about summary refs log tree commit diff
path: root/nixpkgs/pkgs/development/compilers/graalvm/001_mx.py.patch
blob: 0477c6c556f107ac3273b4bd23ed23df4f2d9daa (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
diff --git a/mx.py b/mx.py
index a0b9315..b7d67a0 100755
--- a/mx.py
+++ b/mx.py
@@ -238,21 +238,7 @@ def _check_file_with_sha1(path, sha1, sha1path, mustExist=True, newFile=False, l
             f.write(value or sha1OfFile(path))
 
     if exists(path):
-        if sha1Check and sha1:
-            if not _sha1CachedValid() or (newFile and sha1 != _sha1Cached()):
-                logv('Create/update SHA1 cache file ' + sha1path)
-                _writeSha1Cached()
-
-            if sha1 != _sha1Cached():
-                computedSha1 = sha1OfFile(path)
-                if sha1 == computedSha1:
-                    warn('Fixing corrupt SHA1 cache file ' + sha1path)
-                    _writeSha1Cached(computedSha1)
-                    return True
-                if logErrors:
-                    size = os.path.getsize(path)
-                    log_error('SHA1 of {} [size: {}] ({}) does not match expected value ({})'.format(TimeStampFile(path), size, computedSha1, sha1))
-                return False
+        return True
     elif mustExist:
         if logErrors:
             log_error("'{}' does not exist".format(path))
@@ -1057,46 +1043,8 @@ class SuiteImport:
         version = import_dict.get("version")
         suite_dir = None
         version_from = import_dict.get("versionFrom")
-        if version_from and version:
-            abort("In import for '{}': 'version' and 'versionFrom' can not be both set".format(name), context=context)
-        if version is None and version_from is None:
-            if not (in_subdir and (importer.vc_dir != importer.dir or isinstance(importer, BinarySuite))):
-                abort("In import for '{}': No version given and not a 'subdir' suite of the same repository".format(name), context=context)
-            if importer.isSourceSuite():
-                suite_dir = join(importer.vc_dir, name)
-            version = importer.version()
-        if urls is None:
-            if not in_subdir:
-                if import_dict.get("subdir") is None and importer.vc_dir != importer.dir:
-                    warn("In import for '{}': No urls given but 'subdir' is not set, assuming 'subdir=True'".format(name), context)
-                    in_subdir = True
-                else:
-                    abort("In import for '{}': No urls given and not a 'subdir' suite".format(name), context=context)
-            return SuiteImport(name, version, None, None, dynamicImport=dynamicImport, in_subdir=in_subdir, version_from=version_from, suite_dir=suite_dir)
-        # urls a list of alternatives defined as dicts
-        if not isinstance(urls, list):
-            abort('suite import urls must be a list', context=context)
-        urlinfos = []
-        mainKind = None
-        for urlinfo in urls:
-            if isinstance(urlinfo, dict) and urlinfo.get('url') and urlinfo.get('kind'):
-                kind = urlinfo.get('kind')
-                if not VC.is_valid_kind(kind):
-                    abort('suite import kind ' + kind + ' illegal', context=context)
-            else:
-                abort('suite import url must be a dict with {"url", kind", attributes', context=context)
-            vc = vc_system(kind)
-            if kind != 'binary':
-                assert not mainKind or mainKind == kind, "Only expecting one non-binary kind"
-                mainKind = kind
-            url = mx_urlrewrites.rewriteurl(urlinfo.get('url'))
-            urlinfos.append(SuiteImportURLInfo(url, kind, vc))
-        vc_kind = None
-        if mainKind:
-            vc_kind = mainKind
-        elif urlinfos:
-            vc_kind = 'binary'
-        return SuiteImport(name, version, urlinfos, vc_kind, dynamicImport=dynamicImport, in_subdir=in_subdir, version_from=version_from, suite_dir=suite_dir)
+        suite_dir = join(get_env('MX_GIT_CACHE_DIR'), name)
+        return SuiteImport(name, version, [], None, True, in_subdir=in_subdir, version_from=version_from, suite_dir=suite_dir)
 
     @staticmethod
     def get_source_urls(source, kind=None):
@@ -1467,8 +1415,6 @@ class Suite(object):
     :type dists: list[Distribution]
     """
     def __init__(self, mxDir, primary, internal, importing_suite, load, vc, vc_dir, dynamicallyImported=False):
-        if primary is True and vc_dir is None:
-            abort("The primary suite must be in a vcs repository")
         self.imported_by = [] if primary else [importing_suite]
         self.mxDir = mxDir
         self.dir = dirname(mxDir)
@@ -1496,7 +1442,7 @@ class Suite(object):
         self._outputRoot = None
         self._preloaded_suite_dict = None
         self.vc = vc
-        self.vc_dir = vc_dir
+        self.vc_dir = get_env('MX_GIT_CACHE_DIR')
         self._preload_suite_dict()
         self._init_imports()
         if load:
@@ -2405,7 +2351,9 @@ class Repository(SuiteConstituent):
 class SourceSuite(Suite):
     """A source suite"""
     def __init__(self, mxDir, primary=False, load=True, internal=False, importing_suite=None, dynamicallyImported=False):
-        vc, vc_dir = VC.get_vc_root(dirname(mxDir), abortOnError=False)
+        vc, vc_dir_test = VC.get_vc_root(dirname(mxDir), abortOnError=False)
+        vc_dir = get_env('MX_GIT_CACHE_DIR')
+        warn("LOOKING FOR: " + mxDir)
         Suite.__init__(self, mxDir, primary, internal, importing_suite, load, vc, vc_dir, dynamicallyImported=dynamicallyImported)
         logvv("SourceSuite.__init__({}), got vc={}, vc_dir={}".format(mxDir, self.vc, self.vc_dir))
         self.projects = []
@@ -2454,17 +2402,7 @@ class SourceSuite(Suite):
         """
         Gets the release tag from VC or create a time based once if VC is unavailable
         """
-        if snapshotSuffix not in self._releaseVersion:
-            _version = self._get_early_suite_dict_property('version')
-            if _version and self.getMxCompatibility().addVersionSuffixToExplicitVersion():
-                if not self.is_release():
-                    _version = _version + '-' + snapshotSuffix
-            if not _version:
-                _version = self.vc.release_version_from_tags(self.vc_dir, self.name, snapshotSuffix=snapshotSuffix)
-            if not _version:
-                _version = 'unknown-{0}-{1}'.format(platform.node(), time.strftime('%Y-%m-%d_%H-%M-%S_%Z'))
-            self._releaseVersion[snapshotSuffix] = _version
-        return self._releaseVersion[snapshotSuffix]
+        return get_env('version')
 
     def scm_metadata(self, abortOnError=False):
         scm = self.scm
@@ -2993,12 +2931,35 @@ def _find_suite_import(importing_suite, suite_import, fatalIfMissing=True, load=
         Attempts to locate an existing suite in the local context
         Returns the path to the mx.name dir if found else None
         """
-        if mode == 'binary':
-            # binary suites are always stored relative to the importing suite in mx-private directory
-            return importing_suite._find_binary_suite_dir(suite_import.name)
+        warn("FAKE CLONE: " + str(suite_import))
+        if (suite_import.name == "truffle"):
+            return join(get_env('TMP'), "source", "truffle", "mx.truffle")
+        if (suite_import.name == "graal-nodejs"):
+            return join(get_env('MX_GIT_CACHE_DIR'), "graaljs", "graal-nodejs", "mx.graal-nodejs")
+        if (suite_import.name == "truffleruby"):
+            return join(get_env('MX_GIT_CACHE_DIR'), "truffleruby", "mx.truffleruby")
+        if (suite_import.name == "graalpython"):
+            return join(get_env('MX_GIT_CACHE_DIR'), "graalpython", "mx.graalpython")
+        if (suite_import.name == "vm"):
+            return join(get_env('TMP'), "source", "vm", "mx.vm")
+        if (suite_import.name == "fastr"):
+            return join(get_env('MX_GIT_CACHE_DIR'), "fastr", "mx.fastr")
+        if (suite_import.name == "sdk"):
+            return join(get_env('TMP'), "source", "sdk", "mx.sdk")
+        if (suite_import.name == "graal-js"):
+            return join(get_env('MX_GIT_CACHE_DIR'), "graaljs", "graal-js", "mx.graal-js")
+        if (suite_import.name == "regex"):
+            return join(get_env('TMP'), "source", "regex", "mx.regex")
+        if (suite_import.name == "substratevm"):
+            return join(get_env('TMP'), "source", "substratevm", "mx.substratevm")
+        if (suite_import.name == "tools"):
+            return join(get_env('TMP'), "source", "tools", "mx.tools")
+        if (suite_import.name == "sulong"):
+            return join(get_env('TMP'), "source", "sulong", "mx.sulong")
+        if (suite_import.name == "compiler"):
+            return join(get_env('TMP'), "source", "compiler", "mx.compiler")
         else:
-            # use the SuiteModel to locate a local source copy of the suite
-            return _suitemodel.find_suite_dir(suite_import)
+            return join(get_env('MX_GIT_CACHE_DIR'), suite_import.name)
 
     def _get_import_dir(url, mode):
         """Return directory where the suite will be cloned to"""
@@ -3816,7 +3777,7 @@ def getmtime(name):
     """
     Wrapper for builtin open function that handles long path names on Windows.
     """
-    return os.path.getmtime(_safe_path(name))
+    return 315532800
 
 
 def stat(name):
@@ -4062,57 +4023,8 @@ def _attempt_download(url, path, jarEntryName=None):
     return False
 
 def download(path, urls, verbose=False, abortOnError=True, verifyOnly=False):
-    """
-    Attempts to downloads content for each URL in a list, stopping after the first successful download.
-    If the content cannot be retrieved from any URL, the program is aborted, unless abortOnError=False.
-    The downloaded content is written to the file indicated by `path`.
-    """
-    if not verifyOnly:
-        ensure_dirname_exists(path)
-        assert not path.endswith(os.sep)
-
-    # https://docs.oracle.com/javase/7/docs/api/java/net/JarURLConnection.html
-    jarURLPattern = re.compile('jar:(.*)!/(.*)')
-    verify_errors = {}
-    for url in urls:
-        if not verifyOnly or verbose:
-            log('Downloading ' + url + ' to ' + path)
-        m = jarURLPattern.match(url)
-        jarEntryName = None
-        if m:
-            url = m.group(1)
-            jarEntryName = m.group(2)
-
-        if not _opts.trust_http and (url.lower().startswith('http://') or url.lower().startswith('ftp://')):
-            warn('Downloading from non-https URL {}. Use --trust-http mx option to suppress this warning.'.format(url))
-
-        if verifyOnly:
-            try:
-                conn = _urlopen(url, timeout=10)
-                conn.close()
-            except (IOError, socket.timeout) as e:
-                _suggest_tlsv1_error(e)
-                verify_errors[url] = e
-        else:
-            for i in range(4):
-                if i != 0:
-                    time.sleep(1)
-                    warn('Retry {} to download from {}'.format(i, url))
-                if _attempt_download(url, path, jarEntryName):
-                    return True # Download was successful
-
-    if verifyOnly and len(verify_errors) < len(urls): # verify-mode at least one success -> success
-        return True
-    else: # Either verification error or no download was successful
-        msg = 'Could not download to ' + path + ' from any of the following URLs: ' + ', '.join(urls)
-        if verifyOnly: # verify-mode -> print error details
-            for url, e in verify_errors.items():
-                msg += '\n  ' + url + ': ' + str(e)
-        if abortOnError:
-            abort(msg)
-        else:
-            warn(msg)
-            return False
+    print("FAKE download(path={} urls={} verbose={} abortOnError={} verifyOnly={})".format(path, urls, verbose, abortOnError, verifyOnly))
+    return True
 
 def update_file(path, content, showDiff=False):
     """
@@ -7887,30 +7799,6 @@ class PackedResourceLibrary(ResourceLibrary):
 
     def get_path(self, resolve):
         extract_path = _make_absolute(self.extract_path, self.suite.dir)
-        download_path = super(PackedResourceLibrary, self).get_path(resolve)
-        if resolve and self._check_extract_needed(extract_path, download_path):
-            extract_path_tmp = tempfile.mkdtemp(suffix=basename(extract_path), dir=dirname(extract_path))
-            try:
-                # extract archive
-                Extractor.create(download_path).extract(extract_path_tmp)
-                # ensure modification time is up to date
-                os.utime(extract_path_tmp, None)
-                logv("Moving temporary directory {} to {}".format(extract_path_tmp, extract_path))
-                try:
-                    # attempt atomic overwrite
-                    os.rename(extract_path_tmp, extract_path)
-                except OSError:
-                    # clean destination & re-try for cases where atomic overwrite doesn't work
-                    rmtree(extract_path, ignore_errors=True)
-                    os.rename(extract_path_tmp, extract_path)
-            except OSError as ose:
-                # Rename failed. Race with other process?
-                if self._check_extract_needed(extract_path, download_path):
-                    # ok something really went wrong
-                    abort("Extracting {} failed!".format(download_path), context=ose)
-            finally:
-                rmtree(extract_path_tmp, ignore_errors=True)
-
         return extract_path
 
     def _check_download_needed(self):
@@ -8430,7 +8318,7 @@ class VC(_with_metaclass(ABCMeta, object)):
         :param str branch: a branch name
         :param bool abortOnError: if True abort on error
         """
-        abort(self.kind + " update_to_branch is not implemented")
+        self.run(['hg', vcdir] + cmd)
 
     def is_release_from_tags(self, vcdir, prefix):
         """
@@ -8831,7 +8719,7 @@ class HgConfig(VC):
                 return None
 
     def parent_info(self, vcdir, abortOnError=True):
-        out = self.hg_command(vcdir, ["log", "-r", ".", "--template", "{author}|||{date|hgdate}"], abortOnError=abortOnError)
+        out = _check_output_str(["hg", '-R', vcdir, "log", "-r", ".", "--template", "{author}|||{date|hgdate}"])
         author, date = out.split("|||")
         ts, _ = date.split(" ")
         return self._sanitize_parent_info({
@@ -14069,6 +13957,7 @@ class Archiver(SafeFileCreation):
 
     def _add_zip(self, filename, archive_name, provenance):
         self._add_provenance(archive_name, provenance)
+        os.utime(filename, (315532800, 315532800))
         self.zf.write(filename, archive_name)
 
     def _add_str_zip(self, data, archive_name, provenance):