about summary refs log tree commit diff
path: root/nixpkgs/pkgs/development/compilers/graalvm/001_mx.py.patch
blob: 25a9efa96415a28ad87dfb1b95f7b12708f6ef59 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
diff --git a/mx.py b/mx.py
index d119b62..471fe98 100644
--- a/mx.py
+++ b/mx.py
@@ -4961,30 +4961,6 @@ class PackedResourceLibrary(ResourceLibrary):
 
     def get_path(self, resolve):
         extract_path = _make_absolute(self.extract_path, self.suite.dir)
-        download_path = super(PackedResourceLibrary, self).get_path(resolve)
-        if resolve and self._check_extract_needed(extract_path, download_path):
-            extract_path_tmp = tempfile.mkdtemp(suffix=basename(extract_path), dir=dirname(extract_path))
-            try:
-                # extract archive
-                Extractor.create(download_path).extract(extract_path_tmp)
-                # ensure modification time is up to date
-                os.utime(extract_path_tmp, None)
-                logv("Moving temporary directory {} to {}".format(extract_path_tmp, extract_path))
-                try:
-                    # attempt atomic overwrite
-                    os.rename(extract_path_tmp, extract_path)
-                except OSError:
-                    # clean destination & re-try for cases where atomic overwrite doesn't work
-                    rmtree(extract_path, ignore_errors=True)
-                    os.rename(extract_path_tmp, extract_path)
-            except OSError as ose:
-                # Rename failed. Race with other process?
-                if self._check_extract_needed(extract_path, download_path):
-                    # ok something really went wrong
-                    abort("Extracting {} failed!".format(download_path), context=ose)
-            finally:
-                rmtree(extract_path_tmp, ignore_errors=True)
-
         return extract_path
 
     def _check_download_needed(self):
@@ -5885,7 +5861,7 @@ class HgConfig(VC):
 
     def update_to_branch(self, vcdir, branch, abortOnError=True):
         cmd = ['update', branch]
-        self.hg_command(vcdir, cmd, abortOnError=abortOnError)
+        self.run(['hg', vcdir] + cmd)
 
     def add(self, vcdir, path, abortOnError=True):
         return self.run(['hg', '-q', '-R', vcdir, 'add', path]) == 0
@@ -5922,7 +5898,7 @@ class HgConfig(VC):
                 return None
 
     def parent_info(self, vcdir, abortOnError=True):
-        out = self.hg_command(vcdir, ["log", "-r", ".", "--template", "{author}|||{date|hgdate}"], abortOnError=abortOnError)
+        out = _check_output_str(["hg", '-R', vcdir, "log", "-r", ".", "--template", "{author}|||{date|hgdate}"])
         author, date = out.split("|||")
         ts, _ = date.split(" ")
         return self._sanitize_parent_info({
@@ -8287,46 +8263,8 @@ class SuiteImport:
         version = import_dict.get("version")
         suite_dir = None
         version_from = import_dict.get("versionFrom")
-        if version_from and version:
-            abort("In import for '{}': 'version' and 'versionFrom' can not be both set".format(name), context=context)
-        if version is None and version_from is None:
-            if not (in_subdir and (importer.vc_dir != importer.dir or isinstance(importer, BinarySuite))):
-                abort("In import for '{}': No version given and not a 'subdir' suite of the same repository".format(name), context=context)
-            if importer.isSourceSuite():
-                suite_dir = join(importer.vc_dir, name)
-            version = importer.version()
-        if urls is None:
-            if not in_subdir:
-                if import_dict.get("subdir") is None and importer.vc_dir != importer.dir:
-                    warn("In import for '{}': No urls given but 'subdir' is not set, assuming 'subdir=True'".format(name), context)
-                    in_subdir = True
-                else:
-                    abort("In import for '{}': No urls given and not a 'subdir' suite".format(name), context=context)
-            return SuiteImport(name, version, None, None, dynamicImport=dynamicImport, in_subdir=in_subdir, version_from=version_from, suite_dir=suite_dir)
-        # urls a list of alternatives defined as dicts
-        if not isinstance(urls, list):
-            abort('suite import urls must be a list', context=context)
-        urlinfos = []
-        mainKind = None
-        for urlinfo in urls:
-            if isinstance(urlinfo, dict) and urlinfo.get('url') and urlinfo.get('kind'):
-                kind = urlinfo.get('kind')
-                if not VC.is_valid_kind(kind):
-                    abort('suite import kind ' + kind + ' illegal', context=context)
-            else:
-                abort('suite import url must be a dict with {"url", kind", attributes', context=context)
-            vc = vc_system(kind)
-            if kind != 'binary':
-                assert not mainKind or mainKind == kind, "Only expecting one non-binary kind"
-                mainKind = kind
-            url = mx_urlrewrites.rewriteurl(urlinfo.get('url'))
-            urlinfos.append(SuiteImportURLInfo(url, kind, vc))
-        vc_kind = None
-        if mainKind:
-            vc_kind = mainKind
-        elif urlinfos:
-            vc_kind = 'binary'
-        return SuiteImport(name, version, urlinfos, vc_kind, dynamicImport=dynamicImport, in_subdir=in_subdir, version_from=version_from, suite_dir=suite_dir)
+        suite_dir = join(get_env('MX_GIT_CACHE_DIR'), name)
+        return SuiteImport(name, version, [], None, True, in_subdir=in_subdir, version_from=version_from, suite_dir=suite_dir)
 
     @staticmethod
     def get_source_urls(source, kind=None):
@@ -8367,8 +8305,6 @@ class Suite(object):
     :type dists: list[Distribution]
     """
     def __init__(self, mxDir, primary, internal, importing_suite, load, vc, vc_dir, dynamicallyImported=False):
-        if primary is True and vc_dir is None:
-            abort("The primary suite must be in a vcs repository")
         self.imported_by = [] if primary else [importing_suite]
         self.mxDir = mxDir
         self.dir = dirname(mxDir)
@@ -8396,7 +8332,7 @@ class Suite(object):
         self._outputRoot = None
         self._preloaded_suite_dict = None
         self.vc = vc
-        self.vc_dir = vc_dir
+        self.vc_dir = get_env('MX_GIT_CACHE_DIR')
         self._preload_suite_dict()
         self._init_imports()
         if load:
@@ -9295,7 +9231,9 @@ def get_dynamic_imports():
 class SourceSuite(Suite):
     """A source suite"""
     def __init__(self, mxDir, primary=False, load=True, internal=False, importing_suite=None, dynamicallyImported=False):
-        vc, vc_dir = VC.get_vc_root(dirname(mxDir), abortOnError=False)
+        vc, vc_dir_test = VC.get_vc_root(dirname(mxDir), abortOnError=False)
+        vc_dir = get_env('MX_GIT_CACHE_DIR')
+        warn("LOOKING FOR: " + mxDir)
         Suite.__init__(self, mxDir, primary, internal, importing_suite, load, vc, vc_dir, dynamicallyImported=dynamicallyImported)
         logvv("SourceSuite.__init__({}), got vc={}, vc_dir={}".format(mxDir, self.vc, self.vc_dir))
         self.projects = []
@@ -9344,17 +9282,7 @@ class SourceSuite(Suite):
         """
         Gets the release tag from VC or create a time based once if VC is unavailable
         """
-        if snapshotSuffix not in self._releaseVersion:
-            _version = self._get_early_suite_dict_property('version')
-            if _version and self.getMxCompatibility().addVersionSuffixToExplicitVersion():
-                if not self.is_release():
-                    _version = _version + '-' + snapshotSuffix
-            if not _version:
-                _version = self.vc.release_version_from_tags(self.vc_dir, self.name, snapshotSuffix=snapshotSuffix)
-            if not _version:
-                _version = 'unknown-{0}-{1}'.format(platform.node(), time.strftime('%Y-%m-%d_%H-%M-%S_%Z'))
-            self._releaseVersion[snapshotSuffix] = _version
-        return self._releaseVersion[snapshotSuffix]
+        return get_env('version')
 
     def scm_metadata(self, abortOnError=False):
         scm = self.scm
@@ -12526,55 +12454,8 @@ def _attempt_download(url, path, jarEntryName=None):
     return False
 
 def download(path, urls, verbose=False, abortOnError=True, verifyOnly=False):
-    """
-    Attempts to downloads content for each URL in a list, stopping after the first successful download.
-    If the content cannot be retrieved from any URL, the program is aborted, unless abortOnError=False.
-    The downloaded content is written to the file indicated by `path`.
-    """
-    if not verifyOnly:
-        ensure_dirname_exists(path)
-        assert not path.endswith(os.sep)
-
-    # https://docs.oracle.com/javase/7/docs/api/java/net/JarURLConnection.html
-    jarURLPattern = re.compile('jar:(.*)!/(.*)')
-    verify_errors = {}
-    for url in urls:
-        if not verifyOnly or verbose:
-            log('Downloading ' + url + ' to ' + path)
-        m = jarURLPattern.match(url)
-        jarEntryName = None
-        if m:
-            url = m.group(1)
-            jarEntryName = m.group(2)
-
-        if verifyOnly:
-            try:
-                conn = _urlopen(url, timeout=10)
-                conn.close()
-                return True
-            except (IOError, socket.timeout) as e:
-                _suggest_tlsv1_error(e)
-                verify_errors[url] = e
-            continue
-
-        for i in range(4):
-            if i != 0:
-                time.sleep(1)
-                warn('Retry {} to download from {}'.format(i, url))
-            res = _attempt_download(url, path, jarEntryName)
-            if res is True:
-                return True
-            if res is False:
-                break
-
-    if abortOnError:
-        msg = 'Could not download to ' + path + ' from any of the following URLs: ' + ', '.join(urls)
-        if verifyOnly:
-            for url, e in verify_errors.items():
-                msg += '\n  ' + url + ': ' + str(e)
-        abort(msg)
-    else:
-        return False
+    print("FAKE download(path={} urls={} verbose={} abortOnError={} verifyOnly={})".format(path, urls, verbose, abortOnError, verifyOnly))
+    return True
 
 def update_file(path, content, showDiff=False):
     """
@@ -13378,6 +13259,7 @@ class Archiver(SafeFileCreation):
 
     def _add_zip(self, filename, archive_name, provenance):
         self._add_provenance(archive_name, provenance)
+        os.utime(filename, (315532800, 315532800))
         self.zf.write(filename, archive_name)
 
     def _add_str_zip(self, data, archive_name, provenance):
@@ -18526,12 +18408,35 @@ def _find_suite_import(importing_suite, suite_import, fatalIfMissing=True, load=
         Attempts to locate an existing suite in the local context
         Returns the path to the mx.name dir if found else None
         """
-        if mode == 'binary':
-            # binary suites are always stored relative to the importing suite in mx-private directory
-            return importing_suite._find_binary_suite_dir(suite_import.name)
+        warn("FAKE CLONE: " + str(suite_import))
+        if (suite_import.name == "truffle"):
+            return join(get_env('TMP'), "source", "truffle", "mx.truffle")
+        if (suite_import.name == "graal-nodejs"):
+            return join(get_env('MX_GIT_CACHE_DIR'), "graaljs", "graal-nodejs", "mx.graal-nodejs")
+        if (suite_import.name == "truffleruby"):
+            return join(get_env('MX_GIT_CACHE_DIR'), "truffleruby", "mx.truffleruby")
+        if (suite_import.name == "graalpython"):
+            return join(get_env('MX_GIT_CACHE_DIR'), "graalpython", "mx.graalpython")
+        if (suite_import.name == "vm"):
+            return join(get_env('TMP'), "source", "vm", "mx.vm")
+        if (suite_import.name == "fastr"):
+            return join(get_env('MX_GIT_CACHE_DIR'), "fastr", "mx.fastr")
+        if (suite_import.name == "sdk"):
+            return join(get_env('TMP'), "source", "sdk", "mx.sdk")
+        if (suite_import.name == "graal-js"):
+            return join(get_env('MX_GIT_CACHE_DIR'), "graaljs", "graal-js", "mx.graal-js")
+        if (suite_import.name == "regex"):
+            return join(get_env('TMP'), "source", "regex", "mx.regex")
+        if (suite_import.name == "substratevm"):
+            return join(get_env('TMP'), "source", "substratevm", "mx.substratevm")
+        if (suite_import.name == "tools"):
+            return join(get_env('TMP'), "source", "tools", "mx.tools")
+        if (suite_import.name == "sulong"):
+            return join(get_env('TMP'), "source", "sulong", "mx.sulong")
+        if (suite_import.name == "compiler"):
+            return join(get_env('TMP'), "source", "compiler", "mx.compiler")
         else:
-            # use the SuiteModel to locate a local source copy of the suite
-            return _suitemodel.find_suite_dir(suite_import)
+            return join(get_env('MX_GIT_CACHE_DIR'), suite_import.name)
 
     def _get_import_dir(url, mode):
         """Return directory where the suite will be cloned to"""