diff options
Diffstat (limited to 'bitbake/lib/bb/fetch2/__init__.py')
-rw-r--r-- | bitbake/lib/bb/fetch2/__init__.py | 295 |
1 files changed, 227 insertions, 68 deletions
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py index 666cc1306a..5bf2c4b8cf 100644 --- a/bitbake/lib/bb/fetch2/__init__.py +++ b/bitbake/lib/bb/fetch2/__init__.py @@ -113,7 +113,7 @@ class MissingParameterError(BBFetchException): self.args = (missing, url) class ParameterError(BBFetchException): - """Exception raised when a url cannot be proccessed due to invalid parameters.""" + """Exception raised when a url cannot be processed due to invalid parameters.""" def __init__(self, message, url): msg = "URL: '%s' has invalid parameters. %s" % (url, message) self.url = url @@ -182,7 +182,7 @@ class URI(object): Some notes about relative URIs: while it's specified that a URI beginning with <scheme>:// should either be directly followed by a hostname or a /, the old URI handling of the - fetch2 library did not comform to this. Therefore, this URI + fetch2 library did not conform to this. Therefore, this URI class has some kludges to make sure that URIs are parsed in a way comforming to bitbake's current usage. This URI class supports the following: @@ -199,7 +199,7 @@ class URI(object): file://hostname/absolute/path.diff (would be IETF compliant) Note that the last case only applies to a list of - "whitelisted" schemes (currently only file://), that requires + explicitly allowed schemes (currently only file://), that requires its URIs to not have a network location. """ @@ -290,12 +290,12 @@ class URI(object): def _param_str_split(self, string, elmdelim, kvdelim="="): ret = collections.OrderedDict() - for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]: + for k, v in [x.split(kvdelim, 1) if kvdelim in x else (x, None) for x in string.split(elmdelim) if x]: ret[k] = v return ret def _param_str_join(self, dict_, elmdelim, kvdelim="="): - return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()]) + return elmdelim.join([kvdelim.join([k, v]) if v else k for k, v in dict_.items()]) @property def hostport(self): @@ -388,7 +388,7 @@ def decodeurl(url): if s: if not '=' in s: raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s)) - s1, s2 = s.split('=') + s1, s2 = s.split('=', 1) p[s1] = s2 return type, host, urllib.parse.unquote(path), user, pswd, p @@ -402,24 +402,24 @@ def encodeurl(decoded): if not type: raise MissingParameterError('type', "encoded from the data %s" % str(decoded)) - url = '%s://' % type + url = ['%s://' % type] if user and type != "file": - url += "%s" % user + url.append("%s" % user) if pswd: - url += ":%s" % pswd - url += "@" + url.append(":%s" % pswd) + url.append("@") if host and type != "file": - url += "%s" % host + url.append("%s" % host) if path: # Standardise path to ensure comparisons work while '//' in path: path = path.replace("//", "/") - url += "%s" % urllib.parse.quote(path) + url.append("%s" % urllib.parse.quote(path)) if p: for parm in p: - url += ";%s=%s" % (parm, p[parm]) + url.append(";%s=%s" % (parm, p[parm])) - return url + return "".join(url) def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): if not ud.url or not uri_find or not uri_replace: @@ -430,6 +430,7 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): uri_replace_decoded = list(decodeurl(uri_replace)) logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded)) result_decoded = ['', '', '', '', '', {}] + # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params for loc, i in enumerate(uri_find_decoded): result_decoded[loc] = uri_decoded[loc] regexp = i @@ -449,6 +450,9 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): for l in replacements: uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l]) result_decoded[loc][k] = uri_replace_decoded[loc][k] + elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]: + # User/password in the replacement is just a straight replacement + result_decoded[loc] = uri_replace_decoded[loc] elif (re.match(regexp, uri_decoded[loc])): if not uri_replace_decoded[loc]: result_decoded[loc] = "" @@ -465,10 +469,18 @@ def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None): basename = os.path.basename(mirrortarball) # Kill parameters, they make no sense for mirror tarballs uri_decoded[5] = {} + uri_find_decoded[5] = {} elif ud.localpath and ud.method.supports_checksum(ud): - basename = os.path.basename(uri_decoded[loc]) - if basename and not result_decoded[loc].endswith(basename): - result_decoded[loc] = os.path.join(result_decoded[loc], basename) + basename = os.path.basename(ud.localpath) + if basename: + uri_basename = os.path.basename(uri_decoded[loc]) + # Prefix with a slash as a sentinel in case + # result_decoded[loc] does not contain one. + path = "/" + result_decoded[loc] + if uri_basename and basename != uri_basename and path.endswith("/" + uri_basename): + result_decoded[loc] = path[1:-len(uri_basename)] + basename + elif not path.endswith("/" + basename): + result_decoded[loc] = os.path.join(path[1:], basename) else: return None result = encodeurl(result_decoded) @@ -506,7 +518,7 @@ def fetcher_init(d): else: raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy) - _checksum_cache.init_cache(d) + _checksum_cache.init_cache(d.getVar("BB_CACHEDIR")) for m in methods: if hasattr(m, "init"): @@ -534,7 +546,7 @@ def mirror_from_string(data): bb.warn('Invalid mirror data %s, should have paired members.' % data) return list(zip(*[iter(mirrors)]*2)) -def verify_checksum(ud, d, precomputed={}): +def verify_checksum(ud, d, precomputed={}, localpath=None, fatal_nochecksum=True): """ verify the MD5 and SHA256 checksum for downloaded src @@ -548,17 +560,19 @@ def verify_checksum(ud, d, precomputed={}): file against those in the recipe each time, rather than only after downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571. """ - if ud.ignore_checksums or not ud.method.supports_checksum(ud): return {} + if localpath is None: + localpath = ud.localpath + def compute_checksum_info(checksum_id): checksum_name = getattr(ud, "%s_name" % checksum_id) if checksum_id in precomputed: checksum_data = precomputed[checksum_id] else: - checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(ud.localpath) + checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(localpath) checksum_expected = getattr(ud, "%s_expected" % checksum_id) @@ -584,17 +598,13 @@ def verify_checksum(ud, d, precomputed={}): checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])] # If no checksum has been provided - if ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos): + if fatal_nochecksum and ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos): messages = [] strict = d.getVar("BB_STRICT_CHECKSUM") or "0" # If strict checking enabled and neither sum defined, raise error if strict == "1": - messages.append("No checksum specified for '%s', please add at " \ - "least one to the recipe:" % ud.localpath) - messages.extend(checksum_lines) - logger.error("\n".join(messages)) - raise NoChecksumError("Missing SRC_URI checksum", ud.url) + raise NoChecksumError("\n".join(checksum_lines)) bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d) @@ -616,7 +626,7 @@ def verify_checksum(ud, d, precomputed={}): for ci in checksum_infos: if ci["expected"] and ci["expected"] != ci["data"]: messages.append("File: '%s' has %s checksum '%s' when '%s' was " \ - "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"])) + "expected" % (localpath, ci["id"], ci["data"], ci["expected"])) bad_checksum = ci["data"] if bad_checksum: @@ -734,13 +744,16 @@ def subprocess_setup(): # SIGPIPE errors are known issues with gzip/bash signal.signal(signal.SIGPIPE, signal.SIG_DFL) -def get_autorev(d): - # only not cache src rev in autorev case +def mark_recipe_nocache(d): if d.getVar('BB_SRCREV_POLICY') != "cache": d.setVar('BB_DONT_CACHE', '1') + +def get_autorev(d): + mark_recipe_nocache(d) + d.setVar("__BBAUTOREV_SEEN", True) return "AUTOINC" -def get_srcrev(d, method_name='sortable_revision'): +def _get_srcrev(d, method_name='sortable_revision'): """ Return the revision string, usually for use in the version string (PV) of the current package Most packages usually only have one SCM so we just pass on the call. @@ -754,29 +767,34 @@ def get_srcrev(d, method_name='sortable_revision'): that fetcher provides a method with the given name and the same signature as sortable_revision. """ + d.setVar("__BBSRCREV_SEEN", "1") recursion = d.getVar("__BBINSRCREV") if recursion: raise FetchError("There are recursive references in fetcher variables, likely through SRC_URI") d.setVar("__BBINSRCREV", True) scms = [] + revs = [] fetcher = Fetch(d.getVar('SRC_URI').split(), d) urldata = fetcher.ud for u in urldata: if urldata[u].method.supports_srcrev(): scms.append(u) - if len(scms) == 0: - raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") + if not scms: + d.delVar("__BBINSRCREV") + return "", revs + if len(scms) == 1 and len(urldata[scms[0]].names) == 1: autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0]) + revs.append(rev) if len(rev) > 10: rev = rev[:10] d.delVar("__BBINSRCREV") if autoinc: - return "AUTOINC+" + rev - return rev + return "AUTOINC+" + rev, revs + return rev, revs # # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT @@ -792,6 +810,7 @@ def get_srcrev(d, method_name='sortable_revision'): ud = urldata[scm] for name in ud.names: autoinc, rev = getattr(ud.method, method_name)(ud, d, name) + revs.append(rev) seenautoinc = seenautoinc or autoinc if len(rev) > 10: rev = rev[:10] @@ -809,7 +828,21 @@ def get_srcrev(d, method_name='sortable_revision'): format = "AUTOINC+" + format d.delVar("__BBINSRCREV") - return format + return format, revs + +def get_hashvalue(d, method_name='sortable_revision'): + pkgv, revs = _get_srcrev(d, method_name=method_name) + return " ".join(revs) + +def get_pkgv_string(d, method_name='sortable_revision'): + pkgv, revs = _get_srcrev(d, method_name=method_name) + return pkgv + +def get_srcrev(d, method_name='sortable_revision'): + pkgv, revs = _get_srcrev(d, method_name=method_name) + if not pkgv: + raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI") + return pkgv def localpath(url, d): fetcher = bb.fetch2.Fetch([url], d) @@ -827,6 +860,7 @@ FETCH_EXPORT_VARS = ['HOME', 'PATH', 'ALL_PROXY', 'all_proxy', 'GIT_PROXY_COMMAND', 'GIT_SSH', + 'GIT_SSH_COMMAND', 'GIT_SSL_CAINFO', 'GIT_SMART_HTTP', 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', @@ -834,10 +868,28 @@ FETCH_EXPORT_VARS = ['HOME', 'PATH', 'DBUS_SESSION_BUS_ADDRESS', 'P4CONFIG', 'SSL_CERT_FILE', + 'NODE_EXTRA_CA_CERTS', 'AWS_PROFILE', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', - 'AWS_DEFAULT_REGION'] + 'AWS_ROLE_ARN', + 'AWS_WEB_IDENTITY_TOKEN_FILE', + 'AWS_DEFAULT_REGION', + 'AWS_SESSION_TOKEN', + 'GIT_CACHE_PATH', + 'REMOTE_CONTAINERS_IPC', + 'SSL_CERT_DIR'] + +def get_fetcher_environment(d): + newenv = {} + origenv = d.getVar("BB_ORIGENV") + for name in bb.fetch2.FETCH_EXPORT_VARS: + value = d.getVar(name) + if not value and origenv: + value = origenv.getVar(name) + if value: + newenv[name] = value + return newenv def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): """ @@ -891,7 +943,10 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): elif e.stderr: output = "output:\n%s" % e.stderr else: - output = "no output" + if log: + output = "see logfile for output" + else: + output = "no output" error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output) except bb.process.CmdError as e: error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg) @@ -953,6 +1008,7 @@ def build_mirroruris(origud, mirrors, ld): try: newud = FetchData(newuri, ld) + newud.ignore_checksums = True newud.setup_localpath(ld) except bb.fetch2.BBFetchException as e: logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url)) @@ -1062,7 +1118,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url)) logger.debug(str(e)) try: - ud.method.clean(ud, ld) + if ud.method.cleanup_upon_failure(): + ud.method.clean(ud, ld) except UnboundLocalError: pass return False @@ -1073,6 +1130,8 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): def ensure_symlink(target, link_name): if not os.path.exists(link_name): + dirname = os.path.dirname(link_name) + bb.utils.mkdirhier(dirname) if os.path.islink(link_name): # Broken symbolic link os.unlink(link_name) @@ -1185,6 +1244,7 @@ def srcrev_internal_helper(ud, d, name): if srcrev == "INVALID" or not srcrev: raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url) if srcrev == "AUTOINC": + d.setVar("__BBAUTOREV_ACTED_UPON", True) srcrev = ud.method.latest_revision(ud, d, name) return srcrev @@ -1196,23 +1256,21 @@ def get_checksum_file_list(d): SRC_URI as a space-separated string """ fetch = Fetch([], d, cache = False, localonly = True) - - dl_dir = d.getVar('DL_DIR') filelist = [] for u in fetch.urls: ud = fetch.ud[u] - if ud and isinstance(ud.method, local.Local): - paths = ud.method.localpaths(ud, d) + found = False + paths = ud.method.localfile_searchpaths(ud, d) for f in paths: pth = ud.decodedurl - if f.startswith(dl_dir): - # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else - if os.path.exists(f): - bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f))) - else: - bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f))) + if os.path.exists(f): + found = True filelist.append(f + ":" + str(os.path.exists(f))) + if not found: + bb.fatal(("Unable to get checksum for %s SRC_URI entry %s: file could not be found" + "\nThe following paths were searched:" + "\n%s") % (d.getVar('PN'), os.path.basename(f), '\n'.join(paths))) return " ".join(filelist) @@ -1259,18 +1317,13 @@ class FetchData(object): if checksum_name in self.parm: checksum_expected = self.parm[checksum_name] - elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az"]: + elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az", "crate", "gs"]: checksum_expected = None else: checksum_expected = d.getVarFlag("SRC_URI", checksum_name) setattr(self, "%s_expected" % checksum_id, checksum_expected) - for checksum_id in CHECKSUM_LIST: - configure_checksum(checksum_id) - - self.ignore_checksums = False - self.names = self.parm.get("name",'default').split(',') self.method = None @@ -1292,6 +1345,11 @@ class FetchData(object): if hasattr(self.method, "urldata_init"): self.method.urldata_init(self, d) + for checksum_id in CHECKSUM_LIST: + configure_checksum(checksum_id) + + self.ignore_checksums = False + if "localpath" in self.parm: # if user sets localpath for file, use it instead. self.localpath = self.parm["localpath"] @@ -1371,6 +1429,9 @@ class FetchMethod(object): Is localpath something that can be represented by a checksum? """ + # We cannot compute checksums for None + if urldata.localpath is None: + return False # We cannot compute checksums for directories if os.path.isdir(urldata.localpath): return False @@ -1383,6 +1444,12 @@ class FetchMethod(object): """ return False + def cleanup_upon_failure(self): + """ + When a fetch fails, should clean() be called? + """ + return True + def verify_donestamp(self, ud, d): """ Verify the donestamp file @@ -1450,30 +1517,33 @@ class FetchMethod(object): cmd = None if unpack: + tar_cmd = 'tar --extract --no-same-owner' + if 'striplevel' in urldata.parm: + tar_cmd += ' --strip-components=%s' % urldata.parm['striplevel'] if file.endswith('.tar'): - cmd = 'tar x --no-same-owner -f %s' % file + cmd = '%s -f %s' % (tar_cmd, file) elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'): - cmd = 'tar xz --no-same-owner -f %s' % file + cmd = '%s -z -f %s' % (tar_cmd, file) elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'): - cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file + cmd = 'bzip2 -dc %s | %s -f -' % (file, tar_cmd) elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'): cmd = 'gzip -dc %s > %s' % (file, efile) elif file.endswith('.bz2'): cmd = 'bzip2 -dc %s > %s' % (file, efile) elif file.endswith('.txz') or file.endswith('.tar.xz'): - cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file + cmd = 'xz -dc %s | %s -f -' % (file, tar_cmd) elif file.endswith('.xz'): cmd = 'xz -dc %s > %s' % (file, efile) elif file.endswith('.tar.lz'): - cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file + cmd = 'lzip -dc %s | %s -f -' % (file, tar_cmd) elif file.endswith('.lz'): cmd = 'lzip -dc %s > %s' % (file, efile) elif file.endswith('.tar.7z'): - cmd = '7z x -so %s | tar x --no-same-owner -f -' % file + cmd = '7z x -so %s | %s -f -' % (file, tar_cmd) elif file.endswith('.7z'): cmd = '7za x -y %s 1>/dev/null' % file elif file.endswith('.tzst') or file.endswith('.tar.zst'): - cmd = 'zstd --decompress --stdout %s | tar x --no-same-owner -f -' % file + cmd = 'zstd --decompress --stdout %s | %s -f -' % (file, tar_cmd) elif file.endswith('.zst'): cmd = 'zstd --decompress --stdout %s > %s' % (file, efile) elif file.endswith('.zip') or file.endswith('.jar'): @@ -1506,7 +1576,7 @@ class FetchMethod(object): raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url) else: raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) - cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile) + cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile) # If 'subdir' param exists, create a dir and use it as destination for unpack cmd if 'subdir' in urldata.parm: @@ -1522,6 +1592,7 @@ class FetchMethod(object): unpackdir = rootdir if not unpack or not cmd: + urldata.unpack_tracer.unpack("file-copy", unpackdir) # If file == dest, then avoid any copies, as we already put the file into dest! dest = os.path.join(unpackdir, os.path.basename(file)) if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)): @@ -1536,6 +1607,8 @@ class FetchMethod(object): destdir = urlpath.rsplit("/", 1)[0] + '/' bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir)) cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir) + else: + urldata.unpack_tracer.unpack("archive-extract", unpackdir) if not cmd: return @@ -1627,12 +1700,61 @@ class FetchMethod(object): """ return [] + +class DummyUnpackTracer(object): + """ + Abstract API definition for a class that traces unpacked source files back + to their respective upstream SRC_URI entries, for software composition + analysis, license compliance and detailed SBOM generation purposes. + User may load their own unpack tracer class (instead of the dummy + one) by setting the BB_UNPACK_TRACER_CLASS config parameter. + """ + def start(self, unpackdir, urldata_dict, d): + """ + Start tracing the core Fetch.unpack process, using an index to map + unpacked files to each SRC_URI entry. + This method is called by Fetch.unpack and it may receive nested calls by + gitsm and npmsw fetchers, that expand SRC_URI entries by adding implicit + URLs and by recursively calling Fetch.unpack from new (nested) Fetch + instances. + """ + return + def start_url(self, url): + """Start tracing url unpack process. + This method is called by Fetch.unpack before the fetcher-specific unpack + method starts, and it may receive nested calls by gitsm and npmsw + fetchers. + """ + return + def unpack(self, unpack_type, destdir): + """ + Set unpack_type and destdir for current url. + This method is called by the fetcher-specific unpack method after url + tracing started. + """ + return + def finish_url(self, url): + """Finish tracing url unpack process and update the file index. + This method is called by Fetch.unpack after the fetcher-specific unpack + method finished its job, and it may receive nested calls by gitsm + and npmsw fetchers. + """ + return + def complete(self): + """ + Finish tracing the Fetch.unpack process, and check if all nested + Fecth.unpack calls (if any) have been completed; if so, save collected + metadata. + """ + return + + class Fetch(object): def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): if localonly and cache: raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") - if len(urls) == 0: + if not urls: urls = d.getVar("SRC_URI").split() self.urls = urls self.d = d @@ -1647,10 +1769,30 @@ class Fetch(object): if key in urldata_cache: self.ud = urldata_cache[key] + # the unpack_tracer object needs to be made available to possible nested + # Fetch instances (when those are created by gitsm and npmsw fetchers) + # so we set it as a global variable + global unpack_tracer + try: + unpack_tracer + except NameError: + class_path = d.getVar("BB_UNPACK_TRACER_CLASS") + if class_path: + # use user-defined unpack tracer class + import importlib + module_name, _, class_name = class_path.rpartition(".") + module = importlib.import_module(module_name) + class_ = getattr(module, class_name) + unpack_tracer = class_() + else: + # fall back to the dummy/abstract class + unpack_tracer = DummyUnpackTracer() + for url in urls: if url not in self.ud: try: self.ud[url] = FetchData(url, d, localonly) + self.ud[url].unpack_tracer = unpack_tracer except NonLocalMethod: if localonly: self.ud[url] = None @@ -1689,6 +1831,7 @@ class Fetch(object): network = self.d.getVar("BB_NO_NETWORK") premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY")) + checksum_missing_messages = [] for u in urls: ud = self.ud[u] ud.setup_localpath(self.d) @@ -1700,7 +1843,6 @@ class Fetch(object): try: self.d.setVar("BB_NO_NETWORK", network) - if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d): done = True elif m.try_premirror(ud, self.d): @@ -1753,7 +1895,7 @@ class Fetch(object): logger.debug(str(e)) firsterr = e # Remove any incomplete fetch - if not verified_stamp: + if not verified_stamp and m.cleanup_upon_failure(): m.clean(ud, self.d) logger.debug("Trying MIRRORS") mirrors = mirror_from_string(self.d.getVar('MIRRORS')) @@ -1772,13 +1914,20 @@ class Fetch(object): raise ChecksumError("Stale Error Detected") except BBFetchException as e: - if isinstance(e, ChecksumError): + if isinstance(e, NoChecksumError): + (message, _) = e.args + checksum_missing_messages.append(message) + continue + elif isinstance(e, ChecksumError): logger.error("Checksum failure fetching %s" % u) raise finally: if ud.lockfile: bb.utils.unlockfile(lf) + if checksum_missing_messages: + logger.error("Missing SRC_URI checksum, please add those to the recipe: \n%s", "\n".join(checksum_missing_messages)) + raise BBFetchException("There was some missing checksums in the recipe") def checkstatus(self, urls=None): """ @@ -1809,7 +1958,7 @@ class Fetch(object): ret = m.try_mirrors(self, ud, self.d, mirrors, True) if not ret: - raise FetchError("URL %s doesn't work" % u, u) + raise FetchError("URL doesn't work", u) def unpack(self, root, urls=None): """ @@ -1819,6 +1968,8 @@ class Fetch(object): if not urls: urls = self.urls + unpack_tracer.start(root, self.ud, self.d) + for u in urls: ud = self.ud[u] ud.setup_localpath(self.d) @@ -1826,11 +1977,15 @@ class Fetch(object): if ud.lockfile: lf = bb.utils.lockfile(ud.lockfile) + unpack_tracer.start_url(u) ud.method.unpack(ud, root, self.d) + unpack_tracer.finish_url(u) if ud.lockfile: bb.utils.unlockfile(lf) + unpack_tracer.complete() + def clean(self, urls=None): """ Clean files that the fetcher gets or places @@ -1931,6 +2086,8 @@ from . import clearcase from . import npm from . import npmsw from . import az +from . import crate +from . import gcp methods.append(local.Local()) methods.append(wget.Wget()) @@ -1951,3 +2108,5 @@ methods.append(clearcase.ClearCase()) methods.append(npm.Npm()) methods.append(npmsw.NpmShrinkWrap()) methods.append(az.Az()) +methods.append(crate.Crate()) +methods.append(gcp.GCP()) |