summaryrefslogtreecommitdiffstats
path: root/meta/lib/oe
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib/oe')
-rw-r--r--meta/lib/oe/cve_check.py67
-rw-r--r--meta/lib/oe/go.py32
-rw-r--r--meta/lib/oe/npm_registry.py169
-rw-r--r--meta/lib/oe/overlayfs.py6
-rw-r--r--meta/lib/oe/package_manager/__init__.py5
-rw-r--r--meta/lib/oe/package_manager/deb/__init__.py8
-rw-r--r--meta/lib/oe/package_manager/ipk/__init__.py25
-rw-r--r--meta/lib/oe/package_manager/ipk/manifest.py2
-rw-r--r--meta/lib/oe/package_manager/rpm/__init__.py33
-rw-r--r--meta/lib/oe/package_manager/rpm/rootfs.py2
-rw-r--r--meta/lib/oe/package_manager/rpm/sdk.py3
-rw-r--r--meta/lib/oe/patch.py8
-rw-r--r--meta/lib/oe/recipeutils.py9
-rw-r--r--meta/lib/oe/reproducible.py4
-rw-r--r--meta/lib/oe/rootfs.py24
-rw-r--r--meta/lib/oe/sbom.py4
-rw-r--r--meta/lib/oe/sdk.py2
-rw-r--r--meta/lib/oe/spdx.py2
-rw-r--r--meta/lib/oe/sstatesig.py25
-rw-r--r--meta/lib/oe/terminal.py4
20 files changed, 369 insertions, 65 deletions
diff --git a/meta/lib/oe/cve_check.py b/meta/lib/oe/cve_check.py
index aa06497727..ca2b393116 100644
--- a/meta/lib/oe/cve_check.py
+++ b/meta/lib/oe/cve_check.py
@@ -73,33 +73,33 @@ def get_patched_cves(d):
import re
import oe.patch
- pn = d.getVar("PN")
- cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+")
+ cve_match = re.compile(r"CVE:( CVE-\d{4}-\d+)+")
# Matches the last "CVE-YYYY-ID" in the file name, also if written
# in lowercase. Possible to have multiple CVE IDs in a single
# file name, but only the last one will be detected from the file name.
# However, patch files contents addressing multiple CVE IDs are supported
# (cve_match regular expression)
-
- cve_file_name_match = re.compile(".*([Cc][Vv][Ee]\-\d{4}\-\d+)")
+ cve_file_name_match = re.compile(r".*(CVE-\d{4}-\d+)", re.IGNORECASE)
patched_cves = set()
- bb.debug(2, "Looking for patches that solves CVEs for %s" % pn)
- for url in oe.patch.src_patches(d):
+ patches = oe.patch.src_patches(d)
+ bb.debug(2, "Scanning %d patches for CVEs" % len(patches))
+ for url in patches:
patch_file = bb.fetch.decodeurl(url)[2]
- # Remote compressed patches may not be unpacked, so silently ignore them
- if not os.path.isfile(patch_file):
- bb.warn("%s does not exist, cannot extract CVE list" % patch_file)
- continue
-
# Check patch file name for CVE ID
fname_match = cve_file_name_match.search(patch_file)
if fname_match:
cve = fname_match.group(1).upper()
patched_cves.add(cve)
- bb.debug(2, "Found CVE %s from patch file name %s" % (cve, patch_file))
+ bb.debug(2, "Found %s from patch file name %s" % (cve, patch_file))
+
+ # Remote patches won't be present and compressed patches won't be
+ # unpacked, so say we're not scanning them
+ if not os.path.isfile(patch_file):
+ bb.note("%s is remote or compressed, not scanning content" % patch_file)
+ continue
with open(patch_file, "r", encoding="utf-8") as f:
try:
@@ -143,7 +143,7 @@ def get_cpe_ids(cve_product, version):
else:
vendor = "*"
- cpe_id = f'cpe:2.3:a:{vendor}:{product}:{version}:*:*:*:*:*:*:*'
+ cpe_id = 'cpe:2.3:a:{}:{}:{}:*:*:*:*:*:*:*'.format(vendor, product, version)
cpe_ids.append(cpe_id)
return cpe_ids
@@ -159,7 +159,7 @@ def cve_check_merge_jsons(output, data):
for product in output["package"]:
if product["name"] == data["package"][0]["name"]:
- bb.error("Error adding the same package twice")
+ bb.error("Error adding the same package %s twice" % product["name"])
return
output["package"].append(data["package"][0])
@@ -173,3 +173,42 @@ def update_symlinks(target_path, link_path):
if os.path.exists(os.path.realpath(link_path)):
os.remove(link_path)
os.symlink(os.path.basename(target_path), link_path)
+
+
+def convert_cve_version(version):
+ """
+ This function converts from CVE format to Yocto version format.
+ eg 8.3_p1 -> 8.3p1, 6.2_rc1 -> 6.2-rc1
+
+ Unless it is redefined using CVE_VERSION in the recipe,
+ cve_check uses the version in the name of the recipe (${PV})
+ to check vulnerabilities against a CVE in the database downloaded from NVD.
+
+ When the version has an update, i.e.
+ "p1" in OpenSSH 8.3p1,
+ "-rc1" in linux kernel 6.2-rc1,
+ the database stores the version as version_update (8.3_p1, 6.2_rc1).
+ Therefore, we must transform this version before comparing to the
+ recipe version.
+
+ In this case, the parameter of the function is 8.3_p1.
+ If the version uses the Release Candidate format, "rc",
+ this function replaces the '_' by '-'.
+ If the version uses the Update format, "p",
+ this function removes the '_' completely.
+ """
+ import re
+
+ matches = re.match('^([0-9.]+)_((p|rc)[0-9]+)$', version)
+
+ if not matches:
+ return version
+
+ version = matches.group(1)
+ update = matches.group(2)
+
+ if matches.group(3) == "rc":
+ return version + '-' + update
+
+ return version + update
+
diff --git a/meta/lib/oe/go.py b/meta/lib/oe/go.py
new file mode 100644
index 0000000000..9996057f12
--- /dev/null
+++ b/meta/lib/oe/go.py
@@ -0,0 +1,32 @@
+#
+# Copyright OpenEmbedded Contributors
+#
+# SPDX-License-Identifier: MIT
+#
+
+import re
+
+def map_arch(a):
+ if re.match('i.86', a):
+ return '386'
+ elif a == 'x86_64':
+ return 'amd64'
+ elif re.match('arm.*', a):
+ return 'arm'
+ elif re.match('aarch64.*', a):
+ return 'arm64'
+ elif re.match('mips64el.*', a):
+ return 'mips64le'
+ elif re.match('mips64.*', a):
+ return 'mips64'
+ elif a == 'mips':
+ return 'mips'
+ elif a == 'mipsel':
+ return 'mipsle'
+ elif re.match('p(pc|owerpc)(64le)', a):
+ return 'ppc64le'
+ elif re.match('p(pc|owerpc)(64)', a):
+ return 'ppc64'
+ elif a == 'riscv64':
+ return 'riscv64'
+ return ''
diff --git a/meta/lib/oe/npm_registry.py b/meta/lib/oe/npm_registry.py
new file mode 100644
index 0000000000..3f70e4f5c8
--- /dev/null
+++ b/meta/lib/oe/npm_registry.py
@@ -0,0 +1,169 @@
+import bb
+import json
+import subprocess
+
+_ALWAYS_SAFE = frozenset('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ 'abcdefghijklmnopqrstuvwxyz'
+ '0123456789'
+ '_.-~()')
+
+MISSING_OK = object()
+
+REGISTRY = "https://registry.npmjs.org"
+
+# we can not use urllib.parse here because npm expects lowercase
+# hex-chars but urllib generates uppercase ones
+def uri_quote(s, safe = '/'):
+ res = ""
+ safe_set = set(safe)
+ for c in s:
+ if c in _ALWAYS_SAFE or c in safe_set:
+ res += c
+ else:
+ res += '%%%02x' % ord(c)
+ return res
+
+class PackageJson:
+ def __init__(self, spec):
+ self.__spec = spec
+
+ @property
+ def name(self):
+ return self.__spec['name']
+
+ @property
+ def version(self):
+ return self.__spec['version']
+
+ @property
+ def empty_manifest(self):
+ return {
+ 'name': self.name,
+ 'description': self.__spec.get('description', ''),
+ 'versions': {},
+ }
+
+ def base_filename(self):
+ return uri_quote(self.name, safe = '@')
+
+ def as_manifest_entry(self, tarball_uri):
+ res = {}
+
+ ## NOTE: 'npm install' requires more than basic meta information;
+ ## e.g. it takes 'bin' from this manifest entry but not the actual
+ ## 'package.json'
+ for (idx,dflt) in [('name', None),
+ ('description', ""),
+ ('version', None),
+ ('bin', MISSING_OK),
+ ('man', MISSING_OK),
+ ('scripts', MISSING_OK),
+ ('directories', MISSING_OK),
+ ('dependencies', MISSING_OK),
+ ('devDependencies', MISSING_OK),
+ ('optionalDependencies', MISSING_OK),
+ ('license', "unknown")]:
+ if idx in self.__spec:
+ res[idx] = self.__spec[idx]
+ elif dflt == MISSING_OK:
+ pass
+ elif dflt != None:
+ res[idx] = dflt
+ else:
+ raise Exception("%s-%s: missing key %s" % (self.name,
+ self.version,
+ idx))
+
+ res['dist'] = {
+ 'tarball': tarball_uri,
+ }
+
+ return res
+
+class ManifestImpl:
+ def __init__(self, base_fname, spec):
+ self.__base = base_fname
+ self.__spec = spec
+
+ def load(self):
+ try:
+ with open(self.filename, "r") as f:
+ res = json.load(f)
+ except IOError:
+ res = self.__spec.empty_manifest
+
+ return res
+
+ def save(self, meta):
+ with open(self.filename, "w") as f:
+ json.dump(meta, f, indent = 2)
+
+ @property
+ def filename(self):
+ return self.__base + ".meta"
+
+class Manifest:
+ def __init__(self, base_fname, spec):
+ self.__base = base_fname
+ self.__spec = spec
+ self.__lockf = None
+ self.__impl = None
+
+ def __enter__(self):
+ self.__lockf = bb.utils.lockfile(self.__base + ".lock")
+ self.__impl = ManifestImpl(self.__base, self.__spec)
+ return self.__impl
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ bb.utils.unlockfile(self.__lockf)
+
+class NpmCache:
+ def __init__(self, cache):
+ self.__cache = cache
+
+ @property
+ def path(self):
+ return self.__cache
+
+ def run(self, type, key, fname):
+ subprocess.run(['oe-npm-cache', self.__cache, type, key, fname],
+ check = True)
+
+class NpmRegistry:
+ def __init__(self, path, cache):
+ self.__path = path
+ self.__cache = NpmCache(cache + '/_cacache')
+ bb.utils.mkdirhier(self.__path)
+ bb.utils.mkdirhier(self.__cache.path)
+
+ @staticmethod
+ ## This function is critical and must match nodejs expectations
+ def _meta_uri(spec):
+ return REGISTRY + '/' + uri_quote(spec.name, safe = '@')
+
+ @staticmethod
+ ## Exact return value does not matter; just make it look like a
+ ## usual registry url
+ def _tarball_uri(spec):
+ return '%s/%s/-/%s-%s.tgz' % (REGISTRY,
+ uri_quote(spec.name, safe = '@'),
+ uri_quote(spec.name, safe = '@/'),
+ spec.version)
+
+ def add_pkg(self, tarball, pkg_json):
+ pkg_json = PackageJson(pkg_json)
+ base = os.path.join(self.__path, pkg_json.base_filename())
+
+ with Manifest(base, pkg_json) as manifest:
+ meta = manifest.load()
+ tarball_uri = self._tarball_uri(pkg_json)
+
+ meta['versions'][pkg_json.version] = pkg_json.as_manifest_entry(tarball_uri)
+
+ manifest.save(meta)
+
+ ## Cache entries are a little bit dependent on the nodejs
+ ## version; version specific cache implementation must
+ ## mitigate differences
+ self.__cache.run('meta', self._meta_uri(pkg_json), manifest.filename);
+ self.__cache.run('tgz', tarball_uri, tarball);
diff --git a/meta/lib/oe/overlayfs.py b/meta/lib/oe/overlayfs.py
index b5d5e88e80..590c0de58a 100644
--- a/meta/lib/oe/overlayfs.py
+++ b/meta/lib/oe/overlayfs.py
@@ -38,7 +38,11 @@ def unitFileList(d):
bb.fatal("Missing required mount point for OVERLAYFS_MOUNT_POINT[%s] in your MACHINE configuration" % mountPoint)
for mountPoint in overlayMountPoints:
- for path in d.getVarFlag('OVERLAYFS_WRITABLE_PATHS', mountPoint).split():
+ mountPointList = d.getVarFlag('OVERLAYFS_WRITABLE_PATHS', mountPoint)
+ if not mountPointList:
+ bb.debug(1, "No mount points defined for %s flag, don't add to file list", mountPoint)
+ continue
+ for path in mountPointList.split():
fileList.append(mountUnitName(path))
fileList.append(helperUnitName(path))
diff --git a/meta/lib/oe/package_manager/__init__.py b/meta/lib/oe/package_manager/__init__.py
index 80bc1a6bc6..6615258470 100644
--- a/meta/lib/oe/package_manager/__init__.py
+++ b/meta/lib/oe/package_manager/__init__.py
@@ -467,7 +467,10 @@ def create_packages_dir(d, subrepo_dir, deploydir, taskname, filterbydependencie
# Detect bitbake -b usage
nodeps = d.getVar("BB_LIMITEDDEPS") or False
if nodeps or not filterbydependencies:
- oe.path.symlink(deploydir, subrepo_dir, True)
+ for arch in d.getVar("ALL_MULTILIB_PACKAGE_ARCHS").split() + d.getVar("ALL_MULTILIB_PACKAGE_ARCHS").replace("-", "_").split():
+ target = os.path.join(deploydir + "/" + arch)
+ if os.path.exists(target):
+ oe.path.symlink(target, subrepo_dir + "/" + arch, True)
return
start = None
diff --git a/meta/lib/oe/package_manager/deb/__init__.py b/meta/lib/oe/package_manager/deb/__init__.py
index 86ddb130ad..910f217b62 100644
--- a/meta/lib/oe/package_manager/deb/__init__.py
+++ b/meta/lib/oe/package_manager/deb/__init__.py
@@ -80,15 +80,15 @@ class DpkgIndexer(Indexer):
return
oe.utils.multiprocess_launch(create_index, index_cmds, self.d)
- if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1':
- signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND', True))
+ if self.d.getVar('PACKAGE_FEED_SIGN') == '1':
+ signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND'))
else:
signer = None
if signer:
for f in index_sign_files:
signer.detach_sign(f,
- self.d.getVar('PACKAGE_FEED_GPG_NAME', True),
- self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE', True),
+ self.d.getVar('PACKAGE_FEED_GPG_NAME'),
+ self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'),
output_suffix="gpg",
use_sha256=True)
diff --git a/meta/lib/oe/package_manager/ipk/__init__.py b/meta/lib/oe/package_manager/ipk/__init__.py
index 4cd3963111..fd61340087 100644
--- a/meta/lib/oe/package_manager/ipk/__init__.py
+++ b/meta/lib/oe/package_manager/ipk/__init__.py
@@ -102,12 +102,14 @@ class OpkgDpkgPM(PackageManager):
This method extracts the common parts for Opkg and Dpkg
"""
- try:
- output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
- except subprocess.CalledProcessError as e:
+ proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True)
+ if proc.returncode:
bb.fatal("Unable to list available packages. Command '%s' "
- "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
- return opkg_query(output)
+ "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr))
+ elif proc.stderr:
+ bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr))
+
+ return opkg_query(proc.stdout)
def extract(self, pkg, pkg_info):
"""
@@ -243,7 +245,7 @@ class OpkgPM(OpkgDpkgPM):
"""
if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI') or "") != "":
for arch in self.pkg_archs.split():
- cfg_file_name = os.path.join(self.target_rootfs,
+ cfg_file_name = oe.path.join(self.target_rootfs,
self.d.getVar("sysconfdir"),
"opkg",
"local-%s-feed.conf" % arch)
@@ -443,15 +445,16 @@ class OpkgPM(OpkgDpkgPM):
cmd = "%s %s --noaction install %s " % (self.opkg_cmd,
opkg_args,
' '.join(pkgs))
- try:
- output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
- except subprocess.CalledProcessError as e:
+ proc = subprocess.run(cmd, capture_output=True, encoding="utf-8", shell=True)
+ if proc.returncode:
bb.fatal("Unable to dummy install packages. Command '%s' "
- "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8")))
+ "returned %d:\n%s" % (cmd, proc.returncode, proc.stderr))
+ elif proc.stderr:
+ bb.note("Command '%s' returned stderr: %s" % (cmd, proc.stderr))
bb.utils.remove(temp_rootfs, True)
- return output
+ return proc.stdout
def backup_packaging_data(self):
# Save the opkglib for increment ipk image generation
diff --git a/meta/lib/oe/package_manager/ipk/manifest.py b/meta/lib/oe/package_manager/ipk/manifest.py
index ae451c5c70..22669f97c0 100644
--- a/meta/lib/oe/package_manager/ipk/manifest.py
+++ b/meta/lib/oe/package_manager/ipk/manifest.py
@@ -62,7 +62,7 @@ class PkgManifest(Manifest):
if len(pkgs_to_install) == 0:
return
- output = pm.dummy_install(pkgs_to_install).decode('utf-8')
+ output = pm.dummy_install(pkgs_to_install)
with open(self.full_manifest, 'w+') as manifest:
pkg_re = re.compile('^Installing ([^ ]+) [^ ].*')
diff --git a/meta/lib/oe/package_manager/rpm/__init__.py b/meta/lib/oe/package_manager/rpm/__init__.py
index b392581069..97ef387f3b 100644
--- a/meta/lib/oe/package_manager/rpm/__init__.py
+++ b/meta/lib/oe/package_manager/rpm/__init__.py
@@ -96,11 +96,15 @@ class RpmPM(PackageManager):
archs = ["sdk_provides_dummy_target"] + archs
confdir = "%s/%s" %(self.target_rootfs, "etc/dnf/vars/")
bb.utils.mkdirhier(confdir)
- open(confdir + "arch", 'w').write(":".join(archs))
+ with open(confdir + "arch", 'w') as f:
+ f.write(":".join(archs))
+
distro_codename = self.d.getVar('DISTRO_CODENAME')
- open(confdir + "releasever", 'w').write(distro_codename if distro_codename is not None else '')
+ with open(confdir + "releasever", 'w') as f:
+ f.write(distro_codename if distro_codename is not None else '')
- open(oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"), 'w').write("")
+ with open(oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"), 'w') as f:
+ f.write("")
def _configure_rpm(self):
@@ -110,14 +114,17 @@ class RpmPM(PackageManager):
platformconfdir = "%s/%s" %(self.target_rootfs, "etc/rpm/")
rpmrcconfdir = "%s/%s" %(self.target_rootfs, "etc/")
bb.utils.mkdirhier(platformconfdir)
- open(platformconfdir + "platform", 'w').write("%s-pc-linux" % self.primary_arch)
+ with open(platformconfdir + "platform", 'w') as f:
+ f.write("%s-pc-linux" % self.primary_arch)
with open(rpmrcconfdir + "rpmrc", 'w') as f:
f.write("arch_compat: %s: %s\n" % (self.primary_arch, self.archs if len(self.archs) > 0 else self.primary_arch))
f.write("buildarch_compat: %s: noarch\n" % self.primary_arch)
- open(platformconfdir + "macros", 'w').write("%_transaction_color 7\n")
+ with open(platformconfdir + "macros", 'w') as f:
+ f.write("%_transaction_color 7\n")
if self.d.getVar('RPM_PREFER_ELF_ARCH'):
- open(platformconfdir + "macros", 'a').write("%%_prefer_color %s" % (self.d.getVar('RPM_PREFER_ELF_ARCH')))
+ with open(platformconfdir + "macros", 'a') as f:
+ f.write("%%_prefer_color %s" % (self.d.getVar('RPM_PREFER_ELF_ARCH')))
if self.d.getVar('RPM_SIGN_PACKAGES') == '1':
signer = get_signer(self.d, self.d.getVar('RPM_GPG_BACKEND'))
@@ -164,13 +171,13 @@ class RpmPM(PackageManager):
repo_uri = uri + "/" + arch
repo_id = "oe-remote-repo" + "-".join(urlparse(repo_uri).path.split("/"))
repo_name = "OE Remote Repo:" + " ".join(urlparse(repo_uri).path.split("/"))
- open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'a').write(
- "[%s]\nname=%s\nbaseurl=%s\n%s\n" % (repo_id, repo_name, repo_uri, gpg_opts))
+ with open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'a') as f:
+ f.write("[%s]\nname=%s\nbaseurl=%s\n%s\n" % (repo_id, repo_name, repo_uri, gpg_opts))
else:
repo_name = "OE Remote Repo:" + " ".join(urlparse(uri).path.split("/"))
repo_uri = uri
- open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'w').write(
- "[%s]\nname=%s\nbaseurl=%s\n%s" % (repo_base, repo_name, repo_uri, gpg_opts))
+ with open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'w') as f:
+ f.write("[%s]\nname=%s\nbaseurl=%s\n%s" % (repo_base, repo_name, repo_uri, gpg_opts))
def _prepare_pkg_transaction(self):
os.environ['D'] = self.target_rootfs
@@ -329,7 +336,8 @@ class RpmPM(PackageManager):
return e.output.decode("utf-8")
def dump_install_solution(self, pkgs):
- open(self.solution_manifest, 'w').write(" ".join(pkgs))
+ with open(self.solution_manifest, 'w') as f:
+ f.write(" ".join(pkgs))
return pkgs
def load_old_install_solution(self):
@@ -363,7 +371,8 @@ class RpmPM(PackageManager):
bb.utils.mkdirhier(target_path)
num = self._script_num_prefix(target_path)
saved_script_name = oe.path.join(target_path, "%d-%s" % (num, pkg))
- open(saved_script_name, 'w').write(output)
+ with open(saved_script_name, 'w') as f:
+ f.write(output)
os.chmod(saved_script_name, 0o755)
def _handle_intercept_failure(self, registered_pkgs):
diff --git a/meta/lib/oe/package_manager/rpm/rootfs.py b/meta/lib/oe/package_manager/rpm/rootfs.py
index 00d07cd9cc..a120092b83 100644
--- a/meta/lib/oe/package_manager/rpm/rootfs.py
+++ b/meta/lib/oe/package_manager/rpm/rootfs.py
@@ -108,7 +108,7 @@ class PkgRootfs(Rootfs):
if self.progress_reporter:
self.progress_reporter.next_stage()
- self._setup_dbg_rootfs(['/etc', '/var/lib/rpm', '/var/cache/dnf', '/var/lib/dnf'])
+ self._setup_dbg_rootfs(['/etc/rpm', '/etc/rpmrc', '/etc/dnf', '/var/lib/rpm', '/var/cache/dnf', '/var/lib/dnf'])
execute_pre_post_process(self.d, rpm_post_process_cmds)
diff --git a/meta/lib/oe/package_manager/rpm/sdk.py b/meta/lib/oe/package_manager/rpm/sdk.py
index c5f232431f..04dccf49d7 100644
--- a/meta/lib/oe/package_manager/rpm/sdk.py
+++ b/meta/lib/oe/package_manager/rpm/sdk.py
@@ -110,5 +110,6 @@ class PkgSdk(Sdk):
for f in glob.glob(os.path.join(self.sdk_output, "etc", "rpm*")):
self.movefile(f, native_sysconf_dir)
for f in glob.glob(os.path.join(self.sdk_output, "etc", "dnf", "*")):
- self.movefile(f, native_sysconf_dir)
+ self.mkdirhier(native_sysconf_dir + "/dnf")
+ self.movefile(f, native_sysconf_dir + "/dnf")
self.remove(os.path.join(self.sdk_output, "etc"), True)
diff --git a/meta/lib/oe/patch.py b/meta/lib/oe/patch.py
index 95b915a6ab..4ec9caed45 100644
--- a/meta/lib/oe/patch.py
+++ b/meta/lib/oe/patch.py
@@ -299,10 +299,10 @@ class GitApplyTree(PatchTree):
PatchTree.__init__(self, dir, d)
self.commituser = d.getVar('PATCH_GIT_USER_NAME')
self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL')
- if not self._isInitialized():
+ if not self._isInitialized(d):
self._initRepo()
- def _isInitialized(self):
+ def _isInitialized(self, d):
cmd = "git rev-parse --show-toplevel"
try:
output = runcmd(cmd.split(), self.dir).strip()
@@ -310,8 +310,8 @@ class GitApplyTree(PatchTree):
## runcmd returned non-zero which most likely means 128
## Not a git directory
return False
- ## Make sure repo is in builddir to not break top-level git repos
- return os.path.samefile(output, self.dir)
+ ## Make sure repo is in builddir to not break top-level git repos, or under workdir
+ return os.path.samefile(output, self.dir) or oe.path.is_path_parent(d.getVar('WORKDIR'), output)
def _initRepo(self):
runcmd("git init".split(), self.dir)
diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py
index 872ff97b89..b04992c66d 100644
--- a/meta/lib/oe/recipeutils.py
+++ b/meta/lib/oe/recipeutils.py
@@ -666,7 +666,7 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False):
return (appendpath, pathok)
-def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, machine=None, extralines=None, removevalues=None, redirect_output=None):
+def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, machine=None, extralines=None, removevalues=None, redirect_output=None, params=None):
"""
Writes a bbappend file for a recipe
Parameters:
@@ -696,6 +696,9 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
redirect_output:
If specified, redirects writing the output file to the
specified directory (for dry-run purposes)
+ params:
+ Parameters to use when adding entries to SRC_URI. If specified,
+ should be a list of dicts with the same length as srcfiles.
"""
if not removevalues:
@@ -762,12 +765,14 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
copyfiles = {}
if srcfiles:
instfunclines = []
- for newfile, origsrcfile in srcfiles.items():
+ for i, (newfile, origsrcfile) in enumerate(srcfiles.items()):
srcfile = origsrcfile
srcurientry = None
if not srcfile:
srcfile = os.path.basename(newfile)
srcurientry = 'file://%s' % srcfile
+ if params and params[i]:
+ srcurientry = '%s;%s' % (srcurientry, ';'.join('%s=%s' % (k,v) for k,v in params[i].items()))
# Double-check it's not there already
# FIXME do we care if the entry is added by another bbappend that might go away?
if not srcurientry in rd.getVar('SRC_URI').split():
diff --git a/meta/lib/oe/reproducible.py b/meta/lib/oe/reproducible.py
index 35b8be6d08..768fd4f19c 100644
--- a/meta/lib/oe/reproducible.py
+++ b/meta/lib/oe/reproducible.py
@@ -113,7 +113,8 @@ def get_source_date_epoch_from_git(d, sourcedir):
return None
bb.debug(1, "git repository: %s" % gitpath)
- p = subprocess.run(['git', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'], check=True, stdout=subprocess.PIPE)
+ p = subprocess.run(['git', '-c', 'log.showSignature=false', '--git-dir', gitpath, 'log', '-1', '--pretty=%ct'],
+ check=True, stdout=subprocess.PIPE)
return int(p.stdout.decode('utf-8'))
def get_source_date_epoch_from_youngest_file(d, sourcedir):
@@ -152,7 +153,6 @@ def fixed_source_date_epoch(d):
def get_source_date_epoch(d, sourcedir):
return (
get_source_date_epoch_from_git(d, sourcedir) or
- get_source_date_epoch_from_known_files(d, sourcedir) or
get_source_date_epoch_from_youngest_file(d, sourcedir) or
fixed_source_date_epoch(d) # Last resort
)
diff --git a/meta/lib/oe/rootfs.py b/meta/lib/oe/rootfs.py
index 9e6b411fb6..2824d4f037 100644
--- a/meta/lib/oe/rootfs.py
+++ b/meta/lib/oe/rootfs.py
@@ -104,7 +104,7 @@ class Rootfs(object, metaclass=ABCMeta):
def _cleanup(self):
pass
- def _setup_dbg_rootfs(self, dirs):
+ def _setup_dbg_rootfs(self, package_paths):
gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS') or '0'
if gen_debugfs != '1':
return
@@ -120,11 +120,12 @@ class Rootfs(object, metaclass=ABCMeta):
bb.utils.mkdirhier(self.image_rootfs)
bb.note(" Copying back package database...")
- for dir in dirs:
- if not os.path.isdir(self.image_rootfs + '-orig' + dir):
- continue
- bb.utils.mkdirhier(self.image_rootfs + os.path.dirname(dir))
- shutil.copytree(self.image_rootfs + '-orig' + dir, self.image_rootfs + dir, symlinks=True)
+ for path in package_paths:
+ bb.utils.mkdirhier(self.image_rootfs + os.path.dirname(path))
+ if os.path.isdir(self.image_rootfs + '-orig' + path):
+ shutil.copytree(self.image_rootfs + '-orig' + path, self.image_rootfs + path, symlinks=True)
+ elif os.path.isfile(self.image_rootfs + '-orig' + path):
+ shutil.copyfile(self.image_rootfs + '-orig' + path, self.image_rootfs + path)
# Copy files located in /usr/lib/debug or /usr/src/debug
for dir in ["/usr/lib/debug", "/usr/src/debug"]:
@@ -160,6 +161,13 @@ class Rootfs(object, metaclass=ABCMeta):
bb.note(" Install extra debug packages...")
self.pm.install(extra_debug_pkgs.split(), True)
+ bb.note(" Removing package database...")
+ for path in package_paths:
+ if os.path.isdir(self.image_rootfs + path):
+ shutil.rmtree(self.image_rootfs + path)
+ elif os.path.isfile(self.image_rootfs + path):
+ os.remove(self.image_rootfs + path)
+
bb.note(" Rename debug rootfs...")
try:
shutil.rmtree(self.image_rootfs + '-dbg')
@@ -384,6 +392,10 @@ def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None)
def image_list_installed_packages(d, rootfs_dir=None):
+ # Theres no rootfs for baremetal images
+ if bb.data.inherits_class('baremetal-image', d):
+ return ""
+
if not rootfs_dir:
rootfs_dir = d.getVar('IMAGE_ROOTFS')
diff --git a/meta/lib/oe/sbom.py b/meta/lib/oe/sbom.py
index 3372f13a9d..52bf51440e 100644
--- a/meta/lib/oe/sbom.py
+++ b/meta/lib/oe/sbom.py
@@ -32,7 +32,7 @@ def get_sdk_spdxid(sdk):
return "SPDXRef-SDK-%s" % sdk
-def write_doc(d, spdx_doc, subdir, spdx_deploy=None):
+def write_doc(d, spdx_doc, subdir, spdx_deploy=None, indent=None):
from pathlib import Path
if spdx_deploy is None:
@@ -41,7 +41,7 @@ def write_doc(d, spdx_doc, subdir, spdx_deploy=None):
dest = spdx_deploy / subdir / (spdx_doc.name + ".spdx.json")
dest.parent.mkdir(exist_ok=True, parents=True)
with dest.open("wb") as f:
- doc_sha1 = spdx_doc.to_json(f, sort_keys=True)
+ doc_sha1 = spdx_doc.to_json(f, sort_keys=True, indent=indent)
l = spdx_deploy / "by-namespace" / spdx_doc.documentNamespace.replace("/", "_")
l.parent.mkdir(exist_ok=True, parents=True)
diff --git a/meta/lib/oe/sdk.py b/meta/lib/oe/sdk.py
index 27347667e8..2383bd58b7 100644
--- a/meta/lib/oe/sdk.py
+++ b/meta/lib/oe/sdk.py
@@ -68,7 +68,7 @@ class Sdk(object, metaclass=ABCMeta):
#FIXME: using umbrella exc catching because bb.utils method raises it
except Exception as e:
bb.debug(1, "printing the stack trace\n %s" %traceback.format_exc())
- bb.error("unable to place %s in final SDK location" % sourcefile)
+ bb.fatal("unable to place %s in final SDK location" % sourcefile)
def mkdirhier(self, dirpath):
try:
diff --git a/meta/lib/oe/spdx.py b/meta/lib/oe/spdx.py
index 14ca706895..6d56ed90df 100644
--- a/meta/lib/oe/spdx.py
+++ b/meta/lib/oe/spdx.py
@@ -218,7 +218,7 @@ class SPDXPackage(SPDXObject):
SPDXID = _String()
versionInfo = _String()
downloadLocation = _String(default="NOASSERTION")
- packageSupplier = _String(default="NOASSERTION")
+ supplier = _String(default="NOASSERTION")
homepage = _String()
licenseConcluded = _String(default="NOASSERTION")
licenseDeclared = _String(default="NOASSERTION")
diff --git a/meta/lib/oe/sstatesig.py b/meta/lib/oe/sstatesig.py
index 7150bd0929..30f27b0f4f 100644
--- a/meta/lib/oe/sstatesig.py
+++ b/meta/lib/oe/sstatesig.py
@@ -24,10 +24,25 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCaches):
return "/allarch.bbclass" in inherits
def isImage(mc, fn):
return "/image.bbclass" in " ".join(dataCaches[mc].inherits[fn])
+ def isSPDXTask(task):
+ return task in ("do_create_spdx", "do_create_runtime_spdx")
depmc, _, deptaskname, depmcfn = bb.runqueue.split_tid_mcfn(dep)
mc, _ = bb.runqueue.split_mc(fn)
+ # We can skip the rm_work task signature to avoid running the task
+ # when we remove some tasks from the dependencie chain
+ # i.e INHERIT:remove = "create-spdx" will trigger the do_rm_work
+ if task == "do_rm_work":
+ return False
+
+ # Keep all dependencies between SPDX tasks in the signature. SPDX documents
+ # are linked together by hashes, which means if a dependent document changes,
+ # all downstream documents must be re-written (even if they are "safe"
+ # dependencies).
+ if isSPDXTask(task) and isSPDXTask(deptaskname):
+ return True
+
# (Almost) always include our own inter-task dependencies (unless it comes
# from a mcdepends). The exception is the special
# do_kernel_configme->do_unpack_and_patch dependency from archiver.bbclass.
@@ -452,11 +467,15 @@ def find_sstate_manifest(taskdata, taskdata2, taskname, d, multilibcache):
pkgarchs.append('allarch')
pkgarchs.append('${SDK_ARCH}_${SDK_ARCH}-${SDKPKGSUFFIX}')
+ searched_manifests = []
+
for pkgarch in pkgarchs:
manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.%s" % (pkgarch, taskdata, taskname))
if os.path.exists(manifest):
return manifest, d2
- bb.fatal("Manifest %s not found in %s (variant '%s')?" % (manifest, d2.expand(" ".join(pkgarchs)), variant))
+ searched_manifests.append(manifest)
+ bb.fatal("The sstate manifest for task '%s:%s' (multilib variant '%s') could not be found.\nThe pkgarchs considered were: %s.\nBut none of these manifests exists:\n %s"
+ % (taskdata, taskname, variant, d2.expand(", ".join(pkgarchs)),"\n ".join(searched_manifests)))
return None, d2
def OEOuthashBasic(path, sigfile, task, d):
@@ -641,6 +660,10 @@ def OEOuthashBasic(path, sigfile, task, d):
if f == 'fixmepath':
continue
process(os.path.join(root, f))
+
+ for dir in dirs:
+ if os.path.islink(os.path.join(root, dir)):
+ process(os.path.join(root, dir))
finally:
os.chdir(prev_dir)
diff --git a/meta/lib/oe/terminal.py b/meta/lib/oe/terminal.py
index de8dcebf94..b674335654 100644
--- a/meta/lib/oe/terminal.py
+++ b/meta/lib/oe/terminal.py
@@ -102,6 +102,10 @@ class Rxvt(XTerminal):
command = 'rxvt -T "{title}" -e {command}'
priority = 1
+class URxvt(XTerminal):
+ command = 'urxvt -T "{title}" -e {command}'
+ priority = 1
+
class Screen(Terminal):
command = 'screen -D -m -t "{title}" -S devshell {command}'