summaryrefslogtreecommitdiffstats
path: root/meta/classes
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes')
-rw-r--r--meta/classes/archiver.bbclass8
-rw-r--r--meta/classes/buildhistory.bbclass30
-rw-r--r--meta/classes/cross-canadian.bbclass2
-rw-r--r--meta/classes/cve-check.bbclass21
-rw-r--r--meta/classes/devupstream.bbclass2
-rw-r--r--meta/classes/externalsrc.bbclass9
-rw-r--r--meta/classes/image_types.bbclass2
-rw-r--r--meta/classes/kernel-yocto.bbclass30
-rw-r--r--meta/classes/license.bbclass2
-rw-r--r--meta/classes/mirrors.bbclass2
-rw-r--r--meta/classes/multilib.bbclass1
-rw-r--r--meta/classes/package.bbclass6
-rw-r--r--meta/classes/package_ipk.bbclass4
-rw-r--r--meta/classes/patch.bbclass7
-rw-r--r--meta/classes/populate_sdk_base.bbclass14
-rw-r--r--meta/classes/reproducible_build.bbclass11
-rw-r--r--meta/classes/rm_work.bbclass8
-rw-r--r--meta/classes/rootfs-postcommands.bbclass2
-rw-r--r--meta/classes/sanity.bbclass2
-rw-r--r--meta/classes/sstate.bbclass39
-rw-r--r--meta/classes/terminal.bbclass5
-rw-r--r--meta/classes/testimage.bbclass7
-rw-r--r--meta/classes/toaster.bbclass8
-rw-r--r--meta/classes/uboot-sign.bbclass16
-rw-r--r--meta/classes/uninative.bbclass4
-rw-r--r--meta/classes/utils.bbclass2
26 files changed, 152 insertions, 92 deletions
diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass
index a3962306b1..dd31dc0cd8 100644
--- a/meta/classes/archiver.bbclass
+++ b/meta/classes/archiver.bbclass
@@ -281,7 +281,10 @@ python do_ar_configured() {
# ${STAGING_DATADIR}/aclocal/libtool.m4, so we can't re-run the
# do_configure, we archive the already configured ${S} to
# instead of.
- elif pn != 'libtool-native':
+ # The kernel class functions require it to be on work-shared, we
+ # don't unpack, patch, configure again, just archive the already
+ # configured ${S}
+ elif not (pn == 'libtool-native' or is_work_shared(d)):
def runTask(task):
prefuncs = d.getVarFlag(task, 'prefuncs') or ''
for func in prefuncs.split():
@@ -483,6 +486,9 @@ python do_unpack_and_patch() {
src_orig = '%s.orig' % src
oe.path.copytree(src, src_orig)
+ if bb.data.inherits_class('dos2unix', d):
+ bb.build.exec_func('do_convert_crlf_to_lf', d)
+
# Make sure gcc and kernel sources are patched only once
if not (d.getVar('SRC_URI') == "" or is_work_shared(d)):
bb.build.exec_func('do_patch', d)
diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass
index 49af61c9c5..b2cf9aa28a 100644
--- a/meta/classes/buildhistory.bbclass
+++ b/meta/classes/buildhistory.bbclass
@@ -960,23 +960,19 @@ def write_latest_srcrev(d, pkghistdir):
value = value.replace('"', '').strip()
old_tag_srcrevs[key] = value
with open(srcrevfile, 'w') as f:
- orig_srcrev = d.getVar('SRCREV', False) or 'INVALID'
- if orig_srcrev != 'INVALID':
- f.write('# SRCREV = "%s"\n' % orig_srcrev)
- if len(srcrevs) > 1:
- for name, srcrev in sorted(srcrevs.items()):
- orig_srcrev = d.getVar('SRCREV_%s' % name, False)
- if orig_srcrev:
- f.write('# SRCREV_%s = "%s"\n' % (name, orig_srcrev))
- f.write('SRCREV_%s = "%s"\n' % (name, srcrev))
- else:
- f.write('SRCREV = "%s"\n' % next(iter(srcrevs.values())))
- if len(tag_srcrevs) > 0:
- for name, srcrev in sorted(tag_srcrevs.items()):
- f.write('# tag_%s = "%s"\n' % (name, srcrev))
- if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev:
- pkg = d.getVar('PN')
- bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev))
+ for name, srcrev in sorted(srcrevs.items()):
+ suffix = "_" + name
+ if name == "default":
+ suffix = ""
+ orig_srcrev = d.getVar('SRCREV%s' % suffix, False)
+ if orig_srcrev:
+ f.write('# SRCREV%s = "%s"\n' % (suffix, orig_srcrev))
+ f.write('SRCREV%s = "%s"\n' % (suffix, srcrev))
+ for name, srcrev in sorted(tag_srcrevs.items()):
+ f.write('# tag_%s = "%s"\n' % (name, srcrev))
+ if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev:
+ pkg = d.getVar('PN')
+ bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev))
else:
if os.path.exists(srcrevfile):
diff --git a/meta/classes/cross-canadian.bbclass b/meta/classes/cross-canadian.bbclass
index f5c9f61595..3fc9cac442 100644
--- a/meta/classes/cross-canadian.bbclass
+++ b/meta/classes/cross-canadian.bbclass
@@ -155,7 +155,7 @@ libexecdir = "${exec_prefix}/libexec/${TARGET_ARCH}${TARGET_VENDOR}-${TARGET_OS}
FILES_${PN} = "${prefix}"
-export PKG_CONFIG_DIR = "${STAGING_DIR_HOST}${layout_libdir}/pkgconfig"
+export PKG_CONFIG_DIR = "${STAGING_DIR_HOST}${exec_prefix}/lib/pkgconfig"
export PKG_CONFIG_SYSROOT_DIR = "${STAGING_DIR_HOST}"
do_populate_sysroot[stamp-extra-info] = ""
diff --git a/meta/classes/cve-check.bbclass b/meta/classes/cve-check.bbclass
index 112ee3379d..a95e810605 100644
--- a/meta/classes/cve-check.bbclass
+++ b/meta/classes/cve-check.bbclass
@@ -20,7 +20,7 @@
# the only method to check against CVEs. Running this tool
# doesn't guarantee your packages are free of CVEs.
-# The product name that the CVE database uses. Defaults to BPN, but may need to
+# The product name that the CVE database uses defaults to BPN, but may need to
# be overriden per recipe (for example tiff.bb sets CVE_PRODUCT=libtiff).
CVE_PRODUCT ??= "${BPN}"
CVE_VERSION ??= "${PV}"
@@ -56,11 +56,11 @@ CVE_CHECK_WHITELIST ?= ""
# Layers to be excluded
CVE_CHECK_LAYER_EXCLUDELIST ??= ""
-# Layers to be included
+# Layers to be included
CVE_CHECK_LAYER_INCLUDELIST ??= ""
-# set to "alphabetical" for version using single alphabetical character as increament release
+# set to "alphabetical" for version using single alphabetical character as increment release
CVE_VERSION_SUFFIX ??= ""
python cve_save_summary_handler () {
@@ -110,6 +110,7 @@ python do_cve_check () {
}
addtask cve_check before do_build after do_fetch
+do_cve_check[lockfiles] += "${CVE_CHECK_DB_FILE_LOCK}"
do_cve_check[depends] = "cve-update-db-native:do_fetch"
do_cve_check[nostamp] = "1"
@@ -142,6 +143,7 @@ python cve_check_write_rootfs_manifest () {
manifest_name = d.getVar("CVE_CHECK_MANIFEST")
cve_tmp_file = d.getVar("CVE_CHECK_TMP_FILE")
+ bb.utils.mkdirhier(os.path.dirname(manifest_name))
shutil.copyfile(cve_tmp_file, manifest_name)
if manifest_name and os.path.exists(manifest_name):
@@ -166,9 +168,12 @@ def get_patches_cves(d):
pn = d.getVar("PN")
cve_match = re.compile("CVE:( CVE\-\d{4}\-\d+)+")
- # Matches last CVE-1234-211432 in the file name, also if written
- # with small letters. Not supporting multiple CVE id's in a single
- # file name.
+ # Matches the last "CVE-YYYY-ID" in the file name, also if written
+ # in lowercase. Possible to have multiple CVE IDs in a single
+ # file name, but only the last one will be detected from the file name.
+ # However, patch files contents addressing multiple CVE IDs are supported
+ # (cve_match regular expression)
+
cve_file_name_match = re.compile(".*([Cc][Vv][Ee]\-\d{4}\-\d+)")
patched_cves = set()
@@ -230,7 +235,7 @@ def check_cves(d, patched_cves):
return ([], [], [])
pv = d.getVar("CVE_VERSION").split("+git")[0]
- # If the recipe has been whitlisted we return empty lists
+ # If the recipe has been whitelisted we return empty lists
if pn in d.getVar("CVE_CHECK_PN_WHITELIST").split():
bb.note("Recipe has been whitelisted, skipping check")
return ([], [], [])
@@ -355,7 +360,7 @@ def cve_write_data(d, patched, unpatched, whitelisted, cve_data):
if include_layers and layer not in include_layers:
return
- nvd_link = "https://web.nvd.nist.gov/view/vuln/detail?vulnId="
+ nvd_link = "https://nvd.nist.gov/vuln/detail/"
write_string = ""
unpatched_cves = []
bb.utils.mkdirhier(os.path.dirname(cve_file))
diff --git a/meta/classes/devupstream.bbclass b/meta/classes/devupstream.bbclass
index 7780c5482c..97e137cb40 100644
--- a/meta/classes/devupstream.bbclass
+++ b/meta/classes/devupstream.bbclass
@@ -4,7 +4,7 @@
#
# Usage:
# BBCLASSEXTEND = "devupstream:target"
-# SRC_URI_class-devupstream = "git://git.example.com/example"
+# SRC_URI_class-devupstream = "git://git.example.com/example;branch=master"
# SRCREV_class-devupstream = "abcdef"
#
# If the first entry in SRC_URI is a git: URL then S is rewritten to
diff --git a/meta/classes/externalsrc.bbclass b/meta/classes/externalsrc.bbclass
index 3d6b80bee2..9fe9d36f76 100644
--- a/meta/classes/externalsrc.bbclass
+++ b/meta/classes/externalsrc.bbclass
@@ -109,6 +109,15 @@ python () {
if local_srcuri and task in fetch_tasks:
continue
bb.build.deltask(task, d)
+ if bb.data.inherits_class('reproducible_build', d) and task == 'do_unpack':
+ # The reproducible_build's create_source_date_epoch_stamp function must
+ # be run after the source is available and before the
+ # do_deploy_source_date_epoch task. In the normal case, it's attached
+ # to do_unpack as a postfuncs, but since we removed do_unpack (above)
+ # we need to move the function elsewhere. The easiest thing to do is
+ # move it into the prefuncs of the do_deploy_source_date_epoch task.
+ # This is safe, as externalsrc runs with the source already unpacked.
+ d.prependVarFlag('do_deploy_source_date_epoch', 'prefuncs', 'create_source_date_epoch_stamp ')
d.prependVarFlag('do_compile', 'prefuncs', "externalsrc_compile_prefunc ")
d.prependVarFlag('do_configure', 'prefuncs', "externalsrc_configure_prefunc ")
diff --git a/meta/classes/image_types.bbclass b/meta/classes/image_types.bbclass
index 8028691405..cee577d5da 100644
--- a/meta/classes/image_types.bbclass
+++ b/meta/classes/image_types.bbclass
@@ -225,7 +225,7 @@ EXTRA_IMAGECMD_jffs2 ?= "--pad ${JFFS2_ENDIANNESS} --eraseblock=${JFFS2_ERASEBLO
EXTRA_IMAGECMD_ext2 ?= "-i 4096"
EXTRA_IMAGECMD_ext3 ?= "-i 4096"
EXTRA_IMAGECMD_ext4 ?= "-i 4096"
-EXTRA_IMAGECMD_btrfs ?= "-n 4096"
+EXTRA_IMAGECMD_btrfs ?= "-n 4096 --shrink"
EXTRA_IMAGECMD_f2fs ?= ""
do_image_cpio[depends] += "cpio-native:do_populate_sysroot"
diff --git a/meta/classes/kernel-yocto.bbclass b/meta/classes/kernel-yocto.bbclass
index d38b60f519..8878573f6f 100644
--- a/meta/classes/kernel-yocto.bbclass
+++ b/meta/classes/kernel-yocto.bbclass
@@ -341,6 +341,21 @@ do_kernel_checkout() {
fi
fi
cd ${S}
+
+ # convert any remote branches to local tracking ones
+ for i in `git branch -a --no-color | grep remotes | grep -v HEAD`; do
+ b=`echo $i | cut -d' ' -f2 | sed 's%remotes/origin/%%'`;
+ git show-ref --quiet --verify -- "refs/heads/$b"
+ if [ $? -ne 0 ]; then
+ git branch $b $i > /dev/null
+ fi
+ done
+
+ # Create a working tree copy of the kernel by checking out a branch
+ machine_branch="${@ get_machine_branch(d, "${KBRANCH}" )}"
+
+ # checkout and clobber any unimportant files
+ git checkout -f ${machine_branch}
else
# case: we have no git repository at all.
# To support low bandwidth options for building the kernel, we'll just
@@ -362,21 +377,6 @@ do_kernel_checkout() {
git commit -q -m "baseline commit: creating repo for ${PN}-${PV}"
git clean -d -f
fi
-
- # convert any remote branches to local tracking ones
- for i in `git branch -a --no-color | grep remotes | grep -v HEAD`; do
- b=`echo $i | cut -d' ' -f2 | sed 's%remotes/origin/%%'`;
- git show-ref --quiet --verify -- "refs/heads/$b"
- if [ $? -ne 0 ]; then
- git branch $b $i > /dev/null
- fi
- done
-
- # Create a working tree copy of the kernel by checking out a branch
- machine_branch="${@ get_machine_branch(d, "${KBRANCH}" )}"
-
- # checkout and clobber any unimportant files
- git checkout -f ${machine_branch}
}
do_kernel_checkout[dirs] = "${S} ${WORKDIR}"
diff --git a/meta/classes/license.bbclass b/meta/classes/license.bbclass
index f7978e266b..0d0faa28d7 100644
--- a/meta/classes/license.bbclass
+++ b/meta/classes/license.bbclass
@@ -31,7 +31,7 @@ python do_populate_lic() {
f.write("%s: %s\n" % (key, info[key]))
}
-PSEUDO_IGNORE_PATHS .= ",${@','.join(((d.getVar('COMMON_LICENSE_DIR') or '') + ' ' + (d.getVar('LICENSE_PATH') or '')).split())}"
+PSEUDO_IGNORE_PATHS .= ",${@','.join(((d.getVar('COMMON_LICENSE_DIR') or '') + ' ' + (d.getVar('LICENSE_PATH') or '') + ' ' + d.getVar('COREBASE') + '/meta/COPYING').split())}"
# it would be better to copy them in do_install_append, but find_license_filesa is python
python perform_packagecopy_prepend () {
enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d)
diff --git a/meta/classes/mirrors.bbclass b/meta/classes/mirrors.bbclass
index 87bba41472..8a2c153d9b 100644
--- a/meta/classes/mirrors.bbclass
+++ b/meta/classes/mirrors.bbclass
@@ -62,6 +62,8 @@ ftp://.*/.* http://sources.openembedded.org/ \n \
npm://.*/?.* http://sources.openembedded.org/ \n \
${CPAN_MIRROR} http://cpan.metacpan.org/ \n \
${CPAN_MIRROR} http://search.cpan.org/CPAN/ \n \
+https?$://downloads.yoctoproject.org/releases/uninative/ https://mirrors.kernel.org/yocto/uninative/ \n \
+https?://downloads.yoctoproject.org/mirror/sources/ https://mirrors.kernel.org/yocto-sources/ \
"
# Use MIRRORS to provide git repo fallbacks using the https protocol, for cases
diff --git a/meta/classes/multilib.bbclass b/meta/classes/multilib.bbclass
index 9f726e4537..2ef75c0d16 100644
--- a/meta/classes/multilib.bbclass
+++ b/meta/classes/multilib.bbclass
@@ -105,7 +105,6 @@ python __anonymous () {
d.setVar("LINGUAS_INSTALL", "")
# FIXME, we need to map this to something, not delete it!
d.setVar("PACKAGE_INSTALL_ATTEMPTONLY", "")
- bb.build.deltask('do_populate_sdk', d)
bb.build.deltask('do_populate_sdk_ext', d)
return
}
diff --git a/meta/classes/package.bbclass b/meta/classes/package.bbclass
index e3f0a7060b..5e51b89184 100644
--- a/meta/classes/package.bbclass
+++ b/meta/classes/package.bbclass
@@ -2079,12 +2079,12 @@ python package_do_pkgconfig () {
for pkg in packages.split():
pkgconfig_provided[pkg] = []
pkgconfig_needed[pkg] = []
- for file in pkgfiles[pkg]:
+ for file in sorted(pkgfiles[pkg]):
m = pc_re.match(file)
if m:
pd = bb.data.init()
name = m.group(1)
- pkgconfig_provided[pkg].append(name)
+ pkgconfig_provided[pkg].append(os.path.basename(name))
if not os.access(file, os.R_OK):
continue
with open(file, 'r') as f:
@@ -2107,7 +2107,7 @@ python package_do_pkgconfig () {
pkgs_file = os.path.join(shlibswork_dir, pkg + ".pclist")
if pkgconfig_provided[pkg] != []:
with open(pkgs_file, 'w') as f:
- for p in pkgconfig_provided[pkg]:
+ for p in sorted(pkgconfig_provided[pkg]):
f.write('%s\n' % p)
# Go from least to most specific since the last one found wins
diff --git a/meta/classes/package_ipk.bbclass b/meta/classes/package_ipk.bbclass
index 600b3ac90c..67d6007e11 100644
--- a/meta/classes/package_ipk.bbclass
+++ b/meta/classes/package_ipk.bbclass
@@ -230,8 +230,8 @@ def ipk_write_pkg(pkg, d):
shell=True)
if d.getVar('IPK_SIGN_PACKAGES') == '1':
- ipkver = "%s-%s" % (d.getVar('PKGV'), d.getVar('PKGR'))
- ipk_to_sign = "%s/%s_%s_%s.ipk" % (pkgoutdir, pkgname, ipkver, d.getVar('PACKAGE_ARCH'))
+ ipkver = "%s-%s" % (localdata.getVar('PKGV'), localdata.getVar('PKGR'))
+ ipk_to_sign = "%s/%s_%s_%s.ipk" % (pkgoutdir, pkgname, ipkver, localdata.getVar('PACKAGE_ARCH'))
sign_ipk(d, ipk_to_sign)
finally:
diff --git a/meta/classes/patch.bbclass b/meta/classes/patch.bbclass
index cd491a563d..72bb1eb946 100644
--- a/meta/classes/patch.bbclass
+++ b/meta/classes/patch.bbclass
@@ -131,6 +131,9 @@ python patch_do_patch() {
patchdir = parm["patchdir"]
if not os.path.isabs(patchdir):
patchdir = os.path.join(s, patchdir)
+ if not os.path.isdir(patchdir):
+ bb.fatal("Target directory '%s' not found, patchdir '%s' is incorrect in patch file '%s'" %
+ (patchdir, parm["patchdir"], parm['patchname']))
else:
patchdir = s
@@ -147,12 +150,12 @@ python patch_do_patch() {
patchset.Import({"file":local, "strippath": parm['striplevel']}, True)
except Exception as exc:
bb.utils.remove(process_tmpdir, True)
- bb.fatal(str(exc))
+ bb.fatal("Importing patch '%s' with striplevel '%s'\n%s" % (parm['patchname'], parm['striplevel'], str(exc)))
try:
resolver.Resolve()
except bb.BBHandledException as e:
bb.utils.remove(process_tmpdir, True)
- bb.fatal(str(e))
+ bb.fatal("Applying patch '%s' on target directory '%s'\n%s" % (parm['patchname'], patchdir, str(e)))
bb.utils.remove(process_tmpdir, True)
del os.environ['TMPDIR']
diff --git a/meta/classes/populate_sdk_base.bbclass b/meta/classes/populate_sdk_base.bbclass
index 33ba3fc3c1..76757a3a9d 100644
--- a/meta/classes/populate_sdk_base.bbclass
+++ b/meta/classes/populate_sdk_base.bbclass
@@ -66,7 +66,7 @@ python () {
SDK_RDEPENDS = "${TOOLCHAIN_TARGET_TASK} ${TOOLCHAIN_HOST_TASK}"
SDK_DEPENDS = "virtual/fakeroot-native ${SDK_ARCHIVE_DEPENDS} cross-localedef-native nativesdk-qemuwrapper-cross ${@' '.join(["%s-qemuwrapper-cross" % m for m in d.getVar("MULTILIB_VARIANTS").split()])} qemuwrapper-cross"
-PATH_prepend = "${STAGING_DIR_HOST}${SDKPATHNATIVE}${bindir}/crossscripts:${@":".join(all_multilib_tune_values(d, 'STAGING_BINDIR_CROSS').split())}:"
+PATH_prepend = "${WORKDIR}/recipe-sysroot/${SDKPATHNATIVE}${bindir}/crossscripts:${@":".join(all_multilib_tune_values(d, 'STAGING_BINDIR_CROSS').split())}:"
SDK_DEPENDS += "nativesdk-glibc-locale"
# We want the MULTIARCH_TARGET_SYS to point to the TUNE_PKGARCH, not PACKAGE_ARCH as it
@@ -90,6 +90,8 @@ SDK_HOST_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.host.manifest"
SDK_EXT_TARGET_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.target.manifest"
SDK_EXT_HOST_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.host.manifest"
+SDK_PRUNE_SYSROOT_DIRS ?= "/dev"
+
python write_target_sdk_manifest () {
from oe.sdk import sdk_list_installed_packages
from oe.utils import format_pkg_list
@@ -101,6 +103,12 @@ python write_target_sdk_manifest () {
output.write(format_pkg_list(pkgs, 'ver'))
}
+sdk_prune_dirs () {
+ for d in ${SDK_PRUNE_SYSROOT_DIRS}; do
+ rm -rf ${SDK_OUTPUT}${SDKTARGETSYSROOT}$d
+ done
+}
+
python write_sdk_test_data() {
from oe.data import export2json
testdata = "%s/%s.testdata.json" % (d.getVar('SDKDEPLOYDIR'), d.getVar('TOOLCHAIN_OUTPUTNAME'))
@@ -120,8 +128,9 @@ python write_host_sdk_manifest () {
}
POPULATE_SDK_POST_TARGET_COMMAND_append = " write_sdk_test_data ; "
-POPULATE_SDK_POST_TARGET_COMMAND_append_task-populate-sdk = " write_target_sdk_manifest ; "
+POPULATE_SDK_POST_TARGET_COMMAND_append_task-populate-sdk = " write_target_sdk_manifest; sdk_prune_dirs; "
POPULATE_SDK_POST_HOST_COMMAND_append_task-populate-sdk = " write_host_sdk_manifest; "
+
SDK_PACKAGING_COMMAND = "${@'${SDK_PACKAGING_FUNC};' if '${SDK_PACKAGING_FUNC}' else ''}"
SDK_POSTPROCESS_COMMAND = " create_sdk_files; check_sdk_sysroots; archive_sdk; ${SDK_PACKAGING_COMMAND} "
@@ -280,6 +289,7 @@ EOF
# substitute variables
sed -i -e 's#@SDK_ARCH@#${SDK_ARCH}#g' \
-e 's#@SDKPATH@#${SDKPATH}#g' \
+ -e 's#@SDKPATHINSTALL@#${SDKPATHINSTALL}#g' \
-e 's#@SDKEXTPATH@#${SDKEXTPATH}#g' \
-e 's#@OLDEST_KERNEL@#${SDK_OLDEST_KERNEL}#g' \
-e 's#@REAL_MULTIMACH_TARGET_SYS@#${REAL_MULTIMACH_TARGET_SYS}#g' \
diff --git a/meta/classes/reproducible_build.bbclass b/meta/classes/reproducible_build.bbclass
index 1277764fab..62655c2a5b 100644
--- a/meta/classes/reproducible_build.bbclass
+++ b/meta/classes/reproducible_build.bbclass
@@ -90,11 +90,14 @@ python create_source_date_epoch_stamp() {
}
def get_source_date_epoch_value(d):
- cached = d.getVar('__CACHED_SOURCE_DATE_EPOCH')
- if cached:
+ epochfile = d.getVar('SDE_FILE')
+ cached, efile = d.getVar('__CACHED_SOURCE_DATE_EPOCH') or (None, None)
+ if cached and efile == epochfile:
return cached
- epochfile = d.getVar('SDE_FILE')
+ if cached and epochfile != efile:
+ bb.debug(1, "Epoch file changed from %s to %s" % (efile, epochfile))
+
source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
try:
with open(epochfile, 'r') as f:
@@ -112,7 +115,7 @@ def get_source_date_epoch_value(d):
except FileNotFoundError:
bb.debug(1, "Cannot find %s. SOURCE_DATE_EPOCH will default to %d" % (epochfile, source_date_epoch))
- d.setVar('__CACHED_SOURCE_DATE_EPOCH', str(source_date_epoch))
+ d.setVar('__CACHED_SOURCE_DATE_EPOCH', (str(source_date_epoch), epochfile))
return str(source_date_epoch)
export SOURCE_DATE_EPOCH ?= "${@get_source_date_epoch_value(d)}"
diff --git a/meta/classes/rm_work.bbclass b/meta/classes/rm_work.bbclass
index 01c2ab1c78..2d5a56c238 100644
--- a/meta/classes/rm_work.bbclass
+++ b/meta/classes/rm_work.bbclass
@@ -73,7 +73,7 @@ do_rm_work () {
# sstate version since otherwise we'd need to leave 'plaindirs' around
# such as 'packages' and 'packages-split' and these can be large. No end
# of chain tasks depend directly on do_package anymore.
- rm -f $i;
+ rm -f -- $i;
;;
*_setscene*)
# Skip stamps which are already setscene versions
@@ -90,7 +90,7 @@ do_rm_work () {
;;
esac
done
- rm -f $i
+ rm -f -- $i
esac
done
@@ -100,9 +100,9 @@ do_rm_work () {
# Retain only logs and other files in temp, safely ignore
# failures of removing pseudo folers on NFS2/3 server.
if [ $dir = 'pseudo' ]; then
- rm -rf $dir 2> /dev/null || true
+ rm -rf -- $dir 2> /dev/null || true
elif ! echo "$excludes" | grep -q -w "$dir"; then
- rm -rf $dir
+ rm -rf -- $dir
fi
done
}
diff --git a/meta/classes/rootfs-postcommands.bbclass b/meta/classes/rootfs-postcommands.bbclass
index e66ed5938b..87b5751e24 100644
--- a/meta/classes/rootfs-postcommands.bbclass
+++ b/meta/classes/rootfs-postcommands.bbclass
@@ -60,7 +60,7 @@ python () {
}
systemd_create_users () {
- for conffile in ${IMAGE_ROOTFS}/usr/lib/sysusers.d/systemd.conf ${IMAGE_ROOTFS}/usr/lib/sysusers.d/systemd-remote.conf; do
+ for conffile in ${IMAGE_ROOTFS}/usr/lib/sysusers.d/*.conf; do
[ -e $conffile ] || continue
grep -v "^#" $conffile | sed -e '/^$/d' | while read type name id comment; do
if [ "$type" = "u" ]; then
diff --git a/meta/classes/sanity.bbclass b/meta/classes/sanity.bbclass
index a2ac4eeb80..c8a42dc8bf 100644
--- a/meta/classes/sanity.bbclass
+++ b/meta/classes/sanity.bbclass
@@ -395,7 +395,7 @@ def check_connectivity(d):
msg += " Please ensure your host's network is configured correctly.\n"
msg += " If your ISP or network is blocking the above URL,\n"
msg += " try with another domain name, for example by setting:\n"
- msg += " CONNECTIVITY_CHECK_URIS = \"https://www.yoctoproject.org/\""
+ msg += " CONNECTIVITY_CHECK_URIS = \"https://www.example.com/\""
msg += " You could also set BB_NO_NETWORK = \"1\" to disable network\n"
msg += " access if all required sources are on local disk.\n"
retval = msg
diff --git a/meta/classes/sstate.bbclass b/meta/classes/sstate.bbclass
index 2b5d94dd1f..de6e7fa960 100644
--- a/meta/classes/sstate.bbclass
+++ b/meta/classes/sstate.bbclass
@@ -20,7 +20,7 @@ def generate_sstatefn(spec, hash, taskname, siginfo, d):
components = spec.split(":")
# Fields 0,5,6 are mandatory, 1 is most useful, 2,3,4 are just for information
# 7 is for the separators
- avail = (254 - len(hash + "_" + taskname + extension) - len(components[0]) - len(components[1]) - len(components[5]) - len(components[6]) - 7) // 3
+ avail = (limit - len(hash + "_" + taskname + extension) - len(components[0]) - len(components[1]) - len(components[5]) - len(components[6]) - 7) // 3
components[2] = components[2][:avail]
components[3] = components[3][:avail]
components[4] = components[4][:avail]
@@ -123,8 +123,6 @@ SSTATE_HASHEQUIV_REPORT_TASKDATA[doc] = "Report additional useful data to the \
python () {
if bb.data.inherits_class('native', d):
d.setVar('SSTATE_PKGARCH', d.getVar('BUILD_ARCH', False))
- if d.getVar("PN") == "pseudo-native":
- d.appendVar('SSTATE_PKGARCH', '_${ORIGNATIVELSBSTRING}')
elif bb.data.inherits_class('crosssdk', d):
d.setVar('SSTATE_PKGARCH', d.expand("${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}"))
elif bb.data.inherits_class('cross', d):
@@ -707,6 +705,7 @@ def sstate_package(ss, d):
pass
except OSError as e:
# Handle read-only file systems gracefully
+ import errno
if e.errno != errno.EROFS:
raise e
@@ -732,6 +731,7 @@ def pstaging_fetch(sstatefetch, d):
localdata.setVar('FILESPATH', dldir)
localdata.setVar('DL_DIR', dldir)
localdata.setVar('PREMIRRORS', mirrors)
+ localdata.setVar('SRCPV', d.getVar('SRCPV'))
# if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK,
# we'll want to allow network access for the current set of fetches.
@@ -756,6 +756,9 @@ def pstaging_fetch(sstatefetch, d):
except bb.fetch2.BBFetchException:
pass
+pstaging_fetch[vardepsexclude] += "SRCPV"
+
+
def sstate_setscene(d):
shared_state = sstate_state_fromvars(d)
accelerate = sstate_installpkg(shared_state, d)
@@ -797,7 +800,7 @@ sstate_task_postfunc[dirs] = "${WORKDIR}"
sstate_create_package () {
# Exit early if it already exists
if [ -e ${SSTATE_PKG} ]; then
- [ ! -w ${SSTATE_PKG} ] || touch ${SSTATE_PKG}
+ touch ${SSTATE_PKG} 2>/dev/null || true
return
fi
@@ -824,14 +827,18 @@ sstate_create_package () {
fi
chmod 0664 $TFILE
# Skip if it was already created by some other process
- if [ ! -e ${SSTATE_PKG} ]; then
+ if [ -h ${SSTATE_PKG} ] && [ ! -e ${SSTATE_PKG} ]; then
+ # There is a symbolic link, but it links to nothing.
+ # Forcefully replace it with the new file.
+ ln -f $TFILE ${SSTATE_PKG} || true
+ elif [ ! -e ${SSTATE_PKG} ]; then
# Move into place using ln to attempt an atomic op.
# Abort if it already exists
- ln $TFILE ${SSTATE_PKG} && rm $TFILE
+ ln $TFILE ${SSTATE_PKG} || true
else
- rm $TFILE
+ touch ${SSTATE_PKG} 2>/dev/null || true
fi
- [ ! -w ${SSTATE_PKG} ] || touch ${SSTATE_PKG}
+ rm $TFILE
}
python sstate_sign_package () {
@@ -860,12 +867,12 @@ python sstate_report_unihash() {
#
sstate_unpack_package () {
tar -xvzf ${SSTATE_PKG}
- # update .siginfo atime on local/NFS mirror
- [ -O ${SSTATE_PKG}.siginfo ] && [ -w ${SSTATE_PKG}.siginfo ] && [ -h ${SSTATE_PKG}.siginfo ] && touch -a ${SSTATE_PKG}.siginfo
- # Use "! -w ||" to return true for read only files
- [ ! -w ${SSTATE_PKG} ] || touch --no-dereference ${SSTATE_PKG}
- [ ! -w ${SSTATE_PKG}.sig ] || [ ! -e ${SSTATE_PKG}.sig ] || touch --no-dereference ${SSTATE_PKG}.sig
- [ ! -w ${SSTATE_PKG}.siginfo ] || [ ! -e ${SSTATE_PKG}.siginfo ] || touch --no-dereference ${SSTATE_PKG}.siginfo
+ # update .siginfo atime on local/NFS mirror if it is a symbolic link
+ [ ! -h ${SSTATE_PKG}.siginfo ] || [ ! -e ${SSTATE_PKG}.siginfo ] || touch -a ${SSTATE_PKG}.siginfo 2>/dev/null || true
+ # update each symbolic link instead of any referenced file
+ touch --no-dereference ${SSTATE_PKG} 2>/dev/null || true
+ [ ! -e ${SSTATE_PKG}.sig ] || touch --no-dereference ${SSTATE_PKG}.sig 2>/dev/null || true
+ [ ! -e ${SSTATE_PKG}.siginfo ] || touch --no-dereference ${SSTATE_PKG}.siginfo 2>/dev/null || true
}
BB_HASHCHECK_FUNCTION = "sstate_checkhashes"
@@ -943,7 +950,7 @@ def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True,
localdata2 = bb.data.createCopy(localdata)
srcuri = "file://" + sstatefile
- localdata.setVar('SRC_URI', srcuri)
+ localdata2.setVar('SRC_URI', srcuri)
bb.debug(2, "SState: Attempting to fetch %s" % srcuri)
try:
@@ -1020,6 +1027,7 @@ def sstate_checkhashes(sq_data, d, siginfo=False, currentcount=0, summary=True,
bb.parse.siggen.checkhashes(sq_data, missed, found, d)
return found
+setscene_depvalid[vardepsexclude] = "SSTATE_EXCLUDEDEPS_SYSROOT"
BB_SETSCENE_DEPVALID = "setscene_depvalid"
@@ -1153,6 +1161,7 @@ python sstate_eventhandler() {
pass
except OSError as e:
# Handle read-only file systems gracefully
+ import errno
if e.errno != errno.EROFS:
raise e
diff --git a/meta/classes/terminal.bbclass b/meta/classes/terminal.bbclass
index 6059ae95e0..a564ee7494 100644
--- a/meta/classes/terminal.bbclass
+++ b/meta/classes/terminal.bbclass
@@ -26,6 +26,9 @@ def emit_terminal_func(command, envdata, d):
bb.utils.mkdirhier(os.path.dirname(runfile))
with open(runfile, 'w') as script:
+ # Override the shell shell_trap_code specifies.
+ # If our shell is bash, we might well face silent death.
+ script.write("#!/bin/bash\n")
script.write(bb.build.shell_trap_code())
bb.data.emit_func(cmd_func, script, envdata)
script.write(cmd_func)
@@ -37,7 +40,7 @@ def emit_terminal_func(command, envdata, d):
def oe_terminal(command, title, d):
import oe.data
import oe.terminal
-
+
envdata = bb.data.init()
for v in os.environ:
diff --git a/meta/classes/testimage.bbclass b/meta/classes/testimage.bbclass
index e613759503..538ec4fc8a 100644
--- a/meta/classes/testimage.bbclass
+++ b/meta/classes/testimage.bbclass
@@ -230,9 +230,10 @@ def testimage_main(d):
tdname = "%s.testdata.json" % image_name
try:
- td = json.load(open(tdname, "r"))
- except (FileNotFoundError) as err:
- bb.fatal('File %s Not Found. Have you built the image with INHERIT+="testimage" in the conf/local.conf?' % tdname)
+ with open(tdname, "r") as f:
+ td = json.load(f)
+ except FileNotFoundError as err:
+ bb.fatal('File %s not found (%s).\nHave you built the image with INHERIT += "testimage" in the conf/local.conf?' % (tdname, err))
# Some variables need to be updates (mostly paths) with the
# ones of the current environment because some tests require them.
diff --git a/meta/classes/toaster.bbclass b/meta/classes/toaster.bbclass
index 9518ddf7a4..f365c09142 100644
--- a/meta/classes/toaster.bbclass
+++ b/meta/classes/toaster.bbclass
@@ -101,12 +101,12 @@ def _toaster_load_pkgdatafile(dirpath, filepath):
for line in fin:
try:
kn, kv = line.strip().split(": ", 1)
- m = re.match(r"^PKG_([^A-Z:]*)", kn)
+ m = re.match(r"^PKG:([^A-Z:]*)", kn)
if m:
pkgdata['OPKGN'] = m.group(1)
- kn = "_".join([x for x in kn.split("_") if x.isupper()])
- pkgdata[kn] = kv.strip()
- if kn == 'FILES_INFO':
+ kn = kn.split(":")[0]
+ pkgdata[kn] = kv
+ if kn.startswith('FILES_INFO'):
pkgdata[kn] = json.loads(kv)
except ValueError:
diff --git a/meta/classes/uboot-sign.bbclass b/meta/classes/uboot-sign.bbclass
index ba48f24b10..fca9de2934 100644
--- a/meta/classes/uboot-sign.bbclass
+++ b/meta/classes/uboot-sign.bbclass
@@ -131,6 +131,20 @@ concat_dtb_helper() {
elif [ -e "${DEPLOYDIR}/${UBOOT_NODTB_IMAGE}" -a -e "$deployed_uboot_dtb_binary" ]; then
cd ${DEPLOYDIR}
cat ${UBOOT_NODTB_IMAGE} $deployed_uboot_dtb_binary | tee ${B}/${CONFIG_B_PATH}/${UBOOT_BINARY} > ${UBOOT_IMAGE}
+
+ if [ -n "${UBOOT_CONFIG}" ]
+ then
+ for config in ${UBOOT_MACHINE}; do
+ i=$(expr $i + 1);
+ for type in ${UBOOT_CONFIG}; do
+ j=$(expr $j + 1);
+ if [ $j -eq $i ]
+ then
+ cp ${UBOOT_IMAGE} ${B}/${CONFIG_B_PATH}/u-boot-$type.${UBOOT_SUFFIX}
+ fi
+ done
+ done
+ fi
else
bbwarn "Failure while adding public key to u-boot binary. Verified boot won't be available."
fi
@@ -205,7 +219,7 @@ install_helper() {
fi
}
-# Install SPL dtb and u-boot nodtb to datadir,
+# Install SPL dtb and u-boot nodtb to datadir,
install_spl_helper() {
if [ -f "${SPL_DIR}/${SPL_DTB_BINARY}" ]; then
install -Dm 0644 ${SPL_DIR}/${SPL_DTB_BINARY} ${D}${datadir}/${SPL_DTB_IMAGE}
diff --git a/meta/classes/uninative.bbclass b/meta/classes/uninative.bbclass
index 1e19917a97..4412d7c567 100644
--- a/meta/classes/uninative.bbclass
+++ b/meta/classes/uninative.bbclass
@@ -2,7 +2,7 @@ UNINATIVE_LOADER ?= "${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux/lib/
UNINATIVE_STAGING_DIR ?= "${STAGING_DIR}"
UNINATIVE_URL ?= "unset"
-UNINATIVE_TARBALL ?= "${BUILD_ARCH}-nativesdk-libc.tar.xz"
+UNINATIVE_TARBALL ?= "${BUILD_ARCH}-nativesdk-libc-${UNINATIVE_VERSION}.tar.xz"
# Example checksums
#UNINATIVE_CHECKSUM[aarch64] = "dead"
#UNINATIVE_CHECKSUM[i686] = "dead"
@@ -100,7 +100,7 @@ ${UNINATIVE_STAGING_DIR}-uninative/relocate_sdk.py \
${UNINATIVE_LOADER} \
${UNINATIVE_LOADER} \
${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux/${bindir_native}/patchelf-uninative \
- ${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux${base_libdir_native}/libc*.so" % chksum)
+ ${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux${base_libdir_native}/libc*.so*" % chksum)
subprocess.check_output(cmd, shell=True)
with open(loaderchksum, "w") as f:
diff --git a/meta/classes/utils.bbclass b/meta/classes/utils.bbclass
index 120bcc64a6..072ea1f63c 100644
--- a/meta/classes/utils.bbclass
+++ b/meta/classes/utils.bbclass
@@ -214,7 +214,7 @@ create_cmdline_wrapper () {
#!/bin/bash
realpath=\`readlink -fn \$0\`
realdir=\`dirname \$realpath\`
-exec -a \`dirname \$realpath\`/$cmdname \`dirname \$realpath\`/$cmdname.real $cmdoptions "\$@"
+exec -a \$realdir/$cmdname \$realdir/$cmdname.real $cmdoptions "\$@"
END
chmod +x $cmd
}