summaryrefslogtreecommitdiffstats
path: root/scripts/contrib
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/contrib')
-rwxr-xr-xscripts/contrib/bb-perf/bb-matrix-plot.sh4
-rwxr-xr-xscripts/contrib/bbvars.py6
-rwxr-xr-xscripts/contrib/build-perf-test-wrapper.sh15
-rwxr-xr-xscripts/contrib/convert-overrides.py111
-rwxr-xr-xscripts/contrib/convert-spdx-licenses.py145
-rwxr-xr-xscripts/contrib/convert-srcuri.py77
-rwxr-xr-xscripts/contrib/convert-variable-renames.py116
-rwxr-xr-xscripts/contrib/ddimage2
-rwxr-xr-xscripts/contrib/dialog-power-control2
-rwxr-xr-xscripts/contrib/documentation-audit.sh6
-rwxr-xr-xscripts/contrib/image-manifest2
-rwxr-xr-xscripts/contrib/oe-build-perf-report-email.py167
-rwxr-xr-xscripts/contrib/patchreview.py71
-rwxr-xr-xscripts/contrib/test_build_time.sh2
-rwxr-xr-xscripts/contrib/test_build_time_worker.sh2
-rwxr-xr-xscripts/contrib/verify-homepage.py2
16 files changed, 481 insertions, 249 deletions
diff --git a/scripts/contrib/bb-perf/bb-matrix-plot.sh b/scripts/contrib/bb-perf/bb-matrix-plot.sh
index e7bd129e9e..6672189c95 100755
--- a/scripts/contrib/bb-perf/bb-matrix-plot.sh
+++ b/scripts/contrib/bb-perf/bb-matrix-plot.sh
@@ -16,8 +16,8 @@
# Setup the defaults
DATFILE="bb-matrix.dat"
-XLABEL="BB_NUMBER_THREADS"
-YLABEL="PARALLEL_MAKE"
+XLABEL="BB\\\\_NUMBER\\\\_THREADS"
+YLABEL="PARALLEL\\\\_MAKE"
FIELD=3
DEF_TITLE="Elapsed Time (seconds)"
PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100"
diff --git a/scripts/contrib/bbvars.py b/scripts/contrib/bbvars.py
index 090133600b..a9cdf082ab 100755
--- a/scripts/contrib/bbvars.py
+++ b/scripts/contrib/bbvars.py
@@ -36,8 +36,8 @@ def bbvar_is_documented(var, documented_vars):
def collect_documented_vars(docfiles):
''' Walk the docfiles and collect the documented variables '''
documented_vars = []
- prog = re.compile(".*($|[^A-Z_])<glossentry id=\'var-")
- var_prog = re.compile('<glossentry id=\'var-(.*)\'>')
+ prog = re.compile(r".*($|[^A-Z_])<glossentry id=\'var-")
+ var_prog = re.compile(r'<glossentry id=\'var-(.*)\'>')
for d in docfiles:
with open(d) as f:
documented_vars += var_prog.findall(f.read())
@@ -45,7 +45,7 @@ def collect_documented_vars(docfiles):
return documented_vars
def bbvar_doctag(var, docconf):
- prog = re.compile('^%s\[doc\] *= *"(.*)"' % (var))
+ prog = re.compile(r'^%s\[doc\] *= *"(.*)"' % (var))
if docconf == "":
return "?"
diff --git a/scripts/contrib/build-perf-test-wrapper.sh b/scripts/contrib/build-perf-test-wrapper.sh
index fa71d4a2e9..0a85e6e708 100755
--- a/scripts/contrib/build-perf-test-wrapper.sh
+++ b/scripts/contrib/build-perf-test-wrapper.sh
@@ -87,21 +87,10 @@ if [ $# -ne 0 ]; then
exit 1
fi
-if [ -n "$email_to" ]; then
- if ! [ -x "$(command -v phantomjs)" ]; then
- echo "ERROR: Sending email needs phantomjs."
- exit 1
- fi
- if ! [ -x "$(command -v optipng)" ]; then
- echo "ERROR: Sending email needs optipng."
- exit 1
- fi
-fi
-
# Open a file descriptor for flock and acquire lock
LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock"
if ! exec 3> "$LOCK_FILE"; then
- echo "ERROR: Unable to open lock file"
+ echo "ERROR: Unable to open loemack file"
exit 1
fi
if ! flock -n 3; then
@@ -226,7 +215,7 @@ if [ -n "$results_repo" ]; then
if [ -n "$email_to" ]; then
echo "Emailing test report"
os_name=`get_os_release_var PRETTY_NAME`
- "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt --html $report_html "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}"
+ "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}"
fi
# Upload report files, unless we're on detached head
diff --git a/scripts/contrib/convert-overrides.py b/scripts/contrib/convert-overrides.py
index 4d41a4c475..c69acb4095 100755
--- a/scripts/contrib/convert-overrides.py
+++ b/scripts/contrib/convert-overrides.py
@@ -22,66 +22,78 @@ import sys
import tempfile
import shutil
import mimetypes
+import argparse
-if len(sys.argv) < 2:
- print("Please specify a directory to run the conversion script against.")
- sys.exit(1)
+parser = argparse.ArgumentParser(description="Convert override syntax")
+parser.add_argument("--override", "-o", action="append", default=[], help="Add additional strings to consider as an override (e.g. custom machines/distros")
+parser.add_argument("--skip", "-s", action="append", default=[], help="Add additional string to skip and not consider an override")
+parser.add_argument("--skip-ext", "-e", action="append", default=[], help="Additional file suffixes to skip when processing (e.g. '.foo')")
+parser.add_argument("--package-vars", action="append", default=[], help="Additional variables to treat as package variables")
+parser.add_argument("--image-vars", action="append", default=[], help="Additional variables to treat as image variables")
+parser.add_argument("--short-override", action="append", default=[], help="Additional strings to treat as short overrides")
+parser.add_argument("path", nargs="+", help="Paths to convert")
+
+args = parser.parse_args()
# List of strings to treat as overrides
-vars = ["append", "prepend", "remove"]
-vars = vars + ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"]
-vars = vars + ["genericx86", "edgerouter", "beaglebone-yocto"]
-vars = vars + ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"]
-vars = vars + ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"]
-vars = vars + ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"]
-vars = vars + ["tune-", "pn-", "forcevariable"]
-vars = vars + ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"]
-vars = vars + ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"]
-vars = vars + ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"]
-vars = vars + ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"]
-vars = vars + ["linux-gnueabi", "eabi"]
-vars = vars + ["virtclass-multilib", "virtclass-mcextend"]
+vars = args.override
+vars += ["append", "prepend", "remove"]
+vars += ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"]
+vars += ["genericx86", "edgerouter", "beaglebone-yocto"]
+vars += ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"]
+vars += ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"]
+vars += ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"]
+vars += ["tune-", "pn-", "forcevariable"]
+vars += ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"]
+vars += ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"]
+vars += ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"]
+vars += ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"]
+vars += ["linux-gnueabi", "eabi"]
+vars += ["virtclass-multilib", "virtclass-mcextend"]
# List of strings to treat as overrides but only with whitespace following or another override (more restricted matching).
# Handles issues with arc matching arch.
-shortvars = ["arc", "mips", "mipsel", "sh4"]
+shortvars = ["arc", "mips", "mipsel", "sh4"] + args.short_override
# Variables which take packagenames as an override
packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY",
"pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE",
"PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst",
"pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA",
- "INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"]
+ "INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"] + args.package_vars
# Expressions to skip if encountered, these are not overrides
-skips = ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"]
-skips = skips + ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"]
-skips = skips + ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"]
-skips = skips + ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"]
-skips = skips + ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"]
-skips = skips + ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"]
-skips = skips + ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"]
-skips = skips + ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"]
-skips = skips + ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"]
-
-imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"]
-packagevars = packagevars + imagevars
+skips = args.skip
+skips += ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"]
+skips += ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"]
+skips += ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"]
+skips += ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"]
+skips += ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"]
+skips += ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"]
+skips += ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"]
+skips += ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"]
+skips += ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"]
+
+imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"] + args.image_vars
+packagevars += imagevars
+
+skip_ext = [".html", ".patch", ".m4", ".diff"] + args.skip_ext
vars_re = {}
for exp in vars:
- vars_re[exp] = (re.compile('((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp)
+ vars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp)
shortvars_re = {}
for exp in shortvars:
- shortvars_re[exp] = (re.compile('((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + '([\(\'"\s:])'), r"\1:" + exp + r"\3")
+ shortvars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + r'([\(\'"\s:])'), r"\1:" + exp + r"\3")
package_re = {}
for exp in packagevars:
- package_re[exp] = (re.compile('(^|[#\'"\s\-\+]+)' + exp + '_' + '([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2")
+ package_re[exp] = (re.compile(r'(^|[#\'"\s\-\+]+)' + exp + r'_' + r'([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2")
# Other substitutions to make
subs = {
- 'r = re.compile("([^:]+):\s*(.*)")' : 'r = re.compile("(^.+?):\s+(.*)")',
+ 'r = re.compile(r"([^:]+):\s*(.*)")' : 'r = re.compile(r"(^.+?):\s+(.*)")',
"val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))",
"f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))",
"d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))",
@@ -124,21 +136,20 @@ def processfile(fn):
ourname = os.path.basename(sys.argv[0])
ourversion = "0.9.3"
-if os.path.isfile(sys.argv[1]):
- processfile(sys.argv[1])
- sys.exit(0)
-
-for targetdir in sys.argv[1:]:
- print("processing directory '%s'" % targetdir)
- for root, dirs, files in os.walk(targetdir):
- for name in files:
- if name == ourname:
- continue
- fn = os.path.join(root, name)
- if os.path.islink(fn):
- continue
- if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"):
- continue
- processfile(fn)
+for p in args.path:
+ if os.path.isfile(p):
+ processfile(p)
+ else:
+ print("processing directory '%s'" % p)
+ for root, dirs, files in os.walk(p):
+ for name in files:
+ if name == ourname:
+ continue
+ fn = os.path.join(root, name)
+ if os.path.islink(fn):
+ continue
+ if "/.git/" in fn or any(fn.endswith(ext) for ext in skip_ext):
+ continue
+ processfile(fn)
print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-spdx-licenses.py b/scripts/contrib/convert-spdx-licenses.py
new file mode 100755
index 0000000000..4e194dee3f
--- /dev/null
+++ b/scripts/contrib/convert-spdx-licenses.py
@@ -0,0 +1,145 @@
+#!/usr/bin/env python3
+#
+# Conversion script to change LICENSE entries to SPDX identifiers
+#
+# Copyright (C) 2021-2022 Richard Purdie
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import re
+import os
+import sys
+import tempfile
+import shutil
+import mimetypes
+
+if len(sys.argv) < 2:
+ print("Please specify a directory to run the conversion script against.")
+ sys.exit(1)
+
+license_map = {
+"AGPL-3" : "AGPL-3.0-only",
+"AGPL-3+" : "AGPL-3.0-or-later",
+"AGPLv3" : "AGPL-3.0-only",
+"AGPLv3+" : "AGPL-3.0-or-later",
+"AGPLv3.0" : "AGPL-3.0-only",
+"AGPLv3.0+" : "AGPL-3.0-or-later",
+"AGPL-3.0" : "AGPL-3.0-only",
+"AGPL-3.0+" : "AGPL-3.0-or-later",
+"BSD-0-Clause" : "0BSD",
+"GPL-1" : "GPL-1.0-only",
+"GPL-1+" : "GPL-1.0-or-later",
+"GPLv1" : "GPL-1.0-only",
+"GPLv1+" : "GPL-1.0-or-later",
+"GPLv1.0" : "GPL-1.0-only",
+"GPLv1.0+" : "GPL-1.0-or-later",
+"GPL-1.0" : "GPL-1.0-only",
+"GPL-1.0+" : "GPL-1.0-or-later",
+"GPL-2" : "GPL-2.0-only",
+"GPL-2+" : "GPL-2.0-or-later",
+"GPLv2" : "GPL-2.0-only",
+"GPLv2+" : "GPL-2.0-or-later",
+"GPLv2.0" : "GPL-2.0-only",
+"GPLv2.0+" : "GPL-2.0-or-later",
+"GPL-2.0" : "GPL-2.0-only",
+"GPL-2.0+" : "GPL-2.0-or-later",
+"GPL-3" : "GPL-3.0-only",
+"GPL-3+" : "GPL-3.0-or-later",
+"GPLv3" : "GPL-3.0-only",
+"GPLv3+" : "GPL-3.0-or-later",
+"GPLv3.0" : "GPL-3.0-only",
+"GPLv3.0+" : "GPL-3.0-or-later",
+"GPL-3.0" : "GPL-3.0-only",
+"GPL-3.0+" : "GPL-3.0-or-later",
+"LGPLv2" : "LGPL-2.0-only",
+"LGPLv2+" : "LGPL-2.0-or-later",
+"LGPLv2.0" : "LGPL-2.0-only",
+"LGPLv2.0+" : "LGPL-2.0-or-later",
+"LGPL-2.0" : "LGPL-2.0-only",
+"LGPL-2.0+" : "LGPL-2.0-or-later",
+"LGPL2.1" : "LGPL-2.1-only",
+"LGPL2.1+" : "LGPL-2.1-or-later",
+"LGPLv2.1" : "LGPL-2.1-only",
+"LGPLv2.1+" : "LGPL-2.1-or-later",
+"LGPL-2.1" : "LGPL-2.1-only",
+"LGPL-2.1+" : "LGPL-2.1-or-later",
+"LGPLv3" : "LGPL-3.0-only",
+"LGPLv3+" : "LGPL-3.0-or-later",
+"LGPL-3.0" : "LGPL-3.0-only",
+"LGPL-3.0+" : "LGPL-3.0-or-later",
+"MPL-1" : "MPL-1.0",
+"MPLv1" : "MPL-1.0",
+"MPLv1.1" : "MPL-1.1",
+"MPLv2" : "MPL-2.0",
+"MIT-X" : "MIT",
+"MIT-style" : "MIT",
+"openssl" : "OpenSSL",
+"PSF" : "PSF-2.0",
+"PSFv2" : "PSF-2.0",
+"Python-2" : "Python-2.0",
+"Apachev2" : "Apache-2.0",
+"Apache-2" : "Apache-2.0",
+"Artisticv1" : "Artistic-1.0",
+"Artistic-1" : "Artistic-1.0",
+"AFL-2" : "AFL-2.0",
+"AFL-1" : "AFL-1.2",
+"AFLv2" : "AFL-2.0",
+"AFLv1" : "AFL-1.2",
+"CDDLv1" : "CDDL-1.0",
+"CDDL-1" : "CDDL-1.0",
+"EPLv1.0" : "EPL-1.0",
+"FreeType" : "FTL",
+"Nauman" : "Naumen",
+"tcl" : "TCL",
+"vim" : "Vim",
+"SGIv1" : "SGI-1",
+}
+
+def processfile(fn):
+ print("processing file '%s'" % fn)
+ try:
+ fh, abs_path = tempfile.mkstemp()
+ modified = False
+ with os.fdopen(fh, 'w') as new_file:
+ with open(fn, "r") as old_file:
+ for line in old_file:
+ if not line.startswith("LICENSE"):
+ new_file.write(line)
+ continue
+ orig = line
+ for license in sorted(license_map, key=len, reverse=True):
+ for ending in ['"', "'", " ", ")"]:
+ line = line.replace(license + ending, license_map[license] + ending)
+ if orig != line:
+ modified = True
+ new_file.write(line)
+ new_file.close()
+ if modified:
+ shutil.copymode(fn, abs_path)
+ os.remove(fn)
+ shutil.move(abs_path, fn)
+ except UnicodeDecodeError:
+ pass
+
+ourname = os.path.basename(sys.argv[0])
+ourversion = "0.01"
+
+if os.path.isfile(sys.argv[1]):
+ processfile(sys.argv[1])
+ sys.exit(0)
+
+for targetdir in sys.argv[1:]:
+ print("processing directory '%s'" % targetdir)
+ for root, dirs, files in os.walk(targetdir):
+ for name in files:
+ if name == ourname:
+ continue
+ fn = os.path.join(root, name)
+ if os.path.islink(fn):
+ continue
+ if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
+ continue
+ processfile(fn)
+
+print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-srcuri.py b/scripts/contrib/convert-srcuri.py
new file mode 100755
index 0000000000..587392334f
--- /dev/null
+++ b/scripts/contrib/convert-srcuri.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python3
+#
+# Conversion script to update SRC_URI to add branch to git urls
+#
+# Copyright (C) 2021 Richard Purdie
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import re
+import os
+import sys
+import tempfile
+import shutil
+import mimetypes
+
+if len(sys.argv) < 2:
+ print("Please specify a directory to run the conversion script against.")
+ sys.exit(1)
+
+def processfile(fn):
+ def matchline(line):
+ if "MIRROR" in line or ".*" in line or "GNOME_GIT" in line:
+ return False
+ return True
+ print("processing file '%s'" % fn)
+ try:
+ if "distro_alias.inc" in fn or "linux-yocto-custom.bb" in fn:
+ return
+ fh, abs_path = tempfile.mkstemp()
+ modified = False
+ with os.fdopen(fh, 'w') as new_file:
+ with open(fn, "r") as old_file:
+ for line in old_file:
+ if ("git://" in line or "gitsm://" in line) and "branch=" not in line and matchline(line):
+ if line.endswith('"\n'):
+ line = line.replace('"\n', ';branch=master"\n')
+ elif re.search('\s*\\\\$', line):
+ line = re.sub('\s*\\\\$', ';branch=master \\\\', line)
+ modified = True
+ if ("git://" in line or "gitsm://" in line) and "github.com" in line and "protocol=https" not in line and matchline(line):
+ if "protocol=git" in line:
+ line = line.replace('protocol=git', 'protocol=https')
+ elif line.endswith('"\n'):
+ line = line.replace('"\n', ';protocol=https"\n')
+ elif re.search('\s*\\\\$', line):
+ line = re.sub('\s*\\\\$', ';protocol=https \\\\', line)
+ modified = True
+ new_file.write(line)
+ if modified:
+ shutil.copymode(fn, abs_path)
+ os.remove(fn)
+ shutil.move(abs_path, fn)
+ except UnicodeDecodeError:
+ pass
+
+ourname = os.path.basename(sys.argv[0])
+ourversion = "0.1"
+
+if os.path.isfile(sys.argv[1]):
+ processfile(sys.argv[1])
+ sys.exit(0)
+
+for targetdir in sys.argv[1:]:
+ print("processing directory '%s'" % targetdir)
+ for root, dirs, files in os.walk(targetdir):
+ for name in files:
+ if name == ourname:
+ continue
+ fn = os.path.join(root, name)
+ if os.path.islink(fn):
+ continue
+ if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"):
+ continue
+ processfile(fn)
+
+print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-variable-renames.py b/scripts/contrib/convert-variable-renames.py
new file mode 100755
index 0000000000..eded90ca61
--- /dev/null
+++ b/scripts/contrib/convert-variable-renames.py
@@ -0,0 +1,116 @@
+#!/usr/bin/env python3
+#
+# Conversion script to rename variables to versions with improved terminology.
+# Also highlights potentially problematic language and removed variables.
+#
+# Copyright (C) 2021 Richard Purdie
+# Copyright (C) 2022 Wind River Systems, Inc.
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import re
+import os
+import sys
+import tempfile
+import shutil
+import mimetypes
+
+if len(sys.argv) < 2:
+ print("Please specify a directory to run the conversion script against.")
+ sys.exit(1)
+
+renames = {
+"BB_ENV_WHITELIST" : "BB_ENV_PASSTHROUGH",
+"BB_ENV_EXTRAWHITE" : "BB_ENV_PASSTHROUGH_ADDITIONS",
+"BB_HASHCONFIG_WHITELIST" : "BB_HASHCONFIG_IGNORE_VARS",
+"BB_SETSCENE_ENFORCE_WHITELIST" : "BB_SETSCENE_ENFORCE_IGNORE_TASKS",
+"BB_HASHBASE_WHITELIST" : "BB_BASEHASH_IGNORE_VARS",
+"BB_HASHTASK_WHITELIST" : "BB_TASKHASH_IGNORE_TASKS",
+"CVE_CHECK_PN_WHITELIST" : "CVE_CHECK_SKIP_RECIPE",
+"CVE_CHECK_WHITELIST" : "CVE_CHECK_IGNORE",
+"MULTI_PROVIDER_WHITELIST" : "BB_MULTI_PROVIDER_ALLOWED",
+"PNBLACKLIST" : "SKIP_RECIPE",
+"SDK_LOCAL_CONF_BLACKLIST" : "ESDK_LOCALCONF_REMOVE",
+"SDK_LOCAL_CONF_WHITELIST" : "ESDK_LOCALCONF_ALLOW",
+"SDK_INHERIT_BLACKLIST" : "ESDK_CLASS_INHERIT_DISABLE",
+"SSTATE_DUPWHITELIST" : "SSTATE_ALLOW_OVERLAP_FILES",
+"SYSROOT_DIRS_BLACKLIST" : "SYSROOT_DIRS_IGNORE",
+"UNKNOWN_CONFIGURE_WHITELIST" : "UNKNOWN_CONFIGURE_OPT_IGNORE",
+"ICECC_USER_CLASS_BL" : "ICECC_CLASS_DISABLE",
+"ICECC_SYSTEM_CLASS_BL" : "ICECC_CLASS_DISABLE",
+"ICECC_USER_PACKAGE_WL" : "ICECC_RECIPE_ENABLE",
+"ICECC_USER_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
+"ICECC_SYSTEM_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
+"LICENSE_FLAGS_WHITELIST" : "LICENSE_FLAGS_ACCEPTED",
+}
+
+removed_list = [
+"BB_STAMP_WHITELIST",
+"BB_STAMP_POLICY",
+"INHERIT_BLACKLIST",
+"TUNEABI_WHITELIST",
+]
+
+context_check_list = [
+"blacklist",
+"whitelist",
+"abort",
+]
+
+def processfile(fn):
+
+ print("processing file '%s'" % fn)
+ try:
+ fh, abs_path = tempfile.mkstemp()
+ modified = False
+ with os.fdopen(fh, 'w') as new_file:
+ with open(fn, "r") as old_file:
+ lineno = 0
+ for line in old_file:
+ lineno += 1
+ if not line or "BB_RENAMED_VARIABLE" in line:
+ continue
+ # Do the renames
+ for old_name, new_name in renames.items():
+ if old_name in line:
+ line = line.replace(old_name, new_name)
+ modified = True
+ # Find removed names
+ for removed_name in removed_list:
+ if removed_name in line:
+ print("%s needs further work at line %s because %s has been deprecated" % (fn, lineno, removed_name))
+ for check_word in context_check_list:
+ if re.search(check_word, line, re.IGNORECASE):
+ print("%s needs further work at line %s since it contains %s"% (fn, lineno, check_word))
+ new_file.write(line)
+ new_file.close()
+ if modified:
+ print("*** Modified file '%s'" % (fn))
+ shutil.copymode(fn, abs_path)
+ os.remove(fn)
+ shutil.move(abs_path, fn)
+ except UnicodeDecodeError:
+ pass
+
+ourname = os.path.basename(sys.argv[0])
+ourversion = "0.1"
+
+if os.path.isfile(sys.argv[1]):
+ processfile(sys.argv[1])
+ sys.exit(0)
+
+for targetdir in sys.argv[1:]:
+ print("processing directory '%s'" % targetdir)
+ for root, dirs, files in os.walk(targetdir):
+ for name in files:
+ if name == ourname:
+ continue
+ fn = os.path.join(root, name)
+ if os.path.islink(fn):
+ continue
+ if "ChangeLog" in fn or "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
+ continue
+ processfile(fn)
+
+print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/ddimage b/scripts/contrib/ddimage
index 7f2ad112a6..70eee8ebea 100755
--- a/scripts/contrib/ddimage
+++ b/scripts/contrib/ddimage
@@ -1,5 +1,7 @@
#!/bin/sh
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
diff --git a/scripts/contrib/dialog-power-control b/scripts/contrib/dialog-power-control
index ad6070c369..82c84baa1d 100755
--- a/scripts/contrib/dialog-power-control
+++ b/scripts/contrib/dialog-power-control
@@ -1,5 +1,7 @@
#!/bin/sh
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# Simple script to show a manual power prompt for when you want to use
diff --git a/scripts/contrib/documentation-audit.sh b/scripts/contrib/documentation-audit.sh
index f436f9bae0..7197a2fcea 100755
--- a/scripts/contrib/documentation-audit.sh
+++ b/scripts/contrib/documentation-audit.sh
@@ -1,5 +1,7 @@
#!/bin/bash
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# Perform an audit of which packages provide documentation and which
@@ -26,8 +28,8 @@ if [ -z "$BITBAKE" ]; then
fi
echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results"
-echo "REMINDER: you need to set LICENSE_FLAGS_WHITELIST appropriately in local.conf or "
-echo " you'll get false positives. For example, LICENSE_FLAGS_WHITELIST = \"commercial\""
+echo "REMINDER: you need to set LICENSE_FLAGS_ACCEPTED appropriately in local.conf or "
+echo " you'll get false positives. For example, LICENSE_FLAGS_ACCEPTED = \"commercial\""
for pkg in `bitbake -s | awk '{ print \$1 }'`; do
if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" ||
diff --git a/scripts/contrib/image-manifest b/scripts/contrib/image-manifest
index 3c07a73a4e..4d65a99258 100755
--- a/scripts/contrib/image-manifest
+++ b/scripts/contrib/image-manifest
@@ -392,7 +392,7 @@ def export_manifest_info(args):
for key in rd.getVarFlags('PACKAGECONFIG').keys():
if key == 'doc':
continue
- rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key, True)
+ rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key)
if config['patches'] == 'yes':
patches = oe.recipeutils.get_recipe_patches(rd)
diff --git a/scripts/contrib/oe-build-perf-report-email.py b/scripts/contrib/oe-build-perf-report-email.py
index de3862c897..7192113c28 100755
--- a/scripts/contrib/oe-build-perf-report-email.py
+++ b/scripts/contrib/oe-build-perf-report-email.py
@@ -19,8 +19,6 @@ import socket
import subprocess
import sys
import tempfile
-from email.mime.image import MIMEImage
-from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
@@ -29,30 +27,6 @@ logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
log = logging.getLogger('oe-build-perf-report')
-# Find js scaper script
-SCRAPE_JS = os.path.join(os.path.dirname(__file__), '..', 'lib', 'build_perf',
- 'scrape-html-report.js')
-if not os.path.isfile(SCRAPE_JS):
- log.error("Unableto find oe-build-perf-report-scrape.js")
- sys.exit(1)
-
-
-class ReportError(Exception):
- """Local errors"""
- pass
-
-
-def check_utils():
- """Check that all needed utils are installed in the system"""
- missing = []
- for cmd in ('phantomjs', 'optipng'):
- if not shutil.which(cmd):
- missing.append(cmd)
- if missing:
- log.error("The following tools are missing: %s", ' '.join(missing))
- sys.exit(1)
-
-
def parse_args(argv):
"""Parse command line arguments"""
description = """Email build perf test report"""
@@ -77,137 +51,19 @@ def parse_args(argv):
"the email parts")
parser.add_argument('--text',
help="Plain text message")
- parser.add_argument('--html',
- help="HTML peport generated by oe-build-perf-report")
- parser.add_argument('--phantomjs-args', action='append',
- help="Extra command line arguments passed to PhantomJS")
args = parser.parse_args(argv)
- if not args.html and not args.text:
- parser.error("Please specify --html and/or --text")
+ if not args.text:
+ parser.error("Please specify --text")
return args
-def decode_png(infile, outfile):
- """Parse/decode/optimize png data from a html element"""
- with open(infile) as f:
- raw_data = f.read()
-
- # Grab raw base64 data
- b64_data = re.sub('^.*href="data:image/png;base64,', '', raw_data, 1)
- b64_data = re.sub('">.+$', '', b64_data, 1)
-
- # Replace file with proper decoded png
- with open(outfile, 'wb') as f:
- f.write(base64.b64decode(b64_data))
-
- subprocess.check_output(['optipng', outfile], stderr=subprocess.STDOUT)
-
-
-def mangle_html_report(infile, outfile, pngs):
- """Mangle html file into a email compatible format"""
- paste = True
- png_dir = os.path.dirname(outfile)
- with open(infile) as f_in:
- with open(outfile, 'w') as f_out:
- for line in f_in.readlines():
- stripped = line.strip()
- # Strip out scripts
- if stripped == '<!--START-OF-SCRIPTS-->':
- paste = False
- elif stripped == '<!--END-OF-SCRIPTS-->':
- paste = True
- elif paste:
- if re.match('^.+href="data:image/png;base64', stripped):
- # Strip out encoded pngs (as they're huge in size)
- continue
- elif 'www.gstatic.com' in stripped:
- # HACK: drop references to external static pages
- continue
-
- # Replace charts with <img> elements
- match = re.match('<div id="(?P<id>\w+)"', stripped)
- if match and match.group('id') in pngs:
- f_out.write('<img src="cid:{}"\n'.format(match.group('id')))
- else:
- f_out.write(line)
-
-
-def scrape_html_report(report, outdir, phantomjs_extra_args=None):
- """Scrape html report into a format sendable by email"""
- tmpdir = tempfile.mkdtemp(dir='.')
- log.debug("Using tmpdir %s for phantomjs output", tmpdir)
-
- if not os.path.isdir(outdir):
- os.mkdir(outdir)
- if os.path.splitext(report)[1] not in ('.html', '.htm'):
- raise ReportError("Invalid file extension for report, needs to be "
- "'.html' or '.htm'")
-
- try:
- log.info("Scraping HTML report with PhangomJS")
- extra_args = phantomjs_extra_args if phantomjs_extra_args else []
- subprocess.check_output(['phantomjs', '--debug=true'] + extra_args +
- [SCRAPE_JS, report, tmpdir],
- stderr=subprocess.STDOUT)
-
- pngs = []
- images = []
- for fname in os.listdir(tmpdir):
- base, ext = os.path.splitext(fname)
- if ext == '.png':
- log.debug("Decoding %s", fname)
- decode_png(os.path.join(tmpdir, fname),
- os.path.join(outdir, fname))
- pngs.append(base)
- images.append(fname)
- elif ext in ('.html', '.htm'):
- report_file = fname
- else:
- log.warning("Unknown file extension: '%s'", ext)
- #shutil.move(os.path.join(tmpdir, fname), outdir)
-
- log.debug("Mangling html report file %s", report_file)
- mangle_html_report(os.path.join(tmpdir, report_file),
- os.path.join(outdir, report_file), pngs)
- return (os.path.join(outdir, report_file),
- [os.path.join(outdir, i) for i in images])
- finally:
- shutil.rmtree(tmpdir)
-
-def send_email(text_fn, html_fn, image_fns, subject, recipients, copy=[],
- blind_copy=[]):
- """Send email"""
+def send_email(text_fn, subject, recipients, copy=[], blind_copy=[]):
# Generate email message
- text_msg = html_msg = None
- if text_fn:
- with open(text_fn) as f:
- text_msg = MIMEText("Yocto build performance test report.\n" +
- f.read(), 'plain')
- if html_fn:
- html_msg = msg = MIMEMultipart('related')
- with open(html_fn) as f:
- html_msg.attach(MIMEText(f.read(), 'html'))
- for img_fn in image_fns:
- # Expect that content id is same as the filename
- cid = os.path.splitext(os.path.basename(img_fn))[0]
- with open(img_fn, 'rb') as f:
- image_msg = MIMEImage(f.read())
- image_msg['Content-ID'] = '<{}>'.format(cid)
- html_msg.attach(image_msg)
-
- if text_msg and html_msg:
- msg = MIMEMultipart('alternative')
- msg.attach(text_msg)
- msg.attach(html_msg)
- elif text_msg:
- msg = text_msg
- elif html_msg:
- msg = html_msg
- else:
- raise ReportError("Neither plain text nor html body specified")
+ with open(text_fn) as f:
+ msg = MIMEText("Yocto build performance test report.\n" + f.read(), 'plain')
pw_data = pwd.getpwuid(os.getuid())
full_name = pw_data.pw_gecos.split(',')[0]
@@ -234,8 +90,6 @@ def main(argv=None):
if args.debug:
log.setLevel(logging.DEBUG)
- check_utils()
-
if args.outdir:
outdir = args.outdir
if not os.path.exists(outdir):
@@ -245,25 +99,16 @@ def main(argv=None):
try:
log.debug("Storing email parts in %s", outdir)
- html_report = images = None
- if args.html:
- html_report, images = scrape_html_report(args.html, outdir,
- args.phantomjs_args)
-
if args.to:
log.info("Sending email to %s", ', '.join(args.to))
if args.cc:
log.info("Copying to %s", ', '.join(args.cc))
if args.bcc:
log.info("Blind copying to %s", ', '.join(args.bcc))
- send_email(args.text, html_report, images, args.subject,
- args.to, args.cc, args.bcc)
+ send_email(args.text, args.subject, args.to, args.cc, args.bcc)
except subprocess.CalledProcessError as err:
log.error("%s, with output:\n%s", str(err), err.output.decode())
return 1
- except ReportError as err:
- log.error(err)
- return 1
finally:
if not args.outdir:
log.debug("Wiping %s", outdir)
diff --git a/scripts/contrib/patchreview.py b/scripts/contrib/patchreview.py
index 62c509f51c..bceae06561 100755
--- a/scripts/contrib/patchreview.py
+++ b/scripts/contrib/patchreview.py
@@ -1,14 +1,25 @@
#! /usr/bin/env python3
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
+import argparse
+import collections
+import json
+import os
+import os.path
+import pathlib
+import re
+import subprocess
+
# TODO
# - option to just list all broken files
# - test suite
# - validate signed-off-by
-status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied")
+status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied", "inactive-upstream")
class Result:
# Whether the patch has an Upstream-Status or not
@@ -33,20 +44,18 @@ def blame_patch(patch):
From a patch filename, return a list of "commit summary (author name <author
email>)" strings representing the history.
"""
- import subprocess
return subprocess.check_output(("git", "log",
"--follow", "--find-renames", "--diff-filter=A",
"--format=%s (%aN <%aE>)",
"--", patch)).decode("utf-8").splitlines()
-def patchreview(path, patches):
- import re, os.path
+def patchreview(patches):
# General pattern: start of line, optional whitespace, tag with optional
# hyphen or spaces, maybe a colon, some whitespace, then the value, all case
# insensitive.
sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE)
- status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*(\w*)", re.IGNORECASE | re.MULTILINE)
+ status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*([\w-]*)", re.IGNORECASE | re.MULTILINE)
cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE)
cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE)
@@ -54,11 +63,10 @@ def patchreview(path, patches):
for patch in patches:
- fullpath = os.path.join(path, patch)
result = Result()
- results[fullpath] = result
+ results[patch] = result
- content = open(fullpath, encoding='ascii', errors='ignore').read()
+ content = open(patch, encoding='ascii', errors='ignore').read()
# Find the Signed-off-by tag
match = sob_re.search(content)
@@ -191,29 +199,56 @@ Patches in Pending state: %s""" % (total_patches,
def histogram(results):
from toolz import recipes, dicttoolz
import math
+
counts = recipes.countby(lambda r: r.upstream_status, results.values())
bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts)
for k in bars:
print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k]))
+def find_layers(candidate):
+ # candidate can either be the path to a layer directly (eg meta-intel), or a
+ # repository that contains other layers (meta-arm). We can determine what by
+ # looking for a conf/layer.conf file. If that file exists then it's a layer,
+ # otherwise its a repository of layers and we can assume they're called
+ # meta-*.
+
+ if (candidate / "conf" / "layer.conf").exists():
+ return [candidate.absolute()]
+ else:
+ return [d.absolute() for d in candidate.iterdir() if d.is_dir() and (d.name == "meta" or d.name.startswith("meta-"))]
+
+# TODO these don't actually handle dynamic-layers/
+
+def gather_patches(layers):
+ patches = []
+ for directory in layers:
+ filenames = subprocess.check_output(("git", "-C", directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff"), universal_newlines=True).split()
+ patches += [os.path.join(directory, f) for f in filenames]
+ return patches
+
+def count_recipes(layers):
+ count = 0
+ for directory in layers:
+ output = subprocess.check_output(["git", "-C", directory, "ls-files", "recipes-*/**/*.bb"], universal_newlines=True)
+ count += len(output.splitlines())
+ return count
if __name__ == "__main__":
- import argparse, subprocess, os
-
args = argparse.ArgumentParser(description="Patch Review Tool")
args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches")
args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results")
args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram")
args.add_argument("-j", "--json", help="update JSON")
- args.add_argument("directory", help="directory to scan")
+ args.add_argument("directory", type=pathlib.Path, metavar="DIRECTORY", help="directory to scan (layer, or repository of layers)")
args = args.parse_args()
- patches = subprocess.check_output(("git", "-C", args.directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff")).decode("utf-8").split()
- results = patchreview(args.directory, patches)
+ layers = find_layers(args.directory)
+ print(f"Found layers {' '.join((d.name for d in layers))}")
+ patches = gather_patches(layers)
+ results = patchreview(patches)
analyse(results, want_blame=args.blame, verbose=args.verbose)
if args.json:
- import json, os.path, collections
if os.path.isfile(args.json):
data = json.load(open(args.json))
else:
@@ -221,7 +256,11 @@ if __name__ == "__main__":
row = collections.Counter()
row["total"] = len(results)
- row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"]).decode("utf-8").strip()
+ row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"], universal_newlines=True).strip()
+ row["commit"] = subprocess.check_output(["git", "-C", args.directory, "rev-parse", "HEAD"], universal_newlines=True).strip()
+ row['commit_count'] = subprocess.check_output(["git", "-C", args.directory, "rev-list", "--count", "HEAD"], universal_newlines=True).strip()
+ row['recipe_count'] = count_recipes(layers)
+
for r in results.values():
if r.upstream_status in status_values:
row[r.upstream_status] += 1
@@ -231,7 +270,7 @@ if __name__ == "__main__":
row['malformed-sob'] += 1
data.append(row)
- json.dump(data, open(args.json, "w"))
+ json.dump(data, open(args.json, "w"), sort_keys=True, indent="\t")
if args.histogram:
print()
diff --git a/scripts/contrib/test_build_time.sh b/scripts/contrib/test_build_time.sh
index 23f238adf6..4012ac7ba7 100755
--- a/scripts/contrib/test_build_time.sh
+++ b/scripts/contrib/test_build_time.sh
@@ -97,7 +97,7 @@ if [ $? != 0 ] ; then
exit 251
fi
-if [ "$BB_ENV_EXTRAWHITE" != "" ] ; then
+if [ "BB_ENV_PASSTHROUGH_ADDITIONS" != "" ] ; then
echo "WARNING: you are running after sourcing the build environment script, this is not recommended"
fi
diff --git a/scripts/contrib/test_build_time_worker.sh b/scripts/contrib/test_build_time_worker.sh
index 478e8b0d03..a2879d2336 100755
--- a/scripts/contrib/test_build_time_worker.sh
+++ b/scripts/contrib/test_build_time_worker.sh
@@ -1,5 +1,7 @@
#!/bin/bash
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# This is an example script to be used in conjunction with test_build_time.sh
diff --git a/scripts/contrib/verify-homepage.py b/scripts/contrib/verify-homepage.py
index 7bffa78e23..a90b5010bc 100755
--- a/scripts/contrib/verify-homepage.py
+++ b/scripts/contrib/verify-homepage.py
@@ -1,5 +1,7 @@
#!/usr/bin/env python3
#
+# Copyright OpenEmbedded Contributors
+#
# SPDX-License-Identifier: GPL-2.0-only
#
# This script can be used to verify HOMEPAGE values for all recipes in