summaryrefslogtreecommitdiffstats
path: root/scripts/lib/checklayer/__init__.py
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/lib/checklayer/__init__.py')
-rw-r--r--scripts/lib/checklayer/__init__.py72
1 files changed, 60 insertions, 12 deletions
diff --git a/scripts/lib/checklayer/__init__.py b/scripts/lib/checklayer/__init__.py
index f625d59896..62ecdfe390 100644
--- a/scripts/lib/checklayer/__init__.py
+++ b/scripts/lib/checklayer/__init__.py
@@ -16,6 +16,7 @@ class LayerType(Enum):
BSP = 0
DISTRO = 1
SOFTWARE = 2
+ CORE = 3
ERROR_NO_LAYER_CONF = 98
ERROR_BSP_DISTRO = 99
@@ -43,7 +44,7 @@ def _get_layer_collections(layer_path, lconf=None, data=None):
ldata.setVar('LAYERDIR', layer_path)
try:
- ldata = bb.parse.handle(lconf, ldata, include=True)
+ ldata = bb.parse.handle(lconf, ldata, include=True, baseconfig=True)
except:
raise RuntimeError("Parsing of layer.conf from layer: %s failed" % layer_path)
ldata.expandVarref('LAYERDIR')
@@ -106,7 +107,13 @@ def _detect_layer(layer_path):
if distros:
is_distro = True
- if is_bsp and is_distro:
+ layer['collections'] = _get_layer_collections(layer['path'])
+
+ if layer_name == "meta" and "core" in layer['collections']:
+ layer['type'] = LayerType.CORE
+ layer['conf']['machines'] = machines
+ layer['conf']['distros'] = distros
+ elif is_bsp and is_distro:
layer['type'] = LayerType.ERROR_BSP_DISTRO
elif is_bsp:
layer['type'] = LayerType.BSP
@@ -117,8 +124,6 @@ def _detect_layer(layer_path):
else:
layer['type'] = LayerType.SOFTWARE
- layer['collections'] = _get_layer_collections(layer['path'])
-
return layer
def detect_layers(layer_directories, no_auto):
@@ -146,7 +151,7 @@ def detect_layers(layer_directories, no_auto):
return layers
-def _find_layer_depends(depend, layers):
+def _find_layer(depend, layers):
for layer in layers:
if 'collections' not in layer:
continue
@@ -156,7 +161,28 @@ def _find_layer_depends(depend, layers):
return layer
return None
-def add_layer_dependencies(bblayersconf, layer, layers, logger):
+def sanity_check_layers(layers, logger):
+ """
+ Check that we didn't find duplicate collection names, as the layer that will
+ be used is non-deterministic. The precise check is duplicate collections
+ with different patterns, as the same pattern being repeated won't cause
+ problems.
+ """
+ import collections
+
+ passed = True
+ seen = collections.defaultdict(set)
+ for layer in layers:
+ for name, data in layer.get("collections", {}).items():
+ seen[name].add(data["pattern"])
+
+ for name, patterns in seen.items():
+ if len(patterns) > 1:
+ passed = False
+ logger.error("Collection %s found multiple times: %s" % (name, ", ".join(patterns)))
+ return passed
+
+def get_layer_dependencies(layer, layers, logger):
def recurse_dependencies(depends, layer, layers, logger, ret = []):
logger.debug('Processing dependencies %s for layer %s.' % \
(depends, layer['name']))
@@ -166,7 +192,7 @@ def add_layer_dependencies(bblayersconf, layer, layers, logger):
if depend == 'core':
continue
- layer_depend = _find_layer_depends(depend, layers)
+ layer_depend = _find_layer(depend, layers)
if not layer_depend:
logger.error('Layer %s depends on %s and isn\'t found.' % \
(layer['name'], depend))
@@ -203,6 +229,11 @@ def add_layer_dependencies(bblayersconf, layer, layers, logger):
layer_depends = recurse_dependencies(depends, layer, layers, logger, layer_depends)
# Note: [] (empty) is allowed, None is not!
+ return layer_depends
+
+def add_layer_dependencies(bblayersconf, layer, layers, logger):
+
+ layer_depends = get_layer_dependencies(layer, layers, logger)
if layer_depends is None:
return False
else:
@@ -229,6 +260,20 @@ def add_layers(bblayersconf, layers, logger):
f.write("\nBBLAYERS += \"%s\"\n" % path)
return True
+def check_bblayers(bblayersconf, layer_path, logger):
+ '''
+ If layer_path found in BBLAYERS return True
+ '''
+ import bb.parse
+ import bb.data
+
+ ldata = bb.parse.handle(bblayersconf, bb.data.init(), include=True)
+ for bblayer in (ldata.getVar('BBLAYERS') or '').split():
+ if os.path.normpath(bblayer) == os.path.normpath(layer_path):
+ return True
+
+ return False
+
def check_command(error_msg, cmd, cwd=None):
'''
Run a command under a shell, capture stdout and stderr in a single stream,
@@ -242,7 +287,7 @@ def check_command(error_msg, cmd, cwd=None):
raise RuntimeError(msg)
return output
-def get_signatures(builddir, failsafe=False, machine=None):
+def get_signatures(builddir, failsafe=False, machine=None, extravars=None):
import re
# some recipes needs to be excluded like meta-world-pkgdata
@@ -253,13 +298,16 @@ def get_signatures(builddir, failsafe=False, machine=None):
sigs = {}
tune2tasks = {}
- cmd = 'BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE BB_SIGNATURE_HANDLER" BB_SIGNATURE_HANDLER="OEBasicHash" '
+ cmd = 'BB_ENV_PASSTHROUGH_ADDITIONS="$BB_ENV_PASSTHROUGH_ADDITIONS BB_SIGNATURE_HANDLER" BB_SIGNATURE_HANDLER="OEBasicHash" '
+ if extravars:
+ cmd += extravars
+ cmd += ' '
if machine:
cmd += 'MACHINE=%s ' % machine
cmd += 'bitbake '
if failsafe:
cmd += '-k '
- cmd += '-S none world'
+ cmd += '-S lockedsigs world'
sigs_file = os.path.join(builddir, 'locked-sigs.inc')
if os.path.exists(sigs_file):
os.unlink(sigs_file)
@@ -276,8 +324,8 @@ def get_signatures(builddir, failsafe=False, machine=None):
else:
raise
- sig_regex = re.compile("^(?P<task>.*:.*):(?P<hash>.*) .$")
- tune_regex = re.compile("(^|\s)SIGGEN_LOCKEDSIGS_t-(?P<tune>\S*)\s*=\s*")
+ sig_regex = re.compile(r"^(?P<task>.*:.*):(?P<hash>.*) .$")
+ tune_regex = re.compile(r"(^|\s)SIGGEN_LOCKEDSIGS_t-(?P<tune>\S*)\s*=\s*")
current_tune = None
with open(sigs_file, 'r') as f:
for line in f.readlines():