summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/data.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/data.py')
-rw-r--r--bitbake/lib/bb/data.py145
1 files changed, 75 insertions, 70 deletions
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index 9d18b1e2bf..505f42950f 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -4,14 +4,16 @@ BitBake 'Data' implementations
Functions for interacting with the data structure used by the
BitBake build tools.
-The expandKeys and update_data are the most expensive
-operations. At night the cookie monster came by and
+expandKeys and datastore iteration are the most expensive
+operations. Updating overrides is now "on the fly" but still based
+on the idea of the cookie monster introduced by zecke:
+"At night the cookie monster came by and
suggested 'give me cookies on setting the variables and
things will work out'. Taking this suggestion into account
applying the skills from the not yet passed 'Entwurf und
Analyse von Algorithmen' lecture and the cookie
monster seems to be right. We will track setVar more carefully
-to have faster update_data and expandKeys operations.
+to have faster datastore operations."
This is a trade-off between speed and memory again but
the speed is more critical here.
@@ -26,11 +28,6 @@ the speed is more critical here.
import sys, os, re
import hashlib
-if sys.argv[0][-5:] == "pydoc":
- path = os.path.dirname(os.path.dirname(sys.argv[1]))
-else:
- path = os.path.dirname(os.path.dirname(sys.argv[0]))
-sys.path.insert(0, path)
from itertools import groupby
from bb import data_smart
@@ -70,10 +67,6 @@ def keys(d):
"""Return a list of keys in d"""
return d.keys()
-
-__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
-__expand_python_regexp__ = re.compile(r"\${@.+?}")
-
def expand(s, d, varname = None):
"""Variable expansion using the data store"""
return d.expand(s, varname)
@@ -121,8 +114,8 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
if d.getVarFlag(var, 'python', False) and func:
return False
- export = d.getVarFlag(var, "export", False)
- unexport = d.getVarFlag(var, "unexport", False)
+ export = bb.utils.to_boolean(d.getVarFlag(var, "export"))
+ unexport = bb.utils.to_boolean(d.getVarFlag(var, "unexport"))
if not all and not export and not unexport and not func:
return False
@@ -195,8 +188,8 @@ def emit_env(o=sys.__stdout__, d = init(), all=False):
def exported_keys(d):
return (key for key in d.keys() if not key.startswith('__') and
- d.getVarFlag(key, 'export', False) and
- not d.getVarFlag(key, 'unexport', False))
+ bb.utils.to_boolean(d.getVarFlag(key, 'export')) and
+ not bb.utils.to_boolean(d.getVarFlag(key, 'unexport')))
def exported_vars(d):
k = list(exported_keys(d))
@@ -268,65 +261,72 @@ def emit_func_python(func, o=sys.__stdout__, d = init()):
newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split())
newdeps -= seen
-def update_data(d):
- """Performs final steps upon the datastore, including application of overrides"""
- d.finalize(parent = True)
+def build_dependencies(key, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparsedata):
+ def handle_contains(value, contains, exclusions, d):
+ newvalue = []
+ if value:
+ newvalue.append(str(value))
+ for k in sorted(contains):
+ if k in exclusions or k in ignored_vars:
+ continue
+ l = (d.getVar(k) or "").split()
+ for item in sorted(contains[k]):
+ for word in item.split():
+ if not word in l:
+ newvalue.append("\n%s{%s} = Unset" % (k, item))
+ break
+ else:
+ newvalue.append("\n%s{%s} = Set" % (k, item))
+ return "".join(newvalue)
+
+ def handle_remove(value, deps, removes, d):
+ for r in sorted(removes):
+ r2 = d.expandWithRefs(r, None)
+ value += "\n_remove of %s" % r
+ deps |= r2.references
+ deps = deps | (keys & r2.execs)
+ value = handle_contains(value, r2.contains, exclusions, d)
+ return value
-def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
deps = set()
try:
+ if key in mod_funcs:
+ exclusions = set()
+ moddep = bb.codeparser.modulecode_deps[key]
+ value = handle_contains("", moddep[3], exclusions, d)
+ return frozenset((moddep[0] | keys & moddep[1]) - ignored_vars), value
+
if key[-1] == ']':
vf = key[:-1].split('[')
+ if vf[1] == "vardepvalueexclude":
+ return deps, ""
value, parser = d.getVarFlag(vf[0], vf[1], False, retparser=True)
deps |= parser.references
deps = deps | (keys & parser.execs)
- return deps, value
+ deps -= ignored_vars
+ return frozenset(deps), value
varflags = d.getVarFlags(key, ["vardeps", "vardepvalue", "vardepsexclude", "exports", "postfuncs", "prefuncs", "lineno", "filename"]) or {}
vardeps = varflags.get("vardeps")
-
- def handle_contains(value, contains, d):
- newvalue = ""
- for k in sorted(contains):
- l = (d.getVar(k) or "").split()
- for item in sorted(contains[k]):
- for word in item.split():
- if not word in l:
- newvalue += "\n%s{%s} = Unset" % (k, item)
- break
- else:
- newvalue += "\n%s{%s} = Set" % (k, item)
- if not newvalue:
- return value
- if not value:
- return newvalue
- return value + newvalue
-
- def handle_remove(value, deps, removes, d):
- for r in sorted(removes):
- r2 = d.expandWithRefs(r, None)
- value += "\n_remove of %s" % r
- deps |= r2.references
- deps = deps | (keys & r2.execs)
- return value
+ exclusions = varflags.get("vardepsexclude", "").split()
if "vardepvalue" in varflags:
value = varflags.get("vardepvalue")
elif varflags.get("func"):
if varflags.get("python"):
- value = d.getVarFlag(key, "_content", False)
+ value = codeparsedata.getVarFlag(key, "_content", False)
parser = bb.codeparser.PythonParser(key, logger)
parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno"))
deps = deps | parser.references
deps = deps | (keys & parser.execs)
- value = handle_contains(value, parser.contains, d)
+ value = handle_contains(value, parser.contains, exclusions, d)
else:
- value, parsedvar = d.getVarFlag(key, "_content", False, retparser=True)
+ value, parsedvar = codeparsedata.getVarFlag(key, "_content", False, retparser=True)
parser = bb.codeparser.ShellParser(key, logger)
parser.parse_shell(parsedvar.value)
deps = deps | shelldeps
deps = deps | parsedvar.references
deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
- value = handle_contains(value, parsedvar.contains, d)
+ value = handle_contains(value, parsedvar.contains, exclusions, d)
if hasattr(parsedvar, "removes"):
value = handle_remove(value, deps, parsedvar.removes, d)
if vardeps is None:
@@ -341,7 +341,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
value, parser = d.getVarFlag(key, "_content", False, retparser=True)
deps |= parser.references
deps = deps | (keys & parser.execs)
- value = handle_contains(value, parser.contains, d)
+ value = handle_contains(value, parser.contains, exclusions, d)
if hasattr(parser, "removes"):
value = handle_remove(value, deps, parser.removes, d)
@@ -361,43 +361,50 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d):
deps |= set(varfdeps)
deps |= set((vardeps or "").split())
- deps -= set(varflags.get("vardepsexclude", "").split())
+ deps -= set(exclusions)
+ deps -= ignored_vars
except bb.parse.SkipRecipe:
raise
except Exception as e:
bb.warn("Exception during build_dependencies for %s" % key)
raise
- return deps, value
+ return frozenset(deps), value
#bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
#d.setVarFlag(key, "vardeps", deps)
-def generate_dependencies(d, whitelist):
+def generate_dependencies(d, ignored_vars):
- keys = set(key for key in d if not key.startswith("__"))
- shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False))
+ mod_funcs = set(bb.codeparser.modulecode_deps.keys())
+ keys = set(key for key in d if not key.startswith("__")) | mod_funcs
+ shelldeps = set(key for key in d.getVar("__exportlist", False) if bb.utils.to_boolean(d.getVarFlag(key, "export")) and not bb.utils.to_boolean(d.getVarFlag(key, "unexport")))
varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS')
+ codeparserd = d.createCopy()
+ for forced in (d.getVar('BB_HASH_CODEPARSER_VALS') or "").split():
+ key, value = forced.split("=", 1)
+ codeparserd.setVar(key, value)
+
deps = {}
values = {}
tasklist = d.getVar('__BBTASKS', False) or []
for task in tasklist:
- deps[task], values[task] = build_dependencies(task, keys, shelldeps, varflagsexcl, d)
+ deps[task], values[task] = build_dependencies(task, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparserd)
newdeps = deps[task]
seen = set()
while newdeps:
- nextdeps = newdeps - whitelist
+ nextdeps = newdeps
seen |= nextdeps
newdeps = set()
for dep in nextdeps:
if dep not in deps:
- deps[dep], values[dep] = build_dependencies(dep, keys, shelldeps, varflagsexcl, d)
+ deps[dep], values[dep] = build_dependencies(dep, keys, mod_funcs, shelldeps, varflagsexcl, ignored_vars, d, codeparserd)
newdeps |= deps[dep]
newdeps -= seen
#print "For %s: %s" % (task, str(deps[task]))
return tasklist, deps, values
-def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
+def generate_dependency_hash(tasklist, gendeps, lookupcache, ignored_vars, fn):
taskdeps = {}
basehash = {}
@@ -406,9 +413,10 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
if data is None:
bb.error("Task %s from %s seems to be empty?!" % (task, fn))
- data = ''
+ data = []
+ else:
+ data = [data]
- gendeps[task] -= whitelist
newdeps = gendeps[task]
seen = set()
while newdeps:
@@ -416,27 +424,24 @@ def generate_dependency_hash(tasklist, gendeps, lookupcache, whitelist, fn):
seen |= nextdeps
newdeps = set()
for dep in nextdeps:
- if dep in whitelist:
- continue
- gendeps[dep] -= whitelist
newdeps |= gendeps[dep]
newdeps -= seen
alldeps = sorted(seen)
for dep in alldeps:
- data = data + dep
+ data.append(dep)
var = lookupcache[dep]
if var is not None:
- data = data + str(var)
+ data.append(str(var))
k = fn + ":" + task
- basehash[k] = hashlib.sha256(data.encode("utf-8")).hexdigest()
- taskdeps[task] = alldeps
+ basehash[k] = hashlib.sha256("".join(data).encode("utf-8")).hexdigest()
+ taskdeps[task] = frozenset(seen)
return taskdeps, basehash
def inherits_class(klass, d):
val = d.getVar('__inherit_cache', False) or []
- needle = os.path.join('classes', '%s.bbclass' % klass)
+ needle = '/%s.bbclass' % klass
for v in val:
if v.endswith(needle):
return True