summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r--bitbake/lib/bb/__init__.py2
-rw-r--r--bitbake/lib/bb/cache.py14
-rw-r--r--bitbake/lib/bb/codeparser.py2
-rw-r--r--bitbake/lib/bb/command.py9
-rw-r--r--bitbake/lib/bb/cooker.py274
-rw-r--r--bitbake/lib/bb/cookerdata.py11
-rw-r--r--bitbake/lib/bb/data.py8
-rw-r--r--bitbake/lib/bb/data_smart.py22
-rw-r--r--bitbake/lib/bb/event.py9
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py168
-rw-r--r--bitbake/lib/bb/fetch2/git.py54
-rw-r--r--bitbake/lib/bb/fetch2/local.py54
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py3
-rw-r--r--bitbake/lib/bb/fetch2/wget.py275
-rwxr-xr-xbitbake/lib/bb/main.py390
-rw-r--r--bitbake/lib/bb/parse/__init__.py5
-rw-r--r--bitbake/lib/bb/parse/ast.py6
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py20
-rw-r--r--bitbake/lib/bb/runqueue.py11
-rw-r--r--bitbake/lib/bb/server/process.py5
-rw-r--r--bitbake/lib/bb/server/xmlrpc.py9
-rw-r--r--bitbake/lib/bb/siggen.py57
-rw-r--r--bitbake/lib/bb/tests/data.py13
-rw-r--r--bitbake/lib/bb/tests/fetch.py81
-rw-r--r--bitbake/lib/bb/tests/utils.py37
-rw-r--r--bitbake/lib/bb/tinfoil.py2
-rw-r--r--bitbake/lib/bb/ui/buildinfohelper.py496
-rw-r--r--bitbake/lib/bb/ui/depexp.py9
-rw-r--r--bitbake/lib/bb/ui/knotty.py47
-rw-r--r--bitbake/lib/bb/ui/ncurses.py4
-rw-r--r--bitbake/lib/bb/ui/toasterui.py83
-rw-r--r--bitbake/lib/bb/ui/uievent.py7
-rw-r--r--bitbake/lib/bb/utils.py185
33 files changed, 2038 insertions, 334 deletions
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index 4d69552c44..64ccd445b4 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -21,7 +21,7 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-__version__ = "1.24.0"
+__version__ = "1.26.0"
import sys
if sys.version_info < (2, 7, 3):
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
index ac0c27f922..a1dde96425 100644
--- a/bitbake/lib/bb/cache.py
+++ b/bitbake/lib/bb/cache.py
@@ -43,7 +43,7 @@ except ImportError:
logger.info("Importing cPickle failed. "
"Falling back to a very slow implementation.")
-__cache_version__ = "147"
+__cache_version__ = "148"
def getCacheFile(path, filename, data_hash):
return os.path.join(path, filename + "." + data_hash)
@@ -529,8 +529,11 @@ class Cache(object):
if hasattr(info_array[0], 'file_checksums'):
for _, fl in info_array[0].file_checksums.items():
for f in fl.split():
- if not ('*' in f or os.path.exists(f)):
- logger.debug(2, "Cache: %s's file checksum list file %s was removed",
+ if "*" in f:
+ continue
+ f, exist = f.split(":")
+ if (exist == "True" and not os.path.exists(f)) or (exist == "False" and os.path.exists(f)):
+ logger.debug(2, "Cache: %s's file checksum list file %s changed",
fn, f)
self.remove(fn)
return False
@@ -620,10 +623,13 @@ class Cache(object):
def mtime(cachefile):
return bb.parse.cached_mtime_noerror(cachefile)
- def add_info(self, filename, info_array, cacheData, parsed=None):
+ def add_info(self, filename, info_array, cacheData, parsed=None, watcher=None):
if isinstance(info_array[0], CoreRecipeInfo) and (not info_array[0].skipped):
cacheData.add_from_recipeinfo(filename, info_array)
+ if watcher:
+ watcher(info_array[0].file_depends)
+
if not self.has_cache:
return
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
index 8b8f91a762..21a36f64ca 100644
--- a/bitbake/lib/bb/codeparser.py
+++ b/bitbake/lib/bb/codeparser.py
@@ -178,7 +178,7 @@ class BufferedLogger(Logger):
class PythonParser():
getvars = (".getVar", ".appendVar", ".prependVar")
- containsfuncs = ("bb.utils.contains", "base_contains", "oe.utils.contains", "bb.utils.contains_any")
+ containsfuncs = ("bb.utils.contains", "base_contains", "bb.utils.contains_any")
execfuncs = ("bb.build.exec_func", "bb.build.exec_task")
def warn(self, func, arg):
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
index 60f9ac08aa..24ff341045 100644
--- a/bitbake/lib/bb/command.py
+++ b/bitbake/lib/bb/command.py
@@ -123,11 +123,11 @@ class Command:
def finishAsyncCommand(self, msg=None, code=None):
if msg or msg == "":
- bb.event.fire(CommandFailed(msg), self.cooker.event_data)
+ bb.event.fire(CommandFailed(msg), self.cooker.expanded_data)
elif code:
- bb.event.fire(CommandExit(code), self.cooker.event_data)
+ bb.event.fire(CommandExit(code), self.cooker.expanded_data)
else:
- bb.event.fire(CommandCompleted(), self.cooker.event_data)
+ bb.event.fire(CommandCompleted(), self.cooker.expanded_data)
self.currentAsyncCommand = None
self.cooker.finishcommand()
@@ -273,7 +273,8 @@ class CommandsSync:
def updateConfig(self, command, params):
options = params[0]
- command.cooker.updateConfigOpts(options)
+ environment = params[1]
+ command.cooker.updateConfigOpts(options, environment)
class CommandsAsync:
"""
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index c6c69c30ea..2176167eb7 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -39,6 +39,7 @@ from bb import utils, data, parse, event, cache, providers, taskdata, runqueue
import Queue
import signal
import prserv.serv
+import pyinotify
logger = logging.getLogger("BitBake")
collectlog = logging.getLogger("BitBake.Collection")
@@ -120,8 +121,34 @@ class BBCooker:
self.configuration = configuration
+ self.configwatcher = pyinotify.WatchManager()
+ self.configwatcher.bbseen = []
+ self.confignotifier = pyinotify.Notifier(self.configwatcher, self.config_notifications)
+ self.watchmask = pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE | pyinotify.IN_DELETE | \
+ pyinotify.IN_DELETE_SELF | pyinotify.IN_MODIFY | pyinotify.IN_MOVE_SELF | \
+ pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO
+ self.watcher = pyinotify.WatchManager()
+ self.watcher.bbseen = []
+ self.notifier = pyinotify.Notifier(self.watcher, self.notifications)
+
+
self.initConfigurationData()
+ self.inotify_modified_files = []
+
+ def _process_inotify_updates(server, notifier_list, abort):
+ for n in notifier_list:
+ if n.check_events(timeout=0):
+ # read notified events and enqeue them
+ n.read_events()
+ n.process_events()
+ return 1.0
+
+ self.configuration.server_register_idlecallback(_process_inotify_updates, [self.confignotifier, self.notifier])
+
+ self.baseconfig_valid = True
+ self.parsecache_valid = False
+
# Take a lock so only one copy of bitbake can run against a given build
# directory at a time
lockfile = self.data.expand("${TOPDIR}/bitbake.lock")
@@ -153,20 +180,62 @@ class BBCooker:
self.parser = None
signal.signal(signal.SIGTERM, self.sigterm_exception)
+ # Let SIGHUP exit as SIGTERM
+ signal.signal(signal.SIGHUP, self.sigterm_exception)
+
+ def config_notifications(self, event):
+ if not event.path in self.inotify_modified_files:
+ self.inotify_modified_files.append(event.path)
+ self.baseconfig_valid = False
+
+ def notifications(self, event):
+ if not event.path in self.inotify_modified_files:
+ self.inotify_modified_files.append(event.path)
+ self.parsecache_valid = False
+
+ def add_filewatch(self, deps, watcher=None):
+ if not watcher:
+ watcher = self.watcher
+ for i in deps:
+ f = os.path.dirname(i[0])
+ if f in watcher.bbseen:
+ continue
+ watcher.bbseen.append(f)
+ while True:
+ # We try and add watches for files that don't exist but if they did, would influence
+ # the parser. The parent directory of these files may not exist, in which case we need
+ # to watch any parent that does exist for changes.
+ try:
+ watcher.add_watch(f, self.watchmask, quiet=False)
+ break
+ except pyinotify.WatchManagerError as e:
+ if 'ENOENT' in str(e):
+ f = os.path.dirname(f)
+ watcher.bbseen.append(f)
+ continue
+ if 'ENOSPC' in str(e):
+ providerlog.error("No space left on device or exceeds fs.inotify.max_user_watches?")
+ providerlog.error("To check max_user_watches: sysctl -n fs.inotify.max_user_watches.")
+ providerlog.error("To modify max_user_watches: sysctl -n -w fs.inotify.max_user_watches=<value>.")
+ providerlog.error("Root privilege is required to modify max_user_watches.")
+ raise
def sigterm_exception(self, signum, stackframe):
- bb.warn("Cooker recieved SIGTERM, shutting down...")
+ if signum == signal.SIGTERM:
+ bb.warn("Cooker recieved SIGTERM, shutting down...")
+ elif signum == signal.SIGHUP:
+ bb.warn("Cooker recieved SIGHUP, shutting down...")
self.state = state.forceshutdown
def setFeatures(self, features):
# we only accept a new feature set if we're in state initial, so we can reset without problems
- if self.state != state.initial:
+ if not self.state in [state.initial, state.shutdown, state.forceshutdown, state.stopped, state.error]:
raise Exception("Illegal state for feature set change")
original_featureset = list(self.featureset)
for feature in features:
self.featureset.setFeature(feature)
bb.debug(1, "Features set %s (was %s)" % (original_featureset, list(self.featureset)))
- if (original_featureset != list(self.featureset)):
+ if (original_featureset != list(self.featureset)) and self.state != state.error:
self.reset()
def initConfigurationData(self):
@@ -200,12 +269,82 @@ class BBCooker:
self.data = self.databuilder.data
self.data_hash = self.databuilder.data_hash
+
+ # we log all events to a file if so directed
+ if self.configuration.writeeventlog:
+ import json, pickle
+ DEFAULT_EVENTFILE = self.configuration.writeeventlog
+ class EventLogWriteHandler():
+
+ class EventWriter():
+ def __init__(self, cooker):
+ self.file_inited = None
+ self.cooker = cooker
+ self.event_queue = []
+
+ def init_file(self):
+ try:
+ # delete the old log
+ os.remove(DEFAULT_EVENTFILE)
+ except:
+ pass
+
+ # write current configuration data
+ with open(DEFAULT_EVENTFILE, "w") as f:
+ f.write("%s\n" % json.dumps({ "allvariables" : self.cooker.getAllKeysWithFlags(["doc", "func"])}))
+
+ def write_event(self, event):
+ with open(DEFAULT_EVENTFILE, "a") as f:
+ try:
+ f.write("%s\n" % json.dumps({"class":event.__module__ + "." + event.__class__.__name__, "vars":json.dumps(pickle.dumps(event)) }))
+ except Exception as e:
+ import traceback
+ print(e, traceback.format_exc(e))
+
+
+ def send(self, event):
+ event_class = event.__module__ + "." + event.__class__.__name__
+
+ # init on bb.event.BuildStarted
+ if self.file_inited is None:
+ if event_class == "bb.event.BuildStarted":
+ self.init_file()
+ self.file_inited = True
+
+ # write pending events
+ for e in self.event_queue:
+ self.write_event(e)
+
+ # also write the current event
+ self.write_event(event)
+
+ else:
+ # queue all events until the file is inited
+ self.event_queue.append(event)
+
+ else:
+ # we have the file, just write the event
+ self.write_event(event)
+
+ # set our handler's event processor
+ event = EventWriter(self) # self is the cooker here
+
+
+ # set up cooker features for this mock UI handler
+
+ # we need to write the dependency tree in the log
+ self.featureset.setFeature(CookerFeatures.SEND_DEPENDS_TREE)
+ # register the log file writer as UI Handler
+ bb.event.register_UIHhandler(EventLogWriteHandler())
+
+
#
- # Special updated configuration we use for firing events
+ # Copy of the data store which has been expanded.
+ # Used for firing events and accessing variables where expansion needs to be accounted for
#
- self.event_data = bb.data.createCopy(self.data)
- bb.data.update_data(self.event_data)
- bb.parse.init_parser(self.event_data)
+ self.expanded_data = bb.data.createCopy(self.data)
+ bb.data.update_data(self.expanded_data)
+ bb.parse.init_parser(self.expanded_data)
if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset:
self.disableDataTracking()
@@ -240,7 +379,7 @@ class BBCooker:
f.write(total)
#add to history
- loginfo = {"op":append, "file":default_file, "line":total.count("\n")}
+ loginfo = {"op":"append", "file":default_file, "line":total.count("\n")}
self.data.appendVar(var, val, **loginfo)
def saveConfigurationVar(self, var, val, default_file, op):
@@ -309,7 +448,7 @@ class BBCooker:
f.write(total)
#add to history
- loginfo = {"op":set, "file":default_file, "line":total.count("\n")}
+ loginfo = {"op":"set", "file":default_file, "line":total.count("\n")}
self.data.setVar(var, val, **loginfo)
def removeConfigurationVar(self, var):
@@ -371,9 +510,29 @@ class BBCooker:
self.handleCollections( self.data.getVar("BBFILE_COLLECTIONS", True) )
- def updateConfigOpts(self,options):
+ def updateConfigOpts(self, options, environment):
for o in options:
setattr(self.configuration, o, options[o])
+ clean = True
+ for k in bb.utils.approved_variables():
+ if k in environment and k not in self.configuration.env:
+ logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
+ self.configuration.env[k] = environment[k]
+ clean = False
+ if k in self.configuration.env and k not in environment:
+ logger.debug(1, "Updating environment variable %s (deleted)" % (k))
+ del self.configuration.env[k]
+ clean = False
+ if k not in self.configuration.env and k not in environment:
+ continue
+ if environment[k] != self.configuration.env[k]:
+ logger.debug(1, "Updating environment variable %s to %s" % (k, environment[k]))
+ self.configuration.env[k] = environment[k]
+ clean = False
+ if not clean:
+ logger.debug(1, "Base environment change, triggering reparse")
+ self.baseconfig_valid = False
+ self.reset()
def runCommands(self, server, data, abort):
"""
@@ -420,7 +579,7 @@ class BBCooker:
fn = self.matchFile(fn)
fn = bb.cache.Cache.realfn2virtual(fn, cls)
elif len(pkgs_to_build) == 1:
- ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
+ ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
if pkgs_to_build[0] in set(ignore.split()):
bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0])
@@ -500,7 +659,7 @@ class BBCooker:
taskdata, runlist, pkgs_to_build = self.buildTaskData(pkgs_to_build, task, False)
return runlist, taskdata
-
+
######## WARNING : this function requires cache_extra to be enabled ########
def generateTaskDepTreeData(self, pkgs_to_build, task):
@@ -952,42 +1111,30 @@ class BBCooker:
# Check dependencies and store information for priority calculation
deps = self.data.getVar("LAYERDEPENDS_%s" % c, True)
if deps:
- depnamelist = []
- deplist = deps.split()
- for dep in deplist:
- depsplit = dep.split(':')
- if len(depsplit) > 1:
- try:
- depver = int(depsplit[1])
- except ValueError:
- parselog.error("invalid version value in LAYERDEPENDS_%s: \"%s\"", c, dep)
- errors = True
- continue
- else:
- depver = None
- dep = depsplit[0]
- depnamelist.append(dep)
-
+ try:
+ deplist = bb.utils.explode_dep_versions2(deps)
+ except bb.utils.VersionStringException as vse:
+ bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
+ for dep, oplist in deplist.iteritems():
if dep in collection_list:
- if depver:
+ for opstr in oplist:
layerver = self.data.getVar("LAYERVERSION_%s" % dep, True)
+ (op, depver) = opstr.split()
if layerver:
try:
- lver = int(layerver)
- except ValueError:
- parselog.error("invalid value for LAYERVERSION_%s: \"%s\"", c, layerver)
- errors = True
- continue
- if lver != depver:
- parselog.error("Layer '%s' depends on version %d of layer '%s', but version %d is enabled in your configuration", c, depver, dep, lver)
+ res = bb.utils.vercmp_string_op(layerver, depver, op)
+ except bb.utils.VersionStringException as vse:
+ bb.fatal('Error parsing LAYERDEPENDS_%s: %s' % (c, str(vse)))
+ if not res:
+ parselog.error("Layer '%s' depends on version %s of layer '%s', but version %s is currently enabled in your configuration. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep, layerver)
errors = True
else:
- parselog.error("Layer '%s' depends on version %d of layer '%s', which exists in your configuration but does not specify a version", c, depver, dep)
+ parselog.error("Layer '%s' depends on version %s of layer '%s', which exists in your configuration but does not specify a version. Check that you are using the correct matching versions/branches of these two layers.", c, opstr, dep)
errors = True
else:
parselog.error("Layer '%s' depends on layer '%s', but this layer is not enabled in your configuration", c, dep)
errors = True
- collection_depends[c] = depnamelist
+ collection_depends[c] = deplist.keys()
else:
collection_depends[c] = []
@@ -1039,7 +1186,7 @@ class BBCooker:
bf = os.path.abspath(bf)
self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
- filelist, masked = self.collection.collect_bbfiles(self.data, self.event_data)
+ filelist, masked = self.collection.collect_bbfiles(self.data, self.expanded_data)
try:
os.stat(bf)
bf = os.path.abspath(bf)
@@ -1129,7 +1276,7 @@ class BBCooker:
taskdata.add_provider(self.data, self.recipecache, item)
buildname = self.data.getVar("BUILDNAME")
- bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.event_data)
+ bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data)
# Execute the runqueue
runlist = [[item, "do_%s" % task]]
@@ -1156,8 +1303,8 @@ class BBCooker:
return False
if not retval:
- bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.event_data)
self.command.finishAsyncCommand(msg)
+ bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, item, failures), self.expanded_data)
return False
if retval is True:
return True
@@ -1189,8 +1336,8 @@ class BBCooker:
return False
if not retval:
- bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures), self.data)
self.command.finishAsyncCommand(msg)
+ bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runq_fnid), buildname, targets, failures), self.data)
return False
if retval is True:
return True
@@ -1281,29 +1428,45 @@ class BBCooker:
if self.state == state.running:
return
- if self.state in (state.shutdown, state.forceshutdown):
+ if self.state in (state.shutdown, state.forceshutdown, state.error):
if hasattr(self.parser, 'shutdown'):
self.parser.shutdown(clean=False, force = True)
raise bb.BBHandledException()
if self.state != state.parsing:
+
+ # reload files for which we got notifications
+ for p in self.inotify_modified_files:
+ bb.parse.update_cache(p)
+ self.inotify_modified_files = []
+
+ if not self.baseconfig_valid:
+ logger.debug(1, "Reloading base configuration data")
+ self.initConfigurationData()
+ self.baseconfig_valid = True
+ self.parsecache_valid = False
+
+ if self.state != state.parsing and not self.parsecache_valid:
self.parseConfiguration ()
if CookerFeatures.SEND_SANITYEVENTS in self.featureset:
bb.event.fire(bb.event.SanityCheck(False), self.data)
- ignore = self.data.getVar("ASSUME_PROVIDED", True) or ""
+ ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or ""
self.recipecache.ignored_dependencies = set(ignore.split())
for dep in self.configuration.extra_assume_provided:
self.recipecache.ignored_dependencies.add(dep)
self.collection = CookerCollectFiles(self.recipecache.bbfile_config_priorities)
- (filelist, masked) = self.collection.collect_bbfiles(self.data, self.event_data)
+ (filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data)
self.data.renameVar("__depends", "__base_depends")
+ self.add_filewatch(self.data.getVar("__base_depends"), self.configwatcher)
self.parser = CookerParser(self, filelist, masked)
- self.state = state.parsing
+ self.parsecache_valid = True
+
+ self.state = state.parsing
if not self.parser.parse_next():
collectlog.debug(1, "parsing complete")
@@ -1311,7 +1474,7 @@ class BBCooker:
raise bb.BBHandledException()
self.show_appends_with_no_recipes()
self.handlePrefProviders()
- self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn)
+ self.recipecache.bbfile_priority = self.collection.collection_priorities(self.recipecache.pkg_fn, self.data)
self.state = state.running
return None
@@ -1325,7 +1488,7 @@ class BBCooker:
if len(pkgs_to_build) == 0:
raise NothingToBuild
- ignore = (self.data.getVar("ASSUME_PROVIDED", True) or "").split()
+ ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split()
for pkg in pkgs_to_build:
if pkg in ignore:
parselog.warn("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg)
@@ -1355,13 +1518,13 @@ class BBCooker:
try:
self.prhost = prserv.serv.auto_start(self.data)
except prserv.serv.PRServiceConfigError:
- bb.event.fire(CookerExit(), self.event_data)
+ bb.event.fire(CookerExit(), self.expanded_data)
self.state = state.error
return
def post_serve(self):
prserv.serv.auto_shutdown(self.data)
- bb.event.fire(CookerExit(), self.event_data)
+ bb.event.fire(CookerExit(), self.expanded_data)
def shutdown(self, force = False):
if force:
@@ -1536,7 +1699,7 @@ class CookerCollectFiles(object):
filelist.append(filename)
return filelist
- def collection_priorities(self, pkgfns):
+ def collection_priorities(self, pkgfns, d):
priorities = {}
@@ -1545,10 +1708,10 @@ class CookerCollectFiles(object):
for p in pkgfns:
realfn, cls = bb.cache.Cache.virtualfn2realfn(p)
priorities[p] = self.calc_bbfile_priority(realfn, matched)
-
+
# Don't show the warning if the BBFILE_PATTERN did match .bbappend files
unmatched = set()
- for _, _, regex, pri in self.bbfile_config_priorities:
+ for _, _, regex, pri in self.bbfile_config_priorities:
if not regex in matched:
unmatched.add(regex)
@@ -1565,7 +1728,8 @@ class CookerCollectFiles(object):
for collection, pattern, regex, _ in self.bbfile_config_priorities:
if regex in unmatched:
- collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
+ if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1':
+ collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
return priorities
@@ -1865,7 +2029,7 @@ class CookerParser(object):
self.skipped += 1
self.cooker.skiplist[virtualfn] = SkippedPackage(info_array[0])
self.bb_cache.add_info(virtualfn, info_array, self.cooker.recipecache,
- parsed=parsed)
+ parsed=parsed, watcher = self.cooker.add_filewatch)
return True
def reparse(self, filename):
diff --git a/bitbake/lib/bb/cookerdata.py b/bitbake/lib/bb/cookerdata.py
index 470d5381ae..6c11a60e0f 100644
--- a/bitbake/lib/bb/cookerdata.py
+++ b/bitbake/lib/bb/cookerdata.py
@@ -33,8 +33,8 @@ logger = logging.getLogger("BitBake")
parselog = logging.getLogger("BitBake.Parsing")
class ConfigParameters(object):
- def __init__(self):
- self.options, targets = self.parseCommandLine()
+ def __init__(self, argv=sys.argv):
+ self.options, targets = self.parseCommandLine(argv)
self.environment = self.parseEnvironment()
self.options.pkgs_to_build = targets or []
@@ -46,7 +46,7 @@ class ConfigParameters(object):
for key, val in self.options.__dict__.items():
setattr(self, key, val)
- def parseCommandLine(self):
+ def parseCommandLine(self, argv=sys.argv):
raise Exception("Caller must implement commandline option parsing")
def parseEnvironment(self):
@@ -69,14 +69,14 @@ class ConfigParameters(object):
if bbpkgs:
self.options.pkgs_to_build.extend(bbpkgs.split())
- def updateToServer(self, server):
+ def updateToServer(self, server, environment):
options = {}
for o in ["abort", "tryaltconfigs", "force", "invalidate_stamp",
"verbose", "debug", "dry_run", "dump_signatures",
"debug_domains", "extra_assume_provided", "profile"]:
options[o] = getattr(self.options, o)
- ret, error = server.runCommand(["updateConfig", options])
+ ret, error = server.runCommand(["updateConfig", options, environment])
if error:
raise Exception("Unable to update the server configuration with local parameters: %s" % error)
@@ -139,6 +139,7 @@ class CookerConfiguration(object):
self.dry_run = False
self.tracking = False
self.interface = []
+ self.writeeventlog = False
self.env = {}
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
index 91b1eb1298..82eefef1a6 100644
--- a/bitbake/lib/bb/data.py
+++ b/bitbake/lib/bb/data.py
@@ -219,6 +219,13 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
val = str(val)
+ if varExpanded.startswith("BASH_FUNC_"):
+ varExpanded = varExpanded[10:-2]
+ val = val[3:] # Strip off "() "
+ o.write("%s() %s\n" % (varExpanded, val))
+ o.write("export -f %s\n" % (varExpanded))
+ return 1
+
if func:
# NOTE: should probably check for unbalanced {} within the var
o.write("%s() {\n%s\n}\n" % (varExpanded, val))
@@ -231,6 +238,7 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False):
# to a shell, we need to escape the quotes in the var
alter = re.sub('"', '\\"', val)
alter = re.sub('\n', ' \\\n', alter)
+ alter = re.sub('\\$', '\\\\$', alter)
o.write('%s="%s"\n' % (varExpanded, alter))
return 0
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
index d862308355..7bb7b4aae3 100644
--- a/bitbake/lib/bb/data_smart.py
+++ b/bitbake/lib/bb/data_smart.py
@@ -296,9 +296,14 @@ class VariableHistory(object):
self.variables[var] = []
class DataSmart(MutableMapping):
- def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ):
+ def __init__(self, special = None, seen = None ):
self.dict = {}
+ if special is None:
+ special = COWDictBase.copy()
+ if seen is None:
+ seen = COWDictBase.copy()
+
self.inchistory = IncludeHistory()
self.varhistory = VariableHistory(self)
self._tracking = False
@@ -589,7 +594,7 @@ class DataSmart(MutableMapping):
self._makeShadowCopy(var)
self.dict[var][flag] = value
- if flag == "defaultval" and '_' in var:
+ if flag == "_defaultval" and '_' in var:
self._setvar_update_overrides(var)
if flag == "unexport" or flag == "export":
@@ -605,8 +610,8 @@ class DataSmart(MutableMapping):
if local_var is not None:
if flag in local_var:
value = copy.copy(local_var[flag])
- elif flag == "_content" and "defaultval" in local_var and not noweakdefault:
- value = copy.copy(local_var["defaultval"])
+ elif flag == "_content" and "_defaultval" in local_var and not noweakdefault:
+ value = copy.copy(local_var["_defaultval"])
if expand and value:
# Only getvar (flag == _content) hits the expand cache
cachename = None
@@ -616,9 +621,10 @@ class DataSmart(MutableMapping):
cachename = var + "[" + flag + "]"
value = self.expand(value, cachename)
if value and flag == "_content" and local_var is not None and "_removeactive" in local_var:
- removes = [self.expand(r) for r in local_var["_removeactive"]]
+ removes = [self.expand(r).split() for r in local_var["_removeactive"]]
+ removes = reduce(lambda a, b: a+b, removes, [])
filtered = filter(lambda v: v not in removes,
- value.split(" "))
+ value.split())
value = " ".join(filtered)
if expand:
# We need to ensure the expand cache has the correct value
@@ -740,12 +746,16 @@ class DataSmart(MutableMapping):
yield key
def __iter__(self):
+ deleted = set()
def keylist(d):
klist = set()
for key in d:
if key == "_data":
continue
+ if key in deleted:
+ continue
if not d[key]:
+ deleted.add(key)
continue
klist.add(key)
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
index 32df779786..fec6a05b38 100644
--- a/bitbake/lib/bb/event.py
+++ b/bitbake/lib/bb/event.py
@@ -55,6 +55,7 @@ def get_class_handlers():
return _handlers
def set_class_handlers(h):
+ global _handlers
_handlers = h
def clean_class_handlers():
@@ -67,6 +68,7 @@ _ui_logfilters = {}
_ui_handler_seq = 0
_event_handler_map = {}
_catchall_handlers = {}
+_eventfilter = None
def execute_handler(name, handler, event, d):
event.data = d
@@ -94,6 +96,9 @@ def fire_class_handlers(event, d):
evt_hmap = _event_handler_map.get(eid, {})
for name, handler in _handlers.iteritems():
if name in _catchall_handlers or name in evt_hmap:
+ if _eventfilter:
+ if not _eventfilter(name, handler, event, d):
+ continue
execute_handler(name, handler, event, d)
ui_queue = []
@@ -204,6 +209,10 @@ def remove(name, handler):
"""Remove an Event handler"""
_handlers.pop(name)
+def set_eventfilter(func):
+ global _eventfilter
+ _eventfilter = func
+
def register_UIHhandler(handler):
bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
_ui_handlers[_ui_handler_seq] = handler
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
index 59a7f1b6bd..b004dae0d4 100644
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ b/bitbake/lib/bb/fetch2/__init__.py
@@ -45,6 +45,13 @@ _checksum_cache = bb.checksum.FileChecksumCache()
logger = logging.getLogger("BitBake.Fetcher")
+try:
+ import cPickle as pickle
+except ImportError:
+ import pickle
+ logger.info("Importing cPickle failed. "
+ "Falling back to a very slow implementation.")
+
class BBFetchException(Exception):
"""Class all fetch exceptions inherit from"""
def __init__(self, message):
@@ -525,7 +532,7 @@ def fetcher_compare_revisions(d):
def mirror_from_string(data):
return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
-def verify_checksum(ud, d):
+def verify_checksum(ud, d, precomputed={}):
"""
verify the MD5 and SHA256 checksum for downloaded src
@@ -533,17 +540,32 @@ def verify_checksum(ud, d):
the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
checksums specified.
+ Returns a dict of checksums that can be stored in a done stamp file and
+ passed in as precomputed parameter in a later call to avoid re-computing
+ the checksums from the file. This allows verifying the checksums of the
+ file against those in the recipe each time, rather than only after
+ downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
"""
- if not ud.method.supports_checksum(ud):
- return
+ _MD5_KEY = "md5"
+ _SHA256_KEY = "sha256"
+
+ if ud.ignore_checksums or not ud.method.supports_checksum(ud):
+ return {}
- md5data = bb.utils.md5_file(ud.localpath)
- sha256data = bb.utils.sha256_file(ud.localpath)
+ if _MD5_KEY in precomputed:
+ md5data = precomputed[_MD5_KEY]
+ else:
+ md5data = bb.utils.md5_file(ud.localpath)
+
+ if _SHA256_KEY in precomputed:
+ sha256data = precomputed[_SHA256_KEY]
+ else:
+ sha256data = bb.utils.sha256_file(ud.localpath)
if ud.method.recommends_checksum(ud):
# If strict checking enabled and neither sum defined, raise error
- strict = d.getVar("BB_STRICT_CHECKSUM", "1") or "0"
+ strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0"
if (strict == "1") and not (ud.md5_expected or ud.sha256_expected):
logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
@@ -589,6 +611,72 @@ def verify_checksum(ud, d):
if len(msg):
raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
+ return {
+ _MD5_KEY: md5data,
+ _SHA256_KEY: sha256data
+ }
+
+
+def verify_donestamp(ud, d):
+ """
+ Check whether the done stamp file has the right checksums (if the fetch
+ method supports them). If it doesn't, delete the done stamp and force
+ a re-download.
+
+ Returns True, if the donestamp exists and is valid, False otherwise. When
+ returning False, any existing done stamps are removed.
+ """
+ if not os.path.exists(ud.donestamp):
+ return False
+
+ if not ud.method.supports_checksum(ud):
+ # done stamp exists, checksums not supported; assume the local file is
+ # current
+ return True
+
+ if not os.path.exists(ud.localpath):
+ # done stamp exists, but the downloaded file does not; the done stamp
+ # must be incorrect, re-trigger the download
+ bb.utils.remove(ud.donestamp)
+ return False
+
+ precomputed_checksums = {}
+ # Only re-use the precomputed checksums if the donestamp is newer than the
+ # file. Do not rely on the mtime of directories, though. If ud.localpath is
+ # a directory, there will probably not be any checksums anyway.
+ if (os.path.isdir(ud.localpath) or
+ os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
+ try:
+ with open(ud.donestamp, "rb") as cachefile:
+ pickled = pickle.Unpickler(cachefile)
+ precomputed_checksums.update(pickled.load())
+ except Exception as e:
+ # Avoid the warnings on the upgrade path from emtpy done stamp
+ # files to those containing the checksums.
+ if not isinstance(e, EOFError):
+ # Ignore errors, they aren't fatal
+ logger.warn("Couldn't load checksums from donestamp %s: %s "
+ "(msg: %s)" % (ud.donestamp, type(e).__name__,
+ str(e)))
+
+ try:
+ checksums = verify_checksum(ud, d, precomputed_checksums)
+ # If the cache file did not have the checksums, compute and store them
+ # as an upgrade path from the previous done stamp file format.
+ if checksums != precomputed_checksums:
+ with open(ud.donestamp, "wb") as cachefile:
+ p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
+ p.dump(checksums)
+ return True
+ except ChecksumError as e:
+ # Checksums failed to verify, trigger re-download and remove the
+ # incorrect stamp file.
+ logger.warn("Checksum mismatch for local file %s\n"
+ "Cleaning and trying again." % ud.localpath)
+ rename_bad_checksum(ud, e.checksum)
+ bb.utils.remove(ud.donestamp)
+ return False
+
def update_stamp(ud, d):
"""
@@ -603,8 +691,11 @@ def update_stamp(ud, d):
# Errors aren't fatal here
pass
else:
- verify_checksum(ud, d)
- open(ud.donestamp, 'w').close()
+ checksums = verify_checksum(ud, d)
+ # Store the checksums for later re-verification against the recipe
+ with open(ud.donestamp, "wb") as cachefile:
+ p = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
+ p.dump(checksums)
def subprocess_setup():
# Python installs a SIGPIPE handler by default. This is usually not what
@@ -620,11 +711,13 @@ def get_autorev(d):
def get_srcrev(d):
"""
- Return the version string for the current package
- (usually to be used as PV)
+ Return the revsion string, usually for use in the version string (PV) of the current package
Most packages usually only have one SCM so we just pass on the call.
In the multi SCM case, we build a value based on SRCREV_FORMAT which must
have been set.
+
+ The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
+ incremental, other code is then responsible for turning that into an increasing value (if needed)
"""
scms = []
@@ -803,7 +896,7 @@ def try_mirror_url(origud, ud, ld, check = False):
os.chdir(ld.getVar("DL_DIR", True))
- if not os.path.exists(ud.donestamp) or ud.method.need_update(ud, ld):
+ if not verify_donestamp(ud, ld) or ud.method.need_update(ud, ld):
ud.method.download(ud, ld)
if hasattr(ud.method,"build_mirror_data"):
ud.method.build_mirror_data(ud, ld)
@@ -819,12 +912,13 @@ def try_mirror_url(origud, ud, ld, check = False):
dldir = ld.getVar("DL_DIR", True)
if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \
and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
+ # Create donestamp in old format to avoid triggering a re-download
bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
open(ud.donestamp, 'w').close()
dest = os.path.join(dldir, os.path.basename(ud.localpath))
if not os.path.exists(dest):
os.symlink(ud.localpath, dest)
- if not os.path.exists(origud.donestamp) or origud.method.need_update(origud, ld):
+ if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
origud.method.download(origud, ld)
if hasattr(origud.method,"build_mirror_data"):
origud.method.build_mirror_data(origud, ld)
@@ -936,22 +1030,21 @@ def get_checksum_file_list(d):
ud = fetch.ud[u]
if ud and isinstance(ud.method, local.Local):
- ud.setup_localpath(d)
- f = ud.localpath
- pth = ud.decodedurl
- if '*' in pth:
- f = os.path.join(os.path.abspath(f), pth)
- if f.startswith(dl_dir):
- # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
- if os.path.exists(f):
- bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
- else:
- bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
- filelist.append(f)
+ paths = ud.method.localpaths(ud, d)
+ for f in paths:
+ pth = ud.decodedurl
+ if '*' in pth:
+ f = os.path.join(os.path.abspath(f), pth)
+ if f.startswith(dl_dir):
+ # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
+ if os.path.exists(f):
+ bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f)))
+ else:
+ bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f)))
+ filelist.append(f + ":" + str(os.path.exists(f)))
return " ".join(filelist)
-
def get_file_checksums(filelist, pn):
"""Get a list of the checksums for a list of local files
@@ -981,6 +1074,10 @@ def get_file_checksums(filelist, pn):
checksums = []
for pth in filelist.split():
+ exist = pth.split(":")[1]
+ if exist == "False":
+ continue
+ pth = pth.split(":")[0]
if '*' in pth:
# Handle globs
for f in glob.glob(pth):
@@ -988,14 +1085,12 @@ def get_file_checksums(filelist, pn):
checksums.extend(checksum_dir(f))
else:
checksum = checksum_file(f)
- if checksum:
- checksums.append((f, checksum))
+ checksums.append((f, checksum))
elif os.path.isdir(pth):
checksums.extend(checksum_dir(pth))
else:
checksum = checksum_file(pth)
- if checksum:
- checksums.append((pth, checksum))
+ checksums.append((pth, checksum))
checksums.sort(key=operator.itemgetter(1))
return checksums
@@ -1041,6 +1136,7 @@ class FetchData(object):
self.sha256_expected = None
else:
self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
+ self.ignore_checksums = False
self.names = self.parm.get("name",'default').split(',')
@@ -1197,9 +1293,9 @@ class FetchMethod(object):
bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
(file, urldata.parm.get('unpack')))
- dots = file.split(".")
- if dots[-1] in ['gz', 'bz2', 'Z', 'xz']:
- efile = os.path.join(rootdir, os.path.basename('.'.join(dots[0:-1])))
+ base, ext = os.path.splitext(file)
+ if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
+ efile = os.path.join(rootdir, os.path.basename(base))
else:
efile = file
cmd = None
@@ -1219,6 +1315,10 @@ class FetchMethod(object):
cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
elif file.endswith('.xz'):
cmd = 'xz -dc %s > %s' % (file, efile)
+ elif file.endswith('.tar.lz'):
+ cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
+ elif file.endswith('.lz'):
+ cmd = 'lzip -dc %s > %s' % (file, efile)
elif file.endswith('.zip') or file.endswith('.jar'):
try:
dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
@@ -1414,7 +1514,7 @@ class Fetch(object):
try:
self.d.setVar("BB_NO_NETWORK", network)
- if os.path.exists(ud.donestamp) and not m.need_update(ud, self.d):
+ if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
localpath = ud.localpath
elif m.try_premirror(ud, self.d):
logger.debug(1, "Trying PREMIRRORS")
@@ -1427,7 +1527,7 @@ class Fetch(object):
os.chdir(self.d.getVar("DL_DIR", True))
firsterr = None
- if not localpath and ((not os.path.exists(ud.donestamp)) or m.need_update(ud, self.d)):
+ if not localpath and ((not verify_donestamp(ud, self.d)) or m.need_update(ud, self.d)):
try:
logger.debug(1, "Trying Upstream")
m.download(ud, self.d)
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
index 799fb6c0fe..44fc27193e 100644
--- a/bitbake/lib/bb/fetch2/git.py
+++ b/bitbake/lib/bb/fetch2/git.py
@@ -67,6 +67,7 @@ Supported SRC_URI options are:
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
+import re
import bb
from bb import data
from bb.fetch2 import FetchMethod
@@ -339,9 +340,56 @@ class Git(FetchMethod):
"""
Compute the HEAD revision for the url
"""
- search = "refs/heads/%s refs/tags/%s^{}" % (ud.unresolvedrev[name], ud.unresolvedrev[name])
- output = self._lsremote(ud, d, search)
- return output.split()[0]
+ output = self._lsremote(ud, d, "")
+ # Tags of the form ^{} may not work, need to fallback to other form
+ if ud.unresolvedrev[name][:5] == "refs/":
+ head = ud.unresolvedrev[name]
+ tag = ud.unresolvedrev[name]
+ else:
+ head = "refs/heads/%s" % ud.unresolvedrev[name]
+ tag = "refs/tags/%s" % ud.unresolvedrev[name]
+ for s in [head, tag + "^{}", tag]:
+ for l in output.split('\n'):
+ if s in l:
+ return l.split()[0]
+ raise bb.fetch2.FetchError("Unable to resolve '%s' in upstream git repository in git ls-remote output" % ud.unresolvedrev[name])
+
+ def latest_versionstring(self, ud, d):
+ """
+ Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
+ by searching through the tags output of ls-remote, comparing
+ versions and returning the highest match.
+ """
+ verstring = ""
+ tagregex = re.compile(d.getVar('GITTAGREGEX', True) or "(?P<pver>([0-9][\.|_]?)+)")
+ try:
+ output = self._lsremote(ud, d, "refs/tags/*^{}")
+ except bb.fetch2.FetchError or bb.fetch2.NetworkAccess:
+ return ""
+
+ for line in output.split("\n"):
+ if not line:
+ break
+
+ line = line.split("/")[-1]
+ # Ignore non-released branches
+ m = re.search("(alpha|beta|rc|final)+", line)
+ if m:
+ continue
+
+ # search for version in the line
+ tag = tagregex.search(line)
+ if tag == None:
+ continue
+
+ tag = tag.group('pver')
+ tag = tag.replace("_", ".")
+
+ if verstring and bb.utils.vercmp(("0", tag, ""), ("0", verstring, "")) < 0:
+ continue
+ verstring = tag
+
+ return verstring
def _build_revision(self, ud, d, name):
return ud.revisions[name]
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
index 6fa188fc48..0785236a6b 100644
--- a/bitbake/lib/bb/fetch2/local.py
+++ b/bitbake/lib/bb/fetch2/local.py
@@ -51,29 +51,41 @@ class Local(FetchMethod):
"""
Return the local filename of a given url assuming a successful fetch.
"""
+ return self.localpaths(urldata, d)[-1]
+
+ def localpaths(self, urldata, d):
+ """
+ Return the local filename of a given url assuming a successful fetch.
+ """
+ searched = []
path = urldata.decodedurl
newpath = path
- if path[0] != "/":
- filespath = data.getVar('FILESPATH', d, True)
- if filespath:
- logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
- newpath = bb.utils.which(filespath, path)
- if not newpath:
- filesdir = data.getVar('FILESDIR', d, True)
- if filesdir:
- logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
- newpath = os.path.join(filesdir, path)
- if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
- # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
- newpath = bb.utils.which(filespath, ".")
- logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
- return newpath
- if not os.path.exists(newpath):
- dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
- logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
- bb.utils.mkdirhier(os.path.dirname(dldirfile))
- return dldirfile
- return newpath
+ if path[0] == "/":
+ return [path]
+ filespath = data.getVar('FILESPATH', d, True)
+ if filespath:
+ logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":"))))
+ newpath, hist = bb.utils.which(filespath, path, history=True)
+ searched.extend(hist)
+ if not newpath:
+ filesdir = data.getVar('FILESDIR', d, True)
+ if filesdir:
+ logger.debug(2, "Searching for %s in path: %s" % (path, filesdir))
+ newpath = os.path.join(filesdir, path)
+ searched.append(newpath)
+ if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1:
+ # For expressions using '*', best we can do is take the first directory in FILESPATH that exists
+ newpath, hist = bb.utils.which(filespath, ".", history=True)
+ searched.extend(hist)
+ logger.debug(2, "Searching for %s in path: %s" % (path, newpath))
+ return searched
+ if not os.path.exists(newpath):
+ dldirfile = os.path.join(d.getVar("DL_DIR", True), path)
+ logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path))
+ bb.utils.mkdirhier(os.path.dirname(dldirfile))
+ searched.append(dldirfile)
+ return searched
+ return searched
def need_update(self, ud, d):
if ud.url.find("*") != -1:
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
index 4ae979472c..635578a711 100644
--- a/bitbake/lib/bb/fetch2/ssh.py
+++ b/bitbake/lib/bb/fetch2/ssh.py
@@ -87,7 +87,8 @@ class SSH(FetchMethod):
m = __pattern__.match(urldata.url)
path = m.group('path')
host = m.group('host')
- urldata.localpath = os.path.join(d.getVar('DL_DIR', True), os.path.basename(path))
+ urldata.localpath = os.path.join(d.getVar('DL_DIR', True),
+ os.path.basename(os.path.normpath(path)))
def download(self, urldata, d):
dldir = d.getVar('DL_DIR', True)
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
index 0456490368..162a6bd3be 100644
--- a/bitbake/lib/bb/fetch2/wget.py
+++ b/bitbake/lib/bb/fetch2/wget.py
@@ -25,6 +25,9 @@ BitBake build tools.
#
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
+import re
+import tempfile
+import subprocess
import os
import logging
import bb
@@ -34,6 +37,7 @@ from bb.fetch2 import FetchMethod
from bb.fetch2 import FetchError
from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd
+from bs4 import BeautifulSoup
class Wget(FetchMethod):
"""Class to fetch urls via 'wget'"""
@@ -104,3 +108,274 @@ class Wget(FetchMethod):
self._runwget(ud, d, fetchcmd, True)
return True
+
+ def _parse_path(self, regex, s):
+ """
+ Find and group name, version and archive type in the given string s
+ """
+
+ m = regex.search(s)
+ if m:
+ pname = ''
+ pver = ''
+ ptype = ''
+
+ mdict = m.groupdict()
+ if 'name' in mdict.keys():
+ pname = mdict['name']
+ if 'pver' in mdict.keys():
+ pver = mdict['pver']
+ if 'type' in mdict.keys():
+ ptype = mdict['type']
+
+ bb.debug(3, "_parse_path: %s, %s, %s" % (pname, pver, ptype))
+
+ return (pname, pver, ptype)
+
+ return None
+
+ def _modelate_version(self, version):
+ if version[0] in ['.', '-']:
+ if version[1].isdigit():
+ version = version[1] + version[0] + version[2:len(version)]
+ else:
+ version = version[1:len(version)]
+
+ version = re.sub('-', '.', version)
+ version = re.sub('_', '.', version)
+ version = re.sub('(rc)+', '.1000.', version)
+ version = re.sub('(beta)+', '.100.', version)
+ version = re.sub('(alpha)+', '.10.', version)
+ if version[0] == 'v':
+ version = version[1:len(version)]
+ return version
+
+ def _vercmp(self, old, new):
+ """
+ Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
+ purpose. PE is cleared in comparison as it's not for build, and PR is cleared too
+ for simplicity as it's somehow difficult to get from various upstream format
+ """
+
+ (oldpn, oldpv, oldsuffix) = old
+ (newpn, newpv, newsuffix) = new
+
+ """
+ Check for a new suffix type that we have never heard of before
+ """
+ if (newsuffix):
+ m = self.suffix_regex_comp.search(newsuffix)
+ if not m:
+ bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
+ return False
+
+ """
+ Not our package so ignore it
+ """
+ if oldpn != newpn:
+ return False
+
+ oldpv = self._modelate_version(oldpv)
+ newpv = self._modelate_version(newpv)
+
+ return bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, ""))
+
+ def _fetch_index(self, uri, ud, d):
+ """
+ Run fetch checkstatus to get directory information
+ """
+ f = tempfile.NamedTemporaryFile()
+
+ agent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.12) Gecko/20101027 Ubuntu/9.10 (karmic) Firefox/3.6.12"
+ fetchcmd = self.basecmd
+ fetchcmd += " -O " + f.name + " --user-agent='" + agent + "' '" + uri + "'"
+ try:
+ self._runwget(ud, d, fetchcmd, True)
+ fetchresult = f.read()
+ except bb.fetch2.BBFetchException:
+ fetchresult = ""
+
+ f.close()
+ return fetchresult
+
+ def _check_latest_version(self, url, package, package_regex, current_version, ud, d):
+ """
+ Return the latest version of a package inside a given directory path
+ If error or no version, return ""
+ """
+ valid = 0
+ version = ['', '', '']
+
+ bb.debug(3, "VersionURL: %s" % (url))
+ soup = BeautifulSoup(self._fetch_index(url, ud, d))
+ if not soup:
+ bb.debug(3, "*** %s NO SOUP" % (url))
+ return ""
+
+ for line in soup.find_all('a', href=True):
+ bb.debug(3, "line['href'] = '%s'" % (line['href']))
+ bb.debug(3, "line = '%s'" % (str(line)))
+
+ newver = self._parse_path(package_regex, line['href'])
+ if not newver:
+ newver = self._parse_path(package_regex, str(line))
+
+ if newver:
+ bb.debug(3, "Upstream version found: %s" % newver[1])
+ if valid == 0:
+ version = newver
+ valid = 1
+ elif self._vercmp(version, newver) < 0:
+ version = newver
+
+ pupver = re.sub('_', '.', version[1])
+
+ bb.debug(3, "*** %s -> UpstreamVersion = %s (CurrentVersion = %s)" %
+ (package, pupver or "N/A", current_version[1]))
+
+ if valid:
+ return pupver
+
+ return ""
+
+ def _check_latest_version_by_dir(self, dirver, package, package_regex,
+ current_version, ud, d):
+ """
+ Scan every directory in order to get upstream version.
+ """
+ version_dir = ['', '', '']
+ version = ['', '', '']
+
+ dirver_regex = re.compile("(\D*)((\d+[\.-_])+(\d+))")
+ s = dirver_regex.search(dirver)
+ if s:
+ version_dir[1] = s.group(2)
+ else:
+ version_dir[1] = dirver
+
+ dirs_uri = bb.fetch.encodeurl([ud.type, ud.host,
+ ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
+ bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))
+
+ soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d))
+ if not soup:
+ return version[1]
+
+ for line in soup.find_all('a', href=True):
+ s = dirver_regex.search(line['href'].strip("/"))
+ if s:
+ version_dir_new = ['', s.group(2), '']
+ if self._vercmp(version_dir, version_dir_new) <= 0:
+ dirver_new = s.group(1) + s.group(2)
+ path = ud.path.replace(dirver, dirver_new, True) \
+ .split(package)[0]
+ uri = bb.fetch.encodeurl([ud.type, ud.host, path,
+ ud.user, ud.pswd, {}])
+
+ pupver = self._check_latest_version(uri,
+ package, package_regex, current_version, ud, d)
+ if pupver:
+ version[1] = pupver
+
+ version_dir = version_dir_new
+
+ return version[1]
+
+ def _init_regexes(self, package, ud, d):
+ """
+ Match as many patterns as possible such as:
+ gnome-common-2.20.0.tar.gz (most common format)
+ gtk+-2.90.1.tar.gz
+ xf86-input-synaptics-12.6.9.tar.gz
+ dri2proto-2.3.tar.gz
+ blktool_4.orig.tar.gz
+ libid3tag-0.15.1b.tar.gz
+ unzip552.tar.gz
+ icu4c-3_6-src.tgz
+ genext2fs_1.3.orig.tar.gz
+ gst-fluendo-mp3
+ """
+ # match most patterns which uses "-" as separator to version digits
+ pn_prefix1 = "[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
+ # a loose pattern such as for unzip552.tar.gz
+ pn_prefix2 = "[a-zA-Z]+"
+ # a loose pattern such as for 80325-quicky-0.4.tar.gz
+ pn_prefix3 = "[0-9]+[-]?[a-zA-Z]+"
+ # Save the Package Name (pn) Regex for use later
+ pn_regex = "(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
+
+ # match version
+ pver_regex = "(([A-Z]*\d+[a-zA-Z]*[\.-_]*)+)"
+
+ # match arch
+ parch_regex = "-source|_all_"
+
+ # src.rpm extension was added only for rpm package. Can be removed if the rpm
+ # packaged will always be considered as having to be manually upgraded
+ psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)"
+
+ # match name, version and archive type of a package
+ package_regex_comp = re.compile("(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
+ % (pn_regex, pver_regex, parch_regex, psuffix_regex))
+ self.suffix_regex_comp = re.compile(psuffix_regex)
+
+ # compile regex, can be specific by package or generic regex
+ pn_regex = d.getVar('REGEX', True)
+ if pn_regex:
+ package_custom_regex_comp = re.compile(pn_regex)
+ else:
+ version = self._parse_path(package_regex_comp, package)
+ if version:
+ package_custom_regex_comp = re.compile(
+ "(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
+ (re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
+ else:
+ package_custom_regex_comp = None
+
+ return package_custom_regex_comp
+
+ def latest_versionstring(self, ud, d):
+ """
+ Manipulate the URL and try to obtain the latest package version
+
+ sanity check to ensure same name and type.
+ """
+ package = ud.path.split("/")[-1]
+ current_version = ['', d.getVar('PV', True), '']
+
+ """possible to have no version in pkg name, such as spectrum-fw"""
+ if not re.search("\d+", package):
+ current_version[1] = re.sub('_', '.', current_version[1])
+ current_version[1] = re.sub('-', '.', current_version[1])
+ return current_version[1]
+
+ package_regex = self._init_regexes(package, ud, d)
+ if package_regex is None:
+ bb.warn("latest_versionstring: package %s don't match pattern" % (package))
+ return ""
+ bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
+
+ uri = ""
+ regex_uri = d.getVar("REGEX_URI", True)
+ if not regex_uri:
+ path = ud.path.split(package)[0]
+
+ # search for version matches on folders inside the path, like:
+ # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
+ dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
+ m = dirver_regex.search(path)
+ if m:
+ pn = d.getVar('PN', True)
+ dirver = m.group('dirver')
+
+ dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
+ if not dirver_pn_regex.search(dirver):
+ return self._check_latest_version_by_dir(dirver,
+ package, package_regex, current_version, ud, d)
+
+ uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}])
+ else:
+ uri = regex_uri
+
+ return self._check_latest_version(uri, package, package_regex,
+ current_version, ud, d)
diff --git a/bitbake/lib/bb/main.py b/bitbake/lib/bb/main.py
new file mode 100755
index 0000000000..1f19cc5dcf
--- /dev/null
+++ b/bitbake/lib/bb/main.py
@@ -0,0 +1,390 @@
+#!/usr/bin/env python
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Copyright (C) 2003, 2004 Chris Larson
+# Copyright (C) 2003, 2004 Phil Blundell
+# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
+# Copyright (C) 2005 Holger Hans Peter Freyther
+# Copyright (C) 2005 ROAD GmbH
+# Copyright (C) 2006 Richard Purdie
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys
+import logging
+import optparse
+import warnings
+
+import bb
+from bb import event
+import bb.msg
+from bb import cooker
+from bb import ui
+from bb import server
+from bb import cookerdata
+
+__version__ = "1.26.0"
+logger = logging.getLogger("BitBake")
+
+class BBMainException(bb.BBHandledException):
+ pass
+
+def get_ui(config):
+ if not config.ui:
+ # modify 'ui' attribute because it is also read by cooker
+ config.ui = os.environ.get('BITBAKE_UI', 'knotty')
+
+ interface = config.ui
+
+ try:
+ # Dynamically load the UI based on the ui name. Although we
+ # suggest a fixed set this allows you to have flexibility in which
+ # ones are available.
+ module = __import__("bb.ui", fromlist = [interface])
+ return getattr(module, interface)
+ except AttributeError:
+ raise BBMainException("FATAL: Invalid user interface '%s' specified.\n"
+ "Valid interfaces: depexp, goggle, ncurses, hob, knotty [default]." % interface)
+
+
+# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
+warnlog = logging.getLogger("BitBake.Warnings")
+_warnings_showwarning = warnings.showwarning
+def _showwarning(message, category, filename, lineno, file=None, line=None):
+ if file is not None:
+ if _warnings_showwarning is not None:
+ _warnings_showwarning(message, category, filename, lineno, file, line)
+ else:
+ s = warnings.formatwarning(message, category, filename, lineno)
+ warnlog.warn(s)
+
+warnings.showwarning = _showwarning
+warnings.filterwarnings("ignore")
+warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
+warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
+warnings.filterwarnings("ignore", category=ImportWarning)
+warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
+warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
+
+class BitBakeConfigParameters(cookerdata.ConfigParameters):
+
+ def parseCommandLine(self, argv=sys.argv):
+ parser = optparse.OptionParser(
+ version = "BitBake Build Tool Core version %s, %%prog version %s" % (bb.__version__, __version__),
+ usage = """%prog [options] [recipename/target ...]
+
+ Executes the specified task (default is 'build') for a given set of target recipes (.bb files).
+ It is assumed there is a conf/bblayers.conf available in cwd or in BBPATH which
+ will provide the layer, BBFILES and other configuration information.""")
+
+ parser.add_option("-b", "--buildfile", help = "Execute tasks from a specific .bb recipe directly. WARNING: Does not handle any dependencies from other recipes.",
+ action = "store", dest = "buildfile", default = None)
+
+ parser.add_option("-k", "--continue", help = "Continue as much as possible after an error. While the target that failed and anything depending on it cannot be built, as much as possible will be built before stopping.",
+ action = "store_false", dest = "abort", default = True)
+
+ parser.add_option("-a", "--tryaltconfigs", help = "Continue with builds by trying to use alternative providers where possible.",
+ action = "store_true", dest = "tryaltconfigs", default = False)
+
+ parser.add_option("-f", "--force", help = "Force the specified targets/task to run (invalidating any existing stamp file).",
+ action = "store_true", dest = "force", default = False)
+
+ parser.add_option("-c", "--cmd", help = "Specify the task to execute. The exact options available depend on the metadata. Some examples might be 'compile' or 'populate_sysroot' or 'listtasks' may give a list of the tasks available.",
+ action = "store", dest = "cmd")
+
+ parser.add_option("-C", "--clear-stamp", help = "Invalidate the stamp for the specified task such as 'compile' and then run the default task for the specified target(s).",
+ action = "store", dest = "invalidate_stamp")
+
+ parser.add_option("-r", "--read", help = "Read the specified file before bitbake.conf.",
+ action = "append", dest = "prefile", default = [])
+
+ parser.add_option("-R", "--postread", help = "Read the specified file after bitbake.conf.",
+ action = "append", dest = "postfile", default = [])
+
+ parser.add_option("-v", "--verbose", help = "Output more log message data to the terminal.",
+ action = "store_true", dest = "verbose", default = False)
+
+ parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
+ action = "count", dest="debug", default = 0)
+
+ parser.add_option("-n", "--dry-run", help = "Don't execute, just go through the motions.",
+ action = "store_true", dest = "dry_run", default = False)
+
+ parser.add_option("-S", "--dump-signatures", help = "Dump out the signature construction information, with no task execution. The SIGNATURE_HANDLER parameter is passed to the handler. Two common values are none and printdiff but the handler may define more/less. none means only dump the signature, printdiff means compare the dumped signature with the cached one.",
+ action = "append", dest = "dump_signatures", default = [], metavar="SIGNATURE_HANDLER")
+
+ parser.add_option("-p", "--parse-only", help = "Quit after parsing the BB recipes.",
+ action = "store_true", dest = "parse_only", default = False)
+
+ parser.add_option("-s", "--show-versions", help = "Show current and preferred versions of all recipes.",
+ action = "store_true", dest = "show_versions", default = False)
+
+ parser.add_option("-e", "--environment", help = "Show the global or per-recipe environment complete with information about where variables were set/changed.",
+ action = "store_true", dest = "show_environment", default = False)
+
+ parser.add_option("-g", "--graphviz", help = "Save dependency tree information for the specified targets in the dot syntax.",
+ action = "store_true", dest = "dot_graph", default = False)
+
+ parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
+ action = "append", dest = "extra_assume_provided", default = [])
+
+ parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
+ action = "append", dest = "debug_domains", default = [])
+
+ parser.add_option("-P", "--profile", help = "Profile the command and save reports.",
+ action = "store_true", dest = "profile", default = False)
+
+ parser.add_option("-u", "--ui", help = "The user interface to use (e.g. knotty, hob, depexp).",
+ action = "store", dest = "ui")
+
+ parser.add_option("-t", "--servertype", help = "Choose which server to use, process or xmlrpc.",
+ action = "store", dest = "servertype")
+
+ parser.add_option("", "--token", help = "Specify the connection token to be used when connecting to a remote server.",
+ action = "store", dest = "xmlrpctoken")
+
+ parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not.",
+ action = "store_true", dest = "revisions_changed", default = False)
+
+ parser.add_option("", "--server-only", help = "Run bitbake without a UI, only starting a server (cooker) process.",
+ action = "store_true", dest = "server_only", default = False)
+
+ parser.add_option("-B", "--bind", help = "The name/address for the bitbake server to bind to.",
+ action = "store", dest = "bind", default = False)
+
+ parser.add_option("", "--no-setscene", help = "Do not run any setscene tasks. sstate will be ignored and everything needed, built.",
+ action = "store_true", dest = "nosetscene", default = False)
+
+ parser.add_option("", "--remote-server", help = "Connect to the specified server.",
+ action = "store", dest = "remote_server", default = False)
+
+ parser.add_option("-m", "--kill-server", help = "Terminate the remote server.",
+ action = "store_true", dest = "kill_server", default = False)
+
+ parser.add_option("", "--observe-only", help = "Connect to a server as an observing-only client.",
+ action = "store_true", dest = "observe_only", default = False)
+
+ parser.add_option("", "--status-only", help = "Check the status of the remote bitbake server.",
+ action = "store_true", dest = "status_only", default = False)
+
+ parser.add_option("-w", "--write-log", help = "Writes the event log of the build to a bitbake event json file. Use '' (empty string) to assign the name automatically.",
+ action = "store", dest = "writeeventlog")
+
+ options, targets = parser.parse_args(argv)
+
+ # some environmental variables set also configuration options
+ if "BBSERVER" in os.environ:
+ options.servertype = "xmlrpc"
+ options.remote_server = os.environ["BBSERVER"]
+
+ if "BBTOKEN" in os.environ:
+ options.xmlrpctoken = os.environ["BBTOKEN"]
+
+ if "BBEVENTLOG" is os.environ:
+ options.writeeventlog = os.environ["BBEVENTLOG"]
+
+ # fill in proper log name if not supplied
+ if options.writeeventlog is not None and len(options.writeeventlog) == 0:
+ import datetime
+ options.writeeventlog = "bitbake_eventlog_%s.json" % datetime.datetime.now().strftime("%Y%m%d%H%M%S")
+
+ # if BBSERVER says to autodetect, let's do that
+ if options.remote_server:
+ [host, port] = options.remote_server.split(":", 2)
+ port = int(port)
+ # use automatic port if port set to -1, means read it from
+ # the bitbake.lock file; this is a bit tricky, but we always expect
+ # to be in the base of the build directory if we need to have a
+ # chance to start the server later, anyway
+ if port == -1:
+ lock_location = "./bitbake.lock"
+ # we try to read the address at all times; if the server is not started,
+ # we'll try to start it after the first connect fails, below
+ try:
+ lf = open(lock_location, 'r')
+ remotedef = lf.readline()
+ [host, port] = remotedef.split(":")
+ port = int(port)
+ lf.close()
+ options.remote_server = remotedef
+ except Exception as e:
+ raise BBMainException("Failed to read bitbake.lock (%s), invalid port" % str(e))
+
+ return options, targets[1:]
+
+
+def start_server(servermodule, configParams, configuration, features):
+ server = servermodule.BitBakeServer()
+ if configParams.bind:
+ (host, port) = configParams.bind.split(':')
+ server.initServer((host, int(port)))
+ configuration.interface = [ server.serverImpl.host, server.serverImpl.port ]
+ else:
+ server.initServer()
+ configuration.interface = []
+
+ try:
+ configuration.setServerRegIdleCallback(server.getServerIdleCB())
+
+ cooker = bb.cooker.BBCooker(configuration, features)
+
+ server.addcooker(cooker)
+ server.saveConnectionDetails()
+ except Exception as e:
+ exc_info = sys.exc_info()
+ while hasattr(server, "event_queue"):
+ try:
+ import queue
+ except ImportError:
+ import Queue as queue
+ try:
+ event = server.event_queue.get(block=False)
+ except (queue.Empty, IOError):
+ break
+ if isinstance(event, logging.LogRecord):
+ logger.handle(event)
+ raise exc_info[1], None, exc_info[2]
+ server.detach()
+ return server
+
+
+def bitbake_main(configParams, configuration):
+
+ # Python multiprocessing requires /dev/shm on Linux
+ if sys.platform.startswith('linux') and not os.access('/dev/shm', os.W_OK | os.X_OK):
+ raise BBMainException("FATAL: /dev/shm does not exist or is not writable")
+
+ # Unbuffer stdout to avoid log truncation in the event
+ # of an unorderly exit as well as to provide timely
+ # updates to log files for use with tail
+ try:
+ if sys.stdout.name == '<stdout>':
+ sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
+ except:
+ pass
+
+
+ configuration.setConfigParameters(configParams)
+
+ ui_module = get_ui(configParams)
+
+ # Server type can be xmlrpc or process currently, if nothing is specified,
+ # the default server is process
+ if configParams.servertype:
+ server_type = configParams.servertype
+ else:
+ server_type = 'process'
+
+ try:
+ module = __import__("bb.server", fromlist = [server_type])
+ servermodule = getattr(module, server_type)
+ except AttributeError:
+ raise BBMainException("FATAL: Invalid server type '%s' specified.\n"
+ "Valid interfaces: xmlrpc, process [default]." % server_type)
+
+ if configParams.server_only:
+ if configParams.servertype != "xmlrpc":
+ raise BBMainException("FATAL: If '--server-only' is defined, we must set the "
+ "servertype as 'xmlrpc'.\n")
+ if not configParams.bind:
+ raise BBMainException("FATAL: The '--server-only' option requires a name/address "
+ "to bind to with the -B option.\n")
+ if configParams.remote_server:
+ raise BBMainException("FATAL: The '--server-only' option conflicts with %s.\n" %
+ ("the BBSERVER environment variable" if "BBSERVER" in os.environ \
+ else "the '--remote-server' option" ))
+
+ if configParams.bind and configParams.servertype != "xmlrpc":
+ raise BBMainException("FATAL: If '-B' or '--bind' is defined, we must "
+ "set the servertype as 'xmlrpc'.\n")
+
+ if configParams.remote_server and configParams.servertype != "xmlrpc":
+ raise BBMainException("FATAL: If '--remote-server' is defined, we must "
+ "set the servertype as 'xmlrpc'.\n")
+
+ if configParams.observe_only and (not configParams.remote_server or configParams.bind):
+ raise BBMainException("FATAL: '--observe-only' can only be used by UI clients "
+ "connecting to a server.\n")
+
+ if configParams.kill_server and not configParams.remote_server:
+ raise BBMainException("FATAL: '--kill-server' can only be used to terminate a remote server")
+
+ if "BBDEBUG" in os.environ:
+ level = int(os.environ["BBDEBUG"])
+ if level > configuration.debug:
+ configuration.debug = level
+
+ bb.msg.init_msgconfig(configParams.verbose, configuration.debug,
+ configuration.debug_domains)
+
+ # Ensure logging messages get sent to the UI as events
+ handler = bb.event.LogHandler()
+ if not configParams.status_only:
+ # In status only mode there are no logs and no UI
+ logger.addHandler(handler)
+
+ # Clear away any spurious environment variables while we stoke up the cooker
+ cleanedvars = bb.utils.clean_environment()
+
+ featureset = []
+ if not configParams.server_only:
+ # Collect the feature set for the UI
+ featureset = getattr(ui_module, "featureSet", [])
+
+ if not configParams.remote_server:
+ # we start a server with a given configuration
+ server = start_server(servermodule, configParams, configuration, featureset)
+ bb.event.ui_queue = []
+ else:
+ # we start a stub server that is actually a XMLRPClient that connects to a real server
+ server = servermodule.BitBakeXMLRPCClient(configParams.observe_only, configParams.xmlrpctoken)
+ server.saveConnectionDetails(configParams.remote_server)
+
+
+ if not configParams.server_only:
+ try:
+ server_connection = server.establishConnection(featureset)
+ except Exception as e:
+ if configParams.kill_server:
+ return 0
+ bb.fatal("Could not connect to server %s: %s" % (configParams.remote_server, str(e)))
+
+ # Restore the environment in case the UI needs it
+ for k in cleanedvars:
+ os.environ[k] = cleanedvars[k]
+
+ logger.removeHandler(handler)
+
+
+ if configParams.status_only:
+ server_connection.terminate()
+ return 0
+
+ if configParams.kill_server:
+ server_connection.connection.terminateServer()
+ bb.event.ui_queue = []
+ return 0
+
+ try:
+ return ui_module.main(server_connection.connection, server_connection.events, configParams)
+ finally:
+ bb.event.ui_queue = []
+ server_connection.terminate()
+ else:
+ print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host, server.serverImpl.port))
+ return 0
+
+ return 1
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
index 2303f15b9e..25effc2200 100644
--- a/bitbake/lib/bb/parse/__init__.py
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -73,6 +73,11 @@ def update_mtime(f):
__mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
return __mtime_cache[f]
+def update_cache(f):
+ if f in __mtime_cache:
+ logger.debug(1, "Updating mtime cache for %s" % f)
+ update_mtime(f)
+
def mark_dependency(d, f):
if f.startswith('./'):
f = "%s/%s" % (os.getcwd(), f[2:])
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
index 4e5a06e761..c53ab17d68 100644
--- a/bitbake/lib/bb/parse/ast.py
+++ b/bitbake/lib/bb/parse/ast.py
@@ -128,7 +128,7 @@ class DataNode(AstNode):
if 'flag' in groupd and groupd['flag'] != None:
flag = groupd['flag']
elif groupd["lazyques"]:
- flag = "defaultval"
+ flag = "_defaultval"
loginfo['op'] = op
loginfo['detail'] = groupd["value"]
@@ -139,7 +139,7 @@ class DataNode(AstNode):
data.setVar(key, val, **loginfo)
class MethodNode(AstNode):
- tr_tbl = string.maketrans('/.+-@%', '______')
+ tr_tbl = string.maketrans('/.+-@%&', '_______')
def __init__(self, filename, lineno, func_name, body):
AstNode.__init__(self, filename, lineno)
@@ -226,6 +226,8 @@ class ExportFuncsNode(AstNode):
if data.getVarFlag(calledfunc, "python"):
data.setVar(func, " bb.build.exec_func('" + calledfunc + "', d)\n")
else:
+ if "-" in self.classname:
+ bb.fatal("The classname %s contains a dash character and is calling an sh function %s using EXPORT_FUNCTIONS. Since a dash is illegal in sh function names, this cannot work, please rename the class or don't use EXPORT_FUNCTIONS." % (self.classname, calledfunc))
data.setVar(func, " " + calledfunc + "\n")
data.setVarFlag(func, 'export_func', '1')
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
index 978ebe4608..861faf0e76 100644
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
@@ -27,7 +27,7 @@
import re, os
import logging
import bb.utils
-from bb.parse import ParseError, resolve_file, ast, logger
+from bb.parse import ParseError, resolve_file, ast, logger, handle
__config_regexp__ = re.compile( r"""
^
@@ -66,38 +66,36 @@ def init(data):
def supports(fn, d):
return fn[-5:] == ".conf"
-def include(oldfn, fn, lineno, data, error_out):
+def include(parentfn, fn, lineno, data, error_out):
"""
error_out: A string indicating the verb (e.g. "include", "inherit") to be
used in a ParseError that will be raised if the file to be included could
not be included. Specify False to avoid raising an error in this case.
"""
- if oldfn == fn: # prevent infinite recursion
+ if parentfn == fn: # prevent infinite recursion
return None
- import bb
fn = data.expand(fn)
- oldfn = data.expand(oldfn)
+ parentfn = data.expand(parentfn)
if not os.path.isabs(fn):
- dname = os.path.dirname(oldfn)
+ dname = os.path.dirname(parentfn)
bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True))
abs_fn, attempts = bb.utils.which(bbpath, fn, history=True)
if abs_fn and bb.parse.check_dependency(data, abs_fn):
- bb.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
+ logger.warn("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True)))
for af in attempts:
bb.parse.mark_dependency(data, af)
if abs_fn:
fn = abs_fn
elif bb.parse.check_dependency(data, fn):
- bb.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
+ logger.warn("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True)))
- from bb.parse import handle
try:
- ret = handle(fn, data, True)
+ ret = bb.parse.handle(fn, data, True)
except (IOError, OSError):
if error_out:
- raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), oldfn, lineno)
+ raise ParseError("Could not %(error_out)s file %(fn)s" % vars(), parentfn, lineno)
logger.debug(2, "CONF file '%s' not found", fn)
bb.parse.mark_dependency(data, fn)
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index 6d9cf3f4df..c503980666 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -430,7 +430,7 @@ class RunQueueData:
# Nothing to do
return 0
- logger.info("Preparing runqueue")
+ logger.info("Preparing RunQueue")
# Step A - Work out a list of tasks to run
#
@@ -1064,7 +1064,7 @@ class RunQueue:
retval = self.rqexe.execute()
if self.state is runQueueCleanUp:
- self.rqexe.finish()
+ retval = self.rqexe.finish()
if (self.state is runQueueComplete or self.state is runQueueFailed) and self.rqexe:
self.teardown_workers()
@@ -1306,15 +1306,14 @@ class RunQueueExecute:
if self.stats.active > 0:
bb.event.fire(runQueueExitWait(self.stats.active), self.cfgData)
self.rq.read_workers()
-
- return
+ return self.rq.active_fds()
if len(self.failed_fnids) != 0:
self.rq.state = runQueueFailed
- return
+ return True
self.rq.state = runQueueComplete
- return
+ return True
def check_dependencies(self, task, taskdeps, setscene = False):
if not self.rq.depvalidate:
diff --git a/bitbake/lib/bb/server/process.py b/bitbake/lib/bb/server/process.py
index d362f8d7fe..c9286ddba7 100644
--- a/bitbake/lib/bb/server/process.py
+++ b/bitbake/lib/bb/server/process.py
@@ -115,7 +115,7 @@ class ProcessServer(Process, BaseImplServer):
self.quitout.recv()
self.quit = True
- self.idle_commands(.1, [self.event_queue._reader, self.command_channel, self.quitout])
+ self.idle_commands(.1, [self.command_channel, self.quitout])
except Exception:
logger.exception('Running command %s', command)
@@ -135,6 +135,9 @@ class ProcessServer(Process, BaseImplServer):
nextsleep = None
elif retval is True:
nextsleep = None
+ elif isinstance(retval, float):
+ if (retval < nextsleep):
+ nextsleep = retval
elif nextsleep is None:
continue
else:
diff --git a/bitbake/lib/bb/server/xmlrpc.py b/bitbake/lib/bb/server/xmlrpc.py
index 4205a4c35f..75ec8556f4 100644
--- a/bitbake/lib/bb/server/xmlrpc.py
+++ b/bitbake/lib/bb/server/xmlrpc.py
@@ -235,12 +235,16 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
fds = [self]
nextsleep = 0.1
for function, data in self._idlefuns.items():
+ retval = None
try:
retval = function(self, data, False)
if retval is False:
del self._idlefuns[function]
elif retval is True:
nextsleep = 0
+ elif isinstance(retval, float):
+ if (retval < nextsleep):
+ nextsleep = retval
else:
fds = fds + retval
except SystemExit:
@@ -248,6 +252,9 @@ class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
except:
import traceback
traceback.print_exc()
+ if retval == None:
+ # the function execute failed; delete it
+ del self._idlefuns[function]
pass
socktimeout = self.socket.gettimeout() or nextsleep
@@ -299,6 +306,8 @@ class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
_, error = self.connection.runCommand(["setFeatures", self.featureset])
if error:
+ # disconnect the client, we can't make the setFeature work
+ self.connection.removeClient()
# no need to log it here, the error shall be sent to the client
raise BaseException(error)
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
index 86d9ca0593..2de3aff332 100644
--- a/bitbake/lib/bb/siggen.py
+++ b/bitbake/lib/bb/siggen.py
@@ -62,6 +62,13 @@ class SignatureGenerator(object):
def dump_sigs(self, dataCache, options):
return
+ def get_taskdata(self):
+ return (self.runtaskdeps, self.taskhash, self.file_checksum_values)
+
+ def set_taskdata(self, data):
+ self.runtaskdeps, self.taskhash, self.file_checksum_values = data
+
+
class SignatureGeneratorBasic(SignatureGenerator):
"""
"""
@@ -185,7 +192,14 @@ class SignatureGeneratorBasic(SignatureGenerator):
checksums = bb.fetch2.get_file_checksums(dataCache.file_checksums[fn][task], recipename)
for (f,cs) in checksums:
self.file_checksum_values[k][f] = cs
- data = data + cs
+ if cs:
+ data = data + cs
+
+ taskdep = dataCache.task_deps[fn]
+ if 'nostamp' in taskdep and task in taskdep['nostamp']:
+ # Nostamp tasks need an implicit taint so that they force any dependent tasks to run
+ import uuid
+ data = data + str(uuid.uuid4())
taint = self.read_taint(fn, task, dataCache.stamp[fn])
if taint:
@@ -197,12 +211,6 @@ class SignatureGeneratorBasic(SignatureGenerator):
#d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
return h
- def get_taskdata(self):
- return (self.runtaskdeps, self.taskhash, self.file_checksum_values)
-
- def set_taskdata(self, data):
- self.runtaskdeps, self.taskhash, self.file_checksum_values = data
-
def dump_sigtask(self, fn, task, stampbase, runtime):
k = fn + "." + task
if runtime == "customfile":
@@ -295,10 +303,9 @@ def dump_this_task(outfile, d):
bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile")
def clean_basepath(a):
+ b = a.rsplit("/", 2)[1] + a.rsplit("/", 2)[2]
if a.startswith("virtual:"):
- b = a.rsplit("/", 1)[1] + ":" + a.rsplit(":", 1)[0]
- else:
- b = a.rsplit("/", 1)[1]
+ b = b + ":" + a.rsplit(":", 1)[0]
return b
def clean_basepaths(a):
@@ -307,6 +314,12 @@ def clean_basepaths(a):
b[clean_basepath(x)] = a[x]
return b
+def clean_basepaths_list(a):
+ b = []
+ for x in a:
+ b.append(clean_basepath(x))
+ return b
+
def compare_sigfiles(a, b, recursecb = None):
output = []
@@ -406,6 +419,17 @@ def compare_sigfiles(a, b, recursecb = None):
for f in removed:
output.append("Dependency on checksum of file %s was removed" % (f))
+ changed = []
+ for idx, task in enumerate(a_data['runtaskdeps']):
+ a = a_data['runtaskdeps'][idx]
+ b = b_data['runtaskdeps'][idx]
+ if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]:
+ changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b]))
+
+ if changed:
+ output.append("runtaskdeps changed from %s to %s" % (clean_basepaths_list(a_data['runtaskdeps']), clean_basepaths_list(b_data['runtaskdeps'])))
+ output.append("\n".join(changed))
+
if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
a = a_data['runtaskhashes']
@@ -482,4 +506,17 @@ def dump_sigfile(a):
if 'taint' in a_data:
output.append("Tainted (by forced/invalidated task): %s" % a_data['taint'])
+ data = a_data['basehash']
+ for dep in a_data['runtaskdeps']:
+ data = data + a_data['runtaskhashes'][dep]
+
+ for c in a_data['file_checksum_values']:
+ data = data + c[1]
+
+ if 'taint' in a_data:
+ data = data + a_data['taint']
+
+ h = hashlib.md5(data).hexdigest()
+ output.append("Computed Hash is %s" % h)
+
return output
diff --git a/bitbake/lib/bb/tests/data.py b/bitbake/lib/bb/tests/data.py
index 9b09ff4c61..7994a88a78 100644
--- a/bitbake/lib/bb/tests/data.py
+++ b/bitbake/lib/bb/tests/data.py
@@ -121,6 +121,12 @@ class DataExpansions(unittest.TestCase):
keys = self.d.keys()
self.assertEqual(keys, ['value_of_foo', 'foo', 'bar'])
+ def test_keys_deletion(self):
+ newd = bb.data.createCopy(self.d)
+ newd.delVar("bar")
+ keys = newd.keys()
+ self.assertEqual(keys, ['value_of_foo', 'foo'])
+
class TestNestedExpansions(unittest.TestCase):
def setUp(self):
self.d = bb.data.init()
@@ -266,6 +272,13 @@ class TestConcatOverride(unittest.TestCase):
bb.data.update_data(self.d)
self.assertEqual(self.d.getVar("TEST", True), "Y")
+ def test_remove_expansion_items(self):
+ self.d.setVar("TEST", "A B C D")
+ self.d.setVar("BAR", "B D")
+ self.d.setVar("TEST_remove", "${BAR}")
+ bb.data.update_data(self.d)
+ self.assertEqual(self.d.getVar("TEST", True), "A C")
+
class TestOverrides(unittest.TestCase):
def setUp(self):
self.d = bb.data.init()
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
index d95b43a5e3..d56ef49948 100644
--- a/bitbake/lib/bb/tests/fetch.py
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -24,6 +24,7 @@ import tempfile
import subprocess
import os
from bb.fetch2 import URI
+from bb.fetch2 import FetchMethod
import bb
class URITest(unittest.TestCase):
@@ -565,5 +566,83 @@ class URLHandle(unittest.TestCase):
result = bb.fetch.encodeurl(v)
self.assertEqual(result, k)
+class FetchMethodTest(FetcherTest):
+
+ test_git_uris = {
+ # version pattern "X.Y.Z"
+ ("mx-1.0", "git://github.com/clutter-project/mx.git;branch=mx-1.4", "9b1db6b8060bd00b121a692f942404a24ae2960f", "")
+ : "1.99.4",
+ # version pattern "vX.Y"
+ ("mtd-utils", "git://git.infradead.org/mtd-utils.git", "ca39eb1d98e736109c64ff9c1aa2a6ecca222d8f", "")
+ : "1.5.0",
+ # version pattern "pkg_name-X.Y"
+ ("presentproto", "git://anongit.freedesktop.org/git/xorg/proto/presentproto", "24f3a56e541b0a9e6c6ee76081f441221a120ef9", "")
+ : "1.0",
+ # version pattern "pkg_name-vX.Y.Z"
+ ("dtc", "git://git.qemu.org/dtc.git", "65cc4d2748a2c2e6f27f1cf39e07a5dbabd80ebf", "")
+ : "1.4.0",
+ # combination version pattern
+ ("sysprof", "git://git.gnome.org/sysprof", "cd44ee6644c3641507fb53b8a2a69137f2971219", "")
+ : "1.2.0",
+ ("u-boot-mkimage", "git://git.denx.de/u-boot.git;branch=master;protocol=git", "62c175fbb8a0f9a926c88294ea9f7e88eb898f6c", "")
+ : "2014.01",
+ # version pattern "yyyymmdd"
+ ("mobile-broadband-provider-info", "git://git.gnome.org/mobile-broadband-provider-info", "4ed19e11c2975105b71b956440acdb25d46a347d", "")
+ : "20120614",
+ # packages with a valid GITTAGREGEX
+ ("xf86-video-omap", "git://anongit.freedesktop.org/xorg/driver/xf86-video-omap", "ae0394e687f1a77e966cf72f895da91840dffb8f", "(?P<pver>(\d+\.(\d\.?)*))")
+ : "0.4.3",
+ ("build-appliance-image", "git://git.yoctoproject.org/poky", "b37dd451a52622d5b570183a81583cc34c2ff555", "(?P<pver>(([0-9][\.|_]?)+[0-9]))")
+ : "11.0.0",
+ ("chkconfig-alternatives-native", "git://github.com/kergoth/chkconfig;branch=sysroot", "cd437ecbd8986c894442f8fce1e0061e20f04dee", "chkconfig\-(?P<pver>((\d+[\.\-_]*)+))")
+ : "1.3.59",
+ ("remake", "git://github.com/rocky/remake.git", "f05508e521987c8494c92d9c2871aec46307d51d", "(?P<pver>(\d+\.(\d+\.)*\d*(\+dbg\d+(\.\d+)*)*))")
+ : "3.82+dbg0.9",
+ }
-
+ test_wget_uris = {
+ # packages with versions inside directory name
+ ("util-linux", "http://kernel.org/pub/linux/utils/util-linux/v2.23/util-linux-2.24.2.tar.bz2", "", "")
+ : "2.24.2",
+ ("enchant", "http://www.abisource.com/downloads/enchant/1.6.0/enchant-1.6.0.tar.gz", "", "")
+ : "1.6.0",
+ ("cmake", "http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz", "", "")
+ : "2.8.12.1",
+ # packages with versions only in current directory
+ ("eglic", "http://downloads.yoctoproject.org/releases/eglibc/eglibc-2.18-svnr23787.tar.bz2", "", "")
+ : "2.19",
+ ("gnu-config", "http://downloads.yoctoproject.org/releases/gnu-config/gnu-config-20120814.tar.bz2", "", "")
+ : "20120814",
+ # packages with "99" in the name of possible version
+ ("pulseaudio", "http://freedesktop.org/software/pulseaudio/releases/pulseaudio-4.0.tar.xz", "", "")
+ : "5.0",
+ ("xserver-xorg", "http://xorg.freedesktop.org/releases/individual/xserver/xorg-server-1.15.1.tar.bz2", "", "")
+ : "1.15.1",
+ # packages with valid REGEX_URI and REGEX
+ ("cups", "http://www.cups.org/software/1.7.2/cups-1.7.2-source.tar.bz2", "http://www.cups.org/software.php", "(?P<name>cups\-)(?P<pver>((\d+[\.\-_]*)+))\-source\.tar\.gz")
+ : "2.0.0",
+ ("db", "http://download.oracle.com/berkeley-db/db-5.3.21.tar.gz", "http://www.oracle.com/technetwork/products/berkeleydb/downloads/index-082944.html", "http://download.oracle.com/otn/berkeley-db/(?P<name>db-)(?P<pver>((\d+[\.\-_]*)+))\.tar\.gz")
+ : "6.1.19",
+ }
+ if os.environ.get("BB_SKIP_NETTESTS") == "yes":
+ print("Unset BB_SKIP_NETTESTS to run network tests")
+ else:
+ def test_git_latest_versionstring(self):
+ for k, v in self.test_git_uris.items():
+ self.d.setVar("PN", k[0])
+ self.d.setVar("SRCREV", k[2])
+ self.d.setVar("GITTAGREGEX", k[3])
+ ud = bb.fetch2.FetchData(k[1], self.d)
+ verstring = ud.method.latest_versionstring(ud, self.d)
+ r = bb.utils.vercmp_string(v, verstring)
+ self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
+
+ def test_wget_latest_versionstring(self):
+ for k, v in self.test_wget_uris.items():
+ self.d.setVar("PN", k[0])
+ self.d.setVar("REGEX_URI", k[2])
+ self.d.setVar("REGEX", k[3])
+ ud = bb.fetch2.FetchData(k[1], self.d)
+ verstring = ud.method.latest_versionstring(ud, self.d)
+ r = bb.utils.vercmp_string(v, verstring)
+ self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
diff --git a/bitbake/lib/bb/tests/utils.py b/bitbake/lib/bb/tests/utils.py
index 7c50b1d786..507de2de3c 100644
--- a/bitbake/lib/bb/tests/utils.py
+++ b/bitbake/lib/bb/tests/utils.py
@@ -35,6 +35,10 @@ class VerCmpString(unittest.TestCase):
self.assertTrue(result < 0)
result = bb.utils.vercmp_string('1.1', '1_p2')
self.assertTrue(result < 0)
+ result = bb.utils.vercmp_string('1.0', '1.0+1.1-beta1')
+ self.assertTrue(result < 0)
+ result = bb.utils.vercmp_string('1.1', '1.0+1.1-beta1')
+ self.assertTrue(result > 0)
def test_explode_dep_versions(self):
correctresult = {"foo" : ["= 1.10"]}
@@ -51,3 +55,36 @@ class VerCmpString(unittest.TestCase):
result = bb.utils.explode_dep_versions2("foo ( =1.10 )")
self.assertEqual(result, correctresult)
+ def test_vercmp_string_op(self):
+ compareops = [('1', '1', '=', True),
+ ('1', '1', '==', True),
+ ('1', '1', '!=', False),
+ ('1', '1', '>', False),
+ ('1', '1', '<', False),
+ ('1', '1', '>=', True),
+ ('1', '1', '<=', True),
+ ('1', '0', '=', False),
+ ('1', '0', '==', False),
+ ('1', '0', '!=', True),
+ ('1', '0', '>', True),
+ ('1', '0', '<', False),
+ ('1', '0', '>>', True),
+ ('1', '0', '<<', False),
+ ('1', '0', '>=', True),
+ ('1', '0', '<=', False),
+ ('0', '1', '=', False),
+ ('0', '1', '==', False),
+ ('0', '1', '!=', True),
+ ('0', '1', '>', False),
+ ('0', '1', '<', True),
+ ('0', '1', '>>', False),
+ ('0', '1', '<<', True),
+ ('0', '1', '>=', False),
+ ('0', '1', '<=', True)]
+
+ for arg1, arg2, op, correctresult in compareops:
+ result = bb.utils.vercmp_string_op(arg1, arg2, op)
+ self.assertEqual(result, correctresult, 'vercmp_string_op("%s", "%s", "%s") != %s' % (arg1, arg2, op, correctresult))
+
+ # Check that clearly invalid operator raises an exception
+ self.assertRaises(bb.utils.VersionStringException, bb.utils.vercmp_string_op, '0', '0', '$')
diff --git a/bitbake/lib/bb/tinfoil.py b/bitbake/lib/bb/tinfoil.py
index 6bcbd47ab3..8fc9be3039 100644
--- a/bitbake/lib/bb/tinfoil.py
+++ b/bitbake/lib/bb/tinfoil.py
@@ -90,7 +90,7 @@ class TinfoilConfigParameters(ConfigParameters):
self.initial_options = options
super(TinfoilConfigParameters, self).__init__()
- def parseCommandLine(self):
+ def parseCommandLine(self, argv=sys.argv):
class DummyOptions:
def __init__(self, initial_options):
for key, val in initial_options.items():
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py
index 7017305ae7..64bd94e5e8 100644
--- a/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/bitbake/lib/bb/ui/buildinfohelper.py
@@ -26,12 +26,20 @@ os.environ["DJANGO_SETTINGS_MODULE"] = "toaster.toastermain.settings"
import toaster.toastermain.settings as toaster_django_settings
from toaster.orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
-from toaster.orm.models import Target_Image_File
+from toaster.orm.models import Target_Image_File, BuildArtifact
from toaster.orm.models import Variable, VariableHistory
from toaster.orm.models import Package, Package_File, Target_Installed_Package, Target_File
from toaster.orm.models import Task_Dependency, Package_Dependency
from toaster.orm.models import Recipe_Dependency
from bb.msg import BBLogFormatter as format
+from django.db import models
+from pprint import pformat
+import logging
+
+from django.db import transaction, connection
+
+logger = logging.getLogger("BitBake")
+
class NotExisting(Exception):
pass
@@ -43,8 +51,57 @@ class ORMWrapper(object):
"""
def __init__(self):
+ self.layer_version_objects = []
+ self.task_objects = {}
+ self.recipe_objects = {}
pass
+ @staticmethod
+ def _build_key(**kwargs):
+ key = "0"
+ for k in sorted(kwargs.keys()):
+ if isinstance(kwargs[k], models.Model):
+ key += "-%d" % kwargs[k].id
+ else:
+ key += "-%s" % str(kwargs[k])
+ return key
+
+
+ def _cached_get_or_create(self, clazz, **kwargs):
+ """ This is a memory-cached get_or_create. We assume that the objects will not be created in the
+ database through any other means.
+ """
+
+ assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
+
+ key = ORMWrapper._build_key(**kwargs)
+ dictname = "objects_%s" % clazz.__name__
+ if not dictname in vars(self).keys():
+ vars(self)[dictname] = {}
+
+ created = False
+ if not key in vars(self)[dictname].keys():
+ vars(self)[dictname][key] = clazz.objects.create(**kwargs)
+ created = True
+
+ return (vars(self)[dictname][key], created)
+
+
+ def _cached_get(self, clazz, **kwargs):
+ """ This is a memory-cached get. We assume that the objects will not change in the database between gets.
+ """
+ assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
+
+ key = ORMWrapper._build_key(**kwargs)
+ dictname = "objects_%s" % clazz.__name__
+
+ if not dictname in vars(self).keys():
+ vars(self)[dictname] = {}
+
+ if not key in vars(self)[dictname].keys():
+ vars(self)[dictname][key] = clazz.objects.get(**kwargs)
+
+ return vars(self)[dictname][key]
def create_build_object(self, build_info, brbe):
assert 'machine' in build_info
@@ -65,13 +122,18 @@ class ORMWrapper(object):
build_name=build_info['build_name'],
bitbake_version=build_info['bitbake_version'])
+ logger.debug(1, "buildinfohelper: build is created %s" % build)
if brbe is not None:
+ logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
from bldcontrol.models import BuildEnvironment, BuildRequest
br, be = brbe.split(":")
+
buildrequest = BuildRequest.objects.get(pk = br)
+ buildrequest.build = build
+ buildrequest.save()
+
build.project_id = buildrequest.project_id
build.save()
-
return build
def create_target_objects(self, target_info):
@@ -83,7 +145,7 @@ class ORMWrapper(object):
tgt_object = Target.objects.create( build = target_info['build'],
target = tgt_name,
is_image = False,
- );
+ )
targets.append(tgt_object)
return targets
@@ -103,8 +165,7 @@ class ORMWrapper(object):
build.outcome = outcome
build.save()
- def update_target_object(self, target, license_manifest_path):
-
+ def update_target_set_license_manifest(self, target, license_manifest_path):
target.license_manifest_path = license_manifest_path
target.save()
@@ -113,39 +174,47 @@ class ORMWrapper(object):
assert 'recipe' in task_information
assert 'task_name' in task_information
- task_object, created = Task.objects.get_or_create(
- build=task_information['build'],
- recipe=task_information['recipe'],
- task_name=task_information['task_name'],
- )
-
- if must_exist and created:
- task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
- task_object.delete()
- raise NotExisting("Task object created when expected to exist", task_information)
+ # we use must_exist info for database look-up optimization
+ task_object, created = self._cached_get_or_create(Task,
+ build=task_information['build'],
+ recipe=task_information['recipe'],
+ task_name=task_information['task_name']
+ )
+ if created and must_exist:
+ task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
+ raise NotExisting("Task object created when expected to exist", task_information)
+ object_changed = False
for v in vars(task_object):
if v in task_information.keys():
- vars(task_object)[v] = task_information[v]
+ if vars(task_object)[v] != task_information[v]:
+ vars(task_object)[v] = task_information[v]
+ object_changed = True
- # update setscene-related information
- if 1 == Task.objects.related_setscene(task_object).count():
- if task_object.outcome == Task.OUTCOME_COVERED:
- task_object.outcome = Task.OUTCOME_CACHED
+ # update setscene-related information if the task has a setscene
+ if task_object.outcome == Task.OUTCOME_COVERED and 1 == task_object.get_related_setscene().count():
+ task_object.outcome = Task.OUTCOME_CACHED
+ object_changed = True
outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
if outcome_task_setscene == Task.OUTCOME_SUCCESS:
task_object.sstate_result = Task.SSTATE_RESTORED
+ object_changed = True
elif outcome_task_setscene == Task.OUTCOME_FAILED:
task_object.sstate_result = Task.SSTATE_FAILED
+ object_changed = True
# mark down duration if we have a start time and a current time
if 'start_time' in task_information.keys() and 'end_time' in task_information.keys():
duration = task_information['end_time'] - task_information['start_time']
task_object.elapsed_time = duration
+ object_changed = True
+ del task_information['start_time']
+ del task_information['end_time']
- task_object.save()
+ if object_changed:
+ task_object.save()
return task_object
@@ -153,20 +222,22 @@ class ORMWrapper(object):
assert 'layer_version' in recipe_information
assert 'file_path' in recipe_information
+ if recipe_information['file_path'].startswith(recipe_information['layer_version'].layer.local_path):
+ recipe_information['file_path'] = recipe_information['file_path'][len(recipe_information['layer_version'].layer.local_path):].lstrip("/")
- recipe_object, created = Recipe.objects.get_or_create(
- layer_version=recipe_information['layer_version'],
- file_path=recipe_information['file_path'])
-
- if must_exist and created:
- recipe_object.delete()
+ recipe_object, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
+ file_path=recipe_information['file_path'])
+ if created and must_exist:
raise NotExisting("Recipe object created when expected to exist", recipe_information)
+ object_changed = False
for v in vars(recipe_object):
if v in recipe_information.keys():
+ object_changed = True
vars(recipe_object)[v] = recipe_information[v]
- recipe_object.save()
+ if object_changed:
+ recipe_object.save()
return recipe_object
@@ -185,19 +256,53 @@ class ORMWrapper(object):
priority = layer_version_information['priority']
)
+ self.layer_version_objects.append(layer_version_object)
+
return layer_version_object
- def get_update_layer_object(self, layer_information):
+ def get_update_layer_object(self, layer_information, brbe):
assert 'name' in layer_information
assert 'local_path' in layer_information
assert 'layer_index_url' in layer_information
- layer_object, created = Layer.objects.get_or_create(
+ if brbe is None:
+ layer_object, created = Layer.objects.get_or_create(
name=layer_information['name'],
local_path=layer_information['local_path'],
layer_index_url=layer_information['layer_index_url'])
+ return layer_object
+ else:
+ # we are under managed mode; we must match the layer used in the Project Layer
+ from bldcontrol.models import BuildEnvironment, BuildRequest
+ br_id, be_id = brbe.split(":")
+
+ # find layer by checkout path;
+ from bldcontrol import bbcontroller
+ bc = bbcontroller.getBuildEnvironmentController(pk = be_id)
+
+ # we might have a race condition here, as the project layers may change between the build trigger and the actual build execution
+ # but we can only match on the layer name, so the worst thing can happen is a mis-identification of the layer, not a total failure
+
+ # note that this is different
+ buildrequest = BuildRequest.objects.get(pk = br_id)
+ for brl in buildrequest.brlayer_set.all():
+ localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
+ # we get a relative path, unless running in HEAD mode where the path is absolute
+ if not localdirname.startswith("/"):
+ localdirname = os.path.join(bc.be.sourcedir, localdirname)
+ #logger.debug(1, "Localdirname %s lcal_path %s" % (localdirname, layer_information['local_path']))
+ if localdirname.startswith(layer_information['local_path']):
+ # we matched the BRLayer, but we need the layer_version that generated this BR; reverse of the Project.schedule_build()
+ #logger.debug(1, "Matched %s to BRlayer %s" % (pformat(layer_information["local_path"]), localdirname))
+ for pl in buildrequest.project.projectlayer_set.filter(layercommit__layer__name = brl.name):
+ if pl.layercommit.layer.vcs_url == brl.giturl :
+ layer = pl.layercommit.layer
+ layer.local_path = layer_information['local_path']
+ layer.save()
+ return layer
+
+ raise NotExisting("Unidentified layer %s" % pformat(layer_information))
- return layer_object
def save_target_file_information(self, build_obj, target_obj, filedata):
assert isinstance(build_obj, Build)
@@ -229,7 +334,7 @@ class ORMWrapper(object):
parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
if len(parent_path) == 0:
parent_path = "/"
- parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
+ parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
tf_obj = Target_File.objects.create(
target = target_obj,
path = path,
@@ -263,7 +368,7 @@ class ORMWrapper(object):
permission = permission,
owner = user,
group = group)
- parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
+ parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
tf_obj.directory = parent_obj
tf_obj.save()
@@ -318,8 +423,7 @@ class ORMWrapper(object):
searchname = pkgpnmap[p]['OPKGN']
packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
- if created:
- # package was not build in the current build, but
+ if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
# fill in everything we can from the runtime-reverse package data
try:
packagedict[p]['object'].recipe = recipes[pkgpnmap[p]['PN']]
@@ -333,11 +437,14 @@ class ORMWrapper(object):
packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
# no files recorded for this package, so save files info
+ packagefile_objects = []
for targetpath in pkgpnmap[p]['FILES_INFO']:
targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
- Package_File.objects.create( package = packagedict[p]['object'],
+ packagefile_objects.append(Package_File( package = packagedict[p]['object'],
path = targetpath,
- size = targetfilesize)
+ size = targetfilesize))
+ if len(packagefile_objects):
+ Package_File.objects.bulk_create(packagefile_objects)
except KeyError as e:
errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
@@ -347,6 +454,7 @@ class ORMWrapper(object):
Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
+ packagedeps_objs = []
for p in packagedict:
for (px,deptype) in packagedict[p]['depends']:
if deptype == 'depends':
@@ -354,19 +462,32 @@ class ORMWrapper(object):
elif deptype == 'recommends':
tdeptype = Package_Dependency.TYPE_TRECOMMENDS
- Package_Dependency.objects.create( package = packagedict[p]['object'],
+ packagedeps_objs.append(Package_Dependency( package = packagedict[p]['object'],
depends_on = packagedict[px]['object'],
dep_type = tdeptype,
- target = target_obj);
+ target = target_obj))
+
+ if len(packagedeps_objs) > 0:
+ Package_Dependency.objects.bulk_create(packagedeps_objs)
if (len(errormsg) > 0):
- raise Exception(errormsg)
+ logger.warn("buildinfohelper: target_package_info could not identify recipes: \n%s" % errormsg)
def save_target_image_file_information(self, target_obj, file_name, file_size):
target_image_file = Target_Image_File.objects.create( target = target_obj,
file_name = file_name,
file_size = file_size)
- target_image_file.save()
+
+ def save_artifact_information(self, build_obj, file_name, file_size):
+ # we skip the image files from other builds
+ if Target_Image_File.objects.filter(file_name = file_name).count() > 0:
+ return
+
+ # do not update artifacts found in other builds
+ if BuildArtifact.objects.filter(file_name = file_name).count() > 0:
+ return
+
+ BuildArtifact.objects.create(build = build_obj, file_name = file_name, file_size = file_size)
def create_logmessage(self, log_information):
assert 'build' in log_information
@@ -408,10 +529,13 @@ class ORMWrapper(object):
bp_object.save()
# save any attached file information
+ packagefile_objects = []
for path in package_info['FILES_INFO']:
- fo = Package_File.objects.create( package = bp_object,
+ packagefile_objects.append(Package_File( package = bp_object,
path = path,
- size = package_info['FILES_INFO'][path] )
+ size = package_info['FILES_INFO'][path] ))
+ if len(packagefile_objects):
+ Package_File.objects.bulk_create(packagefile_objects)
def _po_byname(p):
pkg, created = Package.objects.get_or_create(build = build_obj, name = p)
@@ -420,39 +544,44 @@ class ORMWrapper(object):
pkg.save()
return pkg
+ packagedeps_objs = []
# save soft dependency information
if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
for p in bb.utils.explode_deps(package_info['RDEPENDS']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
for p in bb.utils.explode_deps(package_info['RPROVIDES']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
if 'RREPLACES' in package_info and package_info['RREPLACES']:
for p in bb.utils.explode_deps(package_info['RREPLACES']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
+
+ if len(packagedeps_objs) > 0:
+ Package_Dependency.objects.bulk_create(packagedeps_objs)
return bp_object
def save_build_variables(self, build_obj, vardump):
assert isinstance(build_obj, Build)
+ helptext_objects = []
for k in vardump:
- desc = vardump[k]['doc'];
+ desc = vardump[k]['doc']
if desc is None:
var_words = [word for word in k.split('_')]
root_var = "_".join([word for word in var_words if word.isupper()])
@@ -460,25 +589,31 @@ class ORMWrapper(object):
desc = vardump[root_var]['doc']
if desc is None:
desc = ''
- if desc:
- helptext_obj = HelpText.objects.create(build=build_obj,
+ if len(desc):
+ helptext_objects.append(HelpText(build=build_obj,
area=HelpText.VARIABLE,
key=k,
- text=desc)
+ text=desc))
if not bool(vardump[k]['func']):
- value = vardump[k]['v'];
+ value = vardump[k]['v']
if value is None:
value = ''
variable_obj = Variable.objects.create( build = build_obj,
variable_name = k,
variable_value = value,
description = desc)
+
+ varhist_objects = []
for vh in vardump[k]['history']:
if not 'documentation.conf' in vh['file']:
- VariableHistory.objects.create( variable = variable_obj,
+ varhist_objects.append(VariableHistory( variable = variable_obj,
file_name = vh['file'],
line_number = vh['line'],
- operation = vh['op'])
+ operation = vh['op']))
+ if len(varhist_objects):
+ VariableHistory.objects.bulk_create(varhist_objects)
+
+ HelpText.objects.bulk_create(helptext_objects)
class MockEvent: pass # sometimes we mock an event, declare it here
@@ -495,11 +630,16 @@ class BuildInfoHelper(object):
self.internal_state = {}
self.internal_state['taskdata'] = {}
self.task_order = 0
+ self.autocommit_step = 1
self.server = server
+ # we use manual transactions if the database doesn't autocommit on us
+ if not connection.features.autocommits_when_autocommit_is_off:
+ transaction.set_autocommit(False)
self.orm_wrapper = ORMWrapper()
self.has_build_history = has_build_history
self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
self.brbe = self.server.runCommand(["getVariable", "TOASTER_BRBE"])[0]
+ logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
def _configure_django(self):
@@ -544,19 +684,47 @@ class BuildInfoHelper(object):
assert path.startswith("/")
assert 'build' in self.internal_state
- def _slkey(layer_version):
- assert isinstance(layer_version, Layer_Version)
- return len(layer_version.layer.local_path)
+ if self.brbe is None:
+ def _slkey_interactive(layer_version):
+ assert isinstance(layer_version, Layer_Version)
+ return len(layer_version.layer.local_path)
- # Heuristics: we always match recipe to the deepest layer path that
- # we can match to the recipe file path
- for bl in sorted(Layer_Version.objects.filter(build = self.internal_state['build']), reverse=True, key=_slkey):
- if (path.startswith(bl.layer.local_path)):
- return bl
+ # Heuristics: we always match recipe to the deepest layer path in the discovered layers
+ for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive):
+ # we can match to the recipe file path
+ if path.startswith(lvo.layer.local_path):
+ return lvo
- #TODO: if we get here, we didn't read layers correctly
- assert False
- return None
+ else:
+ br_id, be_id = self.brbe.split(":")
+ from bldcontrol.bbcontroller import getBuildEnvironmentController
+ from bldcontrol.models import BuildRequest
+ bc = getBuildEnvironmentController(pk = be_id)
+
+ def _slkey_managed(layer_version):
+ return len(bc.getGitCloneDirectory(layer_version.giturl, layer_version.commit) + layer_version.dirpath)
+
+ # Heuristics: we match the path to where the layers have been checked out
+ for brl in sorted(BuildRequest.objects.get(pk = br_id).brlayer_set.all(), reverse = True, key = _slkey_managed):
+ localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath)
+ # we get a relative path, unless running in HEAD mode where the path is absolute
+ if not localdirname.startswith("/"):
+ localdirname = os.path.join(bc.be.sourcedir, localdirname)
+ if path.startswith(localdirname):
+ #logger.warn("-- managed: matched path %s with layer %s " % (path, localdirname))
+ # we matched the BRLayer, but we need the layer_version that generated this br
+ for lvo in self.orm_wrapper.layer_version_objects:
+ if brl.name == lvo.layer.name:
+ return lvo
+
+ #if we get here, we didn't read layers correctly; dump whatever information we have on the error log
+ logger.error("Could not match layer version for recipe path %s : %s" % (path, self.orm_wrapper.layer_version_objects))
+
+ #mockup the new layer
+ unknown_layer, created = Layer.objects.get_or_create(name="__FIXME__unidentified_layer", local_path="/", layer_index_url="")
+ unknown_layer_version_obj, created = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
+
+ return unknown_layer_version_obj
def _get_recipe_information_from_taskfile(self, taskfile):
localfilepath = taskfile.split(":")[-1]
@@ -599,13 +767,25 @@ class BuildInfoHelper(object):
################################
## external available methods to store information
+ @staticmethod
+ def _get_data_from_event(event):
+ evdata = None
+ if '_localdata' in vars(event):
+ evdata = event._localdata
+ elif 'data' in vars(event):
+ evdata = event.data
+ else:
+ raise Exception("Event with neither _localdata or data properties")
+ return evdata
def store_layer_info(self, event):
- assert '_localdata' in vars(event)
- layerinfos = event._localdata
+ layerinfos = BuildInfoHelper._get_data_from_event(event)
self.internal_state['lvs'] = {}
for layer in layerinfos:
- self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer])] = layerinfos[layer]['version']
+ try:
+ self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
+ except NotExisting as nee:
+ logger.warn("buildinfohelper: cannot identify layer exception:%s " % nee)
def store_started_build(self, event):
@@ -617,10 +797,13 @@ class BuildInfoHelper(object):
self.internal_state['build'] = build_obj
# save layer version information for this build
- for layer_obj in self.internal_state['lvs']:
- self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
+ if not 'lvs' in self.internal_state:
+ logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
+ else:
+ for layer_obj in self.internal_state['lvs']:
+ self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
- del self.internal_state['lvs']
+ del self.internal_state['lvs']
# create target information
target_information = {}
@@ -630,20 +813,27 @@ class BuildInfoHelper(object):
self.internal_state['targets'] = self.orm_wrapper.create_target_objects(target_information)
# Save build configuration
- self.orm_wrapper.save_build_variables(build_obj, self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0])
+ data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
+ self.orm_wrapper.save_build_variables(build_obj, data)
return self.brbe
-
def update_target_image_file(self, event):
image_fstypes = self.server.runCommand(["getVariable", "IMAGE_FSTYPES"])[0]
+ evdata = BuildInfoHelper._get_data_from_event(event)
+
for t in self.internal_state['targets']:
if t.is_image == True:
- output_files = list(event._localdata.viewkeys())
+ output_files = list(evdata.viewkeys())
for output in output_files:
- if t.target in output and output.split('.rootfs.')[1] in image_fstypes:
- self.orm_wrapper.save_target_image_file_information(t, output, event._localdata[output])
+ if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
+ self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
+
+ def update_artifact_image_file(self, event):
+ evdata = BuildInfoHelper._get_data_from_event(event)
+ for artifact_path in evdata.keys():
+ self.orm_wrapper.save_artifact_information(self.internal_state['build'], artifact_path, evdata[artifact_path])
def update_build_information(self, event, errors, warnings, taskfailures):
if 'build' in self.internal_state:
@@ -651,12 +841,12 @@ class BuildInfoHelper(object):
def store_license_manifest_path(self, event):
- deploy_dir = event._localdata['deploy_dir']
- image_name = event._localdata['image_name']
- path = deploy_dir + "/licenses/" + image_name + "/"
+ deploy_dir = BuildInfoHelper._get_data_from_event(event)['deploy_dir']
+ image_name = BuildInfoHelper._get_data_from_event(event)['image_name']
+ path = deploy_dir + "/licenses/" + image_name + "/license.manifest"
for target in self.internal_state['targets']:
if target.target in image_name:
- self.orm_wrapper.update_target_object(target, path)
+ self.orm_wrapper.update_target_set_license_manifest(target, path)
def store_started_task(self, event):
@@ -700,14 +890,21 @@ class BuildInfoHelper(object):
def store_tasks_stats(self, event):
- for (taskfile, taskname, taskstats, recipename) in event._localdata:
+ for (taskfile, taskname, taskstats, recipename) in BuildInfoHelper._get_data_from_event(event):
localfilepath = taskfile.split(":")[-1]
assert localfilepath.startswith("/")
recipe_information = self._get_recipe_information_from_taskfile(taskfile)
- recipe_object = Recipe.objects.get(layer_version = recipe_information['layer_version'],
+ try:
+ if recipe_information['file_path'].startswith(recipe_information['layer_version'].layer.local_path):
+ recipe_information['file_path'] = recipe_information['file_path'][len(recipe_information['layer_version'].layer.local_path):].lstrip("/")
+
+ recipe_object = Recipe.objects.get(layer_version = recipe_information['layer_version'],
file_path__endswith = recipe_information['file_path'],
name = recipename)
+ except Recipe.DoesNotExist:
+ logger.error("Could not find recipe for recipe_information %s name %s" % (pformat(recipe_information), recipename))
+ raise
task_information = {}
task_information['build'] = self.internal_state['build']
@@ -715,6 +912,8 @@ class BuildInfoHelper(object):
task_information['task_name'] = taskname
task_information['cpu_usage'] = taskstats['cpu_usage']
task_information['disk_io'] = taskstats['disk_io']
+ if 'elapsed_time' in taskstats:
+ task_information['elapsed_time'] = taskstats['elapsed_time']
task_obj = self.orm_wrapper.get_update_task_object(task_information, True) # must exist
def update_and_store_task(self, event):
@@ -768,11 +967,18 @@ class BuildInfoHelper(object):
task_information['outcome'] = Task.OUTCOME_FAILED
del self.internal_state['taskdata'][identifier]
+ if not connection.features.autocommits_when_autocommit_is_off:
+ # we force a sync point here, to get the progress bar to show
+ if self.autocommit_step % 3 == 0:
+ transaction.set_autocommit(True)
+ transaction.set_autocommit(False)
+ self.autocommit_step += 1
+
self.orm_wrapper.get_update_task_object(task_information, True) # must exist
def store_missed_state_tasks(self, event):
- for (fn, taskname, taskhash, sstatefile) in event._localdata['missed']:
+ for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
identifier = fn + taskname + "_setscene"
recipe_information = self._get_recipe_information_from_taskfile(fn)
@@ -790,7 +996,7 @@ class BuildInfoHelper(object):
self.orm_wrapper.get_update_task_object(task_information)
- for (fn, taskname, taskhash, sstatefile) in event._localdata['found']:
+ for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
identifier = fn + taskname + "_setscene"
recipe_information = self._get_recipe_information_from_taskfile(fn)
@@ -806,15 +1012,14 @@ class BuildInfoHelper(object):
def store_target_package_data(self, event):
- assert '_localdata' in vars(event)
# for all image targets
for target in self.internal_state['targets']:
if target.is_image:
try:
- pkgdata = event._localdata['pkgdata']
- imgdata = event._localdata['imgdata'][target.target]
+ pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
+ imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'][target.target]
self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'])
- filedata = event._localdata['filedata'][target.target]
+ filedata = BuildInfoHelper._get_data_from_event(event)['filedata'][target.target]
self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
except KeyError:
# we must have not got the data for this image, nothing to save
@@ -850,14 +1055,29 @@ class BuildInfoHelper(object):
recipe_info = {}
recipe_info['name'] = pn
- recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
recipe_info['layer_version'] = layer_version_obj
- recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
- recipe_info['license'] = event._depgraph['pn'][pn]['license']
- recipe_info['description'] = event._depgraph['pn'][pn]['description']
- recipe_info['section'] = event._depgraph['pn'][pn]['section']
- recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
- recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
+
+ if 'version' in event._depgraph['pn'][pn]:
+ recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
+
+ if 'summary' in event._depgraph['pn'][pn]:
+ recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
+
+ if 'license' in event._depgraph['pn'][pn]:
+ recipe_info['license'] = event._depgraph['pn'][pn]['license']
+
+ if 'description' in event._depgraph['pn'][pn]:
+ recipe_info['description'] = event._depgraph['pn'][pn]['description']
+
+ if 'section' in event._depgraph['pn'][pn]:
+ recipe_info['section'] = event._depgraph['pn'][pn]['section']
+
+ if 'homepage' in event._depgraph['pn'][pn]:
+ recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
+
+ if 'bugtracker' in event._depgraph['pn'][pn]:
+ recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
+
recipe_info['file_path'] = file_name
recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
recipe.is_image = False
@@ -879,20 +1099,22 @@ class BuildInfoHelper(object):
# save recipe dependency
# buildtime
+ recipedeps_objects = []
for recipe in event._depgraph['depends']:
try:
target = self.internal_state['recipes'][recipe]
for dep in event._depgraph['depends'][recipe]:
dependency = self.internal_state['recipes'][dep]
- Recipe_Dependency.objects.get_or_create( recipe = target,
- depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS)
+ recipedeps_objects.append(Recipe_Dependency( recipe = target,
+ depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS))
except KeyError as e:
if e not in assume_provided and not str(e).startswith("virtual/"):
errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, e)
+ Recipe_Dependency.objects.bulk_create(recipedeps_objects)
# save all task information
def _save_a_task(taskdesc):
- spec = re.split(r'\.', taskdesc);
+ spec = re.split(r'\.', taskdesc)
pn = ".".join(spec[0:-1])
taskname = spec[-1]
e = event
@@ -909,6 +1131,7 @@ class BuildInfoHelper(object):
tasks[taskdesc] = _save_a_task(taskdesc)
# create dependencies between tasks
+ taskdeps_objects = []
for taskdesc in event._depgraph['tdepends']:
target = tasks[taskdesc]
for taskdep in event._depgraph['tdepends'][taskdesc]:
@@ -917,29 +1140,32 @@ class BuildInfoHelper(object):
dep = _save_a_task(taskdep)
else:
dep = tasks[taskdep]
- Task_Dependency.objects.get_or_create( task = target, depends_on = dep )
+ taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
+ Task_Dependency.objects.bulk_create(taskdeps_objects)
if (len(errormsg) > 0):
- raise Exception(errormsg)
+ logger.warn("buildinfohelper: dependency info not identify recipes: \n%s" % errormsg)
def store_build_package_information(self, event):
- assert '_localdata' in vars(event)
- package_info = event._localdata
+ package_info = BuildInfoHelper._get_data_from_event(event)
self.orm_wrapper.save_build_package_information(self.internal_state['build'],
package_info,
self.internal_state['recipes'],
)
- def _store_build_done(self):
+ def _store_build_done(self, errorcode):
br_id, be_id = self.brbe.split(":")
from bldcontrol.models import BuildEnvironment, BuildRequest
be = BuildEnvironment.objects.get(pk = be_id)
be.lock = BuildEnvironment.LOCK_LOCK
be.save()
br = BuildRequest.objects.get(pk = br_id)
- br.state = BuildRequest.REQ_COMPLETED
- br.build = self.internal_state['build']
+ if errorcode == 0:
+ # request archival of the project artifacts
+ br.state = BuildRequest.REQ_ARCHIVE
+ else:
+ br.state = BuildRequest.REQ_FAILED
br.save()
@@ -947,8 +1173,19 @@ class BuildInfoHelper(object):
mockevent = MockEvent()
mockevent.levelno = format.ERROR
mockevent.msg = text
+ mockevent.pathname = '-- None'
+ mockevent.lineno = -1
self.store_log_event(mockevent)
+ def store_log_exception(self, text, backtrace = ""):
+ mockevent = MockEvent()
+ mockevent.levelno = -1
+ mockevent.msg = text
+ mockevent.pathname = backtrace
+ mockevent.lineno = -1
+ self.store_log_event(mockevent)
+
+
def store_log_event(self, event):
if event.levelno < format.WARNING:
return
@@ -963,40 +1200,51 @@ class BuildInfoHelper(object):
self.internal_state['backlog'].append(event)
else: # we're under Toaster control, post the errors to the build request
from bldcontrol.models import BuildRequest, BRError
- br, be = brbe.split(":")
+ br, be = self.brbe.split(":")
buildrequest = BuildRequest.objects.get(pk = br)
brerror = BRError.objects.create(req = buildrequest, errtype="build", errmsg = event.msg)
return
if 'build' in self.internal_state and 'backlog' in self.internal_state:
+ # if we have a backlog of events, do our best to save them here
if len(self.internal_state['backlog']):
tempevent = self.internal_state['backlog'].pop()
- print " Saving stored event ", tempevent
+ logger.debug(1, "buildinfohelper: Saving stored event %s " % tempevent)
self.store_log_event(tempevent)
else:
+ logger.error("buildinfohelper: Events not saved: %s" % self.internal_state['backlog'])
del self.internal_state['backlog']
log_information = {}
log_information['build'] = self.internal_state['build']
- if event.levelno >= format.ERROR:
- log_information['level'] = event.levelno
+ if event.levelno == format.ERROR:
+ log_information['level'] = LogMessage.ERROR
elif event.levelno == format.WARNING:
log_information['level'] = LogMessage.WARNING
- elif event.levelno == format.INFO:
- log_information['level'] = LogMessage.INFO
+ elif event.levelno == -1: # toaster self-logging
+ log_information['level'] = -1
else:
- log_information['level'] = event.levelno
+ log_information['level'] = LogMessage.INFO
log_information['message'] = event.msg
log_information['pathname'] = event.pathname
log_information['lineno'] = event.lineno
self.orm_wrapper.create_logmessage(log_information)
- def close(self):
+ def close(self, errorcode):
if self.brbe is not None:
- buildinfohelper._store_build_done()
+ self._store_build_done(errorcode)
if 'backlog' in self.internal_state:
- for event in self.internal_state['backlog']:
- print "NOTE: Unsaved log: ", event.msg
+ if 'build' in self.internal_state:
+ # we save missed events in the database for the current build
+ tempevent = self.internal_state['backlog'].pop()
+ self.store_log_event(tempevent)
+ else:
+ # we have no build, and we still have events; something amazingly wrong happend
+ for event in self.internal_state['backlog']:
+ logger.error("UNSAVED log: %s", event.msg)
+
+ if not connection.features.autocommits_when_autocommit_is_off:
+ transaction.set_autocommit(True)
diff --git a/bitbake/lib/bb/ui/depexp.py b/bitbake/lib/bb/ui/depexp.py
index 5d13b5b79e..240aafc3e7 100644
--- a/bitbake/lib/bb/ui/depexp.py
+++ b/bitbake/lib/bb/ui/depexp.py
@@ -17,6 +17,7 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+import sys
import gobject
import gtk
import Queue
@@ -215,6 +216,12 @@ def main(server, eventHandler, params):
print("XMLRPC Fault getting commandline:\n %s" % x)
return
+ try:
+ gtk.init_check()
+ except RuntimeError:
+ sys.stderr.write("Please set DISPLAY variable before running this command \n")
+ return
+
shutdown = 0
gtkgui = gtkthread(shutdown)
@@ -236,7 +243,7 @@ def main(server, eventHandler, params):
try:
event = eventHandler.waitEvent(0.25)
if gtkthread.quit.isSet():
- _, error = server.runCommand(["stateStop"])
+ _, error = server.runCommand(["stateForceShutdown"])
if error:
print('Unable to cleanly stop: %s' % error)
break
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
index 9e58b31727..2bee242eb0 100644
--- a/bitbake/lib/bb/ui/knotty.py
+++ b/bitbake/lib/bb/ui/knotty.py
@@ -271,7 +271,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
server.terminateServer()
return
- if consolelogfile and not params.options.show_environment:
+ if consolelogfile and not params.options.show_environment and not params.options.show_versions:
bb.utils.mkdirhier(os.path.dirname(consolelogfile))
conlogformat = bb.msg.BBLogFormatter(format_str)
consolelog = logging.FileHandler(consolelogfile)
@@ -284,7 +284,7 @@ def main(server, eventHandler, params, tf = TerminalFilter):
if not params.observe_only:
params.updateFromServer(server)
- params.updateToServer(server)
+ params.updateToServer(server, os.environ.copy())
cmdline = params.parseActions()
if not cmdline:
print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
@@ -536,24 +536,29 @@ def main(server, eventHandler, params, tf = TerminalFilter):
if not params.observe_only:
_, error = server.runCommand(["stateForceShutdown"])
main.shutdown = 2
- summary = ""
- if taskfailures:
- summary += pluralise("\nSummary: %s task failed:",
- "\nSummary: %s tasks failed:", len(taskfailures))
- for failure in taskfailures:
- summary += "\n %s" % failure
- if warnings:
- summary += pluralise("\nSummary: There was %s WARNING message shown.",
- "\nSummary: There were %s WARNING messages shown.", warnings)
- if return_value and errors:
- summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.",
- "\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors)
- if summary:
- print(summary)
-
- if interrupted:
- print("Execution was interrupted, returning a non-zero exit code.")
- if return_value == 0:
- return_value = 1
+ try:
+ summary = ""
+ if taskfailures:
+ summary += pluralise("\nSummary: %s task failed:",
+ "\nSummary: %s tasks failed:", len(taskfailures))
+ for failure in taskfailures:
+ summary += "\n %s" % failure
+ if warnings:
+ summary += pluralise("\nSummary: There was %s WARNING message shown.",
+ "\nSummary: There were %s WARNING messages shown.", warnings)
+ if return_value and errors:
+ summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.",
+ "\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors)
+ if summary:
+ print(summary)
+
+ if interrupted:
+ print("Execution was interrupted, returning a non-zero exit code.")
+ if return_value == 0:
+ return_value = 1
+ except IOError as e:
+ import errno
+ if e.errno == errno.EPIPE:
+ pass
return return_value
diff --git a/bitbake/lib/bb/ui/ncurses.py b/bitbake/lib/bb/ui/ncurses.py
index b6c20ec388..9589a77d75 100644
--- a/bitbake/lib/bb/ui/ncurses.py
+++ b/bitbake/lib/bb/ui/ncurses.py
@@ -361,13 +361,13 @@ class NCursesUI:
shutdown = shutdown + 1
pass
-def main(server, eventHandler):
+def main(server, eventHandler, params):
if not os.isatty(sys.stdout.fileno()):
print("FATAL: Unable to run 'ncurses' UI without a TTY.")
return
ui = NCursesUI()
try:
- curses.wrapper(ui.main, server, eventHandler)
+ curses.wrapper(ui.main, server, eventHandler, params)
except:
import traceback
traceback.print_exc()
diff --git a/bitbake/lib/bb/ui/toasterui.py b/bitbake/lib/bb/ui/toasterui.py
index d81b8a989c..f0f853be14 100644
--- a/bitbake/lib/bb/ui/toasterui.py
+++ b/bitbake/lib/bb/ui/toasterui.py
@@ -41,7 +41,7 @@ import sys
import time
import xmlrpclib
-featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING]
+featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING, bb.cooker.CookerFeatures.SEND_SANITYEVENTS]
logger = logging.getLogger("BitBake")
interactive = sys.stdout.isatty()
@@ -58,18 +58,14 @@ def _log_settings_from_server(server):
if error:
logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error)
raise BaseException(error)
- return includelogs, loglines
-
-def main(server, eventHandler, params ):
+ consolelogfile, error = server.runCommand(["getVariable", "BB_CONSOLELOG"])
+ if error:
+ logger.error("Unable to get the value of BB_CONSOLELOG variable: %s" % error)
+ raise BaseException(error)
+ return includelogs, loglines, consolelogfile
- includelogs, loglines = _log_settings_from_server(server)
- # verify and warn
- build_history_enabled = True
- inheritlist, error = server.runCommand(["getVariable", "INHERIT"])
- if not "buildhistory" in inheritlist.split(" "):
- logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
- build_history_enabled = False
+def main(server, eventHandler, params ):
helper = uihelper.BBUIHelper()
@@ -80,6 +76,16 @@ def main(server, eventHandler, params ):
console.setFormatter(format)
logger.addHandler(console)
+ includelogs, loglines, consolelogfile = _log_settings_from_server(server)
+
+ # verify and warn
+ build_history_enabled = True
+ inheritlist, error = server.runCommand(["getVariable", "INHERIT"])
+
+ if not "buildhistory" in inheritlist.split(" "):
+ logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.")
+ build_history_enabled = False
+
if not params.observe_only:
logger.error("ToasterUI can only work in observer mode")
return
@@ -95,6 +101,16 @@ def main(server, eventHandler, params ):
buildinfohelper = BuildInfoHelper(server, build_history_enabled)
+ if buildinfohelper.brbe is not None and consolelogfile:
+ # if we are under managed mode we have no other UI and we need to write our own file
+ bb.utils.mkdirhier(os.path.dirname(consolelogfile))
+ conlogformat = bb.msg.BBLogFormatter(format_str)
+ consolelog = logging.FileHandler(consolelogfile)
+ bb.msg.addDefaultlogFilter(consolelog)
+ consolelog.setFormatter(conlogformat)
+ logger.addHandler(consolelog)
+
+
while True:
try:
event = eventHandler.waitEvent(0.25)
@@ -114,8 +130,12 @@ def main(server, eventHandler, params ):
if isinstance(event, (bb.build.TaskStarted, bb.build.TaskSucceeded, bb.build.TaskFailedSilent)):
buildinfohelper.update_and_store_task(event)
+ logger.warn("Logfile for task %s" % event.logfile)
continue
+ if isinstance(event, bb.build.TaskBase):
+ logger.info(event._message)
+
if isinstance(event, bb.event.LogExecTTY):
logger.warn(event.msg)
continue
@@ -161,7 +181,12 @@ def main(server, eventHandler, params ):
if isinstance(event, bb.event.CacheLoadCompleted):
continue
if isinstance(event, bb.event.MultipleProviders):
+ logger.info("multiple providers are available for %s%s (%s)", event._is_runtime and "runtime " or "",
+ event._item,
+ ", ".join(event._candidates))
+ logger.info("consider defining a PREFERRED_PROVIDER entry to match %s", event._item)
continue
+
if isinstance(event, bb.event.NoProvider):
return_value = 1
errors = errors + 1
@@ -219,6 +244,7 @@ def main(server, eventHandler, params ):
if isinstance(event, (bb.command.CommandCompleted,
bb.command.CommandFailed,
bb.command.CommandExit)):
+ errorcode = 0
if (isinstance(event, bb.command.CommandFailed)):
event.levelno = format.ERROR
event.msg = "Command Failed " + event.error
@@ -226,18 +252,21 @@ def main(server, eventHandler, params ):
event.lineno = 0
buildinfohelper.store_log_event(event)
errors += 1
+ errorcode = 1
+ logger.error("Command execution failed: %s", event.error)
buildinfohelper.update_build_information(event, errors, warnings, taskfailures)
- buildinfohelper.close()
-
+ buildinfohelper.close(errorcode)
+ # mark the log output; controllers may kill the toasterUI after seeing this log
+ logger.info("ToasterUI build done")
# we start a new build info
if buildinfohelper.brbe is not None:
- print "we are under BuildEnvironment management - after the build, we exit"
+ logger.debug(1, "ToasterUI under BuildEnvironment management - exiting after the build")
server.terminateServer()
else:
- print "prepared for new build"
+ logger.debug(1, "ToasterUI prepared for new build")
errors = 0
warnings = 0
taskfailures = []
@@ -258,8 +287,12 @@ def main(server, eventHandler, params ):
buildinfohelper.store_missed_state_tasks(event)
elif event.type == "ImageFileSize":
buildinfohelper.update_target_image_file(event)
+ elif event.type == "ArtifactFileSize":
+ buildinfohelper.update_artifact_image_file(event)
elif event.type == "LicenseManifestPath":
buildinfohelper.store_license_manifest_path(event)
+ else:
+ logger.error("Unprocessed MetadataEvent %s " % str(event))
continue
if isinstance(event, bb.cooker.CookerExit):
@@ -292,9 +325,25 @@ def main(server, eventHandler, params ):
main.shutdown = 1
pass
except Exception as e:
- logger.error(e)
+ # print errors to log
import traceback
- traceback.print_exc()
+ from pprint import pformat
+ exception_data = traceback.format_exc()
+ logger.error("%s\n%s" % (e, exception_data))
+
+ exc_type, exc_value, tb = sys.exc_info()
+ if tb is not None:
+ curr = tb
+ while curr is not None:
+ logger.warn("Error data dump %s\n%s\n" % (traceback.format_tb(curr,1), pformat(curr.tb_frame.f_locals)))
+ curr = curr.tb_next
+
+ # save them to database, if possible; if it fails, we already logged to console.
+ try:
+ buildinfohelper.store_log_exception("%s\n%s" % (str(e), exception_data))
+ except Exception as ce:
+ logger.error("CRITICAL - Failed to to save toaster exception to the database: %s" % str(ce))
+
pass
if interrupted:
diff --git a/bitbake/lib/bb/ui/uievent.py b/bitbake/lib/bb/ui/uievent.py
index c6b100c840..7fc50c759a 100644
--- a/bitbake/lib/bb/ui/uievent.py
+++ b/bitbake/lib/bb/ui/uievent.py
@@ -106,7 +106,12 @@ class BBUIEventQueue:
self.server.timeout = 1
while not self.server.quit:
- self.server.handle_request()
+ try:
+ self.server.handle_request()
+ except Exception as e:
+ import traceback
+ logger.error("BBUIEventQueue.startCallbackHandler: Exception while trying to handle request: %s\n%s" % (e, traceback.format_exc(e)))
+
self.server.server_close()
def system_quit( self ):
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
index 670e592fe0..5ac9bcfbd4 100644
--- a/bitbake/lib/bb/utils.py
+++ b/bitbake/lib/bb/utils.py
@@ -53,6 +53,9 @@ def set_context(ctx):
# Context used in better_exec, eval
_context = clean_context()
+class VersionStringException(Exception):
+ """Exception raised when an invalid version specification is found"""
+
def explode_version(s):
r = []
alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
@@ -128,6 +131,28 @@ def vercmp_string(a, b):
tb = split_version(b)
return vercmp(ta, tb)
+def vercmp_string_op(a, b, op):
+ """
+ Compare two versions and check if the specified comparison operator matches the result of the comparison.
+ This function is fairly liberal about what operators it will accept since there are a variety of styles
+ depending on the context.
+ """
+ res = vercmp_string(a, b)
+ if op in ('=', '=='):
+ return res == 0
+ elif op == '<=':
+ return res <= 0
+ elif op == '>=':
+ return res >= 0
+ elif op in ('>', '>>'):
+ return res > 0
+ elif op in ('<', '<<'):
+ return res < 0
+ elif op == '!=':
+ return res != 0
+ else:
+ raise VersionStringException('Unsupported comparison operator "%s"' % op)
+
def explode_deps(s):
"""
Take an RDEPENDS style string of format:
@@ -188,6 +213,7 @@ def explode_dep_versions2(s):
i = i[1:]
else:
# This is an unsupported case!
+ raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
lastcmp = (i or "")
i = ""
i.strip()
@@ -522,7 +548,7 @@ def filter_environment(good_vars):
os.unsetenv(key)
del os.environ[key]
- if len(removed_vars):
+ if removed_vars:
logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
return removed_vars
@@ -893,3 +919,160 @@ def multiprocessingpool(*args, **kwargs):
return multiprocessing.Pool(*args, **kwargs)
+def exec_flat_python_func(func, *args, **kwargs):
+ """Execute a flat python function (defined with def funcname(args):...)"""
+ # Prepare a small piece of python code which calls the requested function
+ # To do this we need to prepare two things - a set of variables we can use to pass
+ # the values of arguments into the calling function, and the list of arguments for
+ # the function being called
+ context = {}
+ funcargs = []
+ # Handle unnamed arguments
+ aidx = 1
+ for arg in args:
+ argname = 'arg_%s' % aidx
+ context[argname] = arg
+ funcargs.append(argname)
+ aidx += 1
+ # Handle keyword arguments
+ context.update(kwargs)
+ funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.iterkeys()])
+ code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
+ comp = bb.utils.better_compile(code, '<string>', '<string>')
+ bb.utils.better_exec(comp, context, code, '<string>')
+ return context['retval']
+
+def edit_metadata_file(meta_file, variables, func):
+ """Edit a recipe or config file and modify one or more specified
+ variable values set in the file using a specified callback function.
+ The file is only written to if the value(s) actually change.
+ """
+ var_res = {}
+ for var in variables:
+ var_res[var] = re.compile(r'^%s[ \t]*[?+]*=' % var)
+
+ updated = False
+ varset_start = ''
+ newlines = []
+ in_var = None
+ full_value = ''
+
+ def handle_var_end():
+ (newvalue, indent, minbreak) = func(in_var, full_value)
+ if newvalue != full_value:
+ if isinstance(newvalue, list):
+ intentspc = ' ' * indent
+ if minbreak:
+ # First item on first line
+ if len(newvalue) == 1:
+ newlines.append('%s "%s"\n' % (varset_start, newvalue[0]))
+ else:
+ newlines.append('%s "%s\\\n' % (varset_start, newvalue[0]))
+ for item in newvalue[1:]:
+ newlines.append('%s%s \\\n' % (intentspc, item))
+ newlines.append('%s"\n' % indentspc)
+ else:
+ # No item on first line
+ newlines.append('%s " \\\n' % varset_start)
+ for item in newvalue:
+ newlines.append('%s%s \\\n' % (intentspc, item))
+ newlines.append('%s"\n' % intentspc)
+ else:
+ newlines.append('%s "%s"\n' % (varset_start, newvalue))
+ return True
+ return False
+
+ with open(meta_file, 'r') as f:
+ for line in f:
+ if in_var:
+ value = line.rstrip()
+ full_value += value[:-1]
+ if value.endswith('"') or value.endswith("'"):
+ if handle_var_end():
+ updated = True
+ in_var = None
+ else:
+ matched = False
+ for (varname, var_re) in var_res.iteritems():
+ if var_re.match(line):
+ splitvalue = line.split('"', 1)
+ varset_start = splitvalue[0].rstrip()
+ value = splitvalue[1].rstrip()
+ if value.endswith('\\'):
+ value = value[:-1]
+ full_value = value
+ if value.endswith('"') or value.endswith("'"):
+ if handle_var_end():
+ updated = True
+ else:
+ in_var = varname
+ matched = True
+ break
+ if not matched:
+ newlines.append(line)
+ if updated:
+ with open(meta_file, 'w') as f:
+ f.writelines(newlines)
+
+def edit_bblayers_conf(bblayers_conf, add, remove):
+ """Edit bblayers.conf, adding and/or removing layers"""
+
+ import fnmatch
+
+ def remove_trailing_sep(pth):
+ if pth and pth[-1] == os.sep:
+ pth = pth[:-1]
+ return pth
+
+ def layerlist_param(value):
+ if not value:
+ return []
+ elif isinstance(value, list):
+ return [remove_trailing_sep(x) for x in value]
+ else:
+ return [remove_trailing_sep(value)]
+
+ notadded = []
+ notremoved = []
+
+ addlayers = layerlist_param(add)
+ removelayers = layerlist_param(remove)
+
+ # Need to use a list here because we can't set non-local variables from a callback in python 2.x
+ bblayercalls = []
+
+ def handle_bblayers(varname, origvalue):
+ bblayercalls.append(varname)
+ updated = False
+ bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
+ if removelayers:
+ for removelayer in removelayers:
+ matched = False
+ for layer in bblayers:
+ if fnmatch.fnmatch(layer, removelayer):
+ updated = True
+ matched = True
+ bblayers.remove(layer)
+ break
+ if not matched:
+ notremoved.append(removelayer)
+ if addlayers:
+ for addlayer in addlayers:
+ if addlayer not in bblayers:
+ updated = True
+ bblayers.append(addlayer)
+ else:
+ notadded.append(addlayer)
+
+ if updated:
+ return (bblayers, 2, False)
+ else:
+ return (origvalue, 2, False)
+
+ edit_metadata_file(bblayers_conf, ['BBLAYERS'], handle_bblayers)
+
+ if not bblayercalls:
+ raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
+
+ return (notadded, notremoved)
+