summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb/ui/buildinfohelper.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/bb/ui/buildinfohelper.py')
-rw-r--r--bitbake/lib/bb/ui/buildinfohelper.py432
1 files changed, 315 insertions, 117 deletions
diff --git a/bitbake/lib/bb/ui/buildinfohelper.py b/bitbake/lib/bb/ui/buildinfohelper.py
index a0a2d80ffa..491fd1566d 100644
--- a/bitbake/lib/bb/ui/buildinfohelper.py
+++ b/bitbake/lib/bb/ui/buildinfohelper.py
@@ -26,12 +26,18 @@ os.environ["DJANGO_SETTINGS_MODULE"] = "toaster.toastermain.settings"
import toaster.toastermain.settings as toaster_django_settings
from toaster.orm.models import Build, Task, Recipe, Layer_Version, Layer, Target, LogMessage, HelpText
-from toaster.orm.models import Target_Image_File
+from toaster.orm.models import Target_Image_File, BuildArtifact
from toaster.orm.models import Variable, VariableHistory
from toaster.orm.models import Package, Package_File, Target_Installed_Package, Target_File
from toaster.orm.models import Task_Dependency, Package_Dependency
from toaster.orm.models import Recipe_Dependency
from bb.msg import BBLogFormatter as format
+from django.db import models
+import logging
+
+
+logger = logging.getLogger("BitBake")
+
class NotExisting(Exception):
pass
@@ -43,8 +49,57 @@ class ORMWrapper(object):
"""
def __init__(self):
+ self.layer_version_objects = []
+ self.task_objects = {}
+ self.recipe_objects = {}
pass
+ @staticmethod
+ def _build_key(**kwargs):
+ key = "0"
+ for k in sorted(kwargs.keys()):
+ if isinstance(kwargs[k], models.Model):
+ key += "-%d" % kwargs[k].id
+ else:
+ key += "-%s" % str(kwargs[k])
+ return key
+
+
+ def _cached_get_or_create(self, clazz, **kwargs):
+ """ This is a memory-cached get_or_create. We assume that the objects will not be created in the
+ database through any other means.
+ """
+
+ assert issubclass(clazz, models.Model), "_cached_get_or_create needs to get the class as first argument"
+
+ key = ORMWrapper._build_key(**kwargs)
+ dictname = "objects_%s" % clazz.__name__
+ if not dictname in vars(self).keys():
+ vars(self)[dictname] = {}
+
+ created = False
+ if not key in vars(self)[dictname].keys():
+ vars(self)[dictname][key] = clazz.objects.create(**kwargs)
+ created = True
+
+ return (vars(self)[dictname][key], created)
+
+
+ def _cached_get(self, clazz, **kwargs):
+ """ This is a memory-cached get. We assume that the objects will not change in the database between gets.
+ """
+ assert issubclass(clazz, models.Model), "_cached_get needs to get the class as first argument"
+
+ key = ORMWrapper._build_key(**kwargs)
+ dictname = "objects_%s" % clazz.__name__
+
+ if not dictname in vars(self).keys():
+ vars(self)[dictname] = {}
+
+ if not key in vars(self)[dictname].keys():
+ vars(self)[dictname][key] = clazz.objects.get(**kwargs)
+
+ return vars(self)[dictname][key]
def create_build_object(self, build_info, brbe):
assert 'machine' in build_info
@@ -65,13 +120,18 @@ class ORMWrapper(object):
build_name=build_info['build_name'],
bitbake_version=build_info['bitbake_version'])
+ logger.debug(1, "buildinfohelper: build is created %s" % build)
if brbe is not None:
+ logger.debug(1, "buildinfohelper: brbe is %s" % brbe)
from bldcontrol.models import BuildEnvironment, BuildRequest
br, be = brbe.split(":")
+
buildrequest = BuildRequest.objects.get(pk = br)
+ buildrequest.build = build
+ buildrequest.save()
+
build.project_id = buildrequest.project_id
build.save()
-
return build
def create_target_objects(self, target_info):
@@ -83,7 +143,7 @@ class ORMWrapper(object):
tgt_object = Target.objects.create( build = target_info['build'],
target = tgt_name,
is_image = False,
- );
+ )
targets.append(tgt_object)
return targets
@@ -103,8 +163,7 @@ class ORMWrapper(object):
build.outcome = outcome
build.save()
- def update_target_object(self, target, license_manifest_path):
-
+ def update_target_set_license_manifest(self, target, license_manifest_path):
target.license_manifest_path = license_manifest_path
target.save()
@@ -113,39 +172,47 @@ class ORMWrapper(object):
assert 'recipe' in task_information
assert 'task_name' in task_information
- task_object, created = Task.objects.get_or_create(
- build=task_information['build'],
- recipe=task_information['recipe'],
- task_name=task_information['task_name'],
- )
-
- if must_exist and created:
- task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
- task_object.delete()
- raise NotExisting("Task object created when expected to exist", task_information)
+ # we use must_exist info for database look-up optimization
+ task_object, created = self._cached_get_or_create(Task,
+ build=task_information['build'],
+ recipe=task_information['recipe'],
+ task_name=task_information['task_name']
+ )
+ if created and must_exist:
+ task_information['debug'] = "build id %d, recipe id %d" % (task_information['build'].pk, task_information['recipe'].pk)
+ raise NotExisting("Task object created when expected to exist", task_information)
+ object_changed = False
for v in vars(task_object):
if v in task_information.keys():
- vars(task_object)[v] = task_information[v]
+ if vars(task_object)[v] != task_information[v]:
+ vars(task_object)[v] = task_information[v]
+ object_changed = True
- # update setscene-related information
- if 1 == Task.objects.related_setscene(task_object).count():
- if task_object.outcome == Task.OUTCOME_COVERED:
- task_object.outcome = Task.OUTCOME_CACHED
+ # update setscene-related information if the task was just created
+ if created and task_object.outcome == Task.OUTCOME_COVERED and 1 == Task.objects.related_setscene(task_object).count():
+ task_object.outcome = Task.OUTCOME_CACHED
+ object_changed = True
outcome_task_setscene = Task.objects.get(task_executed=True, build = task_object.build,
recipe = task_object.recipe, task_name=task_object.task_name+"_setscene").outcome
if outcome_task_setscene == Task.OUTCOME_SUCCESS:
task_object.sstate_result = Task.SSTATE_RESTORED
+ object_changed = True
elif outcome_task_setscene == Task.OUTCOME_FAILED:
task_object.sstate_result = Task.SSTATE_FAILED
+ object_changed = True
# mark down duration if we have a start time and a current time
if 'start_time' in task_information.keys() and 'end_time' in task_information.keys():
duration = task_information['end_time'] - task_information['start_time']
task_object.elapsed_time = duration
+ object_changed = True
+ del task_information['start_time']
+ del task_information['end_time']
- task_object.save()
+ if object_changed:
+ task_object.save()
return task_object
@@ -153,20 +220,19 @@ class ORMWrapper(object):
assert 'layer_version' in recipe_information
assert 'file_path' in recipe_information
-
- recipe_object, created = Recipe.objects.get_or_create(
- layer_version=recipe_information['layer_version'],
- file_path=recipe_information['file_path'])
-
- if must_exist and created:
- recipe_object.delete()
+ recipe_object, created = self._cached_get_or_create(Recipe, layer_version=recipe_information['layer_version'],
+ file_path=recipe_information['file_path'])
+ if created and must_exist:
raise NotExisting("Recipe object created when expected to exist", recipe_information)
+ object_changed = False
for v in vars(recipe_object):
if v in recipe_information.keys():
+ object_changed = True
vars(recipe_object)[v] = recipe_information[v]
- recipe_object.save()
+ if object_changed:
+ recipe_object.save()
return recipe_object
@@ -185,19 +251,34 @@ class ORMWrapper(object):
priority = layer_version_information['priority']
)
+ self.layer_version_objects.append(layer_version_object)
+
return layer_version_object
- def get_update_layer_object(self, layer_information):
+ def get_update_layer_object(self, layer_information, brbe):
assert 'name' in layer_information
assert 'local_path' in layer_information
assert 'layer_index_url' in layer_information
- layer_object, created = Layer.objects.get_or_create(
+ if brbe is None:
+ layer_object, created = Layer.objects.get_or_create(
name=layer_information['name'],
local_path=layer_information['local_path'],
layer_index_url=layer_information['layer_index_url'])
+ return layer_object
+ else:
+ # we are under managed mode; we must match the layer used in the Project Layer
+ from bldcontrol.models import BuildEnvironment, BuildRequest
+ br, be = brbe.split(":")
+
+ buildrequest = BuildRequest.objects.get(pk = br)
+
+ # we might have a race condition here, as the project layers may change between the build trigger and the actual build execution
+ # but we can only match on the layer name, so the worst thing can happen is a mis-identification of the layer, not a total failure
+ layer_object = buildrequest.project.projectlayer_set.get(layercommit__layer__name=layer_information['name']).layercommit.layer
+
+ return layer_object
- return layer_object
def save_target_file_information(self, build_obj, target_obj, filedata):
assert isinstance(build_obj, Build)
@@ -229,7 +310,7 @@ class ORMWrapper(object):
parent_path = "/".join(path.split("/")[:len(path.split("/")) - 1])
if len(parent_path) == 0:
parent_path = "/"
- parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
+ parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
tf_obj = Target_File.objects.create(
target = target_obj,
path = path,
@@ -263,7 +344,7 @@ class ORMWrapper(object):
permission = permission,
owner = user,
group = group)
- parent_obj = Target_File.objects.get(target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
+ parent_obj = self._cached_get(Target_File, target = target_obj, path = parent_path, inodetype = Target_File.ITYPE_DIRECTORY)
tf_obj.directory = parent_obj
tf_obj.save()
@@ -318,8 +399,7 @@ class ORMWrapper(object):
searchname = pkgpnmap[p]['OPKGN']
packagedict[p]['object'], created = Package.objects.get_or_create( build = build_obj, name = searchname )
- if created:
- # package was not build in the current build, but
+ if created or packagedict[p]['object'].size == -1: # save the data anyway we can, not just if it was not created here; bug [YOCTO #6887]
# fill in everything we can from the runtime-reverse package data
try:
packagedict[p]['object'].recipe = recipes[pkgpnmap[p]['PN']]
@@ -333,11 +413,14 @@ class ORMWrapper(object):
packagedict[p]['object'].size = int(pkgpnmap[p]['PKGSIZE'])
# no files recorded for this package, so save files info
+ packagefile_objects = []
for targetpath in pkgpnmap[p]['FILES_INFO']:
targetfilesize = pkgpnmap[p]['FILES_INFO'][targetpath]
- Package_File.objects.create( package = packagedict[p]['object'],
+ packagefile_objects.append(Package_File( package = packagedict[p]['object'],
path = targetpath,
- size = targetfilesize)
+ size = targetfilesize))
+ if len(packagefile_objects):
+ Package_File.objects.bulk_create(packagefile_objects)
except KeyError as e:
errormsg += " stpi: Key error, package %s key %s \n" % ( p, e )
@@ -347,6 +430,7 @@ class ORMWrapper(object):
Target_Installed_Package.objects.create(target = target_obj, package = packagedict[p]['object'])
+ packagedeps_objs = []
for p in packagedict:
for (px,deptype) in packagedict[p]['depends']:
if deptype == 'depends':
@@ -354,10 +438,13 @@ class ORMWrapper(object):
elif deptype == 'recommends':
tdeptype = Package_Dependency.TYPE_TRECOMMENDS
- Package_Dependency.objects.create( package = packagedict[p]['object'],
+ packagedeps_objs.append(Package_Dependency( package = packagedict[p]['object'],
depends_on = packagedict[px]['object'],
dep_type = tdeptype,
- target = target_obj);
+ target = target_obj))
+
+ if len(packagedeps_objs) > 0:
+ Package_Dependency.objects.bulk_create(packagedeps_objs)
if (len(errormsg) > 0):
raise Exception(errormsg)
@@ -366,7 +453,17 @@ class ORMWrapper(object):
target_image_file = Target_Image_File.objects.create( target = target_obj,
file_name = file_name,
file_size = file_size)
- target_image_file.save()
+
+ def save_artifact_information(self, build_obj, file_name, file_size):
+ # we skip the image files from other builds
+ if Target_Image_File.objects.filter(file_name = file_name).count() > 0:
+ return
+
+ # do not update artifacts found in other builds
+ if BuildArtifact.objects.filter(file_name = file_name).count() > 0:
+ return
+
+ BuildArtifact.objects.create(build = build_obj, file_name = file_name, file_size = file_size)
def create_logmessage(self, log_information):
assert 'build' in log_information
@@ -408,10 +505,13 @@ class ORMWrapper(object):
bp_object.save()
# save any attached file information
+ packagefile_objects = []
for path in package_info['FILES_INFO']:
- fo = Package_File.objects.create( package = bp_object,
+ packagefile_objects.append(Package_File( package = bp_object,
path = path,
- size = package_info['FILES_INFO'][path] )
+ size = package_info['FILES_INFO'][path] ))
+ if len(packagefile_objects):
+ Package_File.objects.bulk_create(packagefile_objects)
def _po_byname(p):
pkg, created = Package.objects.get_or_create(build = build_obj, name = p)
@@ -420,39 +520,44 @@ class ORMWrapper(object):
pkg.save()
return pkg
+ packagedeps_objs = []
# save soft dependency information
if 'RDEPENDS' in package_info and package_info['RDEPENDS']:
for p in bb.utils.explode_deps(package_info['RDEPENDS']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RDEPENDS))
if 'RPROVIDES' in package_info and package_info['RPROVIDES']:
for p in bb.utils.explode_deps(package_info['RPROVIDES']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RPROVIDES))
if 'RRECOMMENDS' in package_info and package_info['RRECOMMENDS']:
for p in bb.utils.explode_deps(package_info['RRECOMMENDS']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RRECOMMENDS))
if 'RSUGGESTS' in package_info and package_info['RSUGGESTS']:
for p in bb.utils.explode_deps(package_info['RSUGGESTS']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RSUGGESTS))
if 'RREPLACES' in package_info and package_info['RREPLACES']:
for p in bb.utils.explode_deps(package_info['RREPLACES']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RREPLACES))
if 'RCONFLICTS' in package_info and package_info['RCONFLICTS']:
for p in bb.utils.explode_deps(package_info['RCONFLICTS']):
- Package_Dependency.objects.get_or_create( package = bp_object,
- depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS)
+ packagedeps_objs.append(Package_Dependency( package = bp_object,
+ depends_on = _po_byname(p), dep_type = Package_Dependency.TYPE_RCONFLICTS))
+
+ if len(packagedeps_objs) > 0:
+ Package_Dependency.objects.bulk_create(packagedeps_objs)
return bp_object
def save_build_variables(self, build_obj, vardump):
assert isinstance(build_obj, Build)
+ helptext_objects = []
for k in vardump:
- desc = vardump[k]['doc'];
+ desc = vardump[k]['doc']
if desc is None:
var_words = [word for word in k.split('_')]
root_var = "_".join([word for word in var_words if word.isupper()])
@@ -460,25 +565,31 @@ class ORMWrapper(object):
desc = vardump[root_var]['doc']
if desc is None:
desc = ''
- if desc:
- helptext_obj = HelpText.objects.create(build=build_obj,
+ if len(desc):
+ helptext_objects.append(HelpText(build=build_obj,
area=HelpText.VARIABLE,
key=k,
- text=desc)
+ text=desc))
if not bool(vardump[k]['func']):
- value = vardump[k]['v'];
+ value = vardump[k]['v']
if value is None:
value = ''
variable_obj = Variable.objects.create( build = build_obj,
variable_name = k,
variable_value = value,
description = desc)
+
+ varhist_objects = []
for vh in vardump[k]['history']:
if not 'documentation.conf' in vh['file']:
- VariableHistory.objects.create( variable = variable_obj,
+ varhist_objects.append(VariableHistory( variable = variable_obj,
file_name = vh['file'],
line_number = vh['line'],
- operation = vh['op'])
+ operation = vh['op']))
+ if len(varhist_objects):
+ VariableHistory.objects.bulk_create(varhist_objects)
+
+ HelpText.objects.bulk_create(helptext_objects)
class MockEvent: pass # sometimes we mock an event, declare it here
@@ -500,6 +611,7 @@ class BuildInfoHelper(object):
self.has_build_history = has_build_history
self.tmp_dir = self.server.runCommand(["getVariable", "TMPDIR"])[0]
self.brbe = self.server.runCommand(["getVariable", "TOASTER_BRBE"])[0]
+ logger.debug(1, "buildinfohelper: Build info helper inited %s" % vars(self))
def _configure_django(self):
@@ -544,19 +656,44 @@ class BuildInfoHelper(object):
assert path.startswith("/")
assert 'build' in self.internal_state
- def _slkey(layer_version):
- assert isinstance(layer_version, Layer_Version)
- return len(layer_version.layer.local_path)
+ if self.brbe is None:
+ def _slkey_interactive(layer_version):
+ assert isinstance(layer_version, Layer_Version)
+ return len(layer_version.layer.local_path)
+
+ # Heuristics: we always match recipe to the deepest layer path in the discovered layers
+ for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_slkey_interactive):
+ # we can match to the recipe file path
+ if path.startswith(lvo.layer.local_path):
+ return lvo
- # Heuristics: we always match recipe to the deepest layer path that
- # we can match to the recipe file path
- for bl in sorted(Layer_Version.objects.filter(build = self.internal_state['build']), reverse=True, key=_slkey):
- if (path.startswith(bl.layer.local_path)):
- return bl
+ else:
+ br_id, be_id = self.brbe.split(":")
+ from bldcontrol.bbcontroller import getBuildEnvironmentController
+ from bldcontrol.models import BuildRequest
+ bc = getBuildEnvironmentController(pk = be_id)
+
+ def _slkey_managed(layer_version):
+ return len(bc.getGitCloneDirectory(layer_version.giturl, layer_version.commit) + layer_version.dirpath)
+
+ # Heuristics: we match the path to where the layers have been checked out
+ for brl in sorted(BuildRequest.objects.get(pk = br_id).brlayer_set.all(), reverse = True, key = _slkey_managed):
+ localdirname = os.path.join(os.path.join(bc.be.sourcedir, bc.getGitCloneDirectory(brl.giturl, brl.commit)), brl.dirpath)
+ if path.startswith(localdirname):
+ #logger.warn("-- managed: matched path %s with layer %s " % (path, localdirname))
+ # we matched the BRLayer, but we need the layer_version that generated this br
+ for lvo in self.orm_wrapper.layer_version_objects:
+ if brl.name == lvo.layer.name:
+ return lvo
+
+ #if we get here, we didn't read layers correctly; dump whatever information we have on the error log
+ logger.error("Could not match layer version for recipe path %s : %s" % (path, self.orm_wrapper.layer_version_objects))
+
+ #mockup the new layer
+ unknown_layer, created = Layer.objects.get_or_create(name="__FIXME__unidentified_layer", local_path="/", layer_index_url="")
+ unknown_layer_version_obj, created = Layer_Version.objects.get_or_create(layer = unknown_layer, build = self.internal_state['build'])
- #TODO: if we get here, we didn't read layers correctly
- assert False
- return None
+ return unknown_layer_version_obj
def _get_recipe_information_from_taskfile(self, taskfile):
localfilepath = taskfile.split(":")[-1]
@@ -599,13 +736,22 @@ class BuildInfoHelper(object):
################################
## external available methods to store information
+ @staticmethod
+ def _get_data_from_event(event):
+ evdata = None
+ if '_localdata' in vars(event):
+ evdata = event._localdata
+ elif 'data' in vars(event):
+ evdata = event.data
+ else:
+ raise Exception("Event with neither _localdata or data properties")
+ return evdata
def store_layer_info(self, event):
- assert '_localdata' in vars(event)
- layerinfos = event._localdata
+ layerinfos = BuildInfoHelper._get_data_from_event(event)
self.internal_state['lvs'] = {}
for layer in layerinfos:
- self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer])] = layerinfos[layer]['version']
+ self.internal_state['lvs'][self.orm_wrapper.get_update_layer_object(layerinfos[layer], self.brbe)] = layerinfos[layer]['version']
def store_started_build(self, event):
@@ -617,10 +763,13 @@ class BuildInfoHelper(object):
self.internal_state['build'] = build_obj
# save layer version information for this build
- for layer_obj in self.internal_state['lvs']:
- self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
+ if not 'lvs' in self.internal_state:
+ logger.error("Layer version information not found; Check if the bitbake server was configured to inherit toaster.bbclass.")
+ else:
+ for layer_obj in self.internal_state['lvs']:
+ self.orm_wrapper.get_update_layer_version_object(build_obj, layer_obj, self.internal_state['lvs'][layer_obj])
- del self.internal_state['lvs']
+ del self.internal_state['lvs']
# create target information
target_information = {}
@@ -630,20 +779,27 @@ class BuildInfoHelper(object):
self.internal_state['targets'] = self.orm_wrapper.create_target_objects(target_information)
# Save build configuration
- self.orm_wrapper.save_build_variables(build_obj, self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0])
+ data = self.server.runCommand(["getAllKeysWithFlags", ["doc", "func"]])[0]
+ self.orm_wrapper.save_build_variables(build_obj, data)
return self.brbe
-
def update_target_image_file(self, event):
image_fstypes = self.server.runCommand(["getVariable", "IMAGE_FSTYPES"])[0]
+ evdata = BuildInfoHelper._get_data_from_event(event)
+
for t in self.internal_state['targets']:
if t.is_image == True:
- output_files = list(event._localdata.viewkeys())
+ output_files = list(evdata.viewkeys())
for output in output_files:
- if t.target in output and output.split('.rootfs.')[1] in image_fstypes:
- self.orm_wrapper.save_target_image_file_information(t, output, event._localdata[output])
+ if t.target in output and 'rootfs' in output and not output.endswith(".manifest"):
+ self.orm_wrapper.save_target_image_file_information(t, output, evdata[output])
+
+ def update_artifact_image_file(self, event):
+ evdata = BuildInfoHelper._get_data_from_event(event)
+ for artifact_path in evdata.keys():
+ self.orm_wrapper.save_artifact_information(self.internal_state['build'], artifact_path, evdata[artifact_path])
def update_build_information(self, event, errors, warnings, taskfailures):
if 'build' in self.internal_state:
@@ -651,12 +807,12 @@ class BuildInfoHelper(object):
def store_license_manifest_path(self, event):
- deploy_dir = event._localdata['deploy_dir']
- image_name = event._localdata['image_name']
- path = deploy_dir + "/licenses/" + image_name + "/"
+ deploy_dir = BuildInfoHelper._get_data_from_event(event)['deploy_dir']
+ image_name = BuildInfoHelper._get_data_from_event(event)['image_name']
+ path = deploy_dir + "/licenses/" + image_name + "/license.manifest"
for target in self.internal_state['targets']:
if target.target in image_name:
- self.orm_wrapper.update_target_object(target, path)
+ self.orm_wrapper.update_target_set_license_manifest(target, path)
def store_started_task(self, event):
@@ -700,7 +856,7 @@ class BuildInfoHelper(object):
def store_tasks_stats(self, event):
- for (taskfile, taskname, taskstats, recipename) in event._localdata:
+ for (taskfile, taskname, taskstats, recipename) in BuildInfoHelper._get_data_from_event(event):
localfilepath = taskfile.split(":")[-1]
assert localfilepath.startswith("/")
@@ -715,6 +871,8 @@ class BuildInfoHelper(object):
task_information['task_name'] = taskname
task_information['cpu_usage'] = taskstats['cpu_usage']
task_information['disk_io'] = taskstats['disk_io']
+ if 'elapsed_time' in taskstats:
+ task_information['elapsed_time'] = taskstats['elapsed_time']
task_obj = self.orm_wrapper.get_update_task_object(task_information, True) # must exist
def update_and_store_task(self, event):
@@ -772,7 +930,7 @@ class BuildInfoHelper(object):
def store_missed_state_tasks(self, event):
- for (fn, taskname, taskhash, sstatefile) in event._localdata['missed']:
+ for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['missed']:
identifier = fn + taskname + "_setscene"
recipe_information = self._get_recipe_information_from_taskfile(fn)
@@ -790,7 +948,7 @@ class BuildInfoHelper(object):
self.orm_wrapper.get_update_task_object(task_information)
- for (fn, taskname, taskhash, sstatefile) in event._localdata['found']:
+ for (fn, taskname, taskhash, sstatefile) in BuildInfoHelper._get_data_from_event(event)['found']:
identifier = fn + taskname + "_setscene"
recipe_information = self._get_recipe_information_from_taskfile(fn)
@@ -806,15 +964,14 @@ class BuildInfoHelper(object):
def store_target_package_data(self, event):
- assert '_localdata' in vars(event)
# for all image targets
for target in self.internal_state['targets']:
if target.is_image:
try:
- pkgdata = event._localdata['pkgdata']
- imgdata = event._localdata['imgdata'][target.target]
+ pkgdata = BuildInfoHelper._get_data_from_event(event)['pkgdata']
+ imgdata = BuildInfoHelper._get_data_from_event(event)['imgdata'][target.target]
self.orm_wrapper.save_target_package_information(self.internal_state['build'], target, imgdata, pkgdata, self.internal_state['recipes'])
- filedata = event._localdata['filedata'][target.target]
+ filedata = BuildInfoHelper._get_data_from_event(event)['filedata'][target.target]
self.orm_wrapper.save_target_file_information(self.internal_state['build'], target, filedata)
except KeyError:
# we must have not got the data for this image, nothing to save
@@ -850,14 +1007,29 @@ class BuildInfoHelper(object):
recipe_info = {}
recipe_info['name'] = pn
- recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
recipe_info['layer_version'] = layer_version_obj
- recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
- recipe_info['license'] = event._depgraph['pn'][pn]['license']
- recipe_info['description'] = event._depgraph['pn'][pn]['description']
- recipe_info['section'] = event._depgraph['pn'][pn]['section']
- recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
- recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
+
+ if 'version' in event._depgraph['pn'][pn]:
+ recipe_info['version'] = event._depgraph['pn'][pn]['version'].lstrip(":")
+
+ if 'summary' in event._depgraph['pn'][pn]:
+ recipe_info['summary'] = event._depgraph['pn'][pn]['summary']
+
+ if 'license' in event._depgraph['pn'][pn]:
+ recipe_info['license'] = event._depgraph['pn'][pn]['license']
+
+ if 'description' in event._depgraph['pn'][pn]:
+ recipe_info['description'] = event._depgraph['pn'][pn]['description']
+
+ if 'section' in event._depgraph['pn'][pn]:
+ recipe_info['section'] = event._depgraph['pn'][pn]['section']
+
+ if 'homepage' in event._depgraph['pn'][pn]:
+ recipe_info['homepage'] = event._depgraph['pn'][pn]['homepage']
+
+ if 'bugtracker' in event._depgraph['pn'][pn]:
+ recipe_info['bugtracker'] = event._depgraph['pn'][pn]['bugtracker']
+
recipe_info['file_path'] = file_name
recipe = self.orm_wrapper.get_update_recipe_object(recipe_info)
recipe.is_image = False
@@ -879,20 +1051,22 @@ class BuildInfoHelper(object):
# save recipe dependency
# buildtime
+ recipedeps_objects = []
for recipe in event._depgraph['depends']:
try:
target = self.internal_state['recipes'][recipe]
for dep in event._depgraph['depends'][recipe]:
dependency = self.internal_state['recipes'][dep]
- Recipe_Dependency.objects.get_or_create( recipe = target,
- depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS)
+ recipedeps_objects.append(Recipe_Dependency( recipe = target,
+ depends_on = dependency, dep_type = Recipe_Dependency.TYPE_DEPENDS))
except KeyError as e:
if e not in assume_provided and not str(e).startswith("virtual/"):
errormsg += " stpd: KeyError saving recipe dependency for %s, %s \n" % (recipe, e)
+ Recipe_Dependency.objects.bulk_create(recipedeps_objects)
# save all task information
def _save_a_task(taskdesc):
- spec = re.split(r'\.', taskdesc);
+ spec = re.split(r'\.', taskdesc)
pn = ".".join(spec[0:-1])
taskname = spec[-1]
e = event
@@ -909,6 +1083,7 @@ class BuildInfoHelper(object):
tasks[taskdesc] = _save_a_task(taskdesc)
# create dependencies between tasks
+ taskdeps_objects = []
for taskdesc in event._depgraph['tdepends']:
target = tasks[taskdesc]
for taskdep in event._depgraph['tdepends'][taskdesc]:
@@ -917,29 +1092,31 @@ class BuildInfoHelper(object):
dep = _save_a_task(taskdep)
else:
dep = tasks[taskdep]
- Task_Dependency.objects.get_or_create( task = target, depends_on = dep )
+ taskdeps_objects.append(Task_Dependency( task = target, depends_on = dep ))
+ Task_Dependency.objects.bulk_create(taskdeps_objects)
if (len(errormsg) > 0):
raise Exception(errormsg)
def store_build_package_information(self, event):
- assert '_localdata' in vars(event)
- package_info = event._localdata
+ package_info = BuildInfoHelper._get_data_from_event(event)
self.orm_wrapper.save_build_package_information(self.internal_state['build'],
package_info,
self.internal_state['recipes'],
)
- def _store_build_done(self):
+ def _store_build_done(self, errorcode):
br_id, be_id = self.brbe.split(":")
from bldcontrol.models import BuildEnvironment, BuildRequest
be = BuildEnvironment.objects.get(pk = be_id)
be.lock = BuildEnvironment.LOCK_LOCK
be.save()
br = BuildRequest.objects.get(pk = br_id)
- br.state = BuildRequest.REQ_COMPLETED
- br.build = self.internal_state['build']
+ if errorcode == 0:
+ br.state = BuildRequest.REQ_COMPLETED
+ else:
+ br.state = BuildRequest.REQ_FAILED
br.save()
@@ -947,8 +1124,19 @@ class BuildInfoHelper(object):
mockevent = MockEvent()
mockevent.levelno = format.ERROR
mockevent.msg = text
+ mockevent.pathname = '-- None'
+ mockevent.lineno = -1
+ self.store_log_event(mockevent)
+
+ def store_log_exception(self, text, backtrace = ""):
+ mockevent = MockEvent()
+ mockevent.levelno = -1
+ mockevent.msg = text
+ mockevent.pathname = backtrace
+ mockevent.lineno = -1
self.store_log_event(mockevent)
+
def store_log_event(self, event):
if event.levelno < format.WARNING:
return
@@ -963,18 +1151,20 @@ class BuildInfoHelper(object):
self.internal_state['backlog'].append(event)
else: # we're under Toaster control, post the errors to the build request
from bldcontrol.models import BuildRequest, BRError
- br, be = brbe.split(":")
+ br, be = self.brbe.split(":")
buildrequest = BuildRequest.objects.get(pk = br)
brerror = BRError.objects.create(req = buildrequest, errtype="build", errmsg = event.msg)
return
if 'build' in self.internal_state and 'backlog' in self.internal_state:
+ # if we have a backlog of events, do our best to save them here
if len(self.internal_state['backlog']):
tempevent = self.internal_state['backlog'].pop()
- print " Saving stored event ", tempevent
+ logger.debug(1, "buildinfohelper: Saving stored event %s " % tempevent)
self.store_log_event(tempevent)
else:
+ logger.error("buildinfohelper: Events not saved: %s" % self.internal_state['backlog'])
del self.internal_state['backlog']
log_information = {}
@@ -983,6 +1173,8 @@ class BuildInfoHelper(object):
log_information['level'] = LogMessage.ERROR
elif event.levelno == format.WARNING:
log_information['level'] = LogMessage.WARNING
+ elif event.levelno == -1: # toaster self-logging
+ log_information['level'] = -1
else:
log_information['level'] = LogMessage.INFO
@@ -991,10 +1183,16 @@ class BuildInfoHelper(object):
log_information['lineno'] = event.lineno
self.orm_wrapper.create_logmessage(log_information)
- def close(self):
+ def close(self, errorcode):
if self.brbe is not None:
- buildinfohelper._store_build_done()
+ self._store_build_done(errorcode)
if 'backlog' in self.internal_state:
- for event in self.internal_state['backlog']:
- print "NOTE: Unsaved log: ", event.msg
+ if 'build' in self.internal_state:
+ # we save missed events in the database for the current build
+ tempevent = self.internal_state['backlog'].pop()
+ self.store_log_event(tempevent)
+ else:
+ # we have no build, and we still have events; something amazingly wrong happend
+ for event in self.internal_state['backlog']:
+ logger.error("UNSAVED log: %s", event.msg)