aboutsummaryrefslogtreecommitdiffstats
path: root/classes
diff options
context:
space:
mode:
Diffstat (limited to 'classes')
-rw-r--r--classes/blackduck-upload.bbclass27
-rw-r--r--classes/bom.bbclass212
-rw-r--r--classes/fossology-python.bbclass127
-rw-r--r--classes/fossology-rest.bbclass688
-rw-r--r--classes/scancode-tk.bbclass34
-rw-r--r--classes/spdx-common.bbclass174
6 files changed, 812 insertions, 450 deletions
diff --git a/classes/blackduck-upload.bbclass b/classes/blackduck-upload.bbclass
index cad3ad5..75759bd 100644
--- a/classes/blackduck-upload.bbclass
+++ b/classes/blackduck-upload.bbclass
@@ -17,13 +17,14 @@ inherit copyleft_filter
inherit spdx-common
do_upload[dirs] = "${SPDX_TOPDIR}"
+do_bd_upload[network] = "1"
WAIT_TIME ?= "20"
python () {
-
+ pn = d.getVar('PN')
#If not for target, won't creat spdx.
- if bb.data.inherits_class('nopackages', d):
+ if bb.data.inherits_class('nopackages', d) and not pn.startswith('gcc-source'):
return
pn = d.getVar('PN')
@@ -46,17 +47,13 @@ python () {
return
# We just archive gcc-source for all the gcc related recipes
- if d.getVar('BPN') in ['gcc', 'libgcc']:
+ if d.getVar('BPN') in ['gcc', 'libgcc'] \
+ and not pn.startswith('gcc-source'):
bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
return
temp_dir = os.path.join(d.getVar('WORKDIR'), "temp")
- info = {}
- info['workdir'] = d.getVar('WORKDIR') or ""
- info['pn'] = d.getVar( 'PN') or ""
- info['pv'] = d.getVar( 'PV') or ""
-
manifest_dir = d.getVar('SPDX_DEPLOY_DIR') or ""
if not os.path.exists( manifest_dir ):
bb.utils.mkdirhier( manifest_dir )
@@ -64,9 +61,9 @@ python () {
def hasTask(task):
return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
- if d.getVar('PACKAGES'):
+ if d.getVar('PACKAGES') or pn.startswith('gcc-source'):
# Some recipes do not have any packaging tasks
- if hasTask("do_package_write_rpm") or hasTask("do_package_write_ipk") or hasTask("do_package_write_deb"):
+ if hasTask("do_package_write_rpm") or hasTask("do_package_write_ipk") or hasTask("do_package_write_deb") or pn.startswith('gcc-source'):
d.appendVarFlag('do_bd_upload', 'depends', ' synopsys-native:do_populate_sysroot')
d.appendVarFlag('do_bd_upload', 'depends', ' %s:do_spdx_creat_tarball' % pn)
d.appendVarFlag('do_synopsys_detect', 'depends', ' %s:do_bd_upload' % pn)
@@ -77,17 +74,15 @@ python () {
python do_bd_upload(){
import logging, shutil,time
- if bb.data.inherits_class('nopackages', d):
+ pn = d.getVar( 'PN')
+ #If not for target, won't creat spdx.
+ if bb.data.inherits_class('nopackages', d) and not pn.startswith('gcc-source'):
return
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logging.basicConfig(level=logging.INFO)
- info = {}
- info['pn'] = d.getVar( 'PN') or ""
- info['pv'] = d.getVar( 'PV') or ""
-
token = d.getVar('TOKEN')
spdx_outdir = d.getVar('SPDX_OUTDIR')
bb.note("Begin to upload : " + spdx_outdir)
@@ -169,7 +164,7 @@ do_synopsys_detect () {
echo "Upload OSS to blackduck server."
}
addtask do_spdx_creat_tarball after do_patch
-addtask do_bd_upload after do_patch
+addtask do_bd_upload after do_spdx_creat_tarball
addtask do_synopsys_detect
do_build[recrdeptask] += "do_synopsys_detect"
do_populate_sdk[recrdeptask] += "do_synopsys_detect"
diff --git a/classes/bom.bbclass b/classes/bom.bbclass
new file mode 100644
index 0000000..a4c645f
--- /dev/null
+++ b/classes/bom.bbclass
@@ -0,0 +1,212 @@
+# This class integrates real-time license scanning, generation of SPDX standard
+# output and verifiying license info during the building process.
+# It is a combination of efforts from the OE-Core, SPDX and bom projects.
+#
+# For more information on the following :
+# https://github.com/kubernetes-sigs/bom
+#
+# For more information on SPDX:
+# http://www.spdx.org
+# install bom on your host:https://github.com/kubernetes-sigs/bom
+HOSTTOOLS += "bom"
+
+COPYLEFT_RECIPE_TYPES ?= 'target nativesdk'
+inherit copyleft_filter
+inherit spdx-common
+HOSTTOOLS += "bom"
+
+do_get_report[dirs] = "${SPDX_OUTDIR}"
+
+CREATOR_TOOL = "bom.bbclass in meta-spdxscanner"
+
+python () {
+ pn = d.getVar('PN')
+ #If not for target, won't creat spdx.
+ if bb.data.inherits_class('nopackages', d) and not pn.startswith('gcc-source'):
+ return
+
+ assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
+ if pn in assume_provided:
+ for p in d.getVar("PROVIDES").split():
+ if p != pn:
+ pn = p
+ break
+
+ # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
+ # so avoid archiving source here.
+ if pn.startswith('glibc-locale'):
+ return
+ if (d.getVar('PN') == "libtool-cross"):
+ return
+ if (d.getVar('PN') == "libgcc-initial"):
+ return
+ if (d.getVar('PN') == "shadow-sysroot"):
+ return
+
+ # We just archive gcc-source for all the gcc related recipes
+ if d.getVar('BPN') in ['gcc', 'libgcc'] \
+ and not pn.startswith('gcc-source'):
+ bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
+ return
+
+ def hasTask(task):
+ return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
+
+ manifest_dir = (d.getVar('SPDX_DEPLOY_DIR') or "")
+ if not os.path.exists( manifest_dir ):
+ bb.utils.mkdirhier( manifest_dir )
+
+ info = {}
+ info['pn'] = (d.getVar( 'PN') or "")
+ info['pv'] = (d.getVar( 'PKGV') or "").replace('-', '+')
+ info['pr'] = (d.getVar( 'PR') or "")
+
+ if (d.getVar('BPN') == "perf"):
+ info['pv'] = d.getVar("KERNEL_VERSION").split("-")[0]
+ if 'AUTOINC' in info['pv']:
+ info['pv'] = info['pv'].replace("AUTOINC", "0")
+
+ if d.getVar('SAVE_SPDX_ACHIVE'):
+ if d.getVar('PACKAGES') or pn.startswith('gcc-source'):
+ # Some recipes do not have any packaging tasks
+ if hasTask("do_package_write_rpm") or hasTask("do_package_write_ipk") or hasTask("do_package_write_deb") or pn.startswith('gcc-source'):
+ d.appendVarFlag('do_spdx', 'depends', ' %s:do_spdx_creat_tarball' % pn)
+
+ spdx_outdir = d.getVar('SPDX_OUTDIR')
+ if pn.startswith('gcc-source'):
+ spdx_name = "gcc-" + info['pv'] + "-" + info['pr'] + ".spdx"
+ else:
+ spdx_name = info['pn'] + "-" + info['pv'] + "-" + info['pr'] + ".spdx"
+
+ info['outfile'] = os.path.join(manifest_dir, spdx_name )
+ sstatefile = os.path.join(spdx_outdir, spdx_name )
+ if os.path.exists(info['outfile']):
+ bb.note(info['pn'] + "spdx file has been exist, do nothing")
+ return
+ if os.path.exists( sstatefile ):
+ bb.note(info['pn'] + "spdx file has been exist, do nothing")
+ create_manifest(info,sstatefile)
+ return
+
+ if d.getVar('PACKAGES') or pn.startswith('gcc-source'):
+ # Some recipes do not have any packaging tasks
+ if hasTask("do_package_write_rpm") or hasTask("do_package_write_ipk") or hasTask("do_package_write_deb"):
+ d.appendVarFlag('do_spdx', 'depends', ' %s:do_get_report' % pn)
+ d.appendVarFlag('do_get_report', 'depends', ' %s:do_spdx_get_src' % pn)
+ bb.build.addtask('do_spdx_get_src', 'do_configure', 'do_patch', d)
+ bb.build.addtask('do_get_report', 'do_configure', 'do_patch', d)
+ bb.build.addtask('do_spdx', 'do_configure', 'do_get_report', d)
+}
+
+python do_get_report(){
+
+ import os, sys, json, shutil
+
+ #If not for target, won't creat spdx.
+ if bb.data.inherits_class('nopackages', d):
+ return
+
+ bb.note("Begin to get report!")
+
+ pn = d.getVar('PN')
+
+ manifest_dir = (d.getVar('SPDX_DEPLOY_DIR') or "")
+ if not os.path.exists( manifest_dir ):
+ bb.utils.mkdirhier( manifest_dir )
+
+ spdx_workdir = d.getVar('SPDX_WORKDIR')
+ temp_dir = os.path.join(d.getVar('WORKDIR'), "temp")
+ spdx_temp_dir = os.path.join(spdx_workdir, "temp")
+ spdx_outdir = d.getVar('SPDX_OUTDIR')
+
+ cur_ver_code = get_ver_code(spdx_workdir).split()[0]
+ info = {}
+ info['workdir'] = (d.getVar('WORKDIR') or "")
+ info['pn'] = (d.getVar( 'PN') or "")
+ info['pv'] = (d.getVar( 'PV') or "").replace('-', '+')
+ info['package_download_location'] = (d.getVar( 'SRC_URI') or "")
+ if info['package_download_location'] != "":
+ info['package_download_location'] = info['package_download_location'].split()[0]
+ info['spdx_version'] = (d.getVar('SPDX_VERSION') or '')
+ info['outfile'] = os.path.join(manifest_dir, info['pn'] + "-" + info['pv'] + ".spdx" )
+ spdx_file = os.path.join(spdx_outdir, info['pn'] + "-" + info['pv'] + ".spdx" )
+ if os.path.exists(info['outfile']):
+ bb.note(info['pn'] + "spdx file has been exist, do nothing")
+ return
+ if os.path.exists( spdx_file ):
+ bb.note(info['pn'] + "spdx file has been exist, do nothing")
+ create_manifest(info,spdx_file)
+ return
+ info['data_license'] = (d.getVar('DATA_LICENSE') or '')
+ info['creator'] = {}
+ info['creator']['Tool'] = (d.getVar('CREATOR_TOOL') or '')
+ info['license_list_version'] = (d.getVar('LICENSELISTVERSION') or '')
+ info['package_homepage'] = (d.getVar('HOMEPAGE') or "")
+ info['package_summary'] = (d.getVar('SUMMARY') or "")
+ info['package_summary'] = info['package_summary'].replace("\n","")
+ info['package_summary'] = info['package_summary'].replace("'"," ")
+ info['package_contains'] = (d.getVar('CONTAINED') or "")
+ info['package_static_link'] = (d.getVar('STATIC_LINK') or "")
+ info['modified'] = "false"
+ info['external_refs'] = get_external_refs(d)
+ info['purpose'] = get_pkgpurpose(d)
+ info['release_date'] = (d.getVar('REALASE_DATE') or "")
+ info['build_time'] = get_build_date(d)
+ info['depends_on'] = get_depends_on(d)
+ info['pkg_spdx_id'] = get_spdxid_pkg(d)
+ srcuri = d.getVar("SRC_URI", False).split()
+ length = len("file://")
+ for item in srcuri:
+ if item.startswith("file://"):
+ item = item[length:]
+ if item.endswith(".patch") or item.endswith(".diff"):
+ info['modified'] = "true"
+ d.setVar('WORKDIR', d.getVar('SPDX_WORKDIR', True))
+ info['sourcedir'] = spdx_workdir
+ git_path = "%s/git/.git" % info['sourcedir']
+ if os.path.exists(git_path):
+ remove_dir_tree(git_path)
+ invoke_bom(info['sourcedir'],spdx_file)
+ bb.note("info['sourcedir'] = " + info['sourcedir'])
+ write_cached_spdx(info,spdx_file,cur_ver_code)
+ create_manifest(info,spdx_file)
+}
+
+def invoke_bom(OSS_src_dir, spdx_file):
+ import subprocess
+ import string
+ import json
+ import codecs
+ import logging
+
+ logger = logging.getLogger()
+ logger.setLevel(logging.INFO)
+ logging.basicConfig(level=logging.INFO)
+
+ path = os.getenv('PATH')
+ bom_cmd = bb.utils.which(os.getenv('PATH'), "bom")
+ bom_cmd = bom_cmd + " generate -d " + OSS_src_dir + " --output=" + spdx_file
+ bb.note("bom_cmd = " + bom_cmd)
+ print(bom_cmd)
+ try:
+ subprocess.check_output(bom_cmd,
+ stderr=subprocess.STDOUT,
+ shell=True)
+ except subprocess.CalledProcessError as e:
+ bb.fatal("Could not invoke bom Command "
+ "'%s' returned %d:\n%s" % (bom_cmd, e.returncode, e.output))
+
+SSTATETASKS += "do_spdx"
+python do_spdx_setscene () {
+ sstate_setscene(d)
+}
+addtask do_spdx_setscene
+do_spdx () {
+ echo "Create spdx file."
+}
+addtask do_spdx_get_src after do_patch
+addtask do_get_report after do_spdx_get_src
+addtask do_spdx
+do_build[recrdeptask] += "do_spdx"
+do_populate_sdk[recrdeptask] += "do_spdx"
+
diff --git a/classes/fossology-python.bbclass b/classes/fossology-python.bbclass
index 4e4a2e6..524c5f6 100644
--- a/classes/fossology-python.bbclass
+++ b/classes/fossology-python.bbclass
@@ -37,11 +37,11 @@ python () {
create_folder_lock = Lock()
+ pn = d.getVar('PN')
#If not for target, won't creat spdx.
- if bb.data.inherits_class('nopackages', d):
+ if bb.data.inherits_class('nopackages', d) and not pn.startswith('gcc-source'):
return
- pn = d.getVar('PN')
assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
if pn in assume_provided:
for p in d.getVar("PROVIDES").split():
@@ -61,22 +61,43 @@ python () {
return
# We just archive gcc-source for all the gcc related recipes
- if d.getVar('BPN') in ['gcc', 'libgcc']:
- bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
+ if d.getVar('BPN') in ['gcc', 'libgcc'] \
+ and not pn.startswith('gcc-source'):
+ bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
return
- spdx_outdir = d.getVar('SPDX_OUTDIR')
-
- info = {}
- info['pn'] = (d.getVar( 'PN') or "")
- info['pv'] = (d.getVar( 'PV') or "")
+ def hasTask(task):
+ return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
manifest_dir = (d.getVar('SPDX_DEPLOY_DIR') or "")
if not os.path.exists( manifest_dir ):
bb.utils.mkdirhier( manifest_dir )
- info['outfile'] = os.path.join(manifest_dir, info['pn'] + "-" + info['pv'] + ".spdx" )
- sstatefile = os.path.join(spdx_outdir, info['pn'] + "-" + info['pv'] + ".spdx" )
+ info = {}
+ info['pn'] = (d.getVar( 'PN') or "")
+ info['pv'] = (d.getVar( 'PKGV') or "").replace('-', '+')
+ info['pr'] = (d.getVar( 'PR') or "")
+
+ if (d.getVar('BPN') == "perf"):
+ info['pv'] = d.getVar("KERNEL_VERSION").split("-")[0]
+
+ if 'AUTOINC' in info['pv']:
+ info['pv'] = info['pv'].replace("AUTOINC", "0")
+
+ if d.getVar('SAVE_SPDX_ACHIVE'):
+ if d.getVar('PACKAGES') or pn.startswith('gcc-source'):
+ # Some recipes do not have any packaging tasks
+ if hasTask("do_package_write_rpm") or hasTask("do_package_write_ipk") or hasTask("do_package_write_deb") or pn.startswith('gcc-source'):
+ d.appendVarFlag('do_spdx', 'depends', ' %s:do_spdx_creat_tarball' % pn)
+
+ spdx_outdir = d.getVar('SPDX_OUTDIR')
+ if pn.startswith('gcc-source'):
+ spdx_name = "gcc-" + info['pv'] + "-" + info['pr'] + ".spdx"
+ else:
+ spdx_name = info['pn'] + "-" + info['pv'] + "-" + info['pr'] + ".spdx"
+
+ info['outfile'] = os.path.join(manifest_dir, spdx_name )
+ sstatefile = os.path.join(spdx_outdir, spdx_name )
if os.path.exists(info['outfile']):
bb.note(info['pn'] + "spdx file has been exist, do nothing")
return
@@ -84,13 +105,10 @@ python () {
bb.note(info['pn'] + "spdx file has been exist, do nothing")
create_manifest(info,sstatefile)
return
-
- def hasTask(task):
- return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
- if d.getVar('PACKAGES'):
+ if d.getVar('PACKAGES') or pn.startswith('gcc-source'):
# Some recipes do not have any packaging tasks
- if hasTask("do_package_write_rpm") or hasTask("do_package_write_ipk") or hasTask("do_package_write_deb"):
+ if hasTask("do_package_write_rpm") or hasTask("do_package_write_ipk") or hasTask("do_package_write_deb") or pn.startswith('gcc-source'):
d.appendVarFlag('do_foss_upload', 'depends', ' %s:do_spdx_creat_tarball' % pn)
d.appendVarFlag('do_schedule_jobs', 'depends', ' %s:do_foss_upload' % pn)
d.appendVarFlag('do_get_report', 'depends', ' %s:do_schedule_jobs' % pn)
@@ -137,8 +155,18 @@ python do_foss_upload(){
bb.warn(pn + " has already been uploaded, don't upload again.")
}
def get_upload(d, folder, foss):
+ from fossology.exceptions import FossologyApiError
+
filename = get_upload_file_name(d)
- upload_list, _ = foss.list_uploads(page_size=1, all_pages=True)
+ try:
+ upload_list, _ = foss.list_uploads(page_size=1, all_pages=True)
+ except FossologyApiError as error:
+ time.sleep(10)
+ try:
+ upload_list, _ = foss.list_uploads(page_size=1, all_pages=True)
+ except FossologyApiError as error:
+ bb.error(error.message)
+
upload = None
bb.note("Check tarball: %s ,has been uploaded?" % filename)
for upload in upload_list:
@@ -198,8 +226,7 @@ def create_folder(d, foss, token, folder_name):
create_folder_lock.release()
if folder.name != folder_name:
bb.error("Folder %s couldn't be created" % folder_name)
- else:
- return folder
+ return folder
python do_schedule_jobs(){
import os
@@ -207,8 +234,9 @@ python do_schedule_jobs(){
import time
import logging
+ pn = d.getVar( 'PN')
#If not for target, won't creat spdx.
- if bb.data.inherits_class('nopackages', d):
+ if bb.data.inherits_class('nopackages', d) and not pn.startswith('gcc-source'):
return
logger = logging.getLogger()
@@ -225,15 +253,25 @@ python do_schedule_jobs(){
info = {}
info['workdir'] = (d.getVar('WORKDIR') or "")
info['pn'] = (d.getVar( 'PN') or "")
- info['pv'] = (d.getVar( 'PV') or "")
+ info['pv'] = (d.getVar( 'PKGV') or "").replace('-', '+')
+ info['pr'] = (d.getVar( 'PR') or "")
+ if (d.getVar('BPN') == "perf"):
+ info['pv'] = d.getVar("KERNEL_VERSION").split("-")[0]
+ if 'AUTOINC' in info['pv']:
+ info['pv'] = info['pv'].replace("AUTOINC", "0")
+
+ if pn.startswith('gcc-source'):
+ spdx_name = "gcc-" + info['pv'] + "-" + info['pr'] + ".spdx"
+ else:
+ spdx_name = info['pn'] + "-" + info['pv'] + "-" + info['pr'] + ".spdx"
manifest_dir = (d.getVar('SPDX_DEPLOY_DIR') or "")
if not os.path.exists( manifest_dir ):
bb.utils.mkdirhier( manifest_dir )
spdx_outdir = d.getVar('SPDX_OUTDIR')
- info['outfile'] = os.path.join(manifest_dir, info['pn'] + "-" + info['pv'] + ".spdx" )
- sstatefile = os.path.join(spdx_outdir, info['pn'] + "-" + info['pv'] + ".spdx" )
+ info['outfile'] = os.path.join(manifest_dir, spdx_name )
+ sstatefile = os.path.join(spdx_outdir, spdx_name)
if os.path.exists(info['outfile']):
bb.note(info['pn'] + "spdx file has been exist, do nothing")
return
@@ -245,7 +283,6 @@ python do_schedule_jobs(){
fossology_server = d.getVar('FOSSOLOGY_SERVER')
token = d.getVar('TOKEN')
foss = Fossology(fossology_server, token, "fossy")
- pn = d.getVar('PN')
if d.getVar('FOLDER_NAME', False):
folder_name = d.getVar('FOLDER_NAME')
@@ -349,8 +386,9 @@ python do_get_report(){
report_id = None
report = None
+ pn = d.getVar('PN')
#If not for target, won't creat spdx.
- if bb.data.inherits_class('nopackages', d):
+ if bb.data.inherits_class('nopackages', d) and not pn.startswith('gcc-source'):
return
logger = logging.getLogger()
@@ -362,7 +400,6 @@ python do_get_report(){
fossology_server = d.getVar('FOSSOLOGY_SERVER')
token = d.getVar('TOKEN')
foss = Fossology(fossology_server, token, "fossy")
- pn = d.getVar('PN')
if d.getVar('FOLDER_NAME', False):
folder_name = d.getVar('FOLDER_NAME')
@@ -380,15 +417,27 @@ python do_get_report(){
cur_ver_code = get_ver_code(spdx_workdir).split()[0]
info = {}
+
info['workdir'] = (d.getVar('WORKDIR') or "")
info['pn'] = (d.getVar( 'PN') or "")
- info['pv'] = (d.getVar( 'PV') or "")
+ info['pv'] = (d.getVar( 'PKGV') or "").replace('-', '+')
+ info['pr'] = (d.getVar( 'PR') or "")
+ if (d.getVar('BPN') == "perf"):
+ info['pv'] = d.getVar("KERNEL_VERSION").split("-")[0]
+ if 'AUTOINC' in info['pv']:
+ info['pv'] = info['pv'].replace("AUTOINC", "0")
+
+ if pn.startswith('gcc-source'):
+ spdx_name = "gcc-" + info['pv'] + "-" + info['pr'] + ".spdx"
+ else:
+ spdx_name = info['pn'] + "-" + info['pv'] + "-" + info['pr'] + ".spdx"
+
info['package_download_location'] = (d.getVar( 'SRC_URI') or "")
if info['package_download_location'] != "":
info['package_download_location'] = info['package_download_location'].split()[0]
info['spdx_version'] = (d.getVar('SPDX_VERSION') or '')
- info['outfile'] = os.path.join(manifest_dir, info['pn'] + "-" + info['pv'] + ".spdx" )
- spdx_file = os.path.join(spdx_outdir, info['pn'] + "-" + info['pv'] + ".spdx" )
+ info['outfile'] = os.path.join(manifest_dir, spdx_name )
+ spdx_file = os.path.join(spdx_outdir, spdx_name )
if os.path.exists(info['outfile']):
bb.note(info['pn'] + "spdx file has been exist, do nothing")
return
@@ -407,6 +456,12 @@ python do_get_report(){
info['package_contains'] = (d.getVar('CONTAINED') or "")
info['package_static_link'] = (d.getVar('STATIC_LINK') or "")
info['modified'] = "false"
+ info['external_refs'] = get_external_refs(d)
+ info['purpose'] = get_pkgpurpose(d)
+ info['release_date'] = (d.getVar('REALASE_DATE') or "")
+ info['build_time'] = get_build_date(d)
+ info['depends_on'] = get_depends_on(d)
+ info['pkg_spdx_id'] = get_spdxid_pkg(d)
srcuri = d.getVar("SRC_URI", False).split()
length = len("file://")
for item in srcuri:
@@ -434,7 +489,7 @@ python do_get_report(){
while i < 20:
i += 1
try:
- report = foss.download_report(report_id)
+ report, name = foss.download_report(report_id, wait_time=wait_time*2)
except TryAgain:
bb.warn("SPDX file is still not ready, try again.")
time.sleep(wait_time)
@@ -446,10 +501,12 @@ python do_get_report(){
bb.error("Fail to download report.")
break
- report = str(report).lstrip("('")
- report = report.rstrip("')")
- with open(spdx_file, "w+") as file:
- file.write(report)
+ with open(spdx_file, "wb") as file:
+ written = file.write(report)
+ assert written == len(report)
+ logger.info(
+ f"Report written to file: report_name {name} written to {spdx_file}"
+ )
file.close()
subprocess.call(r"sed -i -e 's#\\n#\n#g' %s" % spdx_file, shell=True)
@@ -496,4 +553,4 @@ addtask do_get_report after do_schedule_jobs
addtask do_spdx
do_build[recrdeptask] += "do_spdx"
do_populate_sdk[recrdeptask] += "do_spdx"
-
+do_get_report[depends] = "cve-update-nvd2-native:do_fetch"
diff --git a/classes/fossology-rest.bbclass b/classes/fossology-rest.bbclass
index 458fc56..53c41dd 100644
--- a/classes/fossology-rest.bbclass
+++ b/classes/fossology-rest.bbclass
@@ -12,72 +12,94 @@
# 1) Make sure fossdriver has beed installed in your host
# 2) By default,spdx files will be output to the path which is defined as[SPDX_DEPLOY_DIR]
# in ./meta/conf/spdx-dosocs.conf.
-COPYLEFT_RECIPE_TYPES ?= 'target nativesdk'
-inherit copyleft_filter
-
inherit spdx-common
-FOSSOLOGY_SERVER ?= "http://127.0.0.1:8081/repo"
+do_spdx[network] = "1"
+do_get_report[network] = "1"
+
+FOSSOLOGY_SERVER ?= "http://127.0.0.1:8081/repo"
+FOLDER_NAME ?= "Software Repository"
#upload OSS into No.1 folder of fossology
-FOLDER_ID = "1"
+FOLDER_ID ?= "1"
HOSTTOOLS_NONFATAL += "curl"
CREATOR_TOOL = "fossology-rest.bbclass in meta-spdxscanner"
-NO_PROXY ?= "127.0.0.1"
-
-do_get_report[network] = "1"
-
# If ${S} isn't actually the top-level source directory, set SPDX_S to point at
# the real top-level directory.
SPDX_S ?= "${S}"
+addtask do_spdx before do_build after do_patch
+python do_spdx () {
+ import os, sys, shutil
+ pn = d.getVar('PN', True)
-python () {
-
- if bb.data.inherits_class('nopackages', d):
+ pn = d.getVar('PN')
+ #If not for target, won't creat spdx.
+ if bb.data.inherits_class('nopackages', d) and not pn.startswith('gcc-source'):
return
- pn = d.getVar('PN')
- assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
+ assume_provided = (d.getVar("ASSUME_PROVIDED", True) or "").split()
if pn in assume_provided:
- for p in d.getVar("PROVIDES").split():
+ for p in d.getVar("PROVIDES", True).split():
if p != pn:
pn = p
break
-
- # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
- # so avoid archiving source here.
+ # The following: do_fetch, do_unpack and do_patch tasks have been deleted,
+ # so avoid archiving do_spdx here.
if pn.startswith('glibc-locale'):
return
- if (d.getVar('PN') == "libtool-cross"):
+ #if (d.getVar('BPN') == "linux-yocto"):
+ # return
+ if (d.getVar('PN', True) == "libtool-cross"):
return
- if (d.getVar('PN') == "libgcc-initial"):
+ if (d.getVar('PN', True) == "libgcc-initial"):
return
- if (d.getVar('PN') == "shadow-sysroot"):
+ if (d.getVar('PN', True) == "shadow-sysroot"):
return
# We just archive gcc-source for all the gcc related recipes
- if d.getVar('BPN') in ['gcc', 'libgcc']:
- bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
+ if d.getVar('BPN') in ['gcc', 'libgcc'] \
+ and not pn.startswith('gcc-source'):
+ bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
return
+ def hasTask(task):
+ return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
+ if d.getVar('PACKAGES') or pn.startswith('gcc-source'):
+ # Some recipes do not have any packaging tasks
+ if hasTask("do_package_write_rpm") or hasTask("do_package_write_ipk") or hasTask("do_package_write_deb") or pn.startswith('gcc-source'):
+ d.appendVarFlag('do_get_report', 'depends', ' %s:do_spdx_creat_tarball' % pn)
+ d.appendVarFlag('do_spdx', 'depends', ' %s:do_get_report' % pn)
+ bb.build.addtask('do_get_report', 'do_configure', 'do_patch', d)
+ bb.build.addtask('do_spdx', 'do_configure', 'do_get_report', d)
- spdx_outdir = d.getVar('SPDX_OUTDIR')
- spdx_workdir = d.getVar('SPDX_WORKDIR')
- spdx_temp_dir = os.path.join(spdx_workdir, "temp")
- temp_dir = os.path.join(d.getVar('WORKDIR'), "temp")
-
- info = {}
- info['workdir'] = (d.getVar('WORKDIR') or "")
- info['pn'] = (d.getVar( 'PN') or "")
- info['pv'] = (d.getVar( 'PV') or "")
manifest_dir = (d.getVar('SPDX_DEPLOY_DIR') or "")
if not os.path.exists( manifest_dir ):
bb.utils.mkdirhier( manifest_dir )
- info['outfile'] = os.path.join(manifest_dir, info['pn'] + "-" + info['pv'] + ".spdx" )
- sstatefile = os.path.join(spdx_outdir, info['pn'] + "-" + info['pv'] + ".spdx" )
+ info = {}
+ info['pn'] = (d.getVar( 'PN') or "")
+ info['pv'] = (d.getVar( 'PKGV') or "").replace('-', '+')
+ info['pr'] = (d.getVar( 'PR') or "")
+
+ if 'AUTOINC' in info['pv']:
+ info['pv'] = info['pv'].replace("AUTOINC", "0")
+
+ if d.getVar('SAVE_SPDX_ACHIVE'):
+ if d.getVar('PACKAGES') or pn.startswith('gcc-source'):
+ # Some recipes do not have any packaging tasks
+ if hasTask("do_package_write_rpm") or hasTask("do_package_write_ipk") or hasTask("do_package_write_deb") or pn.startswith('gcc-source'):
+ d.appendVarFlag('do_spdx', 'depends', ' %s:do_spdx_creat_tarball' % pn)
+
+ spdx_outdir = d.getVar('SPDX_OUTDIR')
+ if pn.startswith('gcc-source'):
+ spdx_name = "gcc-" + info['pv'] + "-" + info['pr'] + ".spdx"
+ else:
+ spdx_name = info['pn'] + "-" + info['pv'] + "-" + info['pr'] + ".spdx"
+
+ info['outfile'] = os.path.join(manifest_dir, spdx_name )
+ sstatefile = os.path.join(spdx_outdir, spdx_name )
if os.path.exists(info['outfile']):
bb.note(info['pn'] + "spdx file has been exist, do nothing")
return
@@ -86,276 +108,83 @@ python () {
create_manifest(info,sstatefile)
return
- def hasTask(task):
- return bool(d.getVarFlag(task, "task", False)) and not bool(d.getVarFlag(task, "noexec", False))
-
- if d.getVar('PACKAGES'):
- # Some recipes do not have any packaging tasks
- if hasTask("do_package_write_rpm") or hasTask("do_package_write_ipk") or hasTask("do_package_write_deb"):
- d.appendVarFlag('do_spdx', 'depends', ' %s:do_get_report' % pn)
- bb.build.addtask('do_get_report', 'do_configure', 'do_patch' , d)
- bb.build.addtask('do_spdx', 'do_configure', 'do_get_report', d)
-}
-
-python do_get_report () {
- import os, sys, shutil
-
- if bb.data.inherits_class('nopackages', d):
- return
-
- pn = d.getVar('PN')
- assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
- if pn in assume_provided:
- for p in d.getVar("PROVIDES").split():
- if p != pn:
- pn = p
- break
- if d.getVar('BPN') in ['gcc', 'libgcc']:
- bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
- return
- # The following: do_fetch, do_unpack and do_patch tasks have been deleted,
- # so avoid archiving do_spdx here.
- if pn.startswith('glibc-locale'):
- return
- if (d.getVar('PN') == "libtool-cross"):
- return
- if (d.getVar('PN') == "libgcc-initial"):
- return
- if (d.getVar('PN') == "shadow-sysroot"):
- return
-
spdx_outdir = d.getVar('SPDX_OUTDIR')
- spdx_workdir = d.getVar('SPDX_WORKDIR')
- spdx_temp_dir = os.path.join(spdx_workdir, "temp")
- temp_dir = os.path.join(d.getVar('WORKDIR'), "temp")
-
- info = {}
- info['workdir'] = (d.getVar('WORKDIR', True) or "")
- info['pn'] = (d.getVar( 'PN', True ) or "")
- info['pv'] = (d.getVar( 'PV', True ) or "")
- info['package_download_location'] = (d.getVar( 'SRC_URI', True ) or "")
- if info['package_download_location'] != "":
- info['package_download_location'] = info['package_download_location'].split()[0]
- info['spdx_version'] = (d.getVar('SPDX_VERSION', True) or '')
- info['data_license'] = (d.getVar('DATA_LICENSE', True) or '')
- info['creator'] = {}
- info['creator']['Tool'] = (d.getVar('CREATOR_TOOL', True) or '')
- info['license_list_version'] = (d.getVar('LICENSELISTVERSION', True) or '')
- info['package_homepage'] = (d.getVar('HOMEPAGE', True) or "")
- info['package_summary'] = (d.getVar('SUMMARY', True) or "")
- info['package_summary'] = info['package_summary'].replace("\n","")
- info['package_summary'] = info['package_summary'].replace("'"," ")
- info['package_contains'] = (d.getVar('CONTAINED', True) or "")
- info['package_static_link'] = (d.getVar('STATIC_LINK', True) or "")
- info['modified'] = "false"
- info['token'] = (d.getVar('TOKEN', True) or "")
-
- srcuri = d.getVar("SRC_URI", False).split()
- length = len("file://")
- for item in srcuri:
- if item.startswith("file://"):
- item = item[length:]
- if item.endswith(".patch") or item.endswith(".diff"):
- info['modified'] = "true"
-
- manifest_dir = (d.getVar('SPDX_DEPLOY_DIR', True) or "")
- if not os.path.exists( manifest_dir ):
- bb.utils.mkdirhier( manifest_dir )
+ if pn.startswith('gcc-source'):
+ spdx_name = "gcc-" + info['pv'] + "-" + info['pr'] + ".spdx"
+ else:
+ spdx_name = info['pn'] + "-" + info['pv'] + "-" + info['pr'] + ".spdx"
- info['outfile'] = os.path.join(manifest_dir, info['pn'] + "-" + info['pv'] + ".spdx" )
- sstatefile = os.path.join(spdx_outdir, info['pn'] + "-" + info['pv'] + ".spdx" )
-
- spdx_get_src(d)
-
- bb.note('SPDX: Archiving the patched source...')
- if os.path.isdir(spdx_temp_dir):
- for f_dir, f in list_files(spdx_temp_dir):
- temp_file = os.path.join(spdx_temp_dir,f_dir,f)
- shutil.copy(temp_file, temp_dir)
- shutil.rmtree(spdx_temp_dir)
- d.setVar('WORKDIR', spdx_workdir)
- info['sourcedir'] = spdx_workdir
- git_path = "%s/git/.git" % info['sourcedir']
- if os.path.exists(git_path):
- remove_dir_tree(git_path)
- tar_name = spdx_create_tarball(d, d.getVar('WORKDIR'), 'patched', spdx_outdir)
-
- ## get everything from cache. use it to decide if
- ## something needs to be rerun
- if not os.path.exists(spdx_outdir):
- bb.utils.mkdirhier(spdx_outdir)
- cur_ver_code = get_ver_code(spdx_workdir).split()[0]
- ## Get spdx file
- bb.note(' run fossology rest api ...... ')
- if not os.path.isfile(tar_name):
- bb.warn(info['pn'] + "has no source, do nothing")
+ info['outfile'] = os.path.join(manifest_dir, spdx_name )
+ sstatefile = os.path.join(spdx_outdir, spdx_name )
+ if os.path.exists(info['outfile']):
+ bb.note(info['pn'] + "spdx file has been exist, do nothing")
return
- folder_id = get_folder_id(d)
- if invoke_rest_api(d, tar_name, sstatefile, folder_id) == False:
- bb.warn(info['pn'] + ": Get spdx file fail, please check fossology server.")
- remove_file(tar_name)
- return False
- if get_cached_spdx(sstatefile) != None:
- write_cached_spdx( info,sstatefile,cur_ver_code )
- ## CREATE MANIFEST(write to outfile )
+ if os.path.exists( sstatefile ):
+ bb.note(info['pn'] + "spdx file has been exist, do nothing")
create_manifest(info,sstatefile)
- else:
- bb.warn(info['pn'] + ': Can\'t get the spdx file ' + '. Please check fossology server.')
- remove_file(tar_name)
+ return
+
+ if d.getVar('PACKAGES') or pn.startswith('gcc-source'):
+ # Some recipes do not have any packaging tasks
+ if hasTask("do_package_write_rpm") or hasTask("do_package_write_ipk") or hasTask("do_package_write_deb") or pn.startswith('gcc-source'):
+ d.appendVarFlag('do_get_report', 'depends', ' %s:do_spdx_creat_tarball' % pn)
+ d.appendVarFlag('do_spdx', 'depends', ' %s:do_get_report' % pn)
+ bb.build.addtask('do_get_report', 'do_configure', 'do_patch', d)
+ bb.build.addtask('do_spdx', 'do_configure', 'do_get_report', d)
}
-def get_folder_id_by_name(d, folder_name):
+def has_upload(d, tar_file, folder_id):
import os
import subprocess
import json
+ i = 0
- no_proxy = (d.getVar('NO_PROXY', True) or "")
-
- server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
- if server_url == "":
- bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
- raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
-
- token = (d.getVar('TOKEN', True) or "")
- if token == "":
- bb.note("Please set token of fossology server by setting TOKEN!\n" + srcPath)
- raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
-
- rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/folders" \
- + " -H \"Authorization: Bearer " + token + "\"" \
- + " --noproxy " + no_proxy
- bb.note("Invoke rest_api_cmd = " + rest_api_cmd )
- try:
- all_folder = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
- except subprocess.CalledProcessError as e:
- bb.error(d.getVar('PN', True) + ": Get folder list failed: \n%s" % e.output.decode("utf-8"))
- return False
- all_folder = str(all_folder, encoding = "utf-8")
- bb.note("all_folder list= " + all_folder)
- all_folder = json.loads(all_folder)
- bb.note("len of all_folder = ")
- bb.note(str(len(all_folder)))
- if len(all_folder) == 0:
- bb.note("Can not get folder list.")
- return False
- bb.note("all_folder[0][name] = ")
- bb.note(all_folder[0]["name"])
- for i in range(0, len(all_folder)):
- if all_folder[i]["name"] == folder_name:
- bb.note("Find " + folder_name + "in fossology server ")
- return all_folder[i]["id"]
- return False
-
-def create_folder(d, folder_name):
- import os
- import subprocess
-
- no_proxy = (d.getVar('NO_PROXY', True) or "")
- server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
- if server_url == "":
- bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
- raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
-
- token = (d.getVar('TOKEN', True) or "")
- if token == "":
- bb.note("Please set token of fossology server by setting TOKEN!\n" + srcPath)
- raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
-
- rest_api_cmd = "curl -k -s -S -X POST " + server_url + "/api/v1/folders" \
- + " -H \'parentFolder: 1\'" \
- + " -H \'folderName: " + folder_name + "\'" \
- + " -H \"Authorization: Bearer " + token + "\"" \
- + " --noproxy " + no_proxy
- bb.note("Invoke rest_api_cmd = " + rest_api_cmd)
- try:
- add_folder = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
- except subprocess.CalledProcessError as e:
- bb.error(d.getVar('PN', True) + ": Added folder failed: \n%s" % e.output.decode("utf-8"))
- return False
-
- add_folder = str(add_folder, encoding = "utf-8")
- bb.note("add_folder = ")
- bb.note(add_folder)
- add_folder = add_folder.replace("null", "None")
- add_folder = eval(add_folder)
- if str(add_folder["code"]) == "201":
- bb.note("add_folder = " + folder_name)
- return add_folder["message"]
- elif str(add_folder["code"]) == "200":
- bb.note("Folder : " + folder_name + "has been created.")
- return get_folder_id_by_name(d, folder_name)
- else:
- bb.error(d.getVar('PN', True) + ": Added folder failed, please check your fossology server.")
- return False
-
-def get_folder_id(d):
-
- if d.getVar('FOLDER_NAME', False):
- folder_name = d.getVar('FOLDER_NAME')
- folder_id = create_folder(d, folder_name)
- else:
- folder_id = (d.getVar('FOLDER_ID', True) or "1")
-
- bb.note("Folder Id = " + str(folder_id))
- return str(folder_id)
-
-def has_upload(d, tar_file, folder_id):
- import os
- import subprocess
-
(work_dir, file_name) = os.path.split(tar_file)
- no_proxy = (d.getVar('NO_PROXY', True) or "")
+
server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
if server_url == "":
bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
- folder_name = d.getVar('FOLDER_NAME')
-
token = (d.getVar('TOKEN', True) or "")
if token == "":
bb.note("Please set token of fossology server by setting TOKEN!\n" + srcPath)
raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
- rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/uploads" \
- + " -H \'folderName: " + folder_name + "\'" \
+ rest_api_cmd = "curl -Ss -X GET " + server_url + "/api/v1/search" \
+ " -H \"Authorization: Bearer " + token + "\"" \
- + " --noproxy " + no_proxy
+ + " -H \"filename: " + file_name + "\"" \
+ + " --noproxy 127.0.0.1"
bb.note("Invoke rest_api_cmd = " + rest_api_cmd )
try:
- upload_output = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ upload_output = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
except subprocess.CalledProcessError as e:
bb.error("curl failed: \n%s" % e.output.decode("utf-8"))
- return False
-
- upload_output = str(upload_output, encoding = "utf-8")
- upload_output = upload_output.replace("null", "None")
- upload_output = eval(upload_output)
+ return -1
bb.note("upload_output = ")
- print(upload_output)
- bb.note("len of upload_output = ")
+ bb.note(str(upload_output))
+ bb.note("has_upload: len of upload_output = ")
+ upload_output = json.loads(upload_output)
bb.note(str(len(upload_output)))
if len(upload_output) == 0:
- bb.note("The upload of fossology is 0.")
- return False
- bb.note("upload_output[0][uploadname] = ")
- bb.note(upload_output[0]["uploadname"])
- bb.note("len of upload_output = ")
- bb.note(str(len(upload_output)))
- for i in range(0, len(upload_output)):
- if upload_output[i]["uploadname"] == file_name and str(upload_output[i]["folderid"]) == str(folder_id):
- bb.warn("Find " + file_name + " in fossology server. So, will not upload again.")
- return upload_output[i]["id"]
- return False
-
-def upload(d, tar_file, folder):
+ bb.note(file_name + "hasn't been uploaded yet.")
+ return -1
+ while i < len(upload_output):
+ if (upload_output[0]["upload"]["folderid"] == folder_id):
+ bb.note(file_name + "has been uploaded, uploadId = :" + str(upload_output[0]["upload"]["id"]))
+ return upload_output[0]["upload"]["id"]
+ i = i+1
+ bb.note(file_name + "hasn't been uploaded yet.")
+ return -1
+
+def upload(d, tar_file, folder_id):
import os
import subprocess
+ import json
delaytime = 50
i = 0
- no_proxy = (d.getVar('NO_PROXY', True) or "")
server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
if server_url == "":
bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
@@ -367,40 +196,78 @@ def upload(d, tar_file, folder):
raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
rest_api_cmd = "curl -k -s -S -X POST " + server_url + "/api/v1/uploads" \
- + " -H \"folderId: " + folder + "\"" \
+ + " -H \'folderId: " + folder_id + "\'" \
+ " -H \"Authorization: Bearer " + token + "\"" \
+ " -H \'uploadDescription: created by REST\'" \
+ " -H \'public: public\'" \
+ + " -H \'uploadType:file\'" \
+ " -H \'Content-Type: multipart/form-data\'" \
+ " -F \'fileInput=@\"" + tar_file + "\";type=application/octet-stream\'" \
- + " --noproxy " + no_proxy
+ + " --noproxy 127.0.0.1"
bb.note("Upload : Invoke rest_api_cmd = " + rest_api_cmd )
- while i < 10:
+ while i < 1:
time.sleep(delaytime)
try:
upload = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
- bb.error(d.getVar('PN', True) + ": Upload failed: \n%s" % e.output.decode("utf-8"))
+ bb.error("Upload failed: \n%s" % e.output.decode("utf-8"))
return False
- upload = str(upload, encoding = "utf-8")
bb.note("Upload = ")
- bb.note(upload)
- upload = upload.replace("null", "None")
+ bb.note(str(upload))
upload = eval(upload)
- if str(upload["code"]) == "201":
+ if upload["code"] == 201:
return upload["message"]
i += 1
- bb.warn(d.getVar('PN', True) + ": Upload is fail, please check your fossology server.")
+ bb.note("Upload is fail, please check your fossology server.")
return False
+def has_analysis(d, file_name, upload_id):
+ import os
+ import subprocess
+ import json
+ delaytime = 100
+ i = 0
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting TOKEN!\n" + srcPath)
+ raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
+
+ rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/jobs?upload=" + str(upload_id) \
+ + " -H \"Authorization: Bearer " + token + "\""
+ bb.note("get analysis status : Invoke rest_api_cmd = " + rest_api_cmd )
+ try:
+ analysis_output = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
+ except subprocess.CalledProcessError as e:
+ bb.error("curl failed: \n%s" % e.output.decode("utf-8"))
+ return False
+ bb.note("upload_output = ")
+ bb.note(str(analysis_output))
+ bb.note("has_upload: len of upload_output = ")
+ analysis_output = json.loads(analysis_output)
+ bb.note(str(len(analysis_output)))
+ if len(analysis_output) == 0:
+ bb.note(file_name + " hasn't been analysis yet.")
+ return False
+ bb.note(analysis_output[0]["status"])
+ if analysis_output[0]["status"] == "Completed":
+ bb.note(file_name + "has been analysis.")
+ return True
+ else:
+ bb.note(file_name + "has not been analysis.")
+ return False
+
def analysis(d, folder_id, upload_id):
import os
import subprocess
- delaytime = 50
+ delaytime = 100
i = 0
-
- bb.note("Begin to analysis.")
- no_proxy = (d.getVar('NO_PROXY', True) or "")
+
server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
if server_url == "":
bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
@@ -412,46 +279,44 @@ def analysis(d, folder_id, upload_id):
raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
rest_api_cmd = "curl -k -s -S -X POST " + server_url + "/api/v1/jobs" \
- + " -H \"folderId: " + str(folder_id) + "\"" \
- + " -H \"uploadId: " + str(upload_id) + "\"" \
+ + " -H \'folderId: " + str(folder_id) + "\'" \
+ + " -H \'uploadId: " + str(upload_id) + "\'" \
+ " -H \"Authorization: Bearer " + token + "\"" \
+ " -H \'Content-Type: application/json\'" \
+ " --data \'{\"analysis\": {\"bucket\": true,\"copyright_email_author\": true,\"ecc\": true, \"keyword\": true,\"mime\": true,\"monk\": true,\"nomos\": true,\"package\": true},\"decider\": {\"nomos_monk\": true,\"bulk_reused\": true,\"new_scanner\": true}}\'" \
- + " --noproxy " + no_proxy
+ + " --noproxy 127.0.0.1"
+
bb.note("Analysis : Invoke rest_api_cmd = " + rest_api_cmd )
while i < 10:
try:
time.sleep(delaytime)
- analysis = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ analysis = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
except subprocess.CalledProcessError as e:
bb.error("Analysis failed: \n%s" % e.output.decode("utf-8"))
return False
time.sleep(delaytime)
- analysis = str(analysis, encoding = "utf-8")
bb.note("analysis = ")
- bb.note(analysis)
- analysis = analysis.replace("null", "None")
+ bb.note(str(analysis))
analysis = eval(analysis)
- if str(analysis["code"]) == "201":
+ if analysis["code"] == 201:
return analysis["message"]
- elif str(analysis["code"]) == "404":
- bb.warn(d.getVar('PN', True) + ": analysis is still not complete.")
+ elif analysis["code"] == 404:
+ bb.note("analysis is still not complete.")
time.sleep(delaytime*2)
else:
return False
i += 1
- bb.warn(d.getVar('PN', True) + ": Analysis is fail, will try again.")
- bb.warn(d.getVar('PN', True) + ": Analysis is fail, please check your fossology server.")
+ bb.note("Analysis is fail, will try again.")
+ bb.note("Analysis is fail, please check your fossology server.")
return False
def trigger(d, folder_id, upload_id):
import os
import subprocess
- delaytime = 50
+ import json
+ delaytime = 100
i = 0
- bb.note("Trigger to create spdx file.")
- no_proxy = (d.getVar('NO_PROXY', True) or "")
server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
if server_url == "":
bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
@@ -461,44 +326,39 @@ def trigger(d, folder_id, upload_id):
if token == "":
bb.note("Please set token of fossology server by setting TOKEN!\n" + srcPath)
raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
-
rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/report" \
+ " -H \"Authorization: Bearer " + token + "\"" \
- + " -H \"uploadId: " + str(upload_id) + "\"" \
+ + " -H \'uploadId: " + str(upload_id) + "\'" \
+ " -H \'reportFormat: spdx2tv\'" \
- + " --noproxy " + no_proxy
+ + " --noproxy 127.0.0.1"
bb.note("trigger : Invoke rest_api_cmd = " + rest_api_cmd )
while i < 10:
time.sleep(delaytime)
try:
- trigger = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
+ trigger = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
except subprocess.CalledProcessError as e:
- bb.error(d.getVar('PN', True) + ": Trigger failed: \n%s" % e.output.decode("utf-8"))
+ bb.error("Trigger failed: \n%s" % e.output.decode("utf-8"))
return False
time.sleep(delaytime)
- trigger = str(trigger, encoding = "utf-8")
- trigger = trigger.replace("null", "None")
- trigger = eval(trigger)
- bb.note("trigger id = ")
- bb.note(str(trigger["message"]))
- if str(trigger["code"]) == "201":
+ bb.note("trigger = ")
+ bb.note(str(trigger))
+ trigger = json.loads(trigger)
+ if trigger["code"] == 201:
return trigger["message"].split("/")[-1]
i += 1
time.sleep(delaytime * 2)
- bb.warn(d.getVar('PN', True) + ": Trigger is fail, will try again.")
- bb.warn(d.getVar('PN', True) + ": Trigger is fail, please check your fossology server.")
+ bb.note("Trigger is fail, will try again.")
+ bb.note("Trigger is fail, please check your fossology server.")
return False
def get_spdx(d, report_id, spdx_file):
import os
import subprocess
import time
- delaytime = 50
- complete = False
+ delaytime = 100
+ empty = True
i = 0
- bb.note("Begin to download spdx file.")
- no_proxy = (d.getVar('NO_PROXY', True) or "")
server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
if server_url == "":
bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
@@ -511,18 +371,17 @@ def get_spdx(d, report_id, spdx_file):
rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/report/" + report_id \
+ " -H \'accept: text/plain\'" \
+ " -H \"Authorization: Bearer " + token + "\"" \
- + " --noproxy " + no_proxy
+ + " --noproxy 127.0.0.1"
bb.note("get_spdx : Invoke rest_api_cmd = " + rest_api_cmd )
- while i < 10:
+ while i < 3:
time.sleep(delaytime)
file = open(spdx_file,'wt')
try:
- p = subprocess.Popen(rest_api_cmd, shell=True, universal_newlines=True, stdout=file).wait()
+ p = subprocess.Popen(rest_api_cmd, shell=True, universal_newlines=True, stdout=file)
except subprocess.CalledProcessError as e:
- bb.error("Get spdx failed: \n%s. Please check fossology server." % e.output.decode("utf-8"))
- file.close()
- os.remove(spdx_file)
+ bb.error("Get spdx failed: \n%s" % e.output.decode("utf-8"))
return False
+ ret_code = p.wait()
file.flush()
time.sleep(delaytime)
file.close()
@@ -532,51 +391,159 @@ def get_spdx(d, report_id, spdx_file):
line = file.readline()
while line:
if "LicenseID:" in line:
- complete = True
+ empty = False
break
line = file.readline()
file.close()
- if complete == False:
- bb.warn("license info not complete, try agin.")
- else:
- return True
+ if empty == True:
+ bb.note("Hasn't get license info.")
+ return True
else:
- bb.warn(d.getVar('PN', True) + ": Get the first line is " + first_line + ". Try agin")
+ bb.note("Get the first line is " + first_line)
+ bb.note("spdx is not correct, will try again.")
+ file.close()
os.remove(spdx_file)
-
- file.close()
i += 1
- delaytime = delaytime + 20
- time.sleep(delaytime)
-
- file.close()
- bb.warn(d.getVar('PN', True) + ": SPDX file maybe have something wrong, please confirm.")
+ time.sleep(delaytime*2)
+ bb.note("Get spdx failed, Please check your fossology server.")
-def invoke_rest_api(d, tar_file, spdx_file, folder_id):
+def get_folder_id(d):
import os
- import time
+ import subprocess
+ import json
+ delaytime = 100
i = 0
-
- bb.note("invoke fossology REST API : tar_file = %s " % tar_file)
+
+ server_url = (d.getVar('FOSSOLOGY_SERVER', True) or "")
+ if server_url == "":
+ bb.note("Please set fossology server URL by setting FOSSOLOGY_SERVER!\n")
+ raise OSError(errno.ENOENT, "No setting of FOSSOLOGY_SERVER")
+
+ token = (d.getVar('TOKEN', True) or "")
+ if token == "":
+ bb.note("Please set token of fossology server by setting TOKEN!\n" + srcPath)
+ raise OSError(errno.ENOENT, "No setting of TOKEN comes from fossology server.")
+
+ folder_name = (d.getVar('FOLDER_NAME', True) or "")
+ if folder_name == "":
+ bb.note("Please set FOLDER_NAME !\n")
+ raise OSError(errno.ENOENT, "No setting of FOLDER_NAME.")
+
+ rest_api_cmd = "curl -k -s -S -X POST " + server_url + "/api/v1/folders" \
+ + " -H \'parentFolder: 1\'" \
+ + " -H \"folderName: " + folder_name + "\"" \
+ + " -H \"Authorization: Bearer " + token + "\""
+ bb.note("POST folder status : Invoke rest_api_cmd = " + rest_api_cmd )
+ try:
+ folder_output = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
+ except subprocess.CalledProcessError as e:
+ bb.error("curl failed: \n%s" % e.output.decode("utf-8"))
+ return False
+ folder_output = json.loads(folder_output)
+ bb.note(str(folder_output))
+ bb.note(str(len(folder_output)))
+ if len(folder_output) < 1:
+ bb.error("create folder fail.")
+ return 0
+ if (folder_output["code"] == 201 or folder_output["code"] == 200):
+ rest_api_cmd = "curl -k -s -S -X GET " + server_url + "/api/v1/folders" \
+ + " -H \"Authorization: Bearer " + token + "\""
+ bb.note("GET folder status : Invoke rest_api_cmd = " + rest_api_cmd )
+ try:
+ folder_output = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8")
+ except subprocess.CalledProcessError as e:
+ bb.error("curl failed: \n%s" % e.output.decode("utf-8"))
+ return False
+ folder_output = json.loads(folder_output)
+ bb.note(str(folder_output))
+ bb.note(str(len(folder_output)))
+ while i < len(folder_output):
+ if (folder_output[i]["name"] == folder_name):
+ bb.note("The id of " + folder_name + "is : " + str(folder_output[i]["id"]))
+ return folder_output[i]["id"]
+ i = i+1
+ else:
+ bb.error("Creat folder failed. Please check fossology server.")
+ return 0
+
+ bb.error("Not find created folder. Please try again.")
+ return 0
+
+python do_get_report(){
+ import os, sys, json, shutil, time
+ import logging
+
+ i = 0
+
+ spdx_workdir = d.getVar('SPDX_WORKDIR')
+ temp_dir = os.path.join(d.getVar('WORKDIR'), "temp")
+ spdx_temp_dir = os.path.join(spdx_workdir, "temp")
+ spdx_outdir = d.getVar('SPDX_OUTDIR')
+ cur_ver_code = get_ver_code(spdx_workdir).split()[0]
+ info = {}
+
+ manifest_dir = (d.getVar('SPDX_DEPLOY_DIR') or "")
+ if not os.path.exists( manifest_dir ):
+ bb.utils.mkdirhier( manifest_dir )
+
+ pn = d.getVar('PN')
+ info['workdir'] = (d.getVar('WORKDIR') or "")
+ info['pn'] = (d.getVar( 'PN') or "")
+ info['pv'] = (d.getVar( 'PKGV') or "").replace('-', '+')
+ info['pr'] = (d.getVar( 'PR') or "")
+ if (d.getVar('BPN') == "perf"):
+ info['pv'] = d.getVar("KERNEL_VERSION").split("-")[0]
+ if 'AUTOINC' in info['pv']:
+ info['pv'] = info['pv'].replace("AUTOINC", "0")
+
+ if pn.startswith('gcc-source'):
+ spdx_name = "gcc-" + info['pv'] + "-" + info['pr'] + ".spdx"
+ else:
+ spdx_name = info['pn'] + "-" + info['pv'] + "-" + info['pr'] + ".spdx"
+
+ info['package_download_location'] = (d.getVar( 'SRC_URI') or "")
+ if info['package_download_location'] != "":
+ info['package_download_location'] = info['package_download_location'].split()[0]
+ info['spdx_version'] = (d.getVar('SPDX_VERSION') or '')
+ info['outfile'] = os.path.join(manifest_dir, spdx_name )
+ spdx_file = os.path.join(spdx_outdir, spdx_name )
+ if os.path.exists(info['outfile']):
+ bb.note(info['pn'] + "spdx file has been exist, do nothing")
+ return
+ if os.path.exists( spdx_file ):
+ bb.note(info['pn'] + "spdx file has been exist, do nothing")
+ create_manifest(info,spdx_file)
+ return
+ info['data_license'] = (d.getVar('DATA_LICENSE') or '')
+ info['creator'] = {}
+ info['creator']['Tool'] = (d.getVar('CREATOR_TOOL') or '')
+ info['license_list_version'] = (d.getVar('LICENSELISTVERSION') or '')
+ info['package_homepage'] = (d.getVar('HOMEPAGE') or "")
+ info['package_summary'] = (d.getVar('SUMMARY') or "")
+ info['package_summary'] = info['package_summary'].replace("\n","")
+ info['package_summary'] = info['package_summary'].replace("'"," ")
+ info['package_contains'] = (d.getVar('CONTAINED') or "")
+ info['package_static_link'] = (d.getVar('STATIC_LINK') or "")
+ info['modified'] = "false"
+ srcuri = d.getVar("SRC_URI", False).split()
+ length = len("file://")
+ for item in srcuri:
+ if item.startswith("file://"):
+ item = item[length:]
+ if item.endswith(".patch") or item.endswith(".diff"):
+ info['modified'] = "true"
+ tar_file = get_tarball_name(d, d.getVar('WORKDIR'), 'patched', spdx_outdir)
+
+ folder_id = str(get_folder_id(d))
upload_id = has_upload(d, tar_file, folder_id)
- if upload_id == False:
- bb.note("This OSS has not been scanned. So upload it to fossology server.")
+ if upload_id == -1:
+ bb.note("Upload it to fossology server.")
upload_id = upload(d, tar_file, folder_id)
if upload_id == False:
return False
- else:
- report_id = trigger(d, folder_id, upload_id)
- if report_id == False:
- bb.note(d.getVar('PN', True) + ": Although has uploaded,trigger fail. Maybe hasn't analysised.")
- else:
- spdx2tv = get_spdx(d, report_id, spdx_file)
- if spdx2tv == False:
- bb.note(d.getVar('PN', True) + ": Although has uploaded,get report fail. Maybe hasn't analysised.")
- else:
- return True
-
- if analysis(d, folder_id, upload_id) == False:
- return False
+ if has_analysis(d, folder_id, upload_id) == False:
+ if analysis(d, folder_id, upload_id) == False:
+ return False
while i < 10:
i += 1
report_id = trigger(d, folder_id, upload_id)
@@ -584,23 +551,16 @@ def invoke_rest_api(d, tar_file, spdx_file, folder_id):
return False
spdx2tv = get_spdx(d, report_id, spdx_file)
if spdx2tv == False:
- bb.warn(d.getVar('PN', True) + ": get_spdx is unnormal. Will try again!")
+ bb.note("get_spdx is unnormal. Will try again!")
else:
return True
- bb.warn("get_spdx of %s is unnormal. Please confirm!")
+ print("get_spdx of %s is unnormal. Please check your fossology server!")
return False
-
-SSTATETASKS += "do_spdx"
-python do_spdx_setscene () {
- sstate_setscene(d)
-}
-addtask do_spdx_setscene
-do_spdx () {
- echo "Create spdx file."
}
-addtask do_get_report after do_patch
-addtask do_spdx
+addtask do_spdx_creat_tarball after do_patch
+addtask do_get_report after do_spdx_creat_tarball
+addtask do_spdx before do_package after do_get_report
do_build[recrdeptask] += "do_spdx"
do_populate_sdk[recrdeptask] += "do_spdx"
diff --git a/classes/scancode-tk.bbclass b/classes/scancode-tk.bbclass
index 9fa5f69..7ec5835 100644
--- a/classes/scancode-tk.bbclass
+++ b/classes/scancode-tk.bbclass
@@ -10,19 +10,13 @@
COPYLEFT_RECIPE_TYPES ?= 'target nativesdk'
inherit copyleft_filter
-
inherit spdx-common
+HOSTTOOLS += "scancode"
do_get_report[dirs] = "${SPDX_OUTDIR}"
CREATOR_TOOL = "scancode-tk.bbclass in meta-spdxscanner"
-export EXTRACTCODE_LIBARCHIVE_PATH = "${STAGING_LIBDIR_NATIVE}/libarchive.so"
-export EXTRACTCODE_7Z_PATH = "${STAGING_BINDIR_NATIVE}/7z"
-export TYPECODE_LIBMAGIC_PATH = "${STAGING_LIBDIR_NATIVE}/libmagic.so"
-export TYPECODE_LIBMAGIC_DB_PATH = "${STAGING_DATADIR_NATIVE}/magic.mgc"
-
-
python () {
#If not for target, won't creat spdx.
if bb.data.inherits_class('nopackages', d):
@@ -56,7 +50,7 @@ python () {
info = {}
info['pn'] = (d.getVar( 'PN') or "")
- info['pv'] = (d.getVar( 'PV') or "")
+ info['pv'] = (d.getVar( 'PV') or "").replace('-', '+')
manifest_dir = (d.getVar('SPDX_DEPLOY_DIR') or "")
if not os.path.exists( manifest_dir ):
@@ -78,7 +72,6 @@ python () {
if d.getVar('PACKAGES'):
# Some recipes do not have any packaging tasks
if hasTask("do_package_write_rpm") or hasTask("do_package_write_ipk") or hasTask("do_package_write_deb"):
- d.appendVarFlag('do_get_report', 'depends', ' scancode-toolkit-native:do_populate_sysroot')
d.appendVarFlag('do_spdx', 'depends', ' %s:do_get_report' % pn)
d.appendVarFlag('do_get_report', 'depends', ' %s:do_spdx_get_src' % pn)
d.appendVarFlag('do_spdx', 'depends', ' %s:do_get_report' % pn)
@@ -112,7 +105,7 @@ python do_get_report(){
info = {}
info['workdir'] = (d.getVar('WORKDIR') or "")
info['pn'] = (d.getVar( 'PN') or "")
- info['pv'] = (d.getVar( 'PV') or "")
+ info['pv'] = (d.getVar( 'PV') or "").replace('-', '+')
info['package_download_location'] = (d.getVar( 'SRC_URI') or "")
if info['package_download_location'] != "":
info['package_download_location'] = info['package_download_location'].split()[0]
@@ -137,6 +130,13 @@ python do_get_report(){
info['package_contains'] = (d.getVar('CONTAINED') or "")
info['package_static_link'] = (d.getVar('STATIC_LINK') or "")
info['modified'] = "false"
+ info['external_refs'] = get_external_refs(d)
+ info['purpose'] = get_pkgpurpose(d)
+ info['release_date'] = (d.getVar('REALASE_DATE') or "")
+ info['build_time'] = get_build_date(d)
+ info['depends_on'] = get_depends_on(d)
+ info['pkg_spdx_id'] = get_spdxid_pkg(d)
+
srcuri = d.getVar("SRC_URI", False).split()
length = len("file://")
for item in srcuri:
@@ -149,26 +149,22 @@ python do_get_report(){
git_path = "%s/git/.git" % info['sourcedir']
if os.path.exists(git_path):
remove_dir_tree(git_path)
- invoke_scancode(info['sourcedir'],spdx_file)
+ invoke_scancode(d, info['sourcedir'],spdx_file)
write_cached_spdx(info,spdx_file,cur_ver_code)
create_manifest(info,spdx_file)
}
-def invoke_scancode(OSS_src_dir, spdx_file):
+def invoke_scancode(d, OSS_src_dir, spdx_file):
import subprocess
import string
import json
import codecs
- import logging
-
- logger = logging.getLogger()
- logger.setLevel(logging.INFO)
- logging.basicConfig(level=logging.INFO)
+ processes = d.getVar("BB_NUMBER_THREADS")
path = os.getenv('PATH')
- scancode_cmd = bb.utils.which(os.getenv('PATH'), "scancode")
- scancode_cmd = scancode_cmd + " -lpci --spdx-tv " + spdx_file + " " + OSS_src_dir
+ scancode_cmd = "scancode -lpci --max-in-memory 0 --processes " + processes + " --spdx-tv " + spdx_file + " " + OSS_src_dir
+ bb.note ("scancode_cmd = " + scancode_cmd)
print(scancode_cmd)
try:
subprocess.check_output(scancode_cmd,
diff --git a/classes/spdx-common.bbclass b/classes/spdx-common.bbclass
index cb97c5b..d131836 100644
--- a/classes/spdx-common.bbclass
+++ b/classes/spdx-common.bbclass
@@ -31,9 +31,15 @@ def excluded_package(d, pn):
if p != pn:
pn = p
break
- if d.getVar('BPN') in ['gcc', 'libgcc']:
- #bb.debug(1, 'spdx: There is a bug in the scan of %s, skip it.' % pn)
+ # We just archive gcc-source for all the gcc related recipes
+ if d.getVar('BPN') in ['gcc', 'libgcc'] \
+ and not pn.startswith('gcc-source'):
+ bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn)
return True
+ # TARGET_SYS in ARCHIVER_ARCH will break the stamp for gcc-source in multiconfig
+ if pn.startswith('gcc-source'):
+ d.setVar('ARCHIVER_ARCH', "allarch")
+
# The following: do_fetch, do_unpack and do_patch tasks have been deleted,
# so avoid archiving do_spdx here.
# -native is for the host aka during the build
@@ -65,12 +71,6 @@ def get_tar_name(d, suffix):
get the name of tarball
"""
- # Make sure we are only creating a single tarball for gcc sources
- #if (d.getVar('SRC_URI') == ""):
- # return
- # For the kernel archive, srcdir may just be a link to the
- # work-shared location. Use os.path.realpath to make sure
- # that we archive the actual directory and not just the link.
if suffix:
filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix)
else:
@@ -84,12 +84,6 @@ def spdx_create_tarball(d, srcdir, suffix, ar_outdir):
"""
import tarfile, shutil
- # Make sure we are only creating a single tarball for gcc sources
- #if (d.getVar('SRC_URI') == ""):
- # return
- # For the kernel archive, srcdir may just be a link to the
- # work-shared location. Use os.path.realpath to make sure
- # that we archive the actual directory and not just the link.
srcdir = os.path.realpath(srcdir)
bb.utils.mkdirhier(ar_outdir)
@@ -100,6 +94,19 @@ def spdx_create_tarball(d, srcdir, suffix, ar_outdir):
tar.add(srcdir, arcname=os.path.basename(srcdir), filter=exclude_useless_paths)
tar.close()
shutil.rmtree(srcdir)
+
+ info = {}
+ info['pn'] = (d.getVar( 'PN') or "")
+ info['pv'] = (d.getVar( 'PKGV') or "")
+ info['pr'] = (d.getVar( 'PR') or "")
+
+ if d.getVar('SAVE_SPDX_ACHIVE'):
+ manifest_dir = (d.getVar('SPDX_DEPLOY_DIR') or "")
+ if not os.path.exists( manifest_dir ):
+ bb.utils.mkdirhier( manifest_dir )
+ info['outfile'] = os.path.join(manifest_dir, filename)
+ create_manifest(info,tarname)
+
return tarname
def get_tarball_name(d, srcdir, suffix, ar_outdir):
@@ -142,6 +149,11 @@ def spdx_get_src(d):
bb.utils.mkdirhier(src_dir)
if bb.data.inherits_class('kernel',d):
share_src = d.getVar('STAGING_KERNEL_DIR')
+ if pn.startswith('gcc-source'):
+ gcc_source_path = d.getVar('TMPDIR') + "/work-shared"
+ gcc_pv = d.getVar('PV')
+ gcc_pr = d.getVar('PR')
+ share_src = gcc_source_path + "/gcc-" + gcc_pv + "-" + gcc_pr + "/gcc-" + gcc_pv + "/"
cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
cmd_copy_kernel_result = os.popen(cmd_copy_share).read()
bb.note("cmd_copy_kernel_result = " + cmd_copy_kernel_result)
@@ -228,9 +240,15 @@ def find_infoinlicensefile(sstatefile):
continue
license = line_spdx.split(": ")[1]
license = license.split("\n")[0]
+ bb.note("file path = " + file_path)
file_path = file_path.split("\n")[0]
+ bb.note("file path = " + file_path)
path_list = file_path.split('/')
- if len(file_path.split('/')) < 5:
+ if len(file_path.split('/')) < 3:
+ file_path_simple = file_path.split('/',1)[1]
+ elif len(file_path.split('/')) < 4:
+ file_path_simple = file_path.split('/',2)[2]
+ elif len(file_path.split('/')) < 5:
file_path_simple = file_path.split('/',3)[3]
else:
file_path_simple = file_path.split('/',4)[4]
@@ -248,6 +266,7 @@ def find_infoinlicensefile(sstatefile):
## Add necessary information into spdx file
def write_cached_spdx( info,sstatefile, ver_code ):
import subprocess
+ import re
infoinlicensefile=""
@@ -256,11 +275,22 @@ def write_cached_spdx( info,sstatefile, ver_code ):
key_word + replace_info + "#' "
return dest_sed_cmd
+ def sed_replace_aline(dest_sed_cmd,origin_line,dest_line):
+ dest_sed_cmd = dest_sed_cmd + "-e 's#^" + origin_line + ".*#" + \
+ dest_line + "#' "
+ return dest_sed_cmd
+
def sed_insert(dest_sed_cmd,key_word,new_line):
dest_sed_cmd = dest_sed_cmd + "-e '/^" + key_word \
+ r"/a\\" + new_line + "' "
return dest_sed_cmd
+ def sed_insert_front(dest_sed_cmd,key_word,new_line):
+ dest_sed_cmd = dest_sed_cmd + "-e '/^" + key_word \
+ + r"/i\\" + new_line + "' "
+ return dest_sed_cmd
+
+
## Delet ^M in doc format
subprocess.call("sed -i -e 's#\r##g' %s" % sstatefile, shell=True)
@@ -278,7 +308,10 @@ def write_cached_spdx( info,sstatefile, ver_code ):
sed_cmd = sed_replace(sed_cmd,"Creator: Tool: ",info['creator']['Tool'])
## Package level information
+ sed_cmd = sed_replace_aline(sed_cmd, "SPDXVersion: SPDX-2.2", "SPDXVersion: SPDX-2.3")
sed_cmd = sed_replace(sed_cmd, "PackageName: ", info['pn'])
+ sed_cmd = sed_replace_aline(sed_cmd, "SPDXID: SPDXRef-", "SPDXID: SPDXRef-" + info['pkg_spdx_id'])
+ sed_cmd = sed_replace(sed_cmd, "Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-", info['pkg_spdx_id'])
sed_cmd = sed_insert(sed_cmd, "PackageName: ", "PackageVersion: " + info['pv'])
sed_cmd = sed_replace(sed_cmd, "PackageDownloadLocation: ",info['package_download_location'])
sed_cmd = sed_insert(sed_cmd, "PackageDownloadLocation: ", "PackageHomePage: " + info['package_homepage'])
@@ -291,6 +324,13 @@ def write_cached_spdx( info,sstatefile, ver_code ):
sed_cmd = sed_insert(sed_cmd, "PackageComment:"," \\n\\n## Relationships\\nRelationship: " + info['pn'] + " CONTAINS " + contain)
for static_link in info['package_static_link'].split( ):
sed_cmd = sed_insert(sed_cmd, "PackageComment:"," \\n\\n## Relationships\\nRelationship: " + info['pn'] + " STATIC_LINK " + static_link)
+ sed_cmd = sed_insert(sed_cmd, "PackageVerificationCode: ", "BuiltDate: " + info['build_time'])
+ sed_cmd = sed_insert(sed_cmd, "PackageVerificationCode: ", "ReleaseDate: " + info['release_date'])
+ sed_cmd = sed_insert(sed_cmd, "PackageVerificationCode: ", "PrimaryPackagePurpose: " + info['purpose'])
+ depends = info['depends_on']
+ for depend in re.split(r'\s*[,\s\n\r]\s*', depends):
+ sed_cmd = sed_insert_front(sed_cmd, "PackageCopyrightText: ", "Relationship: SPDXRef-" + info['pn'] + " DEPENDS_ON SPDXRef-" + depend)
+ bb.note("sed_cmd = " + sed_cmd)
sed_cmd = sed_cmd + sstatefile
subprocess.call("%s" % sed_cmd, shell=True)
@@ -301,6 +341,9 @@ def write_cached_spdx( info,sstatefile, ver_code ):
sed_cmd = sed_insert(sed_cmd, "ModificationRecord: ", oneline_infoinlicensefile)
sed_cmd = sed_cmd + sstatefile
subprocess.call("%s" % sed_cmd, shell=True)
+
+ with open(sstatefile, encoding="utf-8", mode="a") as file:
+ file.write(info['external_refs'])
def is_work_shared(d):
pn = d.getVar('PN')
@@ -353,7 +396,7 @@ def get_ver_code(dirname):
try:
stats = os.stat(os.path.join(dirname,f_dir,f))
except OSError as e:
- bb.warn( "Stat failed" + str(e) + "\n")
+ bb.note( "Stat failed" + str(e) + "\n")
continue
chksums.append(hash_file(os.path.join(dirname,f_dir,f)))
ver_code_string = ''.join(chksums).lower()
@@ -382,5 +425,104 @@ python do_spdx_creat_tarball(){
}
# For scancode-tk.bbclass, just
python do_spdx_get_src(){
+ import shutil
+
+ spdx_outdir = d.getVar('SPDX_OUTDIR')
+
+ spdx_workdir = d.getVar('SPDX_WORKDIR')
+ spdx_temp_dir = os.path.join(spdx_workdir, "temp")
+ temp_dir = os.path.join(d.getVar('WORKDIR'), "temp")
+ bb.utils.mkdirhier(spdx_workdir)
+
spdx_get_src(d)
+
+ if os.path.isdir(spdx_temp_dir):
+ for f_dir, f in list_files(spdx_temp_dir):
+ temp_file = os.path.join(spdx_temp_dir,f_dir,f)
+ shutil.copy(temp_file, temp_dir)
+ bb.note("temp_dir = " + spdx_temp_dir)
}
+
+#For SPDX2.3
+def get_external_refs(d):
+ from oe.cve_check import get_patched_cves
+ external_refs = "##------------------------- \n"
+ external_refs += "## Security Information \n"
+ external_refs += "##------------------------- \n"
+ external_refs += "\"externalRefs\" : ["
+ unpatched_cves = []
+ nvd_link = "https://nvd.nist.gov/vuln/detail/"
+ with bb.utils.fileslocked([d.getVar("CVE_CHECK_DB_FILE_LOCK")], shared=True):
+ if os.path.exists(d.getVar("CVE_CHECK_DB_FILE")):
+ try:
+ patched_cves = get_patched_cves(d)
+ except FileNotFoundError:
+ bb.fatal("Failure in searching patches")
+ ignored, patched, unpatched, status = check_cves(d, patched_cves)
+ if patched or unpatched or (d.getVar("CVE_CHECK_COVERAGE") == "1" and status):
+ cve_data = get_cve_info(d, patched + unpatched + ignored)
+ #cve_write_data(d, patched, unpatched, ignored, cve_data, status)
+ else:
+ bb.note("No CVE database found, skipping CVE check")
+ return " "
+ if not patched+unpatched+ignored:
+ return " "
+
+ for cve in sorted(cve_data):
+ is_patched = cve in patched
+ is_ignored = cve in ignored
+
+ status = "unpatched"
+ if is_ignored:
+ status = "ignored"
+ elif is_patched:
+ status = "fix"
+ else:
+ # default value of status is Unpatched
+ unpatched_cves.append(cve)
+ external_refs += "{\n"
+ external_refs += "\"referenceCategory\" : \"SECURITY\",\n"
+ external_refs += "\"referenceLocator\" : \"https://nvd.nist.gov/vuln/detail/%s\",\n" % cve
+ external_refs += "\"referenceType\" : \"%s\"\n" % status
+ external_refs += "},"
+
+ external_refs += "]"
+ #bb.warn("external_refs = " + external_refs)
+ return external_refs
+
+def get_pkgpurpose(d):
+ section = d.getVar("SECTION")
+ if section in "libs":
+ return "LIBRARY"
+ else:
+ return "APPLICATION "
+
+def get_build_date(d):
+ from datetime import datetime, timezone
+
+ build_time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
+ return build_time
+
+def get_depends_on(d):
+ import re
+
+ depends = re.split(r'\s*[\s]\s*',d.getVar("DEPENDS"))
+ depends_spdx = ""
+ for depend in depends:
+ bb.note("depend = " + depend)
+ if depend.endswith("-native"):
+ bb.note("Don't show *-native in depends relationship.\n")
+ else:
+ depends_spdx += depend + ","
+ depends_spdx = depends_spdx.strip(',')
+ return depends_spdx
+
+
+def get_spdxid_pkg(d):
+ if d.getVar("PROVIDES"):
+ pid = d.getVar("PROVIDES")
+ else:
+ pid = d.getVar("PN")
+ bb.note("SPDX ID of pkg = " + pid)
+ return pid
+