aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorLei Maohui <leimaohui@cn.fujitsu.com>2019-12-03 09:01:12 +0900
committerLei Maohui <leimaohui@cn.fujitsu.com>2019-12-03 13:40:24 +0900
commit71cdeedcb911058fc2cdf2fd3339cabee6c562f2 (patch)
tree831da94b8bcf54c3daf26f900d3bad47d80d2f29
parent4cc6e9928473f6a82270ce0b0fafbd65bb6a6ad1 (diff)
downloadmeta-spdxscanner-71cdeedcb911058fc2cdf2fd3339cabee6c562f2.tar.gz
meta-spdxscanner-71cdeedcb911058fc2cdf2fd3339cabee6c562f2.tar.bz2
meta-spdxscanner-71cdeedcb911058fc2cdf2fd3339cabee6c562f2.zip
Fix the following issues.
- rename scancode.bbclass to scancode-tk.bbclass to aviod the conflict with meta-oe/class/scancode.bbclass. - Added kernel support. Signed-off-by: Lei Maohui <leimaohui@cn.fujitsu.com>
-rw-r--r--classes/fossdriver-host.bbclass12
-rw-r--r--classes/fossology-rest.bbclass54
-rw-r--r--classes/scancode-tk.bbclass (renamed from classes/scancode.bbclass)29
-rw-r--r--classes/spdx-common.bbclass38
4 files changed, 69 insertions, 64 deletions
diff --git a/classes/fossdriver-host.bbclass b/classes/fossdriver-host.bbclass
index 82eee75..0b168a6 100644
--- a/classes/fossdriver-host.bbclass
+++ b/classes/fossdriver-host.bbclass
@@ -38,8 +38,6 @@ python do_spdx () {
# so avoid archiving source here.
if pn.startswith('glibc-locale'):
return
- if (d.getVar('BPN') == "linux-yocto"):
- return
if (d.getVar('PN') == "libtool-cross"):
return
if (d.getVar('PN') == "libgcc-initial"):
@@ -47,6 +45,9 @@ python do_spdx () {
if (d.getVar('PN') == "shadow-sysroot"):
return
+ if d.getVar('BPN') in ['gcc', 'libgcc']:
+ bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
+ return
# We just archive gcc-source for all the gcc related recipes
if d.getVar('BPN') in ['gcc', 'libgcc']:
@@ -108,8 +109,12 @@ python do_spdx () {
for f_dir, f in list_files(spdx_temp_dir):
temp_file = os.path.join(spdx_temp_dir,f_dir,f)
shutil.copy(temp_file, temp_dir)
- shutil.rmtree(spdx_temp_dir)
+
d.setVar('WORKDIR', spdx_workdir)
+ info['sourcedir'] = spdx_workdir
+ git_path = "%s/git/.git" % info['sourcedir']
+ if os.path.exists(git_path):
+ remove_dir_tree(git_path)
tar_name = spdx_create_tarball(d, d.getVar('WORKDIR'), 'patched', spdx_outdir)
## get everything from cache. use it to decide if
## something needs to be rerun
@@ -128,6 +133,7 @@ python do_spdx () {
create_manifest(info,sstatefile)
else:
bb.warn('Can\'t get the spdx file ' + info['pn'] + '. Please check your.')
+ remove_file(tar_name)
}
diff --git a/classes/fossology-rest.bbclass b/classes/fossology-rest.bbclass
index 9011298..0dd5b6f 100644
--- a/classes/fossology-rest.bbclass
+++ b/classes/fossology-rest.bbclass
@@ -36,13 +36,14 @@ python do_spdx () {
if p != pn:
pn = p
break
+ if d.getVar('BPN') in ['gcc', 'libgcc']:
+ bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
+ return
# The following: do_fetch, do_unpack and do_patch tasks have been deleted,
# so avoid archiving do_spdx here.
if pn.startswith('glibc-locale'):
return
- #if (d.getVar('BPN') == "linux-yocto"):
- # return
if (d.getVar('PN') == "libtool-cross"):
return
if (d.getVar('PN') == "libgcc-initial"):
@@ -50,12 +51,6 @@ python do_spdx () {
if (d.getVar('PN') == "shadow-sysroot"):
return
-
- # We just archive gcc-source for all the gcc related recipes
- if d.getVar('BPN') in ['gcc', 'libgcc']:
- bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
- return
-
spdx_outdir = d.getVar('SPDX_OUTDIR')
spdx_workdir = d.getVar('SPDX_WORKDIR')
spdx_temp_dir = os.path.join(spdx_workdir, "temp")
@@ -113,22 +108,27 @@ python do_spdx () {
for f_dir, f in list_files(spdx_temp_dir):
temp_file = os.path.join(spdx_temp_dir,f_dir,f)
shutil.copy(temp_file, temp_dir)
- shutil.rmtree(spdx_temp_dir)
- #d.setVar('WORKDIR', spdx_workdir)
- tar_name = spdx_create_tarball(d, spdx_workdir, 'patched', spdx_outdir)
+ # shutil.rmtree(spdx_temp_dir)
+ d.setVar('WORKDIR', spdx_workdir)
+ info['sourcedir'] = spdx_workdir
+ git_path = "%s/git/.git" % info['sourcedir']
+ if os.path.exists(git_path):
+ remove_dir_tree(git_path)
+ tar_name = spdx_create_tarball(d, d.getVar('WORKDIR'), 'patched', spdx_outdir)
+
## get everything from cache. use it to decide if
## something needs to be rerun
if not os.path.exists(spdx_outdir):
bb.utils.mkdirhier(spdx_outdir)
cur_ver_code = get_ver_code(spdx_workdir).split()[0]
## Get spdx file
- bb.note(' run fossdriver ...... ')
+ bb.note(' run fossology rest api ...... ')
if not os.path.isfile(tar_name):
bb.warn(info['pn'] + "has no source, do nothing")
return
folder_id = (d.getVar('FOLDER_ID', True) or "")
if invoke_rest_api(d, tar_name, sstatefile, folder_id) == False:
- bb.warn("Get spdx file fail, please check your fossology.")
+ bb.warn(info['pn'] + ": Get spdx file fail, please check your fossology.")
remove_file(tar_name)
return False
if get_cached_spdx(sstatefile) != None:
@@ -136,7 +136,7 @@ python do_spdx () {
## CREATE MANIFEST(write to outfile )
create_manifest(info,sstatefile)
else:
- bb.warn('Can\'t get the spdx file ' + info['pn'] + '. Please check your.')
+ bb.warn(info['pn'] + ': Can\'t get the spdx file ' + '. Please check your.')
remove_file(tar_name)
}
@@ -217,7 +217,7 @@ def upload(d, tar_file, folder):
try:
upload = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
- bb.error("Upload failed: \n%s" % e.output.decode("utf-8"))
+ bb.error(d.getVar('PN', True) + ": Upload failed: \n%s" % e.output.decode("utf-8"))
return False
upload = str(upload, encoding = "utf-8")
bb.note("Upload = ")
@@ -226,7 +226,7 @@ def upload(d, tar_file, folder):
if str(upload["code"]) == "201":
return upload["message"]
i += 1
- bb.warn("Upload is fail, please check your fossology server.")
+ bb.warn(d.getVar('PN', True) + ": Upload is fail, please check your fossology server.")
return False
def analysis(d, folder_id, upload_id):
@@ -268,13 +268,13 @@ def analysis(d, folder_id, upload_id):
if str(analysis["code"]) == "201":
return analysis["message"]
elif str(analysis["code"]) == "404":
- bb.warn("analysis is still not complete.")
+ bb.warn(d.getVar('PN', True) + ": analysis is still not complete.")
time.sleep(delaytime*2)
else:
return False
i += 1
- bb.warn("Analysis is fail, will try again.")
- bb.warn("Analysis is fail, please check your fossology server.")
+ bb.warn(d.getVar('PN', True) + ": Analysis is fail, will try again.")
+ bb.warn(d.getVar('PN', True) + ": Analysis is fail, please check your fossology server.")
return False
def trigger(d, folder_id, upload_id):
@@ -304,7 +304,7 @@ def trigger(d, folder_id, upload_id):
try:
trigger = subprocess.check_output(rest_api_cmd, stderr=subprocess.STDOUT, shell=True)
except subprocess.CalledProcessError as e:
- bb.error("Trigger failed: \n%s" % e.output.decode("utf-8"))
+ bb.error(d.getVar('PN', True) + ": Trigger failed: \n%s" % e.output.decode("utf-8"))
return False
time.sleep(delaytime)
trigger = str(trigger, encoding = "utf-8")
@@ -315,8 +315,8 @@ def trigger(d, folder_id, upload_id):
return trigger["message"].split("/")[-1]
i += 1
time.sleep(delaytime * 2)
- bb.warn("Trigger is fail, will try again.")
- bb.warn("Trigger is fail, please check your fossology server.")
+ bb.warn(d.getVar('PN', True) + ": Trigger is fail, will try again.")
+ bb.warn(d.getVar('PN', True) + ": Trigger is fail, please check your fossology server.")
return False
def get_spdx(d, report_id, spdx_file):
@@ -369,13 +369,13 @@ def get_spdx(d, report_id, spdx_file):
else:
return True
else:
- bb.warn("Get the first line is " + first_line)
- bb.warn("spdx is not correct, will try again.")
+ bb.warn(d.getVar('PN', True) + ": Get the first line is " + first_line)
+ bb.warn(d.getVar('PN', True) + ": spdx is not correct, will try again.")
file.close()
os.remove(spdx_file)
i += 1
time.sleep(delaytime*2)
- bb.warn("Get spdx failed, Please check your fossology server.")
+ bb.warn(d.getVar('PN', True) + ": Get spdx failed, Please check your fossology server.")
def invoke_rest_api(d, tar_file, spdx_file, folder_id):
import os
@@ -399,9 +399,9 @@ def invoke_rest_api(d, tar_file, spdx_file, folder_id):
return False
spdx2tv = get_spdx(d, report_id, spdx_file)
if spdx2tv == False:
- bb.warn("get_spdx is unnormal. Will try again!")
+ bb.warn(d.getVar('PN', True) + ": get_spdx is unnormal. Will try again!")
else:
return True
- print("get_spdx of %s is unnormal. Please check your fossology server!")
+ bb.warn("get_spdx of %s is unnormal. Please confirm!")
return False
diff --git a/classes/scancode.bbclass b/classes/scancode-tk.bbclass
index 4c362ce..9fbbb48 100644
--- a/classes/scancode.bbclass
+++ b/classes/scancode-tk.bbclass
@@ -12,15 +12,15 @@
# 1) By default,spdx files will be output to the path which is defined as[SPDX_DEPLOY_DIR]
# 2) By default, SPDX_DEPLOY_DIR is tmp/deploy
#
+
inherit spdx-common
SPDXEPENDENCY += "scancode-toolkit-native:do_populate_sysroot"
CREATOR_TOOL = "cancode.bbclass in meta-spdxscanner"
-python do_spdx () {
+python do_spdx(){
import os, sys, json, shutil
-
pn = d.getVar('PN')
assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split()
if pn in assume_provided:
@@ -28,13 +28,13 @@ python do_spdx () {
if p != pn:
pn = p
break
-
+ if d.getVar('BPN') in ['gcc', 'libgcc']:
+ bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
+ return
# glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted,
# so avoid archiving source here.
if pn.startswith('glibc-locale'):
return
- if (d.getVar('BPN') == "linux-yocto"):
- return
if (d.getVar('PN') == "libtool-cross"):
return
if (d.getVar('PN') == "libgcc-initial"):
@@ -42,12 +42,6 @@ python do_spdx () {
if (d.getVar('PN') == "shadow-sysroot"):
return
-
- # We just archive gcc-source for all the gcc related recipes
- if d.getVar('BPN') in ['gcc', 'libgcc']:
- bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
- return
-
spdx_outdir = d.getVar('SPDX_OUTDIR')
spdx_workdir = d.getVar('SPDX_WORKDIR')
spdx_temp_dir = os.path.join(spdx_workdir, "temp")
@@ -85,7 +79,6 @@ python do_spdx () {
bb.utils.mkdirhier( manifest_dir )
info['outfile'] = os.path.join(manifest_dir, info['pn'] + "-" + info['pv'] + ".spdx" )
sstatefile = os.path.join(spdx_outdir, info['pn'] + "-" + info['pv'] + ".spdx" )
-
# if spdx has been exist
if os.path.exists(info['outfile']):
bb.note(info['pn'] + "spdx file has been exist, do nothing")
@@ -94,26 +87,26 @@ python do_spdx () {
bb.note(info['pn'] + "spdx file has been exist, do nothing")
create_manifest(info,sstatefile)
return
-
spdx_get_src(d)
-
+
bb.note('SPDX: Archiving the patched source...')
if os.path.isdir(spdx_temp_dir):
for f_dir, f in list_files(spdx_temp_dir):
temp_file = os.path.join(spdx_temp_dir,f_dir,f)
shutil.copy(temp_file, temp_dir)
- shutil.rmtree(spdx_temp_dir)
+ #shutil.rmtree(spdx_temp_dir)
if not os.path.exists(spdx_outdir):
bb.utils.mkdirhier(spdx_outdir)
cur_ver_code = get_ver_code(spdx_workdir).split()[0]
## Get spdx file
- bb.note(' run ScanCode ...... ')
+ bb.note(' run scanCode ...... ')
d.setVar('WORKDIR', d.getVar('SPDX_WORKDIR', True))
info['sourcedir'] = spdx_workdir
- git_path = "%s/.git" % info['sourcedir']
+ git_path = "%s/git/.git" % info['sourcedir']
if os.path.exists(git_path):
remove_dir_tree(git_path)
invoke_scancode(info['sourcedir'],sstatefile)
+ bb.warn("source dir = " + info['sourcedir'])
if get_cached_spdx(sstatefile) != None:
write_cached_spdx( info,sstatefile,cur_ver_code )
## CREATE MANIFEST(write to outfile )
@@ -144,5 +137,3 @@ def invoke_scancode( OSS_src_dir, spdx_file):
except subprocess.CalledProcessError as e:
bb.fatal("Could not invoke scancode Command "
"'%s' returned %d:\n%s" % (scancode_cmd, e.returncode, e.output))
-
-EXPORT_FUNCTIONS do_spdx
diff --git a/classes/spdx-common.bbclass b/classes/spdx-common.bbclass
index 0e5b5ff..208023d 100644
--- a/classes/spdx-common.bbclass
+++ b/classes/spdx-common.bbclass
@@ -10,7 +10,6 @@ SPDXEPENDENCY += " lzip-native:do_populate_sysroot"
SPDXEPENDENCY += " xz-native:do_populate_sysroot"
SPDXEPENDENCY += " unzip-native:do_populate_sysroot"
SPDXEPENDENCY += " xz-native:do_populate_sysroot"
-SPDXEPENDENCY += " nodejs-native:do_populate_sysroot"
SPDXEPENDENCY += " quilt-native:do_populate_sysroot"
SPDXEPENDENCY += " tar-native:do_populate_sysroot"
@@ -27,24 +26,22 @@ LICENSELISTVERSION = "2.6"
# the real top-level directory.
SPDX_S ?= "${S}"
-addtask do_spdx before do_unpack after do_fetch
+addtask do_spdx before do_configure after do_patch
def spdx_create_tarball(d, srcdir, suffix, ar_outdir):
"""
create the tarball from srcdir
"""
import tarfile, shutil
+
# Make sure we are only creating a single tarball for gcc sources
#if (d.getVar('SRC_URI') == ""):
# return
-
# For the kernel archive, srcdir may just be a link to the
# work-shared location. Use os.path.realpath to make sure
# that we archive the actual directory and not just the link.
srcdir = os.path.realpath(srcdir)
- build_dir = os.path.join(srcdir, "build")
- if os.path.exists(build_dir):
- shutil.rmtree(build_dir)
+
bb.utils.mkdirhier(ar_outdir)
if suffix:
filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix)
@@ -56,7 +53,7 @@ def spdx_create_tarball(d, srcdir, suffix, ar_outdir):
tar = tarfile.open(tarname, 'w:gz')
tar.add(srcdir, arcname=os.path.basename(srcdir))
tar.close()
- shutil.rmtree(srcdir)
+ #shutil.rmtree(srcdir)
return tarname
# Run do_unpack and do_patch
@@ -65,12 +62,7 @@ def spdx_get_src(d):
spdx_workdir = d.getVar('SPDX_WORKDIR')
spdx_sysroot_native = d.getVar('STAGING_DIR_NATIVE')
pn = d.getVar('PN')
-
- # We just archive gcc-source for all the gcc related recipes
- if d.getVar('BPN') in ['gcc', 'libgcc']:
- bb.debug(1, 'spdx: There is bug in scan of %s is, do nothing' % pn)
- return
-
+
# The kernel class functions require it to be on work-shared, so we dont change WORKDIR
if not is_work_shared(d):
# Change the WORKDIR to make do_unpack do_patch run in another dir.
@@ -84,10 +76,26 @@ def spdx_get_src(d):
bb.utils.mkdirhier(d.getVar('B'))
bb.build.exec_func('do_unpack', d)
+ # Copy source of kernel to spdx_workdir
+ if is_work_shared(d):
+ d.setVar('WORKDIR', spdx_workdir)
+ d.setVar('STAGING_DIR_NATIVE', spdx_sysroot_native)
+ src_dir = spdx_workdir + "/" + d.getVar('PN')+ "-" + d.getVar('PV') + "-" + d.getVar('PR')
+ bb.utils.mkdirhier(src_dir)
+ if bb.data.inherits_class('kernel',d):
+ share_src = d.getVar('STAGING_KERNEL_DIR')
+ cmd_copy_share = "cp -rf " + share_src + "/* " + src_dir + "/"
+ cmd_copy_kernel_result = os.popen(cmd_copy_share).read()
+ bb.note("cmd_copy_kernel_result = " + cmd_copy_kernel_result)
+
+ git_path = src_dir + "/.git"
+ if os.path.exists(git_path):
+ remove_dir_tree(git_path)
# Make sure gcc and kernel sources are patched only once
if not (d.getVar('SRC_URI') == "" or is_work_shared(d)):
bb.build.exec_func('do_patch', d)
+
# Some userland has no source.
if not os.path.exists( spdx_workdir ):
bb.utils.mkdirhier(spdx_workdir)
@@ -125,13 +133,13 @@ def write_cached_spdx( info,sstatefile, ver_code ):
return dest_sed_cmd
## Document level information
- sed_cmd = r"sed -i -e 's#\r$##g' "
+ sed_cmd = r"sed -i -e 's#\r$##' "
spdx_DocumentComment = "<text>SPDX for " + info['pn'] + " version " \
+ info['pv'] + "</text>"
sed_cmd = sed_replace(sed_cmd,"DocumentComment",spdx_DocumentComment)
## Creator information
- sed_cmd = sed_replace(sed_cmd,"Creator: ",info['creator']['Tool'])
+ sed_cmd = sed_replace(sed_cmd,"Creator: Tool: ",info['creator']['Tool'])
## Package level information
sed_cmd = sed_replace(sed_cmd, "PackageName: ", info['pn'])