summaryrefslogtreecommitdiffstats
path: root/meta/lib/patchtest/tests
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib/patchtest/tests')
-rw-r--r--meta/lib/patchtest/tests/__init__.py0
-rw-r--r--meta/lib/patchtest/tests/base.py239
-rw-r--r--meta/lib/patchtest/tests/pyparsing/common.py26
-rw-r--r--meta/lib/patchtest/tests/pyparsing/parse_cve_tags.py18
-rw-r--r--meta/lib/patchtest/tests/pyparsing/parse_shortlog.py14
-rw-r--r--meta/lib/patchtest/tests/pyparsing/parse_signed_off_by.py22
-rw-r--r--meta/lib/patchtest/tests/pyparsing/parse_upstream_status.py24
-rw-r--r--meta/lib/patchtest/tests/test_mbox.py159
-rw-r--r--meta/lib/patchtest/tests/test_metadata.py197
-rw-r--r--meta/lib/patchtest/tests/test_patch.py103
-rw-r--r--meta/lib/patchtest/tests/test_python_pylint.py65
11 files changed, 867 insertions, 0 deletions
diff --git a/meta/lib/patchtest/tests/__init__.py b/meta/lib/patchtest/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/meta/lib/patchtest/tests/__init__.py
diff --git a/meta/lib/patchtest/tests/base.py b/meta/lib/patchtest/tests/base.py
new file mode 100644
index 0000000000..424e61b5be
--- /dev/null
+++ b/meta/lib/patchtest/tests/base.py
@@ -0,0 +1,239 @@
+# Base class to be used by all test cases defined in the suite
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import unittest
+import logging
+import json
+import unidiff
+from data import PatchTestInput
+import mailbox
+import collections
+import sys
+import os
+import re
+
+sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'pyparsing'))
+
+logger = logging.getLogger('patchtest')
+debug=logger.debug
+info=logger.info
+warn=logger.warn
+error=logger.error
+
+Commit = collections.namedtuple('Commit', ['author', 'subject', 'commit_message', 'shortlog', 'payload'])
+
+class PatchtestOEError(Exception):
+ """Exception for handling patchtest-oe errors"""
+ def __init__(self, message, exitcode=1):
+ super().__init__(message)
+ self.exitcode = exitcode
+
+class Base(unittest.TestCase):
+ # if unit test fails, fail message will throw at least the following JSON: {"id": <testid>}
+
+ endcommit_messages_regex = re.compile(r'\(From \w+-\w+ rev:|(?<!\S)Signed-off-by|(?<!\S)---\n')
+ patchmetadata_regex = re.compile(r'-{3} \S+|\+{3} \S+|@{2} -\d+,\d+ \+\d+,\d+ @{2} \S+')
+
+
+ @staticmethod
+ def msg_to_commit(msg):
+ payload = msg.get_payload()
+ return Commit(subject=msg['subject'].replace('\n', ' ').replace(' ', ' '),
+ author=msg.get('From'),
+ shortlog=Base.shortlog(msg['subject']),
+ commit_message=Base.commit_message(payload),
+ payload=payload)
+
+ @staticmethod
+ def commit_message(payload):
+ commit_message = payload.__str__()
+ match = Base.endcommit_messages_regex.search(payload)
+ if match:
+ commit_message = payload[:match.start()]
+ return commit_message
+
+ @staticmethod
+ def shortlog(shlog):
+ # remove possible prefix (between brackets) before colon
+ start = shlog.find(']', 0, shlog.find(':'))
+ # remove also newlines and spaces at both sides
+ return shlog[start + 1:].replace('\n', '').strip()
+
+ @classmethod
+ def setUpClass(cls):
+
+ # General objects: mailbox.mbox and patchset
+ cls.mbox = mailbox.mbox(PatchTestInput.repo.patch)
+
+ # Patch may be malformed, so try parsing it
+ cls.unidiff_parse_error = ''
+ cls.patchset = None
+ try:
+ cls.patchset = unidiff.PatchSet.from_filename(PatchTestInput.repo.patch, encoding=u'UTF-8')
+ except unidiff.UnidiffParseError as upe:
+ cls.patchset = []
+ cls.unidiff_parse_error = str(upe)
+
+ # Easy to iterate list of commits
+ cls.commits = []
+ for msg in cls.mbox:
+ if msg['subject'] and msg.get_payload():
+ cls.commits.append(Base.msg_to_commit(msg))
+
+ cls.setUpClassLocal()
+
+ @classmethod
+ def tearDownClass(cls):
+ cls.tearDownClassLocal()
+
+ @classmethod
+ def setUpClassLocal(cls):
+ pass
+
+ @classmethod
+ def tearDownClassLocal(cls):
+ pass
+
+ def fail(self, issue, fix=None, commit=None, data=None):
+ """ Convert to a JSON string failure data"""
+ value = {'id': self.id(),
+ 'issue': issue}
+
+ if fix:
+ value['fix'] = fix
+ if commit:
+ value['commit'] = {'subject': commit.subject,
+ 'shortlog': commit.shortlog}
+
+ # extend return value with other useful info
+ if data:
+ value['data'] = data
+
+ return super(Base, self).fail(json.dumps(value))
+
+ def skip(self, issue, data=None):
+ """ Convert the skip string to JSON"""
+ value = {'id': self.id(),
+ 'issue': issue}
+
+ # extend return value with other useful info
+ if data:
+ value['data'] = data
+
+ return super(Base, self).skipTest(json.dumps(value))
+
+ def shortid(self):
+ return self.id().split('.')[-1]
+
+ def __str__(self):
+ return json.dumps({'id': self.id()})
+
+class Metadata(Base):
+ @classmethod
+ def setUpClassLocal(cls):
+ cls.tinfoil = cls.setup_tinfoil()
+
+ # get info about added/modified/remove recipes
+ cls.added, cls.modified, cls.removed = cls.get_metadata_stats(cls.patchset)
+
+ @classmethod
+ def tearDownClassLocal(cls):
+ cls.tinfoil.shutdown()
+
+ @classmethod
+ def setup_tinfoil(cls, config_only=False):
+ """Initialize tinfoil api from bitbake"""
+
+ # import relevant libraries
+ try:
+ scripts_path = os.path.join(PatchTestInput.repodir, 'scripts', 'lib')
+ if scripts_path not in sys.path:
+ sys.path.insert(0, scripts_path)
+ import scriptpath
+ scriptpath.add_bitbake_lib_path()
+ import bb.tinfoil
+ except ImportError:
+ raise PatchtestOEError('Could not import tinfoil module')
+
+ orig_cwd = os.path.abspath(os.curdir)
+
+ # Load tinfoil
+ tinfoil = None
+ try:
+ builddir = os.environ.get('BUILDDIR')
+ if not builddir:
+ logger.warn('Bitbake environment not loaded?')
+ return tinfoil
+ os.chdir(builddir)
+ tinfoil = bb.tinfoil.Tinfoil()
+ tinfoil.prepare(config_only=config_only)
+ except bb.tinfoil.TinfoilUIException as te:
+ if tinfoil:
+ tinfoil.shutdown()
+ raise PatchtestOEError('Could not prepare properly tinfoil (TinfoilUIException)')
+ except Exception as e:
+ if tinfoil:
+ tinfoil.shutdown()
+ raise e
+ finally:
+ os.chdir(orig_cwd)
+
+ return tinfoil
+
+ @classmethod
+ def get_metadata_stats(cls, patchset):
+ """Get lists of added, modified and removed metadata files"""
+
+ def find_pn(data, path):
+ """Find the PN from data"""
+ pn = None
+ pn_native = None
+ for _path, _pn in data:
+ if path in _path:
+ if 'native' in _pn:
+ # store the native PN but look for the non-native one first
+ pn_native = _pn
+ else:
+ pn = _pn
+ break
+ else:
+ # sent the native PN if found previously
+ if pn_native:
+ return pn_native
+
+ # on renames (usually upgrades), we need to check (FILE) base names
+ # because the unidiff library does not provided the new filename, just the modified one
+ # and tinfoil datastore, once the patch is merged, will contain the new filename
+ path_basename = path.split('_')[0]
+ for _path, _pn in data:
+ _path_basename = _path.split('_')[0]
+ if path_basename == _path_basename:
+ pn = _pn
+ return pn
+
+ if not cls.tinfoil:
+ cls.tinfoil = cls.setup_tinfoil()
+
+ added_paths, modified_paths, removed_paths = [], [], []
+ added, modified, removed = [], [], []
+
+ # get metadata filename additions, modification and removals
+ for patch in patchset:
+ if patch.path.endswith('.bb') or patch.path.endswith('.bbappend') or patch.path.endswith('.inc'):
+ if patch.is_added_file:
+ added_paths.append(os.path.join(os.path.abspath(PatchTestInput.repodir), patch.path))
+ elif patch.is_modified_file:
+ modified_paths.append(os.path.join(os.path.abspath(PatchTestInput.repodir), patch.path))
+ elif patch.is_removed_file:
+ removed_paths.append(os.path.join(os.path.abspath(PatchTestInput.repodir), patch.path))
+
+ data = cls.tinfoil.cooker.recipecaches[''].pkg_fn.items()
+
+ added = [find_pn(data,path) for path in added_paths]
+ modified = [find_pn(data,path) for path in modified_paths]
+ removed = [find_pn(data,path) for path in removed_paths]
+
+ return [a for a in added if a], [m for m in modified if m], [r for r in removed if r]
diff --git a/meta/lib/patchtest/tests/pyparsing/common.py b/meta/lib/patchtest/tests/pyparsing/common.py
new file mode 100644
index 0000000000..cbce4c38bc
--- /dev/null
+++ b/meta/lib/patchtest/tests/pyparsing/common.py
@@ -0,0 +1,26 @@
+# common pyparsing variables
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import pyparsing
+
+# general
+colon = pyparsing.Literal(":")
+start = pyparsing.LineStart()
+end = pyparsing.LineEnd()
+at = pyparsing.Literal("@")
+lessthan = pyparsing.Literal("<")
+greaterthan = pyparsing.Literal(">")
+opensquare = pyparsing.Literal("[")
+closesquare = pyparsing.Literal("]")
+inappropriate = pyparsing.CaselessLiteral("Inappropriate")
+submitted = pyparsing.CaselessLiteral("Submitted")
+
+# word related
+nestexpr = pyparsing.nestedExpr(opener='[', closer=']')
+inappropriateinfo = pyparsing.Literal("Inappropriate") + nestexpr
+submittedinfo = pyparsing.Literal("Submitted") + nestexpr
+word = pyparsing.Word(pyparsing.alphas)
+worddot = pyparsing.Word(pyparsing.alphas+".")
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_cve_tags.py b/meta/lib/patchtest/tests/pyparsing/parse_cve_tags.py
new file mode 100644
index 0000000000..f7fb82ec2b
--- /dev/null
+++ b/meta/lib/patchtest/tests/pyparsing/parse_cve_tags.py
@@ -0,0 +1,18 @@
+# signed-off-by pyparsing definition
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+
+import pyparsing
+import common
+
+name = pyparsing.Regex('\S+.*(?= <)')
+username = pyparsing.OneOrMore(common.worddot)
+domain = pyparsing.OneOrMore(common.worddot)
+cve = pyparsing.Regex('CVE\-\d{4}\-\d+')
+cve_mark = pyparsing.Literal("CVE:")
+
+cve_tag = pyparsing.AtLineStart(cve_mark + cve)
+patch_cve_tag = pyparsing.AtLineStart("+" + cve_mark + cve)
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_shortlog.py b/meta/lib/patchtest/tests/pyparsing/parse_shortlog.py
new file mode 100644
index 0000000000..30d3ab35b3
--- /dev/null
+++ b/meta/lib/patchtest/tests/pyparsing/parse_shortlog.py
@@ -0,0 +1,14 @@
+# subject pyparsing definition
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+# NOTE:This is an oversimplified syntax of the mbox's summary
+
+import pyparsing
+import common
+
+target = pyparsing.OneOrMore(pyparsing.Word(pyparsing.printables.replace(':','')))
+summary = pyparsing.OneOrMore(pyparsing.Word(pyparsing.printables))
+shortlog = common.start + target + common.colon + summary + common.end
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_signed_off_by.py b/meta/lib/patchtest/tests/pyparsing/parse_signed_off_by.py
new file mode 100644
index 0000000000..692ebec3ff
--- /dev/null
+++ b/meta/lib/patchtest/tests/pyparsing/parse_signed_off_by.py
@@ -0,0 +1,22 @@
+# signed-off-by pyparsing definition
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+
+import pyparsing
+import common
+
+name = pyparsing.Regex('\S+.*(?= <)')
+username = pyparsing.OneOrMore(common.worddot)
+domain = pyparsing.OneOrMore(common.worddot)
+
+# taken from https://pyparsing-public.wikispaces.com/Helpful+Expressions
+email = pyparsing.Regex(r"(?P<user>[A-Za-z0-9._%+-]+)@(?P<hostname>[A-Za-z0-9.-]+)\.(?P<domain>[A-Za-z]{2,})")
+
+email_enclosed = common.lessthan + email + common.greaterthan
+
+signed_off_by_mark = pyparsing.Literal("Signed-off-by:")
+signed_off_by = pyparsing.AtLineStart(signed_off_by_mark + name + email_enclosed)
+patch_signed_off_by = pyparsing.AtLineStart("+" + signed_off_by_mark + name + email_enclosed)
diff --git a/meta/lib/patchtest/tests/pyparsing/parse_upstream_status.py b/meta/lib/patchtest/tests/pyparsing/parse_upstream_status.py
new file mode 100644
index 0000000000..bc6c427c4c
--- /dev/null
+++ b/meta/lib/patchtest/tests/pyparsing/parse_upstream_status.py
@@ -0,0 +1,24 @@
+# upstream-status pyparsing definition
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+
+import common
+import pyparsing
+
+upstream_status_literal_valid_status = ["Pending", "Backport", "Denied", "Inappropriate", "Submitted"]
+upstream_status_nonliteral_valid_status = ["Pending", "Backport", "Denied", "Inappropriate [reason]", "Submitted [where]"]
+
+upstream_status_valid_status = pyparsing.Or(
+ [pyparsing.Literal(status) for status in upstream_status_literal_valid_status]
+)
+
+upstream_status_mark = pyparsing.Literal("Upstream-Status")
+inappropriate_status_mark = common.inappropriate
+submitted_status_mark = common.submitted
+
+upstream_status = common.start + upstream_status_mark + common.colon + upstream_status_valid_status
+upstream_status_inappropriate_info = common.start + upstream_status_mark + common.colon + common.inappropriateinfo
+upstream_status_submitted_info = common.start + upstream_status_mark + common.colon + common.submittedinfo
diff --git a/meta/lib/patchtest/tests/test_mbox.py b/meta/lib/patchtest/tests/test_mbox.py
new file mode 100644
index 0000000000..0b623b7d17
--- /dev/null
+++ b/meta/lib/patchtest/tests/test_mbox.py
@@ -0,0 +1,159 @@
+# Checks related to the patch's author
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import base
+import collections
+import parse_shortlog
+import parse_signed_off_by
+import pyparsing
+import subprocess
+from data import PatchTestInput
+
+def headlog():
+ output = subprocess.check_output(
+ "cd %s; git log --pretty='%%h#%%aN#%%cD:#%%s' -1" % PatchTestInput.repodir,
+ universal_newlines=True,
+ shell=True
+ )
+ return output.split('#')
+
+class TestMbox(base.Base):
+
+ auh_email = 'auh@auh.yoctoproject.org'
+
+ invalids = [pyparsing.Regex("^Upgrade Helper.+"),
+ pyparsing.Regex(auh_email),
+ pyparsing.Regex("uh@not\.set"),
+ pyparsing.Regex("\S+@example\.com")]
+
+ rexp_detect = pyparsing.Regex('\[\s?YOCTO.*\]')
+ rexp_validation = pyparsing.Regex('\[(\s?YOCTO\s?#\s?(\d+)\s?,?)+\]')
+ revert_shortlog_regex = pyparsing.Regex('Revert\s+".*"')
+ signoff_prog = parse_signed_off_by.signed_off_by
+ revert_shortlog_regex = pyparsing.Regex('Revert\s+".*"')
+ maxlength = 90
+
+ # base paths of main yocto project sub-projects
+ paths = {
+ 'oe-core': ['meta-selftest', 'meta-skeleton', 'meta', 'scripts'],
+ 'bitbake': ['bitbake'],
+ 'documentation': ['documentation'],
+ 'poky': ['meta-poky','meta-yocto-bsp'],
+ 'oe': ['meta-gpe', 'meta-gnome', 'meta-efl', 'meta-networking', 'meta-multimedia','meta-initramfs', 'meta-ruby', 'contrib', 'meta-xfce', 'meta-filesystems', 'meta-perl', 'meta-webserver', 'meta-systemd', 'meta-oe', 'meta-python']
+ }
+
+ # scripts folder is a mix of oe-core and poky, most is oe-core code except:
+ poky_scripts = ['scripts/yocto-bsp', 'scripts/yocto-kernel', 'scripts/yocto-layer', 'scripts/lib/bsp']
+
+ Project = collections.namedtuple('Project', ['name', 'listemail', 'gitrepo', 'paths'])
+
+ bitbake = Project(name='Bitbake', listemail='bitbake-devel@lists.openembedded.org', gitrepo='http://git.openembedded.org/bitbake/', paths=paths['bitbake'])
+ doc = Project(name='Documentantion', listemail='yocto@yoctoproject.org', gitrepo='http://git.yoctoproject.org/cgit/cgit.cgi/yocto-docs/', paths=paths['documentation'])
+ poky = Project(name='Poky', listemail='poky@yoctoproject.org', gitrepo='http://git.yoctoproject.org/cgit/cgit.cgi/poky/', paths=paths['poky'])
+ oe = Project(name='oe', listemail='openembedded-devel@lists.openembedded.org', gitrepo='http://git.openembedded.org/meta-openembedded/', paths=paths['oe'])
+
+
+ def test_signed_off_by_presence(self):
+ for commit in TestMbox.commits:
+ # skip those patches that revert older commits, these do not required the tag presence
+ if self.revert_shortlog_regex.search_string(commit.shortlog):
+ continue
+ if not self.signoff_prog.search_string(commit.payload):
+ self.fail('Mbox is missing Signed-off-by. Add it manually or with "git commit --amend -s"',
+ commit=commit)
+
+ def test_shortlog_format(self):
+ for commit in TestMbox.commits:
+ shortlog = commit.shortlog
+ if not shortlog.strip():
+ self.skip('Empty shortlog, no reason to execute shortlog format test')
+ else:
+ # no reason to re-check on revert shortlogs
+ if shortlog.startswith('Revert "'):
+ continue
+ try:
+ parse_shortlog.shortlog.parseString(shortlog)
+ except pyparsing.ParseException as pe:
+ self.fail('Commit shortlog (first line of commit message) should follow the format "<target>: <summary>"',
+ commit=commit)
+
+ def test_shortlog_length(self):
+ for commit in TestMbox.commits:
+ # no reason to re-check on revert shortlogs
+ shortlog = commit.shortlog
+ if shortlog.startswith('Revert "'):
+ continue
+ l = len(shortlog)
+ if l > self.maxlength:
+ self.fail('Edit shortlog so that it is %d characters or less (currently %d characters)' % (self.maxlength, l),
+ commit=commit)
+
+ def test_series_merge_on_head(self):
+ self.skip("Merge test is disabled for now")
+ if PatchTestInput.repo.branch != "master":
+ self.skip("Skipping merge test since patch is not intended for master branch. Target detected is %s" % PatchTestInput.repo.branch)
+ if not PatchTestInput.repo.ismerged:
+ commithash, author, date, shortlog = headlog()
+ self.fail('Series does not apply on top of target branch %s' % PatchTestInput.repo.branch,
+ data=[('Targeted branch', '%s (currently at %s)' % (PatchTestInput.repo.branch, commithash))])
+
+ def test_target_mailing_list(self):
+ """In case of merge failure, check for other targeted projects"""
+ if PatchTestInput.repo.ismerged:
+ self.skip('Series merged, no reason to check other mailing lists')
+
+ # a meta project may be indicted in the message subject, if this is the case, just fail
+ # TODO: there may be other project with no-meta prefix, we also need to detect these
+ project_regex = pyparsing.Regex("\[(?P<project>meta-.+)\]")
+ for commit in TestMbox.commits:
+ match = project_regex.search_string(commit.subject)
+ if match:
+ self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists',
+ commit=commit)
+
+ for patch in self.patchset:
+ folders = patch.path.split('/')
+ base_path = folders[0]
+ for project in [self.bitbake, self.doc, self.oe, self.poky]:
+ if base_path in project.paths:
+ self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists',
+ data=[('Suggested ML', '%s [%s]' % (project.listemail, project.gitrepo)),
+ ('Patch\'s path:', patch.path)])
+
+ # check for poky's scripts code
+ if base_path.startswith('scripts'):
+ for poky_file in self.poky_scripts:
+ if patch.path.startswith(poky_file):
+ self.fail('Series sent to the wrong mailing list or some patches from the series correspond to different mailing lists',
+ data=[('Suggested ML', '%s [%s]' % (self.poky.listemail, self.poky.gitrepo)),('Patch\'s path:', patch.path)])
+
+ def test_mbox_format(self):
+ if self.unidiff_parse_error:
+ self.fail('Series has malformed diff lines. Create the series again using git-format-patch and ensure it applies using git am',
+ data=[('Diff line',self.unidiff_parse_error)])
+
+ def test_commit_message_presence(self):
+ for commit in TestMbox.commits:
+ if not commit.commit_message.strip():
+ self.fail('Please include a commit message on your patch explaining the change', commit=commit)
+
+ def test_bugzilla_entry_format(self):
+ for commit in TestMbox.commits:
+ if not self.rexp_detect.search_string(commit.commit_message):
+ self.skip("No bug ID found")
+ elif not self.rexp_validation.search_string(commit.commit_message):
+ self.fail('Bugzilla issue ID is not correctly formatted - specify it with format: "[YOCTO #<bugzilla ID>]"', commit=commit)
+
+ def test_author_valid(self):
+ for commit in self.commits:
+ for invalid in self.invalids:
+ if invalid.search_string(commit.author):
+ self.fail('Invalid author %s. Resend the series with a valid patch author' % commit.author, commit=commit)
+
+ def test_non_auh_upgrade(self):
+ for commit in self.commits:
+ if self.auh_email in commit.payload:
+ self.fail('Invalid author %s. Resend the series with a valid patch author' % self.auh_email, commit=commit)
diff --git a/meta/lib/patchtest/tests/test_metadata.py b/meta/lib/patchtest/tests/test_metadata.py
new file mode 100644
index 0000000000..f5dbcf01ed
--- /dev/null
+++ b/meta/lib/patchtest/tests/test_metadata.py
@@ -0,0 +1,197 @@
+# Checks related to the patch's LIC_FILES_CHKSUM metadata variable
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import base
+import os
+import pyparsing
+from data import PatchTestInput, PatchTestDataStore
+
+class TestMetadata(base.Metadata):
+ metadata_lic = 'LICENSE'
+ invalid_license = 'PATCHTESTINVALID'
+ metadata_chksum = 'LIC_FILES_CHKSUM'
+ license_var = 'LICENSE'
+ closed = 'CLOSED'
+ lictag_re = pyparsing.AtLineStart("License-Update:")
+ lic_chksum_added = pyparsing.AtLineStart("+" + metadata_chksum)
+ lic_chksum_removed = pyparsing.AtLineStart("-" + metadata_chksum)
+ add_mark = pyparsing.Regex('\\+ ')
+ max_length = 200
+ metadata_src_uri = 'SRC_URI'
+ md5sum = 'md5sum'
+ sha256sum = 'sha256sum'
+ git_regex = pyparsing.Regex('^git\\:\\/\\/.*')
+ metadata_summary = 'SUMMARY'
+ cve_check_ignore_var = 'CVE_CHECK_IGNORE'
+ cve_status_var = 'CVE_STATUS'
+
+ def test_license_presence(self):
+ if not self.added:
+ self.skip('No added recipes, skipping test')
+
+ # TODO: this is a workaround so we can parse the recipe not
+ # containing the LICENSE var: add some default license instead
+ # of INVALID into auto.conf, then remove this line at the end
+ auto_conf = os.path.join(os.environ.get('BUILDDIR'), 'conf', 'auto.conf')
+ open_flag = 'w'
+ if os.path.exists(auto_conf):
+ open_flag = 'a'
+ with open(auto_conf, open_flag) as fd:
+ for pn in self.added:
+ fd.write('LICENSE ??= "%s"\n' % self.invalid_license)
+
+ no_license = False
+ for pn in self.added:
+ rd = self.tinfoil.parse_recipe(pn)
+ license = rd.getVar(self.metadata_lic)
+ if license == self.invalid_license:
+ no_license = True
+ break
+
+ # remove auto.conf line or the file itself
+ if open_flag == 'w':
+ os.remove(auto_conf)
+ else:
+ fd = open(auto_conf, 'r')
+ lines = fd.readlines()
+ fd.close()
+ with open(auto_conf, 'w') as fd:
+ fd.write(''.join(lines[:-1]))
+
+ if no_license:
+ self.fail('Recipe does not have the LICENSE field set.')
+
+ def test_lic_files_chksum_presence(self):
+ if not self.added:
+ self.skip('No added recipes, skipping test')
+
+ for pn in self.added:
+ rd = self.tinfoil.parse_recipe(pn)
+ pathname = rd.getVar('FILE')
+ # we are not interested in images
+ if '/images/' in pathname:
+ continue
+ lic_files_chksum = rd.getVar(self.metadata_chksum)
+ if rd.getVar(self.license_var) == self.closed:
+ continue
+ if not lic_files_chksum:
+ self.fail('%s is missing in newly added recipe' % self.metadata_chksum)
+
+ def test_lic_files_chksum_modified_not_mentioned(self):
+ if not self.modified:
+ self.skip('No modified recipes, skipping test')
+
+ for patch in self.patchset:
+ # for the moment, we are just interested in metadata
+ if patch.path.endswith('.patch'):
+ continue
+ payload = str(patch)
+ if (self.lic_chksum_added.search_string(payload) or self.lic_chksum_removed.search_string(payload)):
+ # if any patch on the series contain reference on the metadata, fail
+ for commit in self.commits:
+ if self.lictag_re.search_string(commit.commit_message):
+ break
+ else:
+ self.fail('LIC_FILES_CHKSUM changed without "License-Update:" tag and description in commit message')
+
+ def test_max_line_length(self):
+ for patch in self.patchset:
+ # for the moment, we are just interested in metadata
+ if patch.path.endswith('.patch'):
+ continue
+ payload = str(patch)
+ for line in payload.splitlines():
+ if self.add_mark.search_string(line):
+ current_line_length = len(line[1:])
+ if current_line_length > self.max_length:
+ self.fail('Patch line too long (current length %s, maximum is %s)' % (current_line_length, self.max_length),
+ data=[('Patch', patch.path), ('Line', '%s ...' % line[0:80])])
+
+ def pretest_src_uri_left_files(self):
+ # these tests just make sense on patches that can be merged
+ if not PatchTestInput.repo.canbemerged:
+ self.skip('Patch cannot be merged')
+ if not self.modified:
+ self.skip('No modified recipes, skipping pretest')
+
+ # get the proper metadata values
+ for pn in self.modified:
+ # we are not interested in images
+ if 'core-image' in pn:
+ continue
+ rd = self.tinfoil.parse_recipe(pn)
+ PatchTestDataStore['%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)] = rd.getVar(self.metadata_src_uri)
+
+ def test_src_uri_left_files(self):
+ # these tests just make sense on patches that can be merged
+ if not PatchTestInput.repo.canbemerged:
+ self.skip('Patch cannot be merged')
+ if not self.modified:
+ self.skip('No modified recipes, skipping pretest')
+
+ # get the proper metadata values
+ for pn in self.modified:
+ # we are not interested in images
+ if 'core-image' in pn:
+ continue
+ rd = self.tinfoil.parse_recipe(pn)
+ PatchTestDataStore['%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)] = rd.getVar(self.metadata_src_uri)
+
+ for pn in self.modified:
+ pretest_src_uri = PatchTestDataStore['pre%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)].split()
+ test_src_uri = PatchTestDataStore['%s-%s-%s' % (self.shortid(), self.metadata_src_uri, pn)].split()
+
+ pretest_files = set([os.path.basename(patch) for patch in pretest_src_uri if patch.startswith('file://')])
+ test_files = set([os.path.basename(patch) for patch in test_src_uri if patch.startswith('file://')])
+
+ # check if files were removed
+ if len(test_files) < len(pretest_files):
+
+ # get removals from patchset
+ filesremoved_from_patchset = set()
+ for patch in self.patchset:
+ if patch.is_removed_file:
+ filesremoved_from_patchset.add(os.path.basename(patch.path))
+
+ # get the deleted files from the SRC_URI
+ filesremoved_from_usr_uri = pretest_files - test_files
+
+ # finally, get those patches removed at SRC_URI and not removed from the patchset
+ # TODO: we are not taking into account renames, so test may raise false positives
+ not_removed = filesremoved_from_usr_uri - filesremoved_from_patchset
+ if not_removed:
+ self.fail('Patches not removed from tree. Remove them and amend the submitted mbox',
+ data=[('Patch', f) for f in not_removed])
+
+ def test_summary_presence(self):
+ if not self.added:
+ self.skip('No added recipes, skipping test')
+
+ for pn in self.added:
+ # we are not interested in images
+ if 'core-image' in pn:
+ continue
+ rd = self.tinfoil.parse_recipe(pn)
+ summary = rd.getVar(self.metadata_summary)
+
+ # "${PN} version ${PN}-${PR}" is the default, so fail if default
+ if summary.startswith('%s version' % pn):
+ self.fail('%s is missing in newly added recipe' % self.metadata_summary)
+
+ def test_cve_check_ignore(self):
+ # Skip if we neither modified a recipe or target branches are not
+ # Nanbield and newer. CVE_CHECK_IGNORE was first deprecated in Nanbield.
+ if not self.modified or PatchTestInput.repo.branch == "kirkstone" or PatchTestInput.repo.branch == "dunfell":
+ self.skip('No modified recipes or older target branch, skipping test')
+ for pn in self.modified:
+ # we are not interested in images
+ if 'core-image' in pn:
+ continue
+ rd = self.tinfoil.parse_recipe(pn)
+ cve_check_ignore = rd.getVar(self.cve_check_ignore_var)
+
+ if cve_check_ignore is not None:
+ self.fail('%s is deprecated and should be replaced by %s' % (self.cve_check_ignore_var, self.cve_status_var))
diff --git a/meta/lib/patchtest/tests/test_patch.py b/meta/lib/patchtest/tests/test_patch.py
new file mode 100644
index 0000000000..d7187a0cb1
--- /dev/null
+++ b/meta/lib/patchtest/tests/test_patch.py
@@ -0,0 +1,103 @@
+# Checks related to the patch's CVE lines
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import base
+import os
+import parse_signed_off_by
+import parse_upstream_status
+import pyparsing
+
+class TestPatch(base.Base):
+
+ re_cve_pattern = pyparsing.Regex("CVE\-\d{4}\-\d+")
+ re_cve_payload_tag = pyparsing.Regex("\+CVE:(\s+CVE\-\d{4}\-\d+)+")
+ upstream_status_regex = pyparsing.AtLineStart("+" + "Upstream-Status")
+
+ @classmethod
+ def setUpClassLocal(cls):
+ cls.newpatches = []
+ # get just those relevant patches: new software patches
+ for patch in cls.patchset:
+ if patch.path.endswith('.patch') and patch.is_added_file:
+ cls.newpatches.append(patch)
+
+ cls.mark = str(parse_signed_off_by.signed_off_by_mark).strip('"')
+
+ # match PatchSignedOffBy.mark with '+' preceding it
+ cls.prog = parse_signed_off_by.patch_signed_off_by
+
+ def setUp(self):
+ if self.unidiff_parse_error:
+ self.skip('Parse error %s' % self.unidiff_parse_error)
+
+ self.valid_status = ', '.join(parse_upstream_status.upstream_status_nonliteral_valid_status)
+ self.standard_format = 'Upstream-Status: <Valid status>'
+
+ # we are just interested in series that introduce CVE patches, thus discard other
+ # possibilities: modification to current CVEs, patch directly introduced into the
+ # recipe, upgrades already including the CVE, etc.
+ new_cves = [p for p in self.patchset if p.path.endswith('.patch') and p.is_added_file]
+ if not new_cves:
+ self.skip('No new CVE patches introduced')
+
+ def test_upstream_status_presence_format(self):
+ if not TestPatch.newpatches:
+ self.skip("There are no new software patches, no reason to test Upstream-Status presence/format")
+
+ for newpatch in TestPatch.newpatches:
+ payload = newpatch.__str__()
+ if not self.upstream_status_regex.search_string(payload):
+ self.fail('Added patch file is missing Upstream-Status: <Valid status> in the commit message',
+ data=[('Standard format', self.standard_format), ('Valid status', self.valid_status)])
+ for line in payload.splitlines():
+ if self.patchmetadata_regex.match(line):
+ continue
+ if self.upstream_status_regex.search_string(line):
+ if parse_upstream_status.inappropriate_status_mark.searchString(line):
+ try:
+ parse_upstream_status.upstream_status_inappropriate_info.parseString(line.lstrip('+'))
+ except pyparsing.ParseException as pe:
+ self.fail('Upstream-Status is Inappropriate, but no reason was provided',
+ data=[('Current', pe.pstr), ('Standard format', 'Upstream-Status: Inappropriate [reason]')])
+ elif parse_upstream_status.submitted_status_mark.searchString(line):
+ try:
+ parse_upstream_status.upstream_status_submitted_info.parseString(line.lstrip('+'))
+ except pyparsing.ParseException as pe:
+ self.fail('Upstream-Status is Submitted, but it is not mentioned where',
+ data=[('Current', pe.pstr), ('Standard format', 'Upstream-Status: Submitted [where]')])
+ else:
+ try:
+ parse_upstream_status.upstream_status.parseString(line.lstrip('+'))
+ except pyparsing.ParseException as pe:
+ self.fail('Upstream-Status is in incorrect format',
+ data=[('Current', pe.pstr), ('Standard format', self.standard_format), ('Valid status', self.valid_status)])
+
+ def test_signed_off_by_presence(self):
+ if not TestPatch.newpatches:
+ self.skip("There are no new software patches, no reason to test %s presence" % PatchSignedOffBy.mark)
+
+ for newpatch in TestPatch.newpatches:
+ payload = newpatch.__str__()
+ for line in payload.splitlines():
+ if self.patchmetadata_regex.match(line):
+ continue
+ if TestPatch.prog.search_string(payload):
+ break
+ else:
+ self.fail('A patch file has been added without a Signed-off-by tag: \'%s\'' % os.path.basename(newpatch.path))
+
+ def test_cve_tag_format(self):
+ for commit in TestPatch.commits:
+ if self.re_cve_pattern.search_string(commit.shortlog) or self.re_cve_pattern.search_string(commit.commit_message):
+ tag_found = False
+ for line in commit.payload.splitlines():
+ if self.re_cve_payload_tag.search_string(line):
+ tag_found = True
+ break
+ if not tag_found:
+ self.fail('Missing or incorrectly formatted CVE tag in patch file. Correct or include the CVE tag in the patch with format: "CVE: CVE-YYYY-XXXX"',
+ commit=commit)
diff --git a/meta/lib/patchtest/tests/test_python_pylint.py b/meta/lib/patchtest/tests/test_python_pylint.py
new file mode 100644
index 0000000000..ef315e591c
--- /dev/null
+++ b/meta/lib/patchtest/tests/test_python_pylint.py
@@ -0,0 +1,65 @@
+# Checks related to the python code done with pylint
+#
+# Copyright (C) 2016 Intel Corporation
+#
+# SPDX-License-Identifier: GPL-2.0-only
+
+import base
+from io import StringIO
+from data import PatchTestInput
+from pylint.reporters.text import TextReporter
+import pylint.lint as lint
+
+
+class PyLint(base.Base):
+ pythonpatches = []
+ pylint_pretest = {}
+ pylint_test = {}
+ pylint_options = " -E --disable='E0611, E1101, F0401, E0602' --msg-template='L:{line} F:{module} I:{msg}'"
+
+ @classmethod
+ def setUpClassLocal(cls):
+ # get just those patches touching python files
+ cls.pythonpatches = []
+ for patch in cls.patchset:
+ if patch.path.endswith('.py'):
+ if not patch.is_removed_file:
+ cls.pythonpatches.append(patch)
+
+ def setUp(self):
+ if self.unidiff_parse_error:
+ self.skip('Python-unidiff parse error')
+ if not PyLint.pythonpatches:
+ self.skip('No python related patches, skipping test')
+
+ def pretest_pylint(self):
+ for pythonpatch in self.pythonpatches:
+ if pythonpatch.is_modified_file:
+ pylint_output = StringIO()
+ reporter = TextReporter(pylint_output)
+ lint.Run([self.pylint_options, pythonpatch.path], reporter=reporter, exit=False)
+ for line in pylint_output.readlines():
+ if not '*' in line:
+ if line.strip():
+ self.pylint_pretest[line.strip().split(' ',1)[0]] = line.strip().split(' ',1)[1]
+
+ def test_pylint(self):
+ for pythonpatch in self.pythonpatches:
+ # a condition checking whether a file is renamed or not
+ # unidiff doesn't support this yet
+ if pythonpatch.target_file is not pythonpatch.path:
+ path = pythonpatch.target_file[2:]
+ else:
+ path = pythonpatch.path
+ pylint_output = StringIO()
+ reporter = TextReporter(pylint_output)
+ lint.Run([self.pylint_options, pythonpatch.path], reporter=reporter, exit=False)
+ for line in pylint_output.readlines():
+ if not '*' in line:
+ if line.strip():
+ self.pylint_test[line.strip().split(' ',1)[0]] = line.strip().split(' ',1)[1]
+
+ for issue in self.pylint_test:
+ if self.pylint_test[issue] not in self.pylint_pretest.values():
+ self.fail('Errors in your Python code were encountered. Please check your code with a linter and resubmit',
+ data=[('Output', 'Please, fix the listed issues:'), ('', issue + ' ' + self.pylint_test[issue])])