aboutsummaryrefslogtreecommitdiffstats
path: root/bin
diff options
context:
space:
mode:
authorDavid Reyna <David.Reyna@windriver.com>2020-01-12 17:44:16 -0800
committerDavid Reyna <David.Reyna@windriver.com>2020-01-12 17:44:16 -0800
commit889781b5a04a7c9226e45161105ad4d6a95ad1e9 (patch)
tree57cdbc001a43f457ce66370fd5ae88f4d67377d1 /bin
parentab29b17e353d961a0736e678794cee4931e68422 (diff)
downloadsrtool-889781b5a04a7c9226e45161105ad4d6a95ad1e9.zip
srtool-889781b5a04a7c9226e45161105ad4d6a95ad1e9.tar.gz
srtool-889781b5a04a7c9226e45161105ad4d6a95ad1e9.tar.bz2
Bug 13734 - cumulative deployment features and fixes
srtool: cumulative deployment features and fixes High level new features: * Publishing support to external/public databases * Ability to label products as "active", "inactive", "under development" Inactive (EOL) products appear but * Do not affect status propagation * Do not auto-create defects Development product status is not exported to pubic database * Extend NIST download range to 2002..2019 * Added MITRE downloads to provide RESERVED tracking * Extended audit history tracking and meta-data * Delete CVE records * Ability to do "OR" searches (default is "AND") Example: "CVE-2019-20095 OR CVE-2019-20096 OR CVE-2019-19977" * Automated defect creation (Jira) If selected, creates customer defect for selected and active products Reuse existing defect if present for given product * Many small sorting, readability, edge case fixes Backups: * Add meta-data stamp file for each backup * Save daily backups with day name instead of day number * Preserve file dates when making copies to backup * Add list command Automated Updates: * Fix report format * Add trial run test Utilities: * Add 13 new database fix up procedures Some are one-shot historical fixes, some are learned validation checks Database Schema: * Add "SRTool" class to wrap shared enumerations (e.g. Priority) * Add "Update" class to tag and track audit trail objects * Change Priority naming to match CVE model instead of JIRA * Add srt_created/srt_updated to CVE/Vul/Inv/Notify for improved updating and auditing * Add to Defect the SRT versions of Status, Priority, Outcome To distinguish these from the customer's defect system's values Common Tools: * Fix new CVE auto-scoring to skip CVE's already scored (though still NEW) * Add automated propagation of Defects/Investigations status to parent Vulnerabilities See "srtool_common.py" for rule details CVEs: * Add MITRE as an automatic upstream source This is to specifically capture all of the "RESERVED" CVE enumerations which will not appear in the MIST databases, and have the CVE records in place for internal investigations and transitions to "public" status. * Spell out the command arguments in the NIST data source files for greater legibility * Change Priority naming to match CVE instead of JIRA * Add parallel status states for "inactive" products This specifically blocks state propagation from inactive objects to active objects NIST management script: * Refactor file for greater clarity * Reorder methods to reflect workflow order * Fully spell out names of objects * Remove temporary holding class "CVE" in favor of dictionary objects * Debugging enhancements * Incremental update commands for stepped debugging For example, ability to fetch/update specific CVE(s) * Additional debugging flags [YOCTO #13734] Signed-off-by: David Reyna <David.Reyna@windriver.com>
Diffstat (limited to 'bin')
-rwxr-xr-xbin/acme/datasource.json_sample5
-rwxr-xr-xbin/acme/srtool_acme.py1
-rwxr-xr-xbin/common/srtool_backup.py98
-rwxr-xr-xbin/common/srtool_common.py675
-rwxr-xr-xbin/common/srtool_update.py17
-rwxr-xr-xbin/common/srtool_utils.py931
-rwxr-xr-xbin/debian/srtool_debian.py2
-rwxr-xr-xbin/dev_tools/history.py254
-rwxr-xr-xbin/dev_tools/update_status.sh43
-rwxr-xr-xbin/mitre/datasource_2010.json18
-rwxr-xr-xbin/mitre/datasource_2011.json18
-rwxr-xr-xbin/mitre/datasource_2012.json18
-rwxr-xr-xbin/mitre/datasource_2013.json18
-rwxr-xr-xbin/mitre/datasource_2014.json18
-rwxr-xr-xbin/mitre/datasource_2015.json4
-rwxr-xr-xbin/mitre/datasource_2016.json4
-rwxr-xr-xbin/mitre/datasource_2017.json4
-rwxr-xr-xbin/mitre/datasource_2018.json4
-rwxr-xr-xbin/mitre/datasource_2019.json4
-rwxr-xr-xbin/mitre/srtool_mitre.py39
-rw-r--r--bin/nist/datasource.json3
-rwxr-xr-xbin/nist/datasource_2002.json19
-rwxr-xr-xbin/nist/datasource_2003.json19
-rwxr-xr-xbin/nist/datasource_2004.json19
-rwxr-xr-xbin/nist/datasource_2005.json19
-rwxr-xr-xbin/nist/datasource_2006.json19
-rwxr-xr-xbin/nist/datasource_2007.json19
-rwxr-xr-xbin/nist/datasource_2008.json19
-rwxr-xr-xbin/nist/datasource_2009.json19
-rwxr-xr-xbin/nist/datasource_2010.json18
-rwxr-xr-xbin/nist/datasource_2011.json18
-rwxr-xr-xbin/nist/datasource_2012.json18
-rwxr-xr-xbin/nist/datasource_2013.json18
-rwxr-xr-xbin/nist/datasource_2014.json18
-rwxr-xr-xbin/nist/datasource_2015.json6
-rwxr-xr-xbin/nist/datasource_2016.json6
-rwxr-xr-xbin/nist/datasource_2017.json6
-rwxr-xr-xbin/nist/datasource_2018.json6
-rwxr-xr-xbin/nist/datasource_2019.json6
-rwxr-xr-xbin/nist/datasource_2020.json18
-rwxr-xr-xbin/nist/srtool_nist.py1142
-rwxr-xr-xbin/redhat/srtool_redhat.py2
-rwxr-xr-xbin/srt2
-rwxr-xr-xbin/yp/datasource.json5
-rwxr-xr-xbin/yp/srtool_yp.py1
-rwxr-xr-xbin/yp/yocto-project-products.json63
46 files changed, 3107 insertions, 576 deletions
diff --git a/bin/acme/datasource.json_sample b/bin/acme/datasource.json_sample
index dc1d018..98ef1de 100755
--- a/bin/acme/datasource.json_sample
+++ b/bin/acme/datasource.json_sample
@@ -33,6 +33,11 @@
"helptext" : "Text schema of an example defect",
"value" : "54321"
},
+ {
+ "name" : "SRTOOL_DEFECT_DOESNOTIMPACT",
+ "helptext" : "Comment message to mark CVEs that do not affect the products",
+ "value" : "It doesn't impact ACME"
+ },
{
"name" : "SRTOOL_DEFECT_TOOL",
"helptext" : "The registered script to manage defects",
diff --git a/bin/acme/srtool_acme.py b/bin/acme/srtool_acme.py
index 1aa1b91..f815ae1 100755
--- a/bin/acme/srtool_acme.py
+++ b/bin/acme/srtool_acme.py
@@ -124,6 +124,7 @@ def main(argv):
parser.add_argument('--file', dest='file', help='Source file')
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose debugging')
args = parser.parse_args()
diff --git a/bin/common/srtool_backup.py b/bin/common/srtool_backup.py
index b37e2d0..1b93637 100755
--- a/bin/common/srtool_backup.py
+++ b/bin/common/srtool_backup.py
@@ -40,6 +40,8 @@ from common.srt_schema import ORM
verbose = False
srtDbName = 'srt.sqlite'
+BACKUP_DIR = 'backups'
+BACKUP_PREFIX = 'backup_'
#################################
# Common routines
@@ -57,20 +59,35 @@ def _log(msg):
f1.close()
#################################
+# Set backup database stamp file
+#
+
+def backup_stamp(backup_dir):
+ if not os.path.isdir(backup_dir):
+ print("ERROR: no such directory '%s'" % backup_dir)
+ exit(1)
+ statinfo = os.stat(os.path.join(backup_dir, 'srt.sqlite'))
+ mod_timestamp = datetime.fromtimestamp(statinfo.st_mtime)
+ stamp_str = mod_timestamp.strftime('%Y-%m-%d %H:%M:%S | %A, %B %d %Y')
+ with open(os.path.join(backup_dir,'timestamp.txt'), 'w') as file:
+ file.write('%s\n' % stamp_str)
+ print("* Set Timestamp:%s" % mod_timestamp.strftime('%Y-%m-%d|%H:%M:%S|%A, %B %d %Y'))
+
+#################################
# Backup the database and data files
#
def backup_db(is_daily):
today = datetime.today()
weeknum = today.strftime("%W")
- weekday = today.isoweekday()
+ weekday = today.strftime("%A") #today.isoweekday()
year = today.strftime("%Y")
# Where are we backing up to
if is_daily:
- backup_dir = os.path.join(script_pathname, "backups/backup_%s" % (weekday))
+ backup_dir = os.path.join(script_pathname, "%s/%s%s" % (BACKUP_DIR,BACKUP_PREFIX,weekday))
else:
- backup_dir = os.path.join(script_pathname, "backups/backup_%s_%s" % (year,weeknum))
+ backup_dir = os.path.join(script_pathname, "%s/%s%s_%s" % (BACKUP_DIR,BACKUP_PREFIX,year,weeknum))
# Make sure directory exists
try:
os.makedirs(backup_dir)
@@ -82,25 +99,79 @@ def backup_db(is_daily):
print("*** Backup dir='%s' ***" % backup_dir)
print("* Copy database")
- cmd = 'cp %s %s' % (os.path.join(script_pathname,srtDbName),os.path.join(script_pathname,backup_dir))
+ cmd = 'cp -p %s %s' % (os.path.join(script_pathname,srtDbName),backup_dir)
print(cmd)
os.system(cmd)
# Copy data but skip cache dir (no deep copy)
print("* Copy data files")
- cmd = 'cp %s/data/* %s/data' % (script_pathname,os.path.join(script_pathname,backup_dir))
+ cmd = 'cp -p %s/data/* %s/data' % (script_pathname,backup_dir)
print(cmd)
os.system(cmd)
# Copy attachments
print("* Copy attachment files")
- cmd = 'cp -r %s/downloads %s' % (script_pathname,os.path.join(script_pathname,backup_dir))
+ cmd = 'cp -r -p %s/downloads %s' % (script_pathname,backup_dir)
print(cmd)
os.system(cmd)
+ # Set stamp file
+ backup_stamp(backup_dir)
+
+#######################################################################
+# list
+#
+
+def backup_list():
+ def sort_key(elem):
+ return elem[0]+elem[2]
+
+ stamps = []
+ for directory in os.listdir(os.path.join(script_pathname, 'backups')):
+ prefix = '1Week' if not directory[len(BACKUP_PREFIX)].isalpha() else '2Day'
+ directory = os.path.join(script_pathname, 'backups', directory)
+ with open(os.path.join(directory,'timestamp.txt'), 'r') as file:
+ line = file.read().strip()
+ #print("DIR=%s,%s" % (directory,line))
+ stamps.append([prefix, directory, line])
+
+ # Add the current database (now)
+ prefix = '3Now'
+ directory = script_pathname
+ statinfo = os.stat(os.path.join(directory, 'srt.sqlite'))
+ mod_timestamp = datetime.fromtimestamp(statinfo.st_mtime)
+ stamp_str = mod_timestamp.strftime('%Y-%m-%d %H:%M:%S | %A, %B %d %Y')
+ stamps.append([prefix, directory, stamp_str])
+
+ # Sort my time and return
+ stamps.sort(key=sort_key)
+ return stamps
+
+def list(db_list=False):
+ stamps = backup_list()
+ for stamp in stamps:
+ # Insert a separator between the date and the time
+ stamp[2] = stamp[2].replace(' ','|',1)
+ if db_list:
+ print("%s|%s|%s" % (stamp[0][1:],os.path.basename(stamp[1]),stamp[2].replace(' | ','|')))
+ else:
+ snap_date,snap_time,snap_day = stamp[2].split('|')
+ print("%-4s,%-16s,%s" % (stamp[0][1:],os.path.basename(stamp[1]),stamp[2].replace(' | ','|')))
+
+#################################
+# Init stamps
+#
+
+def init_stamps():
+ stamps = backup_list()
+ for stamp in stamps:
+ stamp_prefix, stamp_directory, stamp_line = stamp
+ backup_stamp(stamp_directory)
+
#################################
# main loop
#
+
def main(argv):
global verbose
global cmd_skip
@@ -111,7 +182,14 @@ def main(argv):
parser.add_argument('--backup-db', '-b', action='store_const', const='backup', dest='command', help='Backup the database, save to year_weeknum dir')
parser.add_argument('--backup-db-daily', '-d', action='store_const', const='backup-daily', dest='command', help='Backup the database, save to weekday dir')
+ parser.add_argument('--init-stamps', '-I', action='store_const', const='init-stamps', dest='command', help='Initialize the backup directory timestamps')
+ parser.add_argument('--init-dir-stamp', '-D', dest='init_dir_stamp', help='Initialize a specific backup directory timestamp')
+
+ parser.add_argument('--list-backups', '-l', action='store_const', const='list', dest='command', help='List the backup directory timestamps')
+ parser.add_argument('--list-backups-db', '-L', action='store_const', const='list-db', dest='command', help='Dump the backup directory timestamps')
+
parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
parser.add_argument('--count', dest='count', help='Debugging: short run record count')
@@ -136,6 +214,14 @@ def main(argv):
except Exception as e:
print ("DATABASE BACKUP FAILED ... %s" % e)
master_log.write("SRTOOL:%s:DATABASE BACKUP:\t\t\t\t...\t\t\tFAILED ... %s\n" % (date.today(), e))
+ elif 'list' == args.command:
+ list()
+ elif 'list-db' == args.command:
+ list(True)
+ elif 'init-stamps' == args.command:
+ init_stamps()
+ elif args.command.init_dir_stamp:
+ backup_stamp(args.command.init_dir_stamp)
else:
print("Command not found")
master_log.close()
diff --git a/bin/common/srtool_common.py b/bin/common/srtool_common.py
index 13b5893..d9fbd34 100755
--- a/bin/common/srtool_common.py
+++ b/bin/common/srtool_common.py
@@ -51,6 +51,7 @@ except:
verbose = False
cmd_skip = 0
cmd_count = 0
+cmd_test = False
srtDbName = 'srt.sqlite'
packageKeywordsFile = 'data/package_keywords.csv'
@@ -85,6 +86,12 @@ def get_name_sort(cve_name):
cve_name_sort = cve_name
return cve_name_sort
+def get_tag_key(tag,key,default=None):
+ d = json.loads(tag)
+ if key in d:
+ return d[key]
+ return default
+
#################################
# Load the package keyword source into the database
#
@@ -293,7 +300,8 @@ def score_new_cves(cve_filter):
# Scan the open CVEs
if 'NEW' == cve_filter:
- sql = "SELECT * FROM orm_cve WHERE (status='%s' OR status='%s');" % (ORM.STATUS_NEW,ORM.STATUS_NEW_RESERVED)
+# sql = "SELECT * FROM orm_cve WHERE (status='%s' OR status='%s') AND score_date IS NULL;" % (ORM.STATUS_NEW,ORM.STATUS_NEW_RESERVED)
+ sql = "SELECT * FROM orm_cve WHERE status='%s' AND score_date IS NULL;" % (ORM.STATUS_NEW)
cur.execute(sql)
elif cve_filter.startswith('CVE-'):
cur.execute('SELECT * FROM orm_cve WHERE name LIKE "'+cve_filter+'%"')
@@ -318,22 +326,24 @@ def score_new_cves(cve_filter):
record_count = 0
write_count = 0
ds_count = 0
+ is_change = False
time_now = datetime.now()
for i,cve in enumerate(cur):
cve_name = cve[ORM.CVE_NAME]
- if cve[ORM.CVE_SCORE_DATE]:
- #cve_score_date = datetime.strptime(source[ORM.CVE_SCORE_DATE], '%Y-%m-%d %H:%M:%S')
- # If there is any score_date, then nothing to do here
- continue
-
+# if cve[ORM.CVE_SCORE_DATE]:
+# #cve_score_date = datetime.strptime(source[ORM.CVE_SCORE_DATE], '%Y-%m-%d %H:%M:%S')
+# # If there is any score_date, then nothing to do here
+# continue
+#
# Progress indicator support
if 0 == i % 10:
print('%04d: %20s\r' % (i,cve_name), end='')
- if (0 == i % 200) and not cmd_skip:
+ if (0 == i % 200) and (not cmd_skip) and is_change:
conn.commit()
print("%4d: COMMIT" % i)
sleep(2)
+ is_change = False
# Development/debug support
if cmd_skip:
if i < cmd_skip:
@@ -351,21 +361,24 @@ def score_new_cves(cve_filter):
recommend,recommend_list = compute_recommends(cve)
cve_packages = ''
if recommend_list:
-
# Go ahead and create/attach packages to CVEs
cve_packages = attach_packages(cur_write, cve, recommend_list)
- #cve_packages = cve[ORM.CVE_PACKAGES]
+ else:
+ cve_packages = cve[ORM.CVE_PACKAGES]
- sql = ''' UPDATE orm_cve
- SET recommend = ?,
- recommend_list = ?,
- packages = ?,
- score_date = ?
- WHERE id = ?'''
- cur_write.execute(sql, (recommend, recommend_list, cve_packages, time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT), cve[ORM.CVE_ID]))
- write_count += 1
+ # Always set score_date since it has been evaluated
+ # NOTE: we do not touch 'cve.srt_updated' for this background change
+ sql = ''' UPDATE orm_cve
+ SET recommend = ?,
+ recommend_list = ?,
+ packages = ?,
+ score_date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (recommend, recommend_list, cve_packages, time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT), cve[ORM.CVE_ID]))
+ write_count += 1
+ is_change = True
- if verbose: print(" %d:%s:%s" % (recommend,recommend_list,cve_packages))
+# if verbose: print(" %d:%s:%s" % (recommend,recommend_list,cve_packages))
# Attach all matching CVE sources
for ds_obj in ds_list:
@@ -379,8 +392,9 @@ def score_new_cves(cve_filter):
ds_count += 1
print("%30sADDED [%4d]: %20s <- %20s\r" % ('',ds_count,ds_obj['key'],cve[ORM.CVE_NAME]),end='')
- conn.commit()
- print("COMMIT")
+ if is_change:
+ conn.commit()
+ print("COMMIT")
print("\nUpdated CVEs=%d, Added alternate sources=%d" % (write_count,ds_count))
#################################
@@ -412,6 +426,430 @@ def init_notify_categories(filename):
conn.close()
#################################
+# Update cumulative Cve/Vulnerability/Investigation status
+#
+# * Scan the respective child Vulnerabilities/Investigations/Defects, and
+# sum them into cumulative status for parent
+# * Rules for Status:
+# If any child is VULNERABLE, then the parent is VULNERABLE
+# else if any child is INVESTIGATE, then the parent is INVESTIGATE
+# else if any child is NEW, then the parent is INVESTIGATE
+# else the parent is NOT_VULNERABLE
+# * Exceptions:
+# Children that are 'ORM.STATUS_HISTORICAL' or 'ORM.STATUS_NEW_RESERVED' have no vote
+# If there are no children nor any children with votes, then the status is left unchanged
+# * Rules for Priority:
+# If any child has a higher priority, that priority is used
+#
+
+def _update_cve_status(cur,cve,srtool_today,update_skip_history):
+ if verbose: print("Cve:%s:%s" % (cve[ORM.CVE_NAME],ORM.get_orm_string(cve[ORM.CVE_STATUS],ORM.STATUS_STR)))
+ # Is status locked?
+ # if cve[ORM.CVE_STATUS_LOCK]:
+ # return
+
+ # Get the CVE's Vulnerabilities
+ cve_priority = cve[ORM.CVE_PRIORITY]
+ cve_status = None
+ vote_count = 0
+ cve2vuls = cur.execute("SELECT * FROM orm_cvetovulnerablility where cve_id = '%s'" % cve[ORM.CVE_ID]).fetchall()
+ for cve2vul in cve2vuls:
+ vulnerability_id = cve2vul[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID]
+ vulnerability = cur.execute("SELECT * FROM orm_vulnerability where id = '%s'" % vulnerability_id).fetchone()
+ # Compute Status
+ status = vulnerability[ORM.VULNERABILITY_STATUS]
+ if verbose: print(" %s,%s" % (vulnerability[ORM.VULNERABILITY_NAME],ORM.get_orm_string(status,ORM.STATUS_STR)))
+ if ORM.STATUS_VULNERABLE == status:
+ if verbose: print(" %s => %s" % (ORM.get_orm_string(cve_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR)))
+ cve_status = ORM.STATUS_VULNERABLE
+ vote_count += 1
+ break
+ elif status in (ORM.STATUS_INVESTIGATE,ORM.STATUS_NEW) and cve_status in (None,ORM.STATUS_INVESTIGATE):
+ if verbose: print(" %s => (%s),%s" % (ORM.get_orm_string(cve_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR),ORM.get_orm_string(ORM.STATUS_INVESTIGATE,ORM.STATUS_STR)))
+ cve_status = ORM.STATUS_INVESTIGATE
+ vote_count += 1
+ elif ORM.STATUS_NOT_VULNERABLE == status:
+ # tentative not vulnerable
+ vote_count += 1
+ continue
+ else:
+ # Non-voting status: Active:Historical,New-Reserved Inactive:(New),(Investigate),(Vulnerable),Vulnerable)
+ continue
+ # Compute Priority
+ if cve_priority < vulnerability[ORM.VULNERABILITY_PRIORITY]:
+ cve_priority = vulnerability[ORM.VULNERABILITY_PRIORITY]
+
+ # If no votes, skip and leave existing status
+ if 0 == vote_count:
+ if verbose: print(" No votes:skip")
+ return
+ # if no votes away from 'not vulnerable', defer to 'not vulnerable'
+ if None == cve_status:
+ cve_status = ORM.STATUS_NOT_VULNERABLE
+ if verbose: print(" defer => %s" % (ORM.get_orm_string(cve_status,ORM.STATUS_STR)))
+
+ # Update status
+ history_update = []
+ if cve[ORM.CVE_STATUS] != cve_status:
+ history_update.append(ORM.UPDATE_STATUS % (
+ ORM.get_orm_string(cve[ORM.CVE_STATUS],ORM.STATUS_STR),
+ ORM.get_orm_string(cve_status,ORM.STATUS_STR)))
+ if cve[ORM.CVE_PRIORITY] < cve_priority:
+ history_update.append(ORM.UPDATE_PRIORITY % (
+ ORM.get_orm_string(cve[ORM.CVE_PRIORITY],ORM.PRIORITY_STR),
+ ORM.get_orm_string(cve_priority,ORM.PRIORITY_STR)))
+ if history_update:
+ if verbose: print(" Change CVE:%s" % ';'.join(history_update))
+ if not cmd_test:
+ sql = "UPDATE orm_cve SET status=?, priority=?, srt_updated=? WHERE id=?"
+ cur.execute(sql, (cve_status,cve_priority,srtool_today,cve[ORM.CVE_ID],) )
+ if not update_skip_history:
+ # Add status update in history
+ update_comment = "%s%s {%s}" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_DEFECT,';'.join(history_update),'Cumulative update from vulnerabilities')
+ sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
+ cur.execute(sql, (cve[ORM.CVE_ID],update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+
+ # Create notification
+ ### TO-DO
+ pass
+ else:
+ if verbose: print(" No status change needed!")
+
+def update_cve_status(cve_list,update_skip_history):
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ srtool_today = datetime.today()
+
+ if 'all' == cve_list:
+ cves = cur.execute("SELECT * FROM orm_cve").fetchall()
+ else:
+ cve_paren_list = str(cve_list.split(',')).replace('[','(').replace(']',')')
+ if verbose: print("SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list)
+ cves = cur.execute("SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list).fetchall()
+
+ if verbose: print("ACTION:update_cve_status:count=%d" % (len(cves)))
+
+ i = 0
+ for cve in cves:
+
+ # Leave "New" CVEs to Triage
+ if ORM.STATUS_NEW == cve[ORM.CVE_STATUS]:
+ continue
+
+ _update_cve_status(cur,cve,srtool_today,update_skip_history)
+ i += 1
+ if (0 == i % 100):
+ print("%5d: %-10s\r" % (i,cve[ORM.CVE_NAME]),end='')
+ if (0 == i % 200):
+ conn.commit()
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ print("%5d:" % (i))
+ cur.close()
+ conn.commit()
+ conn.close()
+
+# Indexes into the product table cache
+PRODUCT_DICT_KEY = 0
+PRODUCT_DICT_TAG = 1
+
+def _update_vulnerability_status(cur,vulnerability,srtool_today,product_dict,update_skip_history):
+ if verbose: print("Vulnerability:%s:%s" % (vulnerability[ORM.VULNERABILITY_NAME],ORM.get_orm_string(vulnerability[ORM.VULNERABILITY_STATUS],ORM.STATUS_STR)))
+ # Is status locked?
+ # if vulnerability[ORM.VULNERABILITY_STATUS_LOCK]:
+ # return
+
+ # Get the Vulnerability's Investigations
+ vulnerability_priority = vulnerability[ORM.VULNERABILITY_PRIORITY]
+ vulnerability_status = None
+ vote_count = 0
+ vul2invs = cur.execute("SELECT * FROM orm_vulnerabilitytoinvestigation where vulnerability_id = '%s'" % vulnerability[ORM.VULNERABILITY_ID]).fetchall()
+ for vul2inv in vul2invs:
+ investigation_id = vul2inv[ORM.VULNERABILITYTOINVESTIGATION_INVESTIGATION_ID]
+ investigation = cur.execute("SELECT * FROM orm_investigation where id = '%s'" % investigation_id).fetchone()
+
+ # For now, only calculate the "Public Status", so skip non-supported products
+ product_mode = get_tag_key(product_dict[investigation[ORM.INVESTIGATION_PRODUCT_ID]][PRODUCT_DICT_TAG],'mode')
+ if 'support' != product_mode:
+ if verbose: print(" SKIP:Product %s is mode=%s" % (product_dict[investigation[ORM.INVESTIGATION_PRODUCT_ID]][PRODUCT_DICT_KEY],product_mode))
+ continue
+
+ # Compute Status
+ status = investigation[ORM.INVESTIGATION_STATUS]
+ if verbose: print(" %s,%s" % (investigation[ORM.INVESTIGATION_NAME],ORM.get_orm_string(status,ORM.STATUS_STR)))
+ if ORM.STATUS_VULNERABLE == status:
+ if verbose: print(" %s => %s" % (ORM.get_orm_string(vulnerability_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR)))
+ vulnerability_status = ORM.STATUS_VULNERABLE
+ vote_count += 1
+ break
+ elif status in (ORM.STATUS_INVESTIGATE,ORM.STATUS_NEW) and vulnerability_status in (None,ORM.STATUS_INVESTIGATE):
+ if verbose: print(" %s => (%s),%s" % (ORM.get_orm_string(vulnerability_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR),ORM.get_orm_string(ORM.STATUS_INVESTIGATE,ORM.STATUS_STR)))
+ vulnerability_status = ORM.STATUS_INVESTIGATE
+ vote_count += 1
+ elif ORM.STATUS_NOT_VULNERABLE == status:
+ # tentative not vulnerable
+ vote_count += 1
+ continue
+ else:
+ # Non-voting status: Active:Historical,New-Reserved Inactive:(New),(Investigate),(Vulnerable),Vulnerable)
+ continue
+ # Compute Priority
+ if vulnerability_priority < investigation[ORM.INVESTIGATION_PRIORITY]:
+ vulnerability_priority = investigation[ORM.INVESTIGATION_PRIORITY]
+
+ # If no votes, skip and leave existing status
+ if 0 == vote_count:
+ if verbose: print(" No votes:skip")
+ return
+ # if no votes away from 'not vulnerable', defer to 'not vulnerable'
+ if None == vulnerability_status:
+ vulnerability_status = ORM.STATUS_NOT_VULNERABLE
+ if verbose: print(" defer => %s" % (ORM.get_orm_string(vulnerability_status,ORM.STATUS_STR)))
+
+ # Update status
+ history_update = []
+ if vulnerability[ORM.VULNERABILITY_STATUS] != vulnerability_status:
+ history_update.append(ORM.UPDATE_STATUS % (
+ ORM.get_orm_string(vulnerability[ORM.VULNERABILITY_STATUS],ORM.STATUS_STR),
+ ORM.get_orm_string(vulnerability_status,ORM.STATUS_STR)))
+ if vulnerability[ORM.VULNERABILITY_PRIORITY] < vulnerability_priority:
+ history_update.append(ORM.UPDATE_PRIORITY % (
+ ORM.get_orm_string(vulnerability[ORM.VULNERABILITY_PRIORITY],ORM.PRIORITY_STR),
+ ORM.get_orm_string(vulnerability_priority,ORM.PRIORITY_STR)))
+ if history_update:
+ if verbose: print(" Change Vulnerability:%s" % ';'.join(history_update))
+ if not cmd_test:
+ sql = "UPDATE orm_vulnerability SET status=?, priority=?, srt_updated=? WHERE id=?"
+ cur.execute(sql, (vulnerability_status,vulnerability_priority,srtool_today,vulnerability[ORM.VULNERABILITY_ID],) )
+ if not update_skip_history:
+ # Add status update in history
+ update_comment = "%s%s {%s}" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_DEFECT,';'.join(history_update),'Cumulative update from investigations')
+ sql = '''INSERT INTO orm_vulnerabilityhistory (vulnerability_id, comment, date, author) VALUES (?,?,?,?)'''
+ cur.execute(sql, (vulnerability[ORM.VULNERABILITY_ID],update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+
+ # Create notification
+ ### TO-DO
+ pass
+ else:
+ if verbose: print(" No status change needed!")
+
+def update_vulnerability_status(vulnerability_list,update_skip_history):
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ srtool_today = datetime.today()
+
+ # Pre-gather and cache the product information
+ product_dict = {}
+ products = cur.execute("SELECT * FROM orm_product").fetchall()
+ for product in products:
+ product_dict[ product[ORM.PRODUCT_ID] ] = [product[ORM.PRODUCT_KEY],product[ORM.PRODUCT_PRODUCT_TAGS]]
+
+ if 'all' == vulnerability_list:
+ vulnerabilities = cur.execute("SELECT * FROM orm_vulnerability").fetchall()
+ else:
+ vulnerability_paren_list = str(vulnerability_list.split(',')).replace('[','(').replace(']',')')
+ if verbose: print("SELECT * FROM orm_vulnerability WHERE name IN %s" % vulnerability_paren_list)
+ vulnerabilities = cur.execute("SELECT * FROM orm_vulnerability WHERE name IN %s" % vulnerability_paren_list).fetchall()
+
+ i = 0
+ for vulnerability in vulnerabilities:
+ _update_vulnerability_status(cur,vulnerability,srtool_today,product_dict,update_skip_history)
+ i += 1
+ if (0 == i % 100):
+ print("%5d: %-10s\r" % (i,vulnerability[ORM.VULNERABILITY_NAME]),end='')
+ if (0 == i % 200):
+ conn.commit()
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ print("%5d:" % (i))
+ cur.close()
+ conn.commit()
+ conn.close()
+
+
+def _update_investigation_status(cur,investigation,srtool_today,update_skip_history):
+ if verbose: print("Investigation:%s:%s" % (investigation[ORM.INVESTIGATION_NAME],ORM.get_orm_string(investigation[ORM.INVESTIGATION_STATUS],ORM.STATUS_STR)))
+ # Is status locked?
+ # if investigation[ORM.INVESTIGATION_STATUS_LOCK]:
+ # return
+
+ # Get the Investigation's Defects
+ investigation_priority = investigation[ORM.INVESTIGATION_PRIORITY]
+ investigation_status = None
+ vote_count = 0
+ inv2defs = cur.execute("SELECT * FROM orm_investigationtodefect where investigation_id = '%s'" % investigation[ORM.INVESTIGATION_ID]).fetchall()
+ for inv2def in inv2defs:
+ defect_id = inv2def[ORM.INVESTIGATIONTODEFECT_DEFECT_ID]
+ defect = cur.execute("SELECT * FROM orm_defect where id = '%s'" % defect_id).fetchone()
+ # Compute Status
+ status = defect[ORM.DEFECT_SRT_STATUS]
+ if verbose: print(" %s,%s" % (defect[ORM.DEFECT_NAME],ORM.get_orm_string(status,ORM.STATUS_STR)))
+ if ORM.STATUS_VULNERABLE == status:
+ if verbose: print(" %s => %s" % (ORM.get_orm_string(investigation_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR)))
+ investigation_status = ORM.STATUS_VULNERABLE
+ vote_count += 1
+ break
+ elif status in (ORM.STATUS_INVESTIGATE,ORM.STATUS_NEW) and investigation_status in (None,ORM.STATUS_INVESTIGATE):
+ if verbose: print(" %s => (%s),%s" % (ORM.get_orm_string(investigation_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR),ORM.get_orm_string(ORM.STATUS_INVESTIGATE,ORM.STATUS_STR)))
+ investigation_status = ORM.STATUS_INVESTIGATE
+ vote_count += 1
+ elif ORM.STATUS_NOT_VULNERABLE == status:
+ # tentative not vulnerable
+ vote_count += 1
+ continue
+ else:
+ # Non-voting status: Active:Historical,New-Reserved Inactive:(New),(Investigate),(Vulnerable),Vulnerable)
+ continue
+ # Compute Priority
+ if investigation_priority < defect[ORM.DEFECT_SRT_PRIORITY]:
+ investigation_priority = defect[ORM.DEFECT_SRT_PRIORITY]
+
+ # If no votes, skip and leave existing status
+ if 0 == vote_count:
+ if verbose: print(" No votes:skip")
+ return
+ # if no votes away from 'not vulnerable', defer to 'not vulnerable'
+ if None == investigation_status:
+ investigation_status = ORM.STATUS_NOT_VULNERABLE
+ if verbose: print(" defer => %s" % (ORM.get_orm_string(investigation_status,ORM.STATUS_STR)))
+
+ investigation_outcome = None
+ for inv2def in inv2defs:
+ outcome = defect[ORM.DEFECT_SRT_OUTCOME]
+ if (ORM.OUTCOME_OPEN == outcome) or (ORM.OUTCOME_OPEN == investigation_outcome):
+ investigation_outcome = ORM.OUTCOME_OPEN
+ continue
+ if (ORM.OUTCOME_FIXED == outcome) or (ORM.OUTCOME_FIXED == investigation_outcome):
+ investigation_outcome = ORM.OUTCOME_FIXED
+ continue
+ # ORM.OUTCOME_CLOSED
+ # ORM.OUTCOME_NOT_FIX
+ investigation_outcome = outcome
+
+ if not investigation_outcome:
+ investigation_outcome = investigation[ORM.INVESTIGATION_OUTCOME]
+
+
+ ### TO_DO: DOUBLE CHECK
+ if False:
+ ### WIND_RIVER_EXTENSION_BEGIN ###
+ # FIXUP: Status: overwrite if new is Fixed and old isn't "VULNERABLE"
+ update_fixup = ('Fixed' == jira_resolution) and (ORM.STATUS_VULNERABLE != cve[ORM.CVE_STATUS])
+ ### WIND_RIVER_EXTENSION_END ###
+
+
+
+ # Update status
+ history_update = []
+ if investigation[ORM.INVESTIGATION_STATUS] != investigation_status:
+ history_update.append(ORM.UPDATE_STATUS % (
+ ORM.get_orm_string(investigation[ORM.INVESTIGATION_STATUS],ORM.STATUS_STR),
+ ORM.get_orm_string(investigation_status,ORM.STATUS_STR)))
+ if investigation[ORM.INVESTIGATION_OUTCOME] != investigation_outcome:
+ history_update.append(ORM.UPDATE_OUTCOME % (
+ ORM.get_orm_string(investigation[ORM.INVESTIGATION_OUTCOME],ORM.OUTCOME_STR),
+ ORM.get_orm_string(investigation_outcome,ORM.OUTCOME_STR)))
+ if investigation[ORM.INVESTIGATION_PRIORITY] < investigation_priority:
+ history_update.append(ORM.UPDATE_PRIORITY % (
+ ORM.get_orm_string(investigation[ORM.INVESTIGATION_PRIORITY],ORM.PRIORITY_STR),
+ ORM.get_orm_string(investigation_priority,ORM.PRIORITY_STR)))
+ if history_update:
+ if verbose: print(" Change Investigation:%s" % ';'.join(history_update))
+ if not cmd_test:
+ sql = "UPDATE orm_investigation SET status=?, outcome=?, priority=?, srt_updated=? WHERE id=?"
+ cur.execute(sql, (investigation_status,investigation_outcome,investigation_priority,srtool_today,investigation[ORM.INVESTIGATION_ID],) )
+ if not update_skip_history:
+ # Add status update in history
+ update_comment = "%s%s {%s}" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_DEFECT,';'.join(history_update),'Cumulative update from defects')
+ sql = '''INSERT INTO orm_investigationhistory (investigation_id, comment, date, author) VALUES (?,?,?,?)'''
+ cur.execute(sql, (investigation[ORM.INVESTIGATION_ID],update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+
+ # Create notification
+ ### TO-DO
+ pass
+ else:
+ if verbose: print(" No status change needed!")
+
+def update_investigation_status(investigation_list,update_skip_history):
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ srtool_today = datetime.today()
+
+ if 'all' == investigation_list:
+ investigations = cur.execute("SELECT * FROM orm_investigation").fetchall()
+ else:
+ investigation_paren_list = str(investigation_list.split(',')).replace('[','(').replace(']',')')
+ if verbose: print("SELECT * FROM orm_investigation WHERE name IN %s" % investigation_paren_list)
+ investigations = cur.execute("SELECT * FROM orm_investigation WHERE name IN %s" % investigation_paren_list).fetchall()
+
+ i = 0
+ for investigation in investigations:
+ _update_investigation_status(cur,investigation,srtool_today,update_skip_history)
+ i += 1
+ if (0 == i % 100):
+ print("%5d: %-10s\r" % (i,investigation[ORM.INVESTIGATION_NAME]),end='')
+ if (0 == i % 200):
+ conn.commit()
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ cur.close()
+ conn.commit()
+ conn.close()
+
+# This routine is intended for incremental cumulative status updates
+def update_cve_status_tree(cve_list,update_skip_history):
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+
+ if 'all' == cve_list:
+ # global cumulative update
+ update_investigation_status('all', update_skip_history)
+ update_vulnerability_status('all', update_skip_history)
+ update_cve_status('all', update_skip_history)
+ return
+
+ # Perform a deep update on the CVEs, their vunerabilities, and their investigations
+ cve_paren_list = str(cve_list.split(',')).replace('[','(').replace(']',')')
+ if verbose: print("SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list)
+ cves = cur.execute("SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list).fetchall()
+
+ if verbose: print("ACTION:update_cve_status_tree:count=%d" % (len(cves)))
+
+ i = 0
+ cve_list = []
+ for cve in cves:
+ cve_list.append(cve[ORM.CVE_NAME])
+ vulnerability_list = []
+ investigation_list = []
+
+ cve2vuls = cur.execute("SELECT * FROM orm_cvetovulnerablility where cve_id = '%s'" % cve[ORM.CVE_ID]).fetchall()
+ for cve2vul in cve2vuls:
+ vulnerability_id = cve2vul[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID]
+ vulnerability = cur.execute("SELECT * FROM orm_vulnerability where id = '%s'" % vulnerability_id).fetchone()
+ vulnerability_list.append(vulnerability[ORM.VULNERABILITY_NAME])
+
+ vul2invs = cur.execute("SELECT * FROM orm_vulnerabilitytoinvestigation where vulnerability_id = '%s'" % vulnerability_id).fetchall()
+ for vul2inv in vul2invs:
+ investigation_id = vul2inv[ORM.VULNERABILITYTOINVESTIGATION_INVESTIGATION_ID]
+ investigation = cur.execute("SELECT * FROM orm_investigation where id = '%s'" % investigation_id).fetchone()
+ investigation_list.append(investigation[ORM.INVESTIGATION_NAME])
+
+ # Update the CVE's children status
+ update_investigation_status(','.join(investigation_list), update_skip_history)
+ update_vulnerability_status(','.join(vulnerability_list), update_skip_history)
+
+ # Childred are updated, now update the CVEs
+ update_cve_status(','.join(cve_list), update_skip_history)
+ cur.close()
+ conn.close()
+
+#################################
# Generate database schema offsets
#
#
@@ -428,12 +866,23 @@ def gen_schema_header():
print("ERROR(%d): %s" % (e.returncode, e.output))
return
+ # Fetch USER_SRTOOL_ID
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ USER_SRTOOL_NAME = 'SRTool'
+ user = cur.execute("SELECT * FROM users_srtuser where username = '%s'" % USER_SRTOOL_NAME).fetchone()
+ USER_SRTOOL_ID = user[0] # Hardcoded 'ORM.USERS_SRTUSER_ID'
+ conn.close()
+
with open(os.path.join(srtool_basepath,'bin/common/srt_schema.py'), 'w') as fd:
fd.write("# SRTool database table schema indexes\n")
fd.write("# Generated by: './bin/common/srtool_common.py --generate-schema-header'\n")
fd.write("# Should be run after any schema changes to sync commandline tools\n")
fd.write("\n")
fd.write("class ORM():\n")
+ fd.write(" USER_SRTOOL_NAME = '%s'\n" % USER_SRTOOL_NAME)
+ fd.write(" USER_SRTOOL_ID = %d\n" % USER_SRTOOL_ID)
+
for line in output.decode("utf-8").splitlines():
match = create_re.match(line)
if not match:
@@ -450,14 +899,18 @@ def gen_schema_header():
#print("%s_%s = %d" % (table.upper(),name.upper(),i))
fd.write(" %s_%s = %d\n" % (table.upper(),name.upper(),i))
+ #
+ # Common SRTool Status Mappings
+ #
+
fd.write("\n # Shared Constants\n")
fd.write(" %s_%s = %d\n" % ('PRIORITY','UNDEFINED',0))
- fd.write(" %s_%s = %d\n" % ('PRIORITY','MINOR' ,1))
- fd.write(" %s_%s = %d\n" % ('PRIORITY','LOW' ,2))
- fd.write(" %s_%s = %d\n" % ('PRIORITY','MEDIUM' ,3))
- fd.write(" %s_%s = %d\n" % ('PRIORITY','HIGH' ,4))
+ fd.write(" %s_%s = %d\n" % ('PRIORITY','LOW' ,1))
+ fd.write(" %s_%s = %d\n" % ('PRIORITY','MEDIUM' ,2))
+ fd.write(" %s_%s = %d\n" % ('PRIORITY','HIGH' ,3))
+ fd.write(" %s_%s = %d\n" % ('PRIORITY','CRITICAL' ,4))
fd.write(" %s = '%s'\n" % ('PRIORITY_STR', \
- 'Undefined,Minor,Low,Medium,High' \
+ 'UNDEFINED,Low,Medium,High,Critical' \
))
fd.write(" %s_%s = %d\n" % ('STATUS','HISTORICAL' ,0))
@@ -466,8 +919,12 @@ def gen_schema_header():
fd.write(" %s_%s = %d\n" % ('STATUS','INVESTIGATE' ,3))
fd.write(" %s_%s = %d\n" % ('STATUS','VULNERABLE' ,4))
fd.write(" %s_%s = %d\n" % ('STATUS','NOT_VULNERABLE',5))
+ fd.write(" %s_%s = %d\n" % ('STATUS','NEW_INACTIVE' ,6))
+ fd.write(" %s_%s = %d\n" % ('STATUS','INVESTIGATE_INACTIVE' ,7))
+ fd.write(" %s_%s = %d\n" % ('STATUS','VULNERABLE_INACTIVE' ,8))
+ fd.write(" %s_%s = %d\n" % ('STATUS','NOT_VULNERABLE_INACTIVE',9))
fd.write(" %s = '%s'\n" % ('STATUS_STR', \
- 'Historical,New,New_Reserved,Investigate,Vulnerable,Not_Vulnerable' \
+ 'Historical,New,New_Reserved,Investigate,Vulnerable,Not_Vulnerable,(New),(Investigate),(Vulnerable),(Not Vulnerable)' \
))
fd.write(" %s_%s = %d\n" % ('PUBLISH','UNPUBLISHED',0))
@@ -488,6 +945,10 @@ def gen_schema_header():
'Open,Closed,Fixed,Not_Fix' \
))
+ #
+ # External Defect Record Mappings
+ #
+
fd.write(" %s_%s = %d\n" % ('DEFECT','UNRESOLVED' ,0))
fd.write(" %s_%s = %d\n" % ('DEFECT','RESOLVED' ,1))
fd.write(" %s_%s = %d\n" % ('DEFECT','FIXED' ,2))
@@ -500,12 +961,39 @@ def gen_schema_header():
fd.write(" %s_%s = %d\n" % ('DEFECT','CANNOT_REPRODUCE' ,9))
fd.write(" %s_%s = %d\n" % ('DEFECT','DONE' ,10))
fd.write(" %s_%s = '%s'\n" % ('DEFECT','RESOLUTION_STR', \
- 'Unresolved,Resolved,Fixed,Will_Not_Fix,Withdrawn,Rejected,Duplicate,Not_Applicable,Replaced_By_Requirement,Cannot_Reproduce,Done' \
+ 'Unresolved,Resolved,Fixed,Will Not Fix,Withdrawn,Rejected,Duplicate,Not Applicable,Replaced By Requirement,Cannot Reproduce,Done' \
))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','UNDEFINED',0))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','LOW' ,1))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','MEDIUM' ,2))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','HIGH' ,3))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','CRITICAL' ,4))
+ fd.write(" %s_%s = '%s'\n" % ('DEFECT','PRIORITY_STR', \
+ 'UNDEFINED,P4,P3,P2,P1' \
+ ))
+
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_OPEN' ,0))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_IN_PROGRESS' ,1))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_ON_HOLD' ,2))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_CHECKED_IN' ,3))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_RESOLVED' ,4))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_CLOSED' ,5))
+ fd.write(" %s_%s = '%s'\n" % ('DEFECT','STATUS_STR', \
+ 'Open,In progress,On Hold,Checked In,Resolved,Closed' \
+ ))
+
+ #
+ # Package Record Mappings
+ #
+
fd.write(" %s_%s = %d\n" % ('PACKAGE','FOR' ,0))
fd.write(" %s_%s = %d\n" % ('PACKAGE','AGAINST' ,1))
+ #
+ # Data source Record Mappings
+ #
+
fd.write(" %s_%s = %d\n" % ('DATASOURCE','MINUTELY' ,0))
fd.write(" %s_%s = %d\n" % ('DATASOURCE','HOURLY' ,1))
fd.write(" %s_%s = %d\n" % ('DATASOURCE','DAILY' ,2))
@@ -519,6 +1007,55 @@ def gen_schema_header():
fd.write(" %s_%s = '%s'\n" % ('DATASOURCE','DATE_FORMAT','%Y-%m-%d'))
fd.write(" %s_%s = '%s'\n" % ('DATASOURCE','DATETIME_FORMAT','%Y-%m-%d %H:%M:%S'))
+ #
+ # Update class Mappings
+ #
+
+ fd.write("\n\n")
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','UPDATE_STR','UPDATE(%s):'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','CREATE_STR','CREATE(%s):'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SOURCE_USER','User'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SOURCE_TRIAGE','Triage'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SOURCE_CVE','CVE'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SOURCE_DEFECT','Defect'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','NEW_NAME','New_Name(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','PRIORITY','Priority(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','STATUS','Status(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SEVERITY_V3','Severity_V3(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SEVERITY_V2','Severity_V2(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DESCRIPTION','Description()'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','LASTMODIFIEDDATE','LastModifiedDate(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','OUTCOME','Outcome(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','RELEASE','Release(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','NOTE','User_Note()'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','PRIVATE_NOTE','Private_Note()'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','TAG','Tag()'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','PUBLISH_STATE','Publish_State(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','PUBLISH_DATE','Publish_Date(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ACKNOWLEDGE_DATE','AcknowledgeDate(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_CVE','Attach_CVE(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_CVE','Detach_CVE(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_VUL','Attach_Vulnerability(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_VUL','Detach_Vulnerability(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_INV','Attach_Investigration(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_INV','Detach_Investigration(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_DEV','Attach_Defect(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_DEV','Detach_Defect(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_DOC','Attach_Document(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_DOC','Detach_Document(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_USER_NOTIFY','Attach_User_Notify(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_USER_NOTIFY','Detach_User_Notify(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_ACCESS','Attach_Access(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_ACCESS','Detach_Access(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_PRODUCT','Attach_Product(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_PRODUCT','Detach_Product(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','MARK_NEW','Mark_New()'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','MARK_UPDATED','Mark_Updated()'))
+
+ #
+ # Helper routine to map values to string names
+ #
+
fd.write("\n\n")
fd.write(" # General routine to return string name of a constant (e.g. 'DATASOURCE_FREQUENCY_STR')\n")
fd.write(" @staticmethod\n")
@@ -536,63 +1073,15 @@ def gen_schema_header():
fd.write("\n")
#################################
-# fixups
-#
-
-# Recompute all of the CVE name_sort fields
-def fix_name_sort():
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
- cur_write = conn.cursor()
-
- cur.execute('SELECT * FROM orm_cve')
- for i,cve in enumerate(cur):
- name_sort = get_name_sort(cve[ORM.CVE_NAME])
-
- # Progress indicator support
- if 0 == i % 10:
- print('%05d: %20s to %20s\r' % (i,cve[ORM.CVE_NAME],name_sort), end='')
- if (0 == i % 200):
- conn.commit()
-
- sql = ''' UPDATE orm_cve
- SET name_sort = ?
- WHERE id = ?'''
- cur_write.execute(sql, (name_sort, cve[ORM.CVE_ID],))
- conn.commit()
-
-# Reset empty CVE recommend fields to the proper integer zero
-def fix_cve_recommend():
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
- cur_write = conn.cursor()
-
- cur.execute('SELECT * FROM orm_cve WHERE recommend = ""')
- i = 0
- for cve in cur:
- i += 1
-
- # Progress indicator support
- if 0 == i % 10:
- print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
- if (0 == i % 200):
- conn.commit()
-
- sql = ''' UPDATE orm_cve
- SET recommend = ?
- WHERE id = ?'''
- cur_write.execute(sql, (0, cve[ORM.CVE_ID],))
- print("CVE RECOMMEND FIX COUNT=%d" % i)
- conn.commit()
-
-#################################
# main loop
#
def main(argv):
global verbose
+ global update_skip_history
global cmd_skip
global cmd_count
+ global cmd_test
# setup
parser = argparse.ArgumentParser(description='srtool_common.py: manage SRTool common source data')
@@ -600,15 +1089,24 @@ def main(argv):
parser.add_argument('--init-notify-categories', '-n', action='store_const', const='init_notify_categories', dest='command', help='Initialize notify categories')
parser.add_argument('--score-new-cves', '-s', dest='score_new_cves', help='Score CVEs for triage [NEW|CVE-1234]')
parser.add_argument('--generate-schema-header', '-g', action='store_const', const='gen_schema_header', dest='command', help='Generate database schema header')
+
+
+ parser.add_argument('--update-cve-status-tree', '-S', dest='update_cve_status_tree', help="Update CVEs and their children's cumulative status")
+ parser.add_argument('--update-investigation-status', '-I', dest='update_investigation_status', help='Update Investigation cumulative status')
+ parser.add_argument('--update-vulnerability-status', '-V', dest='update_vulnerability_status', help='Update Vulnerability cumulative status')
+ parser.add_argument('--update-cve-status', '-C', dest='update_cve_status', help='Update CVE cumulative status')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
+
parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
+ parser.add_argument('--test', '-t', action='store_true', dest='test', help='Test run')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
parser.add_argument('--count', dest='count', help='Debugging: short run record count')
- parser.add_argument('--fix-name-sort', action='store_const', const='fix_name_sort', dest='command', help='Recalulate the CVE name sort values')
- parser.add_argument('--fix-cve-recommend', action='store_const', const='fix_cve_recommend', dest='command', help='Fix the empty CVE recommend values')
args = parser.parse_args()
verbose = args.verbose
+ update_skip_history = args.update_skip_history
+ cmd_test = args.test
cmd_skip = 0
if None != args.skip:
cmd_skip = int(args.skip)
@@ -618,6 +1116,9 @@ def main(argv):
if get_override('SRTDBG_MINIMAL_DB'):
cmd_count = 40
+ if verbose:
+ print('srtool_common %s' % args)
+
if 'init_package_keywords' == args.command:
init_package_keywords(packageKeywordsFile)
elif 'init_notify_categories' == args.command:
@@ -626,12 +1127,16 @@ def main(argv):
score_new_cves(args.score_new_cves)
elif 'gen_schema_header' == args.command:
gen_schema_header()
- ### TO-DO: TEMPORARY WORKAROUND
- fix_cve_recommend()
- elif 'fix_name_sort' == args.command:
- fix_name_sort()
- elif 'fix_cve_recommend' == args.command:
- fix_cve_recommend()
+
+ elif args.update_cve_status_tree:
+ update_cve_status_tree(args.update_cve_status_tree, update_skip_history)
+ elif args.update_cve_status:
+ update_cve_status(args.update_cve_status, update_skip_history)
+ elif args.update_vulnerability_status:
+ update_vulnerability_status(args.update_vulnerability_status, update_skip_history)
+ elif args.update_investigation_status:
+ update_investigation_status(args.update_investigation_status, update_skip_history)
+
else:
print("Command not found")
diff --git a/bin/common/srtool_update.py b/bin/common/srtool_update.py
index 1ec6c0a..92f4479 100755
--- a/bin/common/srtool_update.py
+++ b/bin/common/srtool_update.py
@@ -27,6 +27,7 @@ import sqlite3
import json
import time
from datetime import datetime, timedelta
+import traceback
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
@@ -85,7 +86,7 @@ def get_tag_key(tag,key,default=''):
# ONDEMAND = 5 "{}" # only on demand
# ONSTARTUP = 6 "{}" # on every SRTool start up
-def run_updates(force_all,name_filter,is_trial):
+def run_updates(force_all,name_filter,update_skip_history,is_trial):
conn = sqlite3.connect(srtDbName)
cur = conn.cursor()
@@ -95,7 +96,7 @@ def run_updates(force_all,name_filter,is_trial):
if verbose:
print("SRTool Update: time_now = %s" % time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT))
status_str = "============================================================\n"
- status_str += "Update: Date=%s,Filter='%s',Force=%s\n" % (time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT),name_filter,force_all)
+ status_str += "Update: Date=%s,Filter='%s',Force=%s,Skip_History=%s\n" % (time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT),name_filter,force_all,update_skip_history)
#get sources that have update command
sources = cur.execute("SELECT * FROM orm_datasource").fetchall()
@@ -198,6 +199,8 @@ def run_updates(force_all,name_filter,is_trial):
update_command = source[ORM.DATASOURCE_UPDATE]
if force_all:
update_command += " --force"
+ if update_skip_history:
+ update_command += " --update-skip-history"
if update_command.startswith('./'):
update_command = os.path.join(script_pathname, update_command)
os.system("echo 'Update:%s,%s' > %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),update_command,os.path.join(script_pathname,SRT_UPDATE_TASK_FILE)))
@@ -235,7 +238,7 @@ def list():
cur = conn.cursor()
cur_write = conn.cursor()
- format_str = "%16s %7s %14s %10s %28s %s"
+ format_str = "%16s %7s %14s %10s %28s '%s'"
print("SRTool Update List:")
print(format_str % ('Data','Source','Name','Frequency','Offset','Description'))
@@ -251,7 +254,7 @@ def list():
if verbose:
print('')
- run_updates(False,'all',True)
+ run_updates(False,'all',True,True)
#################################
# Start 'cron' job for updates
@@ -274,7 +277,7 @@ def cron_start():
extra_line = False
while True:
# Run the updates
- run_updates(False,'all',False)
+ run_updates(False,'all',False,False)
# Toggle an extra line in the log to make updates obvious
if extra_line:
extra_line = False
@@ -318,6 +321,7 @@ def main(argv):
parser.add_argument('--name-filter', '-n', dest='name_filter', help='Filter for datasource name')
parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
parser.add_argument('--trial', '-t', action='store_true', dest='is_trial', help='Debugging: trial run')
@@ -337,12 +341,13 @@ def main(argv):
elif 'run-updates' == args.command:
try:
print("BEGINNING UPDATING DATASOURCES... this MAY take a long time")
- run_updates(args.force,name_filter,args.is_trial)
+ run_updates(args.force,name_filter,args.update_skip_history,args.is_trial)
master_log.write("SRTOOL:%s:UPDATING DATASOURCES:\t\t\t...\t\t\tSUCCESS\n" %(datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT)))
print("FINISHED UPDATING ALL DATASOURCES\n")
except Exception as e:
print("FAILED UPDATING ALL DATASOURCES (%s)" % e)
master_log.write("SRTOOL:%s:UPDATING DATASOURCES\t\t\t...\t\t\tFAILED ... %s\n" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT), e))
+ traceback.print_exc(file=sys.stdout)
elif args.configure_ds_update:
try:
print("CHANGING UPDATE CONFIGURATION FOR %s" % args.configure_ds_update[0])
diff --git a/bin/common/srtool_utils.py b/bin/common/srtool_utils.py
index 8c13f3a..ac65d42 100755
--- a/bin/common/srtool_utils.py
+++ b/bin/common/srtool_utils.py
@@ -25,6 +25,9 @@ import os
import sys
import argparse
import sqlite3
+from datetime import datetime, date
+import time
+import re
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
@@ -35,6 +38,7 @@ from common.srt_schema import ORM
verbose = False
cmd_skip = 0
cmd_count = 0
+force = False
srtDbName = 'srt.sqlite'
@@ -141,9 +145,6 @@ def remove_app_sources(master_app):
# Is this reserved by Mitre? Is '** RESERVED **' within the first 20 char positions?
def fix_new_reserved():
- global cmd_skip
- global cmd_count
-
conn = sqlite3.connect(srtDbName)
cur = conn.cursor()
cur_write = conn.cursor()
@@ -171,6 +172,7 @@ def fix_new_reserved():
reserved_pos = cve[ORM.CVE_DESCRIPTION].find('** RESERVED **')
if (0 <= reserved_pos) and (20 > reserved_pos):
print("STATUS_NEW_RESERVED:%s:%s:%s" % (cve[ORM.CVE_STATUS],cve[ORM.CVE_NAME],cve[ORM.CVE_DESCRIPTION][:40]))
+ # NOTE: we do not touch 'cve.srt_updated' for this background change
sql = ''' UPDATE orm_cve
SET status = ?
WHERE id = ?'''
@@ -180,12 +182,881 @@ def fix_new_reserved():
conn.commit()
#################################
+# fix_new_tags
+#
+
+# Fix the None "cve.tags" fields
+def fix_new_tags():
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ cur.execute('SELECT * FROM orm_cve')
+ i = 0
+ j = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+ if (0 == i % 200):
+ conn.commit()
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ if not cve[ORM.CVE_TAGS]:
+ # NOTE: we do not touch 'cve.srt_updated' for this background change
+ sql = ''' UPDATE orm_cve
+ SET tags = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, ('', cve[ORM.CVE_ID],))
+ j += 1
+ print("\nCVE COUNT=%5d,%5d" % (i,j))
+ conn.commit()
+
+#################################
+# fixup fix_name_sort
+#
+
+# Recompute all of the CVE name_sort fields
+def fix_name_sort():
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ cur.execute('SELECT * FROM orm_cve')
+ for i,cve in enumerate(cur):
+ name_sort = get_name_sort(cve[ORM.CVE_NAME])
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s to %20s\r' % (i,cve[ORM.CVE_NAME],name_sort), end='')
+ if (0 == i % 200):
+ conn.commit()
+
+ # NOTE: we do not touch 'cve.srt_updated' for this background change
+ sql = ''' UPDATE orm_cve
+ SET name_sort = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (name_sort, cve[ORM.CVE_ID],))
+ conn.commit()
+
+#################################
+# fixup fix_cve_recommend
+#
+
+# Reset empty CVE recommend fields to the proper integer zero
+def fix_cve_recommend():
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ cur.execute('SELECT * FROM orm_cve WHERE recommend = ""')
+ i = 0
+ fix_count = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+
+ #
+ # Fix miss-write to lastModifiedDate, missing integer for recommend
+ #
+
+ fix = False
+
+ lastModifiedDate = cve[ORM.CVE_LASTMODIFIEDDATE]
+ if '0' == lastModifiedDate:
+ lastModifiedDate = ''
+ fix = True
+
+ recommend = cve[ORM.CVE_RECOMMEND]
+ if not recommend:
+ recommend = 0
+ fix = True
+
+ # NOTE: we do not touch 'cve.srt_updated' for this background change
+ if fix:
+ sql = ''' UPDATE orm_cve
+ SET recommend = ?, lastModifiedDate = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (recommend, lastModifiedDate, cve[ORM.CVE_ID],))
+
+ fix_count += 1
+ if (199 == fix_count % 200):
+ conn.commit()
+
+ print("CVE RECOMMEND FIX COUNT=%d of %d" % (fix_count,i))
+ if fix_count:
+ conn.commit()
+ conn.close()
+
+#################################
+# fixup fix_srt_dates
+#
+
+# Reset older 'date' values as 'datetime' values
+
+def _fix_datetime(value,default):
+ if (not value) or (not value[0].isdigit()):
+ return(default)
+ elif ':' in value:
+ return(value)
+ else:
+ return(datetime.strptime(value, '%Y-%m-%d'))
+
+def _fix_date(value,default):
+ if (not value) or (not value[0].isdigit()):
+ return(False,default)
+ elif not ':' in value:
+ return(False,value)
+ else:
+ value = re.sub('\..*','',value)
+ dt = datetime.strptime(value,ORM.DATASOURCE_DATETIME_FORMAT)
+ return(True,dt.strftime(ORM.DATASOURCE_DATE_FORMAT))
+
+
+def fix_srt_datetime(scope):
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ if ('d' == scope) or ('all' == scope):
+ cur.execute('SELECT * FROM orm_defect')
+ i = 0
+ is_change_count = 0
+ for defect in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,defect[ORM.DEFECT_NAME]), end='')
+ if (0 == i % 200):
+ conn.commit()
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ defect_srt_updated = _fix_datetime(defect[ORM.DEFECT_SRT_UPDATED],defect[ORM.DEFECT_DATE_UPDATED])
+ if defect_srt_updated == defect[ORM.DEFECT_SRT_UPDATED]:
+ continue
+
+ sql = ''' UPDATE orm_defect
+ SET srt_updated = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (defect_srt_updated, defect[ORM.DEFECT_ID],))
+ is_change_count += 1
+ print("DEFECT DATE FIX COUNT=%d/%d" % (is_change_count,i))
+ conn.commit()
+
+ # INVESTIGATION DATE FIX COUNT=1089363, real 12m20.041s = 1472 recs/sec
+ if ('i' == scope) or ('all' == scope):
+ cur.execute('SELECT * FROM orm_investigation')
+ i = 0
+ is_change_count = 0
+ for investigation in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,investigation[ORM.INVESTIGATION_NAME]), end='')
+ if (0 == i % 200):
+ conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ srt_updated = _fix_datetime(investigation[ORM.INVESTIGATION_SRT_UPDATED],None)
+ srt_created = _fix_datetime(investigation[ORM.INVESTIGATION_SRT_CREATED],None)
+ if (not srt_updated) or (not srt_created):
+ print("ERROR[%d]: bad date field at '%s', U=%s,C=%s" % (i,investigation[ORM.INVESTIGATION_ID],investigation[ORM.INVESTIGATION_SRT_UPDATED],investigation[ORM.INVESTIGATION_SRT_CREATED]))
+ exit(1)
+ if (srt_updated == investigation[ORM.INVESTIGATION_SRT_UPDATED]) and (srt_created == investigation[ORM.INVESTIGATION_SRT_CREATED]):
+ continue
+
+ sql = ''' UPDATE orm_investigation
+ SET srt_updated = ?, srt_created = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (srt_updated, srt_created, investigation[ORM.INVESTIGATION_ID],))
+ is_change_count += 1
+ print("INVESTIGATION DATE FIX COUNT=%d/%d" % (is_change_count,i))
+ conn.commit()
+
+ # VULNERABILITY DATE FIX COUNT=86585, real 1m2.969s = 1374 recs/sec
+ if ('v' == scope) or ('all' == scope):
+ cur.execute('SELECT * FROM orm_vulnerability')
+ i = 0
+ is_change_count = 0
+ for vulnerability in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,vulnerability[ORM.VULNERABILITY_NAME]), end='')
+ if (0 == i % 200):
+ conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ srt_updated = _fix_datetime(vulnerability[ORM.VULNERABILITY_SRT_UPDATED],None)
+ srt_created = _fix_datetime(vulnerability[ORM.VULNERABILITY_SRT_CREATED],None)
+ if (not srt_updated) or (not srt_created):
+ print("ERROR[%d]: bad date field at '%s', U=%s,C=%s" % (i,vulnerability[ORM.VULNERABILITY_ID],vulnerability[ORM.VULNERABILITY_SRT_UPDATED],vulnerability[ORM.VULNERABILITY_SRT_CREATED]))
+ exit(1)
+ if (srt_updated == vulnerability[ORM.VULNERABILITY_SRT_UPDATED]) and (srt_created == vulnerability[ORM.VULNERABILITY_SRT_CREATED]):
+ continue
+
+ sql = ''' UPDATE orm_vulnerability
+ SET srt_updated = ?, srt_created = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (srt_updated, srt_created, vulnerability[ORM.VULNERABILITY_ID],))
+ is_change_count += 1
+ print("VULNERABILITY DATE FIX COUNT=%d/%d" % (is_change_count,i))
+ conn.commit()
+
+ # CVE DATE FIX COUNT=86585, real 1m2.969s = 1374 recs/sec
+ # NOTE: only ACK dates need fixing, received bad apha content from srtool_mitre
+ if ('c' == scope) or ('all' == scope):
+ cur.execute('SELECT * FROM orm_cve')
+ i = 0
+ # Sparse updates
+ is_change = False
+ is_change_count = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+ if (0 == i % 200) and is_change:
+ conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ is_change = False
+
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if is_change_count > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ is_change = False
+
+ if cve[ORM.CVE_ACKNOWLEDGE_DATE]:
+ acknowledge_date = _fix_datetime(cve[ORM.CVE_ACKNOWLEDGE_DATE],'alpha')
+ # If the default 'alpha' happens, then date had bad format and must go away
+ if ('alpha' == acknowledge_date) or (acknowledge_date != cve[ORM.CVE_ACKNOWLEDGE_DATE]):
+ acknowledge_date = None
+ is_change = True
+
+ srt_updated = _fix_datetime(cve[ORM.CVE_SRT_UPDATED],None)
+ srt_created = _fix_datetime(cve[ORM.CVE_SRT_CREATED],None)
+ if (not srt_updated) or (not srt_created):
+ print("ERROR[%d]: bad date field at '%s', U=%s,C=%s" % (i,cve[ORM.CVE_ID],cve[ORM.CVE_SRT_UPDATED],cve[ORM.CVE_SRT_CREATED]))
+ exit(1)
+ if (srt_updated != cve[ORM.CVE_SRT_UPDATED]) or (srt_created != cve[ORM.CVE_SRT_CREATED]):
+ is_change = True
+
+ # Anything to do?
+ if not is_change:
+ continue
+
+ is_change_count += 1
+ sql = ''' UPDATE orm_cve
+ SET srt_updated = ?, srt_created = ?, acknowledge_date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (srt_updated, srt_created, acknowledge_date, cve[ORM.CVE_ID],))
+ is_change_count += 1
+ print("CVE DATE FIX COUNT=%d/%d" % (is_change_count,i))
+ conn.commit()
+
+ # Fix CVE History
+ if scope in ('ch','all','history'):
+ cur.execute('SELECT * FROM orm_cvehistory')
+ i = 0
+ # Sparse updates
+ is_change = False
+ is_change_count = 0
+ for cve_history in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: \r' % (i), end='')
+ if (0 == i % 200) and is_change:
+ conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ is_change = False
+
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if is_change_count > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ updated,history_date = _fix_date(cve_history[ORM.CVEHISTORY_DATE],'')
+ if not updated:
+ continue
+
+ is_change = True
+ sql = ''' UPDATE orm_cvehistory
+ SET date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (history_date, cve_history[ORM.CVEHISTORY_ID],))
+ is_change_count += 1
+
+ # Commit all remaining changes
+ if is_change:
+ conn.commit()
+ print("CVE HISTORY DATE FIX COUNT=%d/%d" % (is_change_count,i))
+
+ # Fix Vulnerability History
+ if scope in ('vh','all','history'):
+ cur.execute('SELECT * FROM orm_vulnerabilityhistory')
+ i = 0
+ # Sparse updates
+ is_change = False
+ is_change_count = 0
+ for vulnerabilityhistory in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: \r' % (i), end='')
+ if (0 == i % 200) and is_change:
+ conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ is_change = False
+
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if is_change_count > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ updated,history_date = _fix_date(vulnerabilityhistory[ORM.VULNERABILITYHISTORY_DATE],'')
+ if not updated:
+ continue
+
+ is_change = True
+ sql = ''' UPDATE orm_vulnerabilityhistory
+ SET date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (history_date, vulnerabilityhistory[ORM.VULNERABILITYHISTORY_ID],))
+ is_change_count += 1
+
+ # Commit all remaining changes
+ if is_change:
+ conn.commit()
+ print("VULNERABILITY HISTORY DATE FIX COUNT=%d/%d" % (is_change_count,i))
+
+ # Fix Investigation History
+ if scope in ('ih','all','history'):
+ cur.execute('SELECT * FROM orm_investigationhistory')
+ i = 0
+ # Sparse updates
+ is_change = False
+ is_change_count = 0
+ for investigation_history in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: \r' % (i), end='')
+ if (0 == i % 200) and is_change:
+ conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ is_change = False
+
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if is_change_count > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ updated,history_date = _fix_date(investigation_history[ORM.INVESTIGATIONHISTORY_DATE],'')
+ if not updated:
+ continue
+
+ is_change = True
+ sql = ''' UPDATE orm_investigationhistory
+ SET date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (history_date, investigation_history[ORM.INVESTIGATIONHISTORY_ID],))
+ is_change_count += 1
+
+ # Commit all remaining changes
+ if is_change:
+ conn.commit()
+ print("INVESTIGATION HISTORY DATE FIX COUNT=%d/%d" % (is_change_count,i))
+
+#################################
+# fixup fix_cve_srt_create
+#
+
+# Reset CVE srt_create to NIST release dates
+def fix_reset_nist_to_create(cve_prefix):
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ def date_nist2srt(nist_date,default,cve_name,i):
+ if not nist_date or (4 > len(nist_date)):
+ return default
+ try:
+ return(datetime.strptime(nist_date, '%Y-%m-%d'))
+ except Exception as e:
+ print("\n\ndate_nist2srt:%s,%s,%s,%s" % (cve_name,e,cve_name,i))
+ exit(1)
+ return default
+
+ cur.execute('SELECT * FROM orm_cve WHERE name LIKE "'+cve_prefix+'%"')
+
+ i = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+ if (0 == i % 200):
+ conn.commit()
+ print('')
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ nist_released = date_nist2srt(cve[ORM.CVE_PUBLISHEDDATE],cve[ORM.CVE_SRT_CREATED],cve[ORM.CVE_NAME],i)
+ nist_modified = date_nist2srt(cve[ORM.CVE_LASTMODIFIEDDATE],cve[ORM.CVE_SRT_UPDATED],cve[ORM.CVE_NAME],i)
+
+ sql = ''' UPDATE orm_cve
+ SET srt_created = ?, srt_updated = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (nist_released, nist_modified, cve[ORM.CVE_ID],))
+ print("CVE DATE FIX COUNT=%d" % i)
+ conn.commit()
+
+#################################
+# fixup fix_missing_create_dates
+#
+
+# Reset CVE None creation dates to 2019-01-01, out of the way of reports
+def fix_missing_create_dates():
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ fix_date = datetime.strptime('Jan 1 2019', '%b %d %Y')
+ fix_count = 0
+
+ cur.execute('SELECT * FROM orm_cve')
+ i = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+ if (0 == i % 200):
+# conn.commit()
+ #print('')
+ pass
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ fix = False
+ if not cve[ORM.CVE_SRT_CREATED] or (0 > cve[ORM.CVE_SRT_CREATED].find(':')):
+ srt_created = fix_date
+ fix = True
+ else:
+ srt_created = cve[ORM.CVE_SRT_CREATED]
+ #srt_created = datetime.strptime(cve[ORM.CVE_SRT_CREATED],'%Y-%m-%d')
+ if not cve[ORM.CVE_SRT_UPDATED] or (0 > cve[ORM.CVE_SRT_UPDATED].find(':')):
+ srt_updated = fix_date
+ fix = True
+ else:
+ srt_updated = cve[ORM.CVE_SRT_UPDATED]
+ #srt_updated = datetime.strptime(cve[ORM.CVE_SRT_UPDATED],'%Y-%m-%d')
+
+ if fix:
+ sql = ''' UPDATE orm_cve
+ SET srt_created = ?, srt_updated = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (srt_created, srt_updated, cve[ORM.CVE_ID],))
+ fix_count += 1
+ print("CVE DATE FIX COUNT=%d of %d" % (fix_count,i))
+ conn.commit()
+
+#################################
+# fixup fix_public_reserved
+#
+
+# Reset CVE 'New-Reserved' if now public from NIST
+def fix_public_reserved():
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ fix_count = 0
+
+ cur.execute('SELECT * FROM orm_cve WHERE status = "%s"' % ORM.STATUS_NEW_RESERVED)
+ i = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s %d\r' % (i,cve[ORM.CVE_NAME],cve[ORM.CVE_STATUS]), end='')
+ if (0 == i % 200):
+ conn.commit()
+ #print('')
+ pass
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ if cve[ORM.CVE_CVSSV3_BASESCORE] or cve[ORM.CVE_CVSSV2_BASESCORE]:
+ sql = ''' UPDATE orm_cve
+ SET status = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (ORM.STATUS_NEW, cve[ORM.CVE_ID],))
+ fix_count += 1
+ print("CVE DATE FIX COUNT=%d of %d" % (fix_count,i))
+ conn.commit()
+
+#################################
+# fix_remove_bulk_cve_history
+#
+
+# Remove a specific/accidental set of bulk CVE history updates intended to be background
+def fix_foo():
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_cve = conn.cursor()
+ cur_del = conn.cursor()
+
+ fix_count = 0
+
+ print("FOO=%s\n\n" % ORM.STATUS_NEW_RESERVED)
+
+ cur.execute('SELECT * FROM orm_cvehistory WHERE date LIKE "2019-03-2%"')
+
+ i = 0
+ for cvehistory in cur:
+ i += 1
+
+ # Progress indicator support
+ if 9 == i % 10:
+# print('%05d: %20s %s \r' % (i,cvehistory[ORM.CVEHISTORY_COMMENT],cvehistory[ORM.CVEHISTORY_DATE]), end='')
+ pass
+ if (0 == i % 200):
+# conn.commit()
+ #print('')
+ pass
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ if not (cvehistory[ORM.CVEHISTORY_DATE] in ('2019-03-28','2019-03-27')):
+ continue
+ if not (cvehistory[ORM.CVEHISTORY_COMMENT].startswith("UPDATE(CVE):")):
+ continue
+
+ cur_cve.execute('SELECT * FROM orm_cve WHERE id = "%s"' % cvehistory[ORM.CVEHISTORY_CVE_ID])
+ cve = cur_cve.fetchone()
+ if not (cve[ORM.CVE_NAME].startswith("CVE-200")):
+ continue
+
+ if 19 == fix_count % 20:
+ print("%4d) CVE=%s,CH_Comment=%s,CH_Date=%s" % (fix_count,cve[ORM.CVE_NAME],cvehistory[ORM.CVEHISTORY_COMMENT],cvehistory[ORM.CVEHISTORY_DATE]))
+
+ mydata = cur_del.execute("DELETE FROM orm_cvehistory WHERE id=?", (cvehistory[ORM.CVEHISTORY_ID],))
+ fix_count += 1
+
+ print("CVE DATE FIX COUNT=%d of %d" % (fix_count,i))
+ conn.commit()
+
+#################################
+# fix_defects_to_products
+#
+
+#
+def fix_defects_to_products():
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_cve = conn.cursor()
+ cur_del = conn.cursor()
+
+ fix_count = 0
+
+ # Find all products
+ products = {}
+ cur.execute('SELECT * FROM orm_product')
+ for product in cur:
+ id = product[ORM.PRODUCT_ID]
+ name = "%s %s %s" % (product[ORM.PRODUCT_NAME],product[ORM.PRODUCT_VERSION],product[ORM.PRODUCT_PROFILE])
+ products[id] = name
+ print("[%2d] %s" % (id,name))
+
+ # Test product field for all defects
+ cur.execute('SELECT * FROM orm_defect')
+ i = 0
+ for defect in cur:
+ i += 1
+
+ # Progress indicator support
+ if 99 == i % 100:
+ print('%05d: %-20s\r' % (i,defect[ORM.DEFECT_NAME]), end='')
+ pass
+ if (0 == i % 200):
+# conn.commit()
+ #print('')
+ pass
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ product_id = defect[ORM.DEFECT_PRODUCT_ID]
+ if not product_id in products:
+ print("ERROR:[%5d] %-20s => %s" % (defect[ORM.DEFECT_ID],defect[ORM.DEFECT_NAME],product_id))
+
+# print("CVE DATE FIX COUNT=%d of %d" % (fix_count,i))
+ conn.commit()
+
+#################################
+# find_multiple_defects
+#
+
+def find_multiple_defects():
+
+ conn = sqlite3.connect(srtDbName)
+ cur_i2d = conn.cursor()
+ cur_inv = conn.cursor()
+ cur_def = conn.cursor()
+
+ cur_inv.execute('SELECT * FROM orm_investigation')
+ count = 0
+ for i,investigation in enumerate(cur_inv):
+ if 0 == i % 100:
+ print("%4d) V=%-30s\r" % (i,investigation[ORM.VULNERABILITY_NAME]), end='')
+
+ cur_i2d.execute('SELECT * FROM orm_investigationtodefect WHERE investigation_id = "%s"' % investigation[ORM.INVESTIGATION_ID])
+ i2d_list = cur_i2d.fetchall()
+ if 1 < len(i2d_list):
+ count += 1
+ for k,i2d in enumerate(i2d_list):
+ cur_def.execute('SELECT * FROM orm_defect WHERE id = "%s"' % i2d[ORM.INVESTIGATIONTODEFECT_DEFECT_ID])
+ defect = cur_def.fetchone()
+ if defect[ORM.DEFECT_NAME].startswith("LIN10"):
+ if 0 == k:
+ print("[%02d] Multiple defects for investigation '%s':" % (count,investigation[ORM.INVESTIGATION_NAME]))
+ print(" [%02d] %s: %s (%s)" % (k+1,defect[ORM.DEFECT_NAME],defect[ORM.DEFECT_SUMMARY],ORM.get_orm_string(defect[ORM.DEFECT_RESOLUTION],ORM.DEFECT_RESOLUTION_STR)))
+ conn.close()
+
+#################################
+# find_duplicate_names
+#
+
+def find_duplicate_names():
+
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+
+
+ cur.execute('SELECT * FROM orm_cve')
+ cve_dict = {}
+ for i,cve in enumerate(cur):
+ if 0 == i % 100:
+ print("%4d) C=%-30s\r" % (i,cve[ORM.CVE_NAME]), end='')
+
+ if not cve[ORM.CVE_NAME] in cve_dict:
+ cve_dict[cve[ORM.CVE_NAME]] = cve[ORM.CVE_ID]
+ else:
+ print("\nERROR:Multiple cve names '%s'" % cve[ORM.CVE_NAME])
+ print(" a) id=%d" % cve_dict[cve[ORM.CVE_NAME]])
+ print(" b) id=%d" % cve[ORM.CVE_ID])
+ cve_dict = {}
+ print('')
+
+ cur.execute('SELECT * FROM orm_vulnerability')
+ vul_dict = {}
+ for i,vulnerability in enumerate(cur):
+ if 0 == i % 100:
+ print("%4d) V=%-30s\r" % (i,vulnerability[ORM.VULNERABILITY_NAME]), end='')
+
+ if not vulnerability[ORM.VULNERABILITY_NAME] in vul_dict:
+ vul_dict[vulnerability[ORM.VULNERABILITY_NAME]] = vulnerability[ORM.VULNERABILITY_ID]
+ else:
+ print("\nERROR:Multiple vulnerability names '%s'" % vulnerability[ORM.VULNERABILITY_NAME])
+ print(" a) id=%d" % vul_dict[vulnerability[ORM.VULNERABILITY_NAME]])
+ print(" b) id=%d" % vulnerability[ORM.VULNERABILITY_ID])
+ vul_dict = {}
+ print('')
+
+ cur.execute('SELECT * FROM orm_investigation')
+ inv_dict = {}
+ for i,investigation in enumerate(cur):
+ if 0 == i % 100:
+ print("%4d) I=%-30s\r" % (i,investigation[ORM.INVESTIGATION_NAME]), end='')
+
+ if not investigation[ORM.INVESTIGATION_NAME] in inv_dict:
+ inv_dict[investigation[ORM.INVESTIGATION_NAME]] = investigation[ORM.INVESTIGATION_ID]
+ else:
+ print("\nERROR:Multiple investigation names '%s'" % investigation[ORM.INVESTIGATION_NAME])
+ print(" a) id=%d" % inv_dict[investigation[ORM.INVESTIGATION_NAME]])
+ print(" b) id=%d" % investigation[ORM.INVESTIGATION_ID])
+ inv_dict = {}
+ print('')
+
+ cur.execute('SELECT * FROM orm_defect')
+ dev_dict = {}
+ for i,defect in enumerate(cur):
+ if 0 == i % 100:
+ print("%4d) D=%-30s\r" % (i,defect[ORM.DEFECT_NAME]), end='')
+
+ if not defect[ORM.DEFECT_NAME] in dev_dict:
+ dev_dict[defect[ORM.DEFECT_NAME]] = defect[ORM.DEFECT_ID]
+ else:
+ print("\nERROR:Multiple defect names '%s'" % defect[ORM.DEFECT_NAME])
+ print(" a) id=%d" % dev_dict[defect[ORM.DEFECT_NAME]])
+ print(" b) id=%d" % defect[ORM.DEFECT_ID])
+ dev_dict = {}
+ print('')
+
+ conn.close()
+
+#################################
+# find_bad_links
+#
+
+def find_bad_links():
+
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_del = conn.cursor()
+
+ #
+ print('\n=== CVE Source Check ===\n')
+ #
+
+ cur.execute('SELECT * FROM orm_cvesource')
+ is_change = False
+ for i,cs in enumerate(cur):
+ cveid = cs[ORM.CVESOURCE_CVE_ID]
+ srcid = cs[ORM.CVESOURCE_DATASOURCE_ID]
+ if 0 == i % 100:
+ print("%4d) CVE=%6d,SRC=%6d\r" % (cs[ORM.CVESOURCE_ID],cveid,srcid), end='')
+ error = False
+ if (1 > cveid): error = True
+ if (1 > srcid): error = True
+
+ if error:
+ print("ERROR: [%4d] CVE=%6d,SRC=%6d" % (cs[ORM.CVESOURCE_ID],cveid,srcid))
+ if force:
+ sql = 'DELETE FROM orm_cvesource WHERE id=?'
+ cur_del.execute(sql, (cs[ORM.CVESOURCE_ID],))
+ is_change = True
+
+ print('')
+ if is_change:
+ conn.commit()
+
+ #
+ print('\n=== Defect to Product Check ===\n')
+ #
+
+ # Find all products
+ products = {}
+ cur.execute('SELECT * FROM orm_product')
+ for product in cur:
+ id = product[ORM.PRODUCT_ID]
+ name = "%s %s %s" % (product[ORM.PRODUCT_NAME],product[ORM.PRODUCT_VERSION],product[ORM.PRODUCT_PROFILE])
+ products[id] = name
+ print("[%2d] %s" % (id,name))
+
+ # Test product field for all defects
+ cur.execute('SELECT * FROM orm_defect')
+ i = 0
+ for defect in cur:
+ i += 1
+
+ # Progress indicator support
+ if 99 == i % 100:
+ print('%05d: %-20s\r' % (i,defect[ORM.DEFECT_NAME]), end='')
+ pass
+ if (0 == i % 200):
+# conn.commit()
+ #print('')
+ pass
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ product_id = defect[ORM.DEFECT_PRODUCT_ID]
+ if not product_id in products:
+ print("ERROR:[%5d] %-20s => %s" % (defect[ORM.DEFECT_ID],defect[ORM.DEFECT_NAME],product_id))
+
+ conn.close()
+
+
+#################################
# main loop
#
+
def main(argv):
global verbose
global cmd_skip
global cmd_count
+ global force
# setup
parser = argparse.ArgumentParser(description='srtool.py: manage the SRTool database')
@@ -194,13 +1065,28 @@ def main(argv):
parser.add_argument('--settings', '-S', action='store_const', const='settings', dest='command', help='Show the SRT Settings')
parser.add_argument('--remove-app-sources', dest='remove_app_sources', help='Remove data sources for a previous app')
+ parser.add_argument('--fix-name-sort', action='store_const', const='fix_name_sort', dest='command', help='Recalulate the CVE name sort values')
+ parser.add_argument('--fix-cve-recommend', action='store_const', const='fix_cve_recommend', dest='command', help='Fix the empty CVE recommend values')
+ parser.add_argument('--fix-new-reserved', action='store_const', const='fix_new_reserved', dest='command', help='Reset new reserved CVEs to NEW_RESERVED')
+ parser.add_argument('--fix-new-tags', action='store_const', const='fix_new_tags', dest='command', help='Reset new cve.tags')
+ parser.add_argument('--fix-srt-datetime', dest='fix_srt_datetime', help='Fix SRT dates to datetimes [all|c|v|i|d|history|ch|vh|ih|dh]')
+ parser.add_argument('--fix-reset-nist-to-create', dest='fix_reset_nist_to_create', help='Bulk reset CVE [prefix*] srt_create dates to NIST release dates')
+ parser.add_argument('--fix-missing-create-dates', action='store_const', const='fix_missing_create_dates', dest='command', help='Reset CVE srt_create dates to NIST release dates')
+ parser.add_argument('--fix-public-reserved', action='store_const', const='fix_public_reserved', dest='command', help='Reset CVE NEW_RESERVED if now public')
+ parser.add_argument('--fix-remove-bulk-cve-history', action='store_const', const='fix_remove_bulk_cve_history', dest='command', help='foo')
+
+ parser.add_argument('--find-multiple-defects', action='store_const', const='find_multiple_defects', dest='command', help='foo')
+ parser.add_argument('--find-duplicate-names', action='store_const', const='find_duplicate_names', dest='command', help='foo')
+
+ parser.add_argument('--fix-defects-to-products', action='store_const', const='fix_defects_to_products', dest='command', help='foo')
+ parser.add_argument('--find-bad-links', action='store_const', const='find_bad_links', dest='command', help='Find bad links, e.g. "orm_cvesource" (with "-f" to fix)')
+
parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
parser.add_argument('--count', dest='count', help='Debugging: short run record count')
- parser.add_argument('--fix-new-reserved', action='store_const', const='fix_new_reserved', dest='command', help='Reset new reserved CVEs to NEW_RESERVED')
-
args = parser.parse_args()
master_log = open(os.path.join(script_pathname, "update_logs/master_log.txt"), "a")
@@ -210,6 +1096,7 @@ def main(argv):
cmd_skip = int(args.skip)
if None != args.count:
cmd_count = int(args.count)
+ force = args.force
if args.sources:
if args.sources.startswith('s'):
@@ -226,10 +1113,40 @@ def main(argv):
sources('reset')
elif 'settings' == args.command:
settings()
- elif 'fix_new_reserved' == args.command:
- fix_new_reserved()
+
elif args.remove_app_sources:
remove_app_sources(args.remove_app_sources)
+
+ elif 'fix_name_sort' == args.command:
+ fix_name_sort()
+ elif 'fix_cve_recommend' == args.command:
+ fix_cve_recommend()
+ elif 'fix_new_reserved' == args.command:
+ fix_new_reserved()
+ elif 'fix_new_tags' == args.command:
+ fix_new_tags()
+ elif args.fix_srt_datetime:
+ fix_srt_datetime(args.fix_srt_datetime)
+ elif args.fix_reset_nist_to_create:
+ fix_reset_nist_to_create(args.fix_reset_nist_to_create)
+ elif 'fix_missing_create_dates' == args.command:
+ fix_missing_create_dates()
+ elif 'fix_public_reserved' == args.command:
+ fix_public_reserved()
+ elif 'fix_remove_bulk_cve_history' == args.command:
+ fix_remove_bulk_cve_history()
+ elif 'fix_defects_to_products' == args.command:
+ fix_defects_to_products()
+
+
+ elif 'find_multiple_defects' == args.command:
+ find_multiple_defects()
+ elif 'find_duplicate_names' == args.command:
+ find_duplicate_names()
+ elif 'find_bad_links' == args.command:
+ find_bad_links()
+
+
else:
print("Command not found")
master_log.close()
diff --git a/bin/debian/srtool_debian.py b/bin/debian/srtool_debian.py
index a8d8b3d..094deda 100755
--- a/bin/debian/srtool_debian.py
+++ b/bin/debian/srtool_debian.py
@@ -203,7 +203,9 @@ def main(argv):
# parser.add_argument('--url-file', dest='url_file', help='CVE URL extension')
parser.add_argument('--file', dest='cve_file', help='Local CVE source file')
parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Fetch CVE detail')
+
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='is_verbose', help='Enable verbose debugging output')
args = parser.parse_args()
diff --git a/bin/dev_tools/history.py b/bin/dev_tools/history.py
new file mode 100755
index 0000000..9079874
--- /dev/null
+++ b/bin/dev_tools/history.py
@@ -0,0 +1,254 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2017-2018 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import json
+import os
+import sys
+import argparse
+from datetime import datetime, date, timedelta
+import sqlite3
+import re
+import subprocess
+
+# load the srt.sqlite schema indexes
+if os.path.isdir('bin'):
+ dir_path = 'bin'
+else:
+ dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+
+# Setup:
+verbose = False
+is_trial = False
+
+#######################################################################
+# Helper Routines
+# stamp = ['d|W',directory,timestamp]
+#
+
+def backup_list():
+ def sort_key(elem):
+ return elem[0]+elem[2]
+
+ stamps = []
+ for directory in os.listdir(os.path.join(srtool_basepath, 'backups')):
+ prefix = 'W' if 10 < len(directory) else 'd'
+ directory = os.path.join(srtool_basepath, 'backups', directory)
+ with open(os.path.join(directory,'timestamp.txt'), 'r') as file:
+ line = file.read().strip()
+ #print("DIR=%s,%s" % (directory,line))
+ stamps.append([prefix, directory, line])
+
+ # Add the current database (now)
+ prefix = 'n'
+ directory = srtool_basepath
+ statinfo = os.stat(os.path.join(directory, 'srt.sqlite'))
+ mod_timestamp = datetime.fromtimestamp(statinfo.st_mtime)
+ stamp_str = mod_timestamp.strftime('%Y-%m-%d %H:%M:%S | %A, %B %d %Y')
+ stamps.append([prefix, directory, stamp_str])
+
+ # Sort my time and return
+ stamps.sort(key=sort_key)
+ return stamps
+
+def run_command(cmnd):
+ print("Command:%s" % cmnd)
+ if not is_trial:
+ p = subprocess.Popen(cmnd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ for line in p.stdout.readlines():
+ if 0 < line.find(b'\r'):
+ continue
+ print(line)
+ retval = p.wait()
+
+#######################################################################
+# init_timestamps
+#
+
+def init_timestamps():
+
+ backup_dir = os.path.join(srtool_basepath, 'backups')
+ for directory in os.listdir(backup_dir):
+ directory = os.path.join(backup_dir, directory)
+ statinfo = os.stat(os.path.join(directory, 'srt.sqlite'))
+ mod_timestamp = datetime.fromtimestamp(statinfo.st_mtime)
+ stamp_str = mod_timestamp.strftime('%Y-%m-%d %H:%M:%S | %A, %B %d %Y')
+ with open(os.path.join(directory,'timestamp.txt'), 'w') as file:
+ file.write('%s\n' % stamp_str)
+ print("DIR=%s,%s" % (directory,mod_timestamp.strftime('%Y-%m-%d %H:%M:%S | %A, %B %d %Y')))
+
+
+#######################################################################
+# list_history
+#
+
+def list_history():
+ stamps = backup_list()
+ for stamp in stamps:
+ print("DIR=%s,%-14s,%s" % (stamp[0],os.path.basename(stamp[1]),stamp[2]))
+
+#######################################################################
+# trace
+#
+
+def trace(item):
+ stamps = backup_list()
+ for stamp in stamps:
+ srtDbName = os.path.join(stamp[1],'srt.sqlite')
+ #print("db=%s" % srtDbName)
+
+ stamp_date = re.sub(' .*','',stamp[2])
+ stamp_day = re.sub('.*\| ','',stamp[2])
+ stamp_day = re.sub(',.*','',stamp_day)
+ stamp_text = '%s,%-9s %8s' % (stamp[0],stamp_day,stamp_date)
+
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+
+ if item.startswith('CVE-'):
+ cur.execute('SELECT * FROM orm_cve WHERE name = "%s"' % item)
+ for cve in cur:
+ status = ORM.get_orm_string(cve[ORM.CVE_STATUS],ORM.STATUS_STR)
+ print("%s] %-16s, %s, %s %s , %s %s " % (stamp_text, cve[ORM.CVE_NAME], status, cve[ORM.CVE_CVSSV3_BASESCORE],cve[ORM.CVE_CVSSV3_BASESEVERITY],cve[ORM.CVE_CVSSV2_BASESCORE],cve[ORM.CVE_CVSSV2_SEVERITY]))
+
+ conn.close()
+
+#######################################################################
+# replay_nist
+#
+
+def replay_nist():
+ stamps = backup_list()
+
+ # Read base database
+ for i,stamp in enumerate(stamps):
+ print("%2d: [%s]%s" % (i+1,stamp[0],stamp[2]))
+ index = input("Which backup? ")
+ if not index:
+ return
+ try:
+ index = int(index)
+ except:
+ print("Not a number '%s'" % index)
+ return
+ if (index>=1) and (index<len(stamps)):
+ print("You selected base:%s " % stamps[index-1][2])
+ else:
+ print("Out of range '%d'" % index)
+ return
+
+ # Read replay database
+ for i,stamp in enumerate(stamps):
+ print("%2d: [%s]%s" % (i+1,stamp[0],stamp[2]))
+ replay_index = input("Which backup? ")
+ if not replay_index:
+ return
+ try:
+ replay_index = int(replay_index)
+ except:
+ print("Not a number '%s'" % replay_index)
+ return
+ if (replay_index>=1) and (replay_index<len(stamps)):
+ print("You selected replay:%s " % stamps[replay_index-1][2])
+ else:
+ print("Out of range '%d'" % replay_index)
+ return
+
+ # Stop the SRTool server
+ cmnd = './bin/srt_stop.sh'
+ run_command(cmnd)
+
+ # Create restore backup
+ srtDbName = os.path.join(srtool_basepath, 'srt.sqlite')
+ restore_db = os.path.join(srtool_basepath, 'srt.sqlite.restore')
+ if not os.path.isfile(restore_db):
+ cmnd = 'cp %s %s' % (srtDbName,restore_db)
+ run_command(cmnd)
+
+ # Copy in the replay database
+ cmnd = 'cp %s/srt.sqlite .' % stamps[index-1][1]
+ run_command(cmnd)
+
+ # Replay the NIST data
+# cmnd = "bin/nist/srtool_nist.py --update_nist --source='NIST 2019' --file=%s/data/nvdcve-1.0-2019.json --url-file=nvdcve-1.0-2019.json.gz --url-meta=nvdcve-1.0-2019.meta --force --force-cache" % stamps[replay_index-1][1]
+ cmnd = "bin/nist/srtool_nist.py --update_nist_incremental --source='NIST Modified Data' --file=%s/data/nvdcve-1.0-modified.json --url-file=nvdcve-1.0-modified.json.gz --url-meta=nvdcve-1.0-modified.meta --force --force-cache" % stamps[replay_index-1][1]
+
+ run_command(cmnd)
+
+ # Restart the SRTool server
+ cmnd = './bin/srt_start.sh'
+ run_command(cmnd)
+
+#######################################################################
+# restore
+#
+
+def restore():
+ srtDbName = os.path.join(srtool_basepath, 'srt.sqlite')
+ restore_db = os.path.join(srtool_basepath, 'srt.sqlite.restore')
+ if os.path.isfile(restore_db):
+ cmnd = 'cp %s %s' % (restore_db,srtDbName)
+ run_command(cmnd)
+ else:
+ print("No restore database found")
+
+#######################################################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+ global is_trial
+
+ parser = argparse.ArgumentParser(description='history.py: manage the history database')
+ parser.add_argument('--init-timestamps', '-I', action='store_const', const='init_timestamps', dest='command', help='Initialize the backup directory timestamps')
+ parser.add_argument('--list-history', '-l', action='store_const', const='list_history', dest='command', help='List the backup directory timestamps')
+ parser.add_argument('--trace', '-t', dest='trace', help='Trace an item')
+
+ parser.add_argument('--replay-nist', '-r', action='store_const', const='replay_nist', dest='command', help='Replay NIST update')
+ parser.add_argument('--restore', '-R', action='store_const', const='restore', dest='command', help='Restore database')
+
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose output')
+ parser.add_argument('--trial', '-T', action='store_true', dest='is_trial', help='Verbose output')
+
+ args = parser.parse_args()
+ verbose = args.verbose
+ is_trial = args.is_trial
+
+ if 'init_timestamps' == args.command:
+ init_timestamps()
+ elif 'list_history' == args.command:
+ list_history()
+ elif args.trace:
+ trace(args.trace)
+ elif 'replay_nist' == args.command:
+ replay_nist()
+ elif 'restore' == args.command:
+ restore()
+
+
+
+if __name__ == '__main__':
+ srtool_basepath = os.path.dirname(os.path.abspath(sys.argv[0]))
+ main(sys.argv[1:])
diff --git a/bin/dev_tools/update_status.sh b/bin/dev_tools/update_status.sh
new file mode 100755
index 0000000..243626a
--- /dev/null
+++ b/bin/dev_tools/update_status.sh
@@ -0,0 +1,43 @@
+#!/bin/sh
+
+#
+# Helper routine to see if any active update commands are executing
+# in addition to showing the the backgroup updater is running.
+#
+# Sample result:
+# $ ./is_update.sh
+# 18149 python3 /opt/srt/bin/common/srtool_update.py --cron-start
+# Update:2019-03-16 12:29:21,bin/common/srtool_common.py --score-new-cves NEW --count=100
+# Done:2019-03-16 12:29:49,bin/common/srtool_common.py --score-new-cves NEW --count=100
+#
+# An "Update" without a "Done" is an running tack
+#
+
+# Test if the backgroup updater is running
+if [ -f .srtupdate.pid ] ; then
+ pid=`cat .srtupdate.pid`
+ updater=`ps -e -o pid,cmd | grep $pid | grep -v grep | grep cron`
+else
+ echo "No updater pid file found"
+ updater=""
+fi
+if [ -z "$updater" ] ; then
+ echo "!!! WARNING: UPDATER IS NOT RUNNING !!!"
+ cat .srtupdate.task
+ exit 1
+else
+ echo "UPDATER:$updater"
+fi
+
+# Test if their is an open update in progress
+cat .srtupdate.task
+is_start=`grep "^Update" .srtupdate.task | grep -v "<cron_start>"`
+is_stop=`grep "^Done" .srtupdate.task`
+if [ -z "$is_stop" ] ; then
+ echo "!!! UPDATE JOB RUNNING !!!"
+ exit 1
+else
+ echo "UPDATE PAUSED BETWEEN JOBS."
+ exit 0
+fi
+
diff --git a/bin/mitre/datasource_2010.json b/bin/mitre/datasource_2010.json
new file mode 100755
index 0000000..547de7a
--- /dev/null
+++ b/bin/mitre/datasource_2010.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2010",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2010",
+ "cve_filter" : "CVE-2010",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2010' --file=data/allitems-cvrf-year-2010.xml --url-file=allitems-cvrf-year-2010.xml",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2010' --file=data/allitems-cvrf-year-2010.xml --url-file=allitems-cvrf-year-2010.xml",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2010.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2011.json b/bin/mitre/datasource_2011.json
new file mode 100755
index 0000000..2138154
--- /dev/null
+++ b/bin/mitre/datasource_2011.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2011",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2011",
+ "cve_filter" : "CVE-2011",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2011' --file=data/allitems-cvrf-year-2011.xml --url-file=allitems-cvrf-year-2011.xml",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2011' --file=data/allitems-cvrf-year-2011.xml --url-file=allitems-cvrf-year-2011.xml",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2011.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2012.json b/bin/mitre/datasource_2012.json
new file mode 100755
index 0000000..49f3256
--- /dev/null
+++ b/bin/mitre/datasource_2012.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2012",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2012",
+ "cve_filter" : "CVE-2012",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2012' --file=data/allitems-cvrf-year-2012.xml --url-file=allitems-cvrf-year-2012.xml",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2012' --file=data/allitems-cvrf-year-2012.xml --url-file=allitems-cvrf-year-2012.xml",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2012.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2013.json b/bin/mitre/datasource_2013.json
new file mode 100755
index 0000000..d18fe73
--- /dev/null
+++ b/bin/mitre/datasource_2013.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2013",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2013",
+ "cve_filter" : "CVE-2013",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2013' --file=data/allitems-cvrf-year-2013.xml --url-file=allitems-cvrf-year-2013.xml",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2013' --file=data/allitems-cvrf-year-2013.xml --url-file=allitems-cvrf-year-2013.xml",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2013.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2014.json b/bin/mitre/datasource_2014.json
new file mode 100755
index 0000000..fc469f9
--- /dev/null
+++ b/bin/mitre/datasource_2014.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2014",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2014",
+ "cve_filter" : "CVE-2014",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2014' --file=data/allitems-cvrf-year-2014.xml --url-file=allitems-cvrf-year-2014.xml",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2014' --file=data/allitems-cvrf-year-2014.xml --url-file=allitems-cvrf-year-2014.xml",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2014.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2015.json b/bin/mitre/datasource_2015.json
index 0ce89f1..e91f7bd 100755
--- a/bin/mitre/datasource_2015.json
+++ b/bin/mitre/datasource_2015.json
@@ -7,8 +7,8 @@
"name" : "MITRE",
"description" : "MITRE 2015",
"cve_filter" : "CVE-2015",
- "init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml",
- "update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2015.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2016.json b/bin/mitre/datasource_2016.json
index 36ca814..5fba94b 100755
--- a/bin/mitre/datasource_2016.json
+++ b/bin/mitre/datasource_2016.json
@@ -7,8 +7,8 @@
"name" : "MITRE",
"description" : "MITRE 2016",
"cve_filter" : "CVE-2016",
- "init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml",
- "update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2016.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2017.json b/bin/mitre/datasource_2017.json
index 2b326bf..9047fd5 100755
--- a/bin/mitre/datasource_2017.json
+++ b/bin/mitre/datasource_2017.json
@@ -7,8 +7,8 @@
"name" : "MITRE",
"description" : "MITRE 2017",
"cve_filter" : "CVE-2017",
- "init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml",
- "update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2017.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2018.json b/bin/mitre/datasource_2018.json
index ebb6eff..567c46b 100755
--- a/bin/mitre/datasource_2018.json
+++ b/bin/mitre/datasource_2018.json
@@ -7,8 +7,8 @@
"name" : "MITRE",
"description" : "MITRE 2018",
"cve_filter" : "CVE-2018",
- "init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml",
- "update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2018.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2019.json b/bin/mitre/datasource_2019.json
index 7113aa9..f106f88 100755
--- a/bin/mitre/datasource_2019.json
+++ b/bin/mitre/datasource_2019.json
@@ -7,8 +7,8 @@
"name" : "MITRE",
"description" : "MITRE 2019",
"cve_filter" : "CVE-2019",
- "init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml",
- "update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2019.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/srtool_mitre.py b/bin/mitre/srtool_mitre.py
index 3c6af89..3928e51 100755
--- a/bin/mitre/srtool_mitre.py
+++ b/bin/mitre/srtool_mitre.py
@@ -113,15 +113,16 @@ def get_cve_default_status(is_init,publishedDate,description):
if is_init:
# Note: the NIST 'published date' is in the format "2017-05-11", so do a simple string compare
#print("INIT status: %s versus %s" % (init_new_date,publishedDate))
- if not publishedDate or (publishedDate > init_new_date):
- # Is this reserved by Mitre? Is '** RESERVED **' within the first 20 char positions?
- reserved_pos = description.find('** RESERVED **')
- if (0 <= reserved_pos) and (20 > reserved_pos):
- return ORM.STATUS_NEW_RESERVED
- else:
+# if not publishedDate or (publishedDate > init_new_date):
+# # Is this reserved by Mitre? Is '** RESERVED **' within the first 20 char positions?
+# reserved_pos = description.find('** RESERVED **')
+# if (0 <= reserved_pos) and (20 > reserved_pos):
+# return ORM.STATUS_NEW_RESERVED
+# else:
+ if True:
return ORM.STATUS_NEW
- else:
- return ORM.STATUS_HISTORICAL
+# else:
+# return ORM.STATUS_HISTORICAL
else:
return ORM.STATUS_NEW
@@ -276,6 +277,7 @@ def append_cve_database(is_init,file_xml):
cur_write = conn.cursor()
cur_ds = conn.cursor()
datasource_id = 0
+ srtool_today = datetime.today()
i = 0
for child in root:
@@ -317,12 +319,19 @@ def append_cve_database(is_init,file_xml):
# Get the default CVE status
status = get_cve_default_status(is_init,summary['Published'],summary['Description'])
- sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, srt_updated, packages)
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
- cur.execute(sql, (cve_name, get_name_sort(cve_name), ORM.PRIORITY_UNDEFINED, status, '', '', 'CVE', 'MITRE', '', 1, ORM.PUBLISH_UNPUBLISHED, '', summary['Description'], summary['Published'], summary['Modified'],0, '', '', '', '', '', datetime.now(),''))
+ # 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
+ sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, tags, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, acknowledge_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, srt_updated, srt_created, packages)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
+ cur.execute(sql, (cve_name, get_name_sort(cve_name), ORM.PRIORITY_UNDEFINED, status, '', '', '', 'CVE', 'MITRE', '', 1, ORM.PUBLISH_UNPUBLISHED, '', summary['Description'], summary['Published'], summary['Modified'],0, '', '', '', '', '', '', datetime.now(), datetime.now(),''))
+ # 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
cve_id = cur.lastrowid
print("MITRE:ADDED %20s\r" % cve_name)
+ # Also create CVE history entry
+ update_comment = "%s {%s}" % (ORM.UPDATE_CREATE_STR % ORM.UPDATE_SOURCE_CVE,'Created from MITRE')
+ sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
+ cur.execute(sql, (cve_id,update_comment,srtool_today,ORM.USER_SRTOOL_NAME,) )
+
# Add this data source to the CVE
sql = '''SELECT * FROM orm_cvesource WHERE cve_id=? AND datasource_id=? '''
if not cur_ds.execute(sql, (cve_id,datasource_id)).fetchone():
@@ -405,13 +414,16 @@ def main(argv):
# setup
parser = argparse.ArgumentParser(description='srtool_mitre.py: manage Mitre CVE data')
- parser.add_argument('--initialize', '-I', action='store_const', const='init_mitre', dest='command', help='Download the Mitre source CVE file')
+ parser.add_argument('--initialize', '-I', action='store_const', const='init_mitre', dest='command', help='Download the Mitre source CVE file, add CVEs')
parser.add_argument('--update', '-u', action='store_const', const='update_mitre', dest='command', help='Update the Mitre source CVE file')
parser.add_argument('--source', dest='source', help='Local CVE source file')
parser.add_argument('--url-file', dest='url_file', help='CVE URL extension')
+ parser.add_argument('--download-only', action='store_const', const='download_mitre', dest='command', help='Download the Mitre source CVE file only')
parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Fetch CVE detail')
parser.add_argument('--file', dest='cve_file', help='Local CVE source file')
+
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='is_verbose', help='Enable verbose debugging output')
parser.add_argument('--dump', '-D', action='store_const', const='dump', dest='command', help='test dump data')
parser.add_argument('--dump2', '-2', action='store_const', const='dump2', dest='command', help='test dump data')
@@ -449,12 +461,15 @@ def main(argv):
print("ERROR: missing --url_file parameter")
exit(1)
+ # Currently no different between initialize and update actions
if 'init_mitre' == args.command:
init_mitre_file(args.source,args.url_file,args.cve_file,args.force_update)
append_cve_database(True,args.cve_file)
elif 'update_mitre' == args.command:
init_mitre_file(args.source,args.url_file,args.cve_file,args.force_update)
append_cve_database(False,args.cve_file)
+ elif 'download_mitre' == args.command:
+ init_mitre_file(args.source,args.url_file,args.cve_file,args.force_update)
else:
print("Command not found")
diff --git a/bin/nist/datasource.json b/bin/nist/datasource.json
index 45210e4..de52a6b 100644
--- a/bin/nist/datasource.json
+++ b/bin/nist/datasource.json
@@ -20,9 +20,10 @@
"source" : "nist",
"name" : "NIST",
"description" : "NIST Modified Data",
+ "attributes" : "PREVIEW-SOURCE",
"cve_filter" : "",
"init" : "",
- "update" : "bin/nist/srtool_nist.py -i --source='NIST Modified Data' --file=data/nvdcve-1.0-modified.json --url-file=nvdcve-1.0-modified.json.gz --url-meta=nvdcve-1.0-modified.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist_incremental --source='NIST Modified Data' --file=data/nvdcve-1.0-modified.json --url-file=nvdcve-1.0-modified.json.gz --url-meta=nvdcve-1.0-modified.meta",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-modified.json %command%",
"update_frequency" : "2",
"_comment_" : "Update at 7:00 am",
diff --git a/bin/nist/datasource_2002.json b/bin/nist/datasource_2002.json
new file mode 100755
index 0000000..f4e62d3
--- /dev/null
+++ b/bin/nist/datasource_2002.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2002",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2002",
+ "cve_filter" : "CVE-2002",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2002' --file=data/nvdcve-1.1-2002.json --url-file=nvdcve-1.1-2002.json.gz --url-meta=nvdcve-1.1-2002.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2002' --file=data/nvdcve-1.1-2002.json --url-file=nvdcve-1.1-2002.json.gz --url-meta=nvdcve-1.1-2002.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2002.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2003.json b/bin/nist/datasource_2003.json
new file mode 100755
index 0000000..8bcf620
--- /dev/null
+++ b/bin/nist/datasource_2003.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2003",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2003",
+ "cve_filter" : "CVE-2003",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2003' --file=data/nvdcve-1.1-2003.json --url-file=nvdcve-1.1-2003.json.gz --url-meta=nvdcve-1.1-2003.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2003' --file=data/nvdcve-1.1-2003.json --url-file=nvdcve-1.1-2003.json.gz --url-meta=nvdcve-1.1-2003.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2003.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2004.json b/bin/nist/datasource_2004.json
new file mode 100755
index 0000000..3839e12
--- /dev/null
+++ b/bin/nist/datasource_2004.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2004",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2004",
+ "cve_filter" : "CVE-2004",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2004' --file=data/nvdcve-1.1-2004.json --url-file=nvdcve-1.1-2004.json.gz --url-meta=nvdcve-1.1-2004.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2004' --file=data/nvdcve-1.1-2004.json --url-file=nvdcve-1.1-2004.json.gz --url-meta=nvdcve-1.1-2004.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2004.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2005.json b/bin/nist/datasource_2005.json
new file mode 100755
index 0000000..a3acfaa
--- /dev/null
+++ b/bin/nist/datasource_2005.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2005",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2005",
+ "cve_filter" : "CVE-2005",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2005' --file=data/nvdcve-1.1-2005.json --url-file=nvdcve-1.1-2005.json.gz --url-meta=nvdcve-1.1-2005.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2005' --file=data/nvdcve-1.1-2005.json --url-file=nvdcve-1.1-2005.json.gz --url-meta=nvdcve-1.1-2005.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2005.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2006.json b/bin/nist/datasource_2006.json
new file mode 100755
index 0000000..6f3c508
--- /dev/null
+++ b/bin/nist/datasource_2006.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2006",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2006",
+ "cve_filter" : "CVE-2006",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2006' --file=data/nvdcve-1.1-2006.json --url-file=nvdcve-1.1-2006.json.gz --url-meta=nvdcve-1.1-2006.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2006' --file=data/nvdcve-1.1-2006.json --url-file=nvdcve-1.1-2006.json.gz --url-meta=nvdcve-1.1-2006.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2006.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2007.json b/bin/nist/datasource_2007.json
new file mode 100755
index 0000000..5ea0094
--- /dev/null
+++ b/bin/nist/datasource_2007.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2007",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2007",
+ "cve_filter" : "CVE-2007",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2007' --file=data/nvdcve-1.1-2007.json --url-file=nvdcve-1.1-2007.json.gz --url-meta=nvdcve-1.1-2007.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2007' --file=data/nvdcve-1.1-2007.json --url-file=nvdcve-1.1-2007.json.gz --url-meta=nvdcve-1.1-2007.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2007.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2008.json b/bin/nist/datasource_2008.json
new file mode 100755
index 0000000..891d304
--- /dev/null
+++ b/bin/nist/datasource_2008.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2008",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2008",
+ "cve_filter" : "CVE-2008",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2008' --file=data/nvdcve-1.1-2008.json --url-file=nvdcve-1.1-2008.json.gz --url-meta=nvdcve-1.1-2008.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2008' --file=data/nvdcve-1.1-2008.json --url-file=nvdcve-1.1-2008.json.gz --url-meta=nvdcve-1.1-2008.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2008.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2009.json b/bin/nist/datasource_2009.json
new file mode 100755
index 0000000..2bebc34
--- /dev/null
+++ b/bin/nist/datasource_2009.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2009",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2009",
+ "cve_filter" : "CVE-2009",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2009' --file=data/nvdcve-1.1-2009.json --url-file=nvdcve-1.1-2009.json.gz --url-meta=nvdcve-1.1-2009.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2009' --file=data/nvdcve-1.1-2009.json --url-file=nvdcve-1.1-2009.json.gz --url-meta=nvdcve-1.1-2009.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2009.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2010.json b/bin/nist/datasource_2010.json
new file mode 100755
index 0000000..21030e4
--- /dev/null
+++ b/bin/nist/datasource_2010.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2010",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2010",
+ "cve_filter" : "CVE-2010",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2010' --file=data/nvdcve-1.1-2010.json --url-file=nvdcve-1.1-2010.json.gz --url-meta=nvdcve-1.1-2010.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2010' --file=data/nvdcve-1.1-2010.json --url-file=nvdcve-1.1-2010.json.gz --url-meta=nvdcve-1.1-2010.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2010.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2011.json b/bin/nist/datasource_2011.json
new file mode 100755
index 0000000..5b0bb05
--- /dev/null
+++ b/bin/nist/datasource_2011.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2011",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2011",
+ "cve_filter" : "CVE-2011",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2011' --file=data/nvdcve-1.1-2011.json --url-file=nvdcve-1.1-2011.json.gz --url-meta=nvdcve-1.1-2011.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2011' --file=data/nvdcve-1.1-2011.json --url-file=nvdcve-1.1-2011.json.gz --url-meta=nvdcve-1.1-2011.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2011.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2012.json b/bin/nist/datasource_2012.json
new file mode 100755
index 0000000..69d40ba
--- /dev/null
+++ b/bin/nist/datasource_2012.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2012",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2012",
+ "cve_filter" : "CVE-2012",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2012' --file=data/nvdcve-1.1-2012.json --url-file=nvdcve-1.1-2012.json.gz --url-meta=nvdcve-1.1-2012.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2012' --file=data/nvdcve-1.1-2012.json --url-file=nvdcve-1.1-2012.json.gz --url-meta=nvdcve-1.1-2012.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2012.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2013.json b/bin/nist/datasource_2013.json
new file mode 100755
index 0000000..2f2d313
--- /dev/null
+++ b/bin/nist/datasource_2013.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2013",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2013",
+ "cve_filter" : "CVE-2013",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2013' --file=data/nvdcve-1.1-2013.json --url-file=nvdcve-1.1-2013.json.gz --url-meta=nvdcve-1.1-2013.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2013' --file=data/nvdcve-1.1-2013.json --url-file=nvdcve-1.1-2013.json.gz --url-meta=nvdcve-1.1-2013.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2013.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2014.json b/bin/nist/datasource_2014.json
new file mode 100755
index 0000000..619197c
--- /dev/null
+++ b/bin/nist/datasource_2014.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2014",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2014",
+ "cve_filter" : "CVE-2014",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2014' --file=data/nvdcve-1.1-2014.json --url-file=nvdcve-1.1-2014.json.gz --url-meta=nvdcve-1.1-2014.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2014' --file=data/nvdcve-1.1-2014.json --url-file=nvdcve-1.1-2014.json.gz --url-meta=nvdcve-1.1-2014.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2014.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2015.json b/bin/nist/datasource_2015.json
index ccca2f3..7600aac 100755
--- a/bin/nist/datasource_2015.json
+++ b/bin/nist/datasource_2015.json
@@ -7,9 +7,9 @@
"name" : "NIST",
"description" : "NIST 2015",
"cve_filter" : "CVE-2015",
- "init" : "bin/nist/srtool_nist.py -I --source='NIST 2015' --file=data/nvdcve-1.0-2015.json --url-file=nvdcve-1.0-2015.json.gz --url-meta=nvdcve-1.0-2015.meta",
- "update" : "bin/nist/srtool_nist.py -n --source='NIST 2015' --file=data/nvdcve-1.0-2015.json --url-file=nvdcve-1.0-2015.json.gz --url-meta=nvdcve-1.0-2015.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2015.json %command%",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2015' --file=data/nvdcve-1.1-2015.json --url-file=nvdcve-1.1-2015.json.gz --url-meta=nvdcve-1.1-2015.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2015' --file=data/nvdcve-1.1-2015.json --url-file=nvdcve-1.1-2015.json.gz --url-meta=nvdcve-1.1-2015.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2015.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
diff --git a/bin/nist/datasource_2016.json b/bin/nist/datasource_2016.json
index 9c87ef9..55244a2 100755
--- a/bin/nist/datasource_2016.json
+++ b/bin/nist/datasource_2016.json
@@ -7,9 +7,9 @@
"name" : "NIST",
"description" : "NIST 2016",
"cve_filter" : "CVE-2016",
- "init" : "bin/nist/srtool_nist.py -I --source='NIST 2016' --file=data/nvdcve-1.0-2016.json --url-file=nvdcve-1.0-2016.json.gz --url-meta=nvdcve-1.0-2016.meta",
- "update" : "bin/nist/srtool_nist.py -n --source='NIST 2016' --file=data/nvdcve-1.0-2016.json --url-file=nvdcve-1.0-2016.json.gz --url-meta=nvdcve-1.0-2016.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2016.json %command%",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2016' --file=data/nvdcve-1.1-2016.json --url-file=nvdcve-1.1-2016.json.gz --url-meta=nvdcve-1.1-2016.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2016' --file=data/nvdcve-1.1-2016.json --url-file=nvdcve-1.1-2016.json.gz --url-meta=nvdcve-1.1-2016.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2016.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
diff --git a/bin/nist/datasource_2017.json b/bin/nist/datasource_2017.json
index 40695ef..2c68ed8 100755
--- a/bin/nist/datasource_2017.json
+++ b/bin/nist/datasource_2017.json
@@ -7,9 +7,9 @@
"name" : "NIST",
"description" : "NIST 2017",
"cve_filter" : "CVE-2017",
- "init" : "bin/nist/srtool_nist.py -I --source='NIST 2017' --file=data/nvdcve-1.0-2017.json --url-file=nvdcve-1.0-2017.json.gz --url-meta=nvdcve-1.0-2017.meta",
- "update" : "bin/nist/srtool_nist.py -n --source='NIST 2017' --file=data/nvdcve-1.0-2017.json --url-file=nvdcve-1.0-2017.json.gz --url-meta=nvdcve-1.0-2017.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2017.json %command%",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2017' --file=data/nvdcve-1.1-2017.json --url-file=nvdcve-1.1-2017.json.gz --url-meta=nvdcve-1.1-2017.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2017' --file=data/nvdcve-1.1-2017.json --url-file=nvdcve-1.1-2017.json.gz --url-meta=nvdcve-1.1-2017.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2017.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
diff --git a/bin/nist/datasource_2018.json b/bin/nist/datasource_2018.json
index cf87ca2..03d0961 100755
--- a/bin/nist/datasource_2018.json
+++ b/bin/nist/datasource_2018.json
@@ -7,9 +7,9 @@
"name" : "NIST",
"description" : "NIST 2018",
"cve_filter" : "CVE-2018",
- "init" : "bin/nist/srtool_nist.py -I --source='NIST 2018' --file=data/nvdcve-1.0-2018.json --url-file=nvdcve-1.0-2018.json.gz --url-meta=nvdcve-1.0-2018.meta",
- "update" : "bin/nist/srtool_nist.py -n --source='NIST 2018' --file=data/nvdcve-1.0-2018.json --url-file=nvdcve-1.0-2018.json.gz --url-meta=nvdcve-1.0-2018.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2018.json %command%",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2018' --file=data/nvdcve-1.1-2018.json --url-file=nvdcve-1.1-2018.json.gz --url-meta=nvdcve-1.1-2018.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2018' --file=data/nvdcve-1.1-2018.json --url-file=nvdcve-1.1-2018.json.gz --url-meta=nvdcve-1.1-2018.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2018.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
diff --git a/bin/nist/datasource_2019.json b/bin/nist/datasource_2019.json
index f331552..269f77b 100755
--- a/bin/nist/datasource_2019.json
+++ b/bin/nist/datasource_2019.json
@@ -7,9 +7,9 @@
"name" : "NIST",
"description" : "NIST 2019",
"cve_filter" : "CVE-2019",
- "init" : "bin/nist/srtool_nist.py -I --source='NIST 2019' --file=data/nvdcve-1.0-2019.json --url-file=nvdcve-1.0-2019.json.gz --url-meta=nvdcve-1.0-2019.meta",
- "update" : "bin/nist/srtool_nist.py -n --source='NIST 2019' --file=data/nvdcve-1.0-2019.json --url-file=nvdcve-1.0-2019.json.gz --url-meta=nvdcve-1.0-2019.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2019.json %command%",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2019' --file=data/nvdcve-1.1-2019.json --url-file=nvdcve-1.1-2019.json.gz --url-meta=nvdcve-1.1-2019.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2019' --file=data/nvdcve-1.1-2019.json --url-file=nvdcve-1.1-2019.json.gz --url-meta=nvdcve-1.1-2019.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2019.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
diff --git a/bin/nist/datasource_2020.json b/bin/nist/datasource_2020.json
new file mode 100755
index 0000000..e4bb63d
--- /dev/null
+++ b/bin/nist/datasource_2020.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2020",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2020",
+ "cve_filter" : "CVE-2020",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2020' --file=data/nvdcve-1.1-2020.json --url-file=nvdcve-1.1-2020.json.gz --url-meta=nvdcve-1.1-2020.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2020' --file=data/nvdcve-1.1-2020.json --url-file=nvdcve-1.1-2020.json.gz --url-meta=nvdcve-1.1-2020.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2020.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/srtool_nist.py b/bin/nist/srtool_nist.py
index 3711614..c7a61dc 100755
--- a/bin/nist/srtool_nist.py
+++ b/bin/nist/srtool_nist.py
@@ -21,8 +21,8 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
### Usage Examples (run from top level directory)
-# Updating a specific NIST feed: ./bin/srtool.py -u "NIST JSON Data 2017"
-# Updating with the NIST incremental feed: ./bin/srtool.py -U
+# Updating a specific NIST feed: ./bin/nist/srtool_nist.py -u "NIST JSON Data 2017"
+# Updating with the NIST incremental feed: ./bin/nist/srtool_nist.py -U
import os
import sys
@@ -33,6 +33,7 @@ import json
from datetime import datetime, date, timedelta
import pytz
from urllib.request import urlopen, URLError
+import traceback
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
@@ -44,15 +45,30 @@ lookupTable = []
cveIndex = {}
db_change = False
+count_read = 0
+count_create = 0
+count_update = 0
+
+ACTION_INIT = 'Initialize'
+ACTION_UPDATE = 'Update'
+ACTION_INCREMENT = 'Increment'
+ACTION_DOWNLOAD = 'Download'
+ACTION_UPDATE_CVE = 'Update_Cve'
+
srtDbName = 'srt.sqlite'
srtErrorLog = 'srt_errors.txt'
verbose = False
-
-nist_cve_url_base = 'https://static.nvd.nist.gov/feeds/json/cve/1.0'
-nist_meta_url_base = 'https://nvd.nist.gov/feeds/json/cve/1.0'
+force_update = False
+force_cache = False
+update_skip_history = False
+cmd_skip = 0
+cmd_count = 0
+
+nist_cve_url_base = 'https://nvd.nist.gov/feeds/json/cve/1.1'
+nist_meta_url_base = 'https://nvd.nist.gov/feeds/json/cve/1.1'
nist_cache_dir = 'data/cache/nist'
-#################################
+#######################################################################
# Helper methods
#
@@ -78,6 +94,17 @@ def srt_error_log(msg):
f1.write("|" + msg + "|\n" )
f1.close()
+# quick development/debugging support
+def _log(msg):
+ DBG_LVL = os.environ['SRTDBG_LVL'] if ('SRTDBG_LVL' in os.environ) else 2
+ DBG_LOG = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srt_dbg.log'
+ if 1 == DBG_LVL:
+ print(msg)
+ elif 2 == DBG_LVL:
+ f1=open(DBG_LOG, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
def get_name_sort(cve_name):
try:
a = cve_name.split('-')
@@ -86,10 +113,159 @@ def get_name_sort(cve_name):
cve_name_sort = cve_name
return cve_name_sort
-# Newly discovered or updated CVEs default to NEW for triage
-# Inited CVEs default to HISTORICAL, unless they are within the courtesy CVE_INIT_NEW_DELTA
+#######################################################################
+# CVE_ItemToSummary: Translate a CVE_Item JSON node to a dictionary
+
+def do_nist_scan_configuration_or(cpe_or_node, name, and_enum, key):
+ cpe_list = ''
+ for cpe in cpe_or_node[key]:
+ cpe23Uri = cpe['cpe23Uri']
+ if 'cpeMatchString' in cpe:
+ cpeMatchString = cpe['cpeMatchString']
+ else:
+ cpeMatchString = ''
+ if 'versionEndIncluding' in cpe:
+ versionEndIncluding = cpe['versionEndIncluding']
+ else:
+ versionEndIncluding = ''
+ cpe_list += '%s,%s,%s,%s|' % (cpe['vulnerable'],cpe23Uri,cpeMatchString,versionEndIncluding)
+ return cpe_list
+
+def nist_scan_configuration_or(cpe_or_node, name, and_enum):
+ cpe_list = '[or]|'
+ found = 0
+ if 'cpe' in cpe_or_node:
+ if verbose: print("NOTE:NIST_SCAN_CONFIGURATION_OR:cpe")
+ cpe_list += do_nist_scan_configuration_or(cpe_or_node, name, and_enum,'cpe')
+ found += 1
+ if 'cpe_match' in cpe_or_node:
+ if verbose: print("NOTE:NIST_SCAN_CONFIGURATION_OR:cpe_match")
+ cpe_list += do_nist_scan_configuration_or(cpe_or_node, name, and_enum,'cpe_match')
+ found += 1
+ cpe_list += '[/or]|'
+
+ if verbose and (not found):
+ print("WARNING:NIST_SCAN_CONFIGURATION_OR:NO CPE|CPE_MATCH:%s" % cpe_or_node)
+ srt_error_log("WARNING:NIST_SCAN_CONFIGURATION_OR:NO CPE|CPE_MATCH:%s" % cpe_or_node)
+ return cpe_list
+
+def fixscore(score):
+ if not score:
+ return ''
+ return '%02.2f' % float(score)
+
+def CVE_ItemToSummary(CVE_Item,header_only=False):
+ summary = {}
+
+ #
+ # Assure that all fields are at least defined as empty string
+ #
+
+ # Header info
+ summary['name'] = CVE_Item['cve']['CVE_data_meta']['ID']
+ summary['cve_data_type'] = CVE_Item['cve']['data_type']
+ summary['cve_data_format'] = CVE_Item['cve']['data_format']
+ summary['cve_data_version'] = CVE_Item['cve']['data_version']
+
+ summary['description'] = CVE_Item['cve']['description']['description_data'][0]['value']
+ summary['publishedDate'] = re.sub('T.*','',CVE_Item['publishedDate'])
+ summary['lastModifiedDate'] = re.sub('T.*','',CVE_Item['lastModifiedDate'])
+ summary['url'] = 'https://nvd.nist.gov/vuln/detail/%s' % summary['name']
+ summary['url_title'] = 'NIST Link'
+
+ # cvssV3
+ is_v3 = ('impact' in CVE_Item) and ('baseMetricV3' in CVE_Item['impact'])
+ baseMetricV3 = CVE_Item['impact']['baseMetricV3'] if is_v3 else ''
+ summary['cvssV3_baseScore'] = baseMetricV3['cvssV3']['baseScore'] if is_v3 else ''
+ summary['cvssV3_baseSeverity'] = baseMetricV3['cvssV3']['baseSeverity'] if is_v3 else ''
+ summary['cvssV3_vectorString'] = baseMetricV3['cvssV3']['vectorString'] if is_v3 else ''
+ summary['cvssV3_exploitabilityScore'] = baseMetricV3['exploitabilityScore'] if is_v3 else ''
+ summary['cvssV3_impactScore'] = baseMetricV3['impactScore'] if is_v3 else ''
+ summary['cvssV3_attackVector'] = baseMetricV3['cvssV3']['attackVector'] if is_v3 else ''
+ summary['cvssV3_attackComplexity'] = baseMetricV3['cvssV3']['attackComplexity'] if is_v3 else ''
+ summary['cvssV3_privilegesRequired'] = baseMetricV3['cvssV3']['privilegesRequired'] if is_v3 else ''
+ summary['cvssV3_userInteraction'] = baseMetricV3['cvssV3']['userInteraction'] if is_v3 else ''
+ summary['cvssV3_scope'] = baseMetricV3['cvssV3']['scope'] if is_v3 else ''
+ summary['cvssV3_confidentialityImpact'] = baseMetricV3['cvssV3']['confidentialityImpact'] if is_v3 else ''
+ summary['cvssV3_integrityImpact'] = baseMetricV3['cvssV3']['integrityImpact'] if is_v3 else ''
+ summary['cvssV3_availabilityImpact'] = baseMetricV3['cvssV3']['availabilityImpact'] if is_v3 else ''
+
+ # cvssV2
+ is_v2 = ('impact' in CVE_Item) and ('baseMetricV2' in CVE_Item['impact'])
+ baseMetricV2 = CVE_Item['impact']['baseMetricV2'] if is_v2 else ''
+ summary['cvssV2_baseScore'] = baseMetricV2['cvssV2']['baseScore'] if is_v2 else ''
+ summary['cvssV2_severity'] = baseMetricV2['severity'] if is_v2 else ''
+ summary['cvssV2_vectorString'] = baseMetricV2['cvssV2']['vectorString'] if is_v2 else ''
+ summary['cvssV2_exploitabilityScore'] = baseMetricV2['exploitabilityScore'] if is_v2 else ''
+ summary['cvssV2_impactScore'] = baseMetricV2['exploitabilityScore'] if is_v2 else ''
+ summary['cvssV2_accessVector'] = baseMetricV2['cvssV2']['accessVector'] if is_v2 else ''
+ summary['cvssV2_accessComplexity'] = baseMetricV2['cvssV2']['accessComplexity'] if is_v2 else ''
+ summary['cvssV2_authentication'] = baseMetricV2['cvssV2']['authentication'] if is_v2 else ''
+ summary['cvssV2_confidentialityImpact'] = baseMetricV2['cvssV2']['confidentialityImpact'] if is_v2 else ''
+ summary['cvssV2_integrityImpact'] = baseMetricV2['cvssV2']['integrityImpact'] if is_v2 else ''
+
+ # SRTool specific meta data
+ summary['priority'] = '0'
+ summary['status'] = '0'
+ summary['comments'] = ''
+ summary['comments_private'] = ''
+ summary['tags'] = ''
+ summary['public'] = '1' # Always true since NIST is public source
+ summary['recommend'] = '0'
+ summary['recommend_list'] = ''
+ summary['publish_state'] = ORM.PUBLISH_UNPUBLISHED
+ summary['publish_date'] = ''
+ summary['acknowledge_date'] = ''
+ summary['packages'] = ''
+
+ # Fix score to sortable string value
+ summary['cvssV3_baseScore'] = '%02.2f' % float(summary['cvssV3_baseScore']) if summary['cvssV3_baseScore'] else ''
+ summary['cvssV2_baseScore'] = '%02.2f' % float(summary['cvssV2_baseScore']) if summary['cvssV2_baseScore'] else ''
+
+ # The CVE table only needs the header, CVE details needs the rest
+ if header_only:
+ summary['cpe_list'] = ''
+ summary['ref_list'] = ''
+ return summary
+
+ configurations = CVE_Item['configurations']
+ is_first_and = True
+ summary['cpe_list'] = ''
+ for i, config in enumerate(configurations['nodes']):
+ summary['cpe_list'] += '[config]|'
+ summary['cpe_list'] += '[and]|'
+ if "AND" == config['operator']:
+ # create AND record
+ if not is_first_and:
+ summary['cpe_list'] += '[/and]|'
+ summary['cpe_list'] += '[and]|'
+ #is_first_and = False
+ if 'children' in config:
+ for j, cpe_or_node in enumerate(config['children']):
+ if "OR" == cpe_or_node['operator']:
+ summary['cpe_list'] += nist_scan_configuration_or(cpe_or_node, summary['name'], j)
+ else:
+ print("ERROR CONFIGURE:OR_OP?:%s" % cpe_or_node['operator'])
+ elif "OR" == config['operator']:
+ summary['cpe_list'] += nist_scan_configuration_or(config, summary['name'], 0)
+ else:
+ print("ERROR CONFIGURE:OP?:%s" % config['operator'])
+ summary['cpe_list'] += '[/and]|'
+ summary['cpe_list'] += '[/config]|'
+
+ summary['ref_list'] = ''
+ for i, ref in enumerate(CVE_Item['cve']['references']['reference_data']):
+ summary['ref_list'] += '%s%s\t%s\t%s' % ('|' if i>0 else '',ref['url'],','.join([tag for tag in ref['tags']]),ref['refsource'])
+
+ return summary
+
+#######################################################################
+# get_cve_default_status: bootstrap initial CVE states
+# Newly discovered or updated CVEs default to NEW for triage
+# Inited CVEs default to HISTORICAL, unless they are within the courtesy CVE_INIT_NEW_DELTA
+
init_new_date = None
-def get_cve_default_status(is_init,publishedDate):
+def get_cve_default_status(action,publishedDate):
global init_new_date
if None == init_new_date:
@@ -108,31 +284,387 @@ def get_cve_default_status(is_init,publishedDate):
#print("\nPreset new data = %s" % init_new_date.strftime("%Y-%m-%d"))
init_new_date = init_new_date.strftime("%Y-%m-%d")
- if is_init:
+ if ACTION_INIT == action:
# Note: the NIST 'published date' is in the format "2017-05-11", so do a simple string compare
#print("INIT status: %s > %s" % (publishedDate, init_new_date))
- if not publishedDate or (publishedDate > init_new_date):
+# if not publishedDate or (publishedDate > init_new_date):
+ if True:
return ORM.STATUS_NEW
- else:
- return ORM.STATUS_HISTORICAL
+# else:
+# return ORM.STATUS_HISTORICAL
else:
return ORM.STATUS_NEW
+#######################################################################
+# cwe and cve2cwe
+#
+# Generates and executes appropriate SQLite query for a new CWE
+# returns CWE_ID
+
+### THIS DOES NOT CALL CONNECTION.COMMIT()
+def sql_cwe_query(conn, value):
+ CWE_ID = 0
+ CWE_VULNERABLE_COUNT = 6
+ cur = conn.cursor()
+ sql = '''SELECT * FROM orm_cwetable WHERE name=?'''
+ cwe = cur.execute(sql, (value,)).fetchone()
+ if cwe is None:
+ sql = '''INSERT INTO orm_cwetable (name, href, summary, description, vulnerable_count, found) VALUES (?,'','','',1,1)'''
+ cur.execute(sql, (value,))
+ cwe_id = cur.lastrowid
+ cur.close()
+ return cwe_id
+ else:
+ sql = ''' UPDATE orm_cwetable
+ SET vulnerable_count = ?
+ WHERE id = ?'''
+ cur.execute(sql, (cwe[CWE_VULNERABLE_COUNT] + 1,cwe[CWE_ID]))
+ conn.commit()
+ cur.close()
+ return cwe[CWE_ID]
+
+#generates and executes appropriate SQLite query for new CVE to CWE relation
+### THIS DOES NOT CALL CONNECTION.COMMIT()
+def sql_cve2cwe_query(conn, cve_id, cwe_id):
+ cur = conn.cursor()
+ sql = '''SELECT * FROM orm_cvetocwe WHERE cve_id=? AND cwe_id=?'''
+ cve2cwe = cur.execute(sql, (cve_id, cwe_id)).fetchone()
+ if cve2cwe is None:
+ sql = '''INSERT INTO orm_cvetocwe (cve_id, cwe_id) VALUES (?, ?)'''
+ cur.execute(sql, (cve_id, cwe_id))
+ conn.commit()
+ cur.close()
+
+#######################################################################
+#
+# Generates and executes appropriate SQLite query for CVE depending on situation
+# new CVE -> INSERT || modified CVE -> UPDATE || no change -> ignore and return
+# returns (CVE_ID, BOOL) tuple, True if insert or update executed
+#
+
+### THIS DOES NOT CALL CONNECTION.COMMIT()
+def sql_cve_query(action, conn, summary, log):
+ global count_create
+ global count_update
+
+ is_change = False
+ cur = conn.cursor()
+ sql = '''SELECT * FROM orm_cve WHERE name=?'''
+ cve_current = cur.execute(sql, (summary['name'],)).fetchone()
+ cve_id = -1
+ srtool_today = datetime.today()
+ if cve_current is None:
+ count_create += 1
+
+ # Get the default CVE status
+ summary['status'] = get_cve_default_status(action,summary['publish_date'])
+
+# # Offsets... 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
+# sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, tags, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, acknowledge_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, srt_updated, srt_created, packages)
+# VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
+# cur.execute(sql, (cve.name, get_name_sort(cve.name), cve.priority, status, cve.comments, cve.comments_private, cve.tags, cve.cve_data_type, cve.cve_data_format, cve.cve_data_version, 1, cve.publish_state, cve.publish_date, cve.acknowledge_date, cve.description, cve.publishedDate, cve.lastModifiedDate, cve.recommend, cve.recommend_list, cve.cvssV3_baseScore, cve.cvssV3_baseSeverity, cve.cvssV2_baseScore, cve.cvssV2_severity, srtool_today, srtool_today,''))
+# # Offsets... 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
+
+ sql_elements = [
+ 'name',
+ 'name_sort',
+ 'priority',
+ 'status',
+ 'comments',
+ 'comments_private',
+ 'tags',
+ 'cve_data_type',
+ 'cve_data_format',
+ 'cve_data_version',
+ 'public',
+ 'publish_state',
+ 'publish_date',
+ 'acknowledge_date',
+ 'description',
+ 'publishedDate',
+ 'lastModifiedDate',
+ 'recommend',
+ 'recommend_list',
+ 'cvssV3_baseScore',
+ 'cvssV3_baseSeverity',
+ 'cvssV2_baseScore',
+ 'cvssV2_severity',
+ 'packages',
+ 'srt_updated',
+ 'srt_created',
+ ]
+ sql_qmarks = []
+ for i in range(len(sql_elements)):
+ sql_qmarks.append('?')
+ sql_values = (
+ summary['name'],
+ get_name_sort(summary['name']),
+ summary['priority'],
+ summary['status'],
+ summary['comments'],
+ summary['comments_private'],
+ summary['tags'],
+ summary['cve_data_type'],
+ summary['cve_data_format'],
+ summary['cve_data_version'],
+ summary['public'],
+ summary['publish_state'],
+ summary['publish_date'],
+ summary['acknowledge_date'],
+ summary['description'],
+ summary['publishedDate'],
+ summary['lastModifiedDate'],
+ summary['recommend'],
+ summary['recommend_list'],
+ summary['cvssV3_baseScore'],
+ summary['cvssV3_baseSeverity'],
+ summary['cvssV2_baseScore'],
+ summary['cvssV2_severity'],
+ summary['packages'],
+ srtool_today,
+ srtool_today
+ )
+
+ #print('INSERT into orm_cve (%s) VALUES (%s)' % (','.join(sql_elements),','.join(sql_qmarks)),sql_values)
+ cur.execute('INSERT into orm_cve (%s) VALUES (%s)' % (','.join(sql_elements),','.join(sql_qmarks)),sql_values)
+
+ is_change = True
+ cve_id = cur.lastrowid
+ if log: log.write("\tINSERTED '%s'\n" % summary['name'])
+
+ # Also create CVE history entry
+ update_comment = "%s {%s}" % (ORM.UPDATE_CREATE_STR % ORM.UPDATE_SOURCE_CVE,'Created from NIST')
+ sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
+ cur.execute(sql, (cve_id,update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+
+ elif (cve_current[ORM.CVE_LASTMODIFIEDDATE] < summary['lastModifiedDate']) or force_update:
+ count_update += 1
+
+ cve_id = cve_current[ORM.CVE_ID]
+
+ # If CVE was 'reserved', promote to "new'
+ if cve_current[ORM.CVE_STATUS] in (ORM.STATUS_NEW_RESERVED,):
+ summary['status'] = ORM.STATUS_NEW
+ else:
+ summary['status'] = cve_current[ORM.CVE_STATUS]
+
+ # If CVE is "new', reset score date so that it will be rescanned
+ if summary['status'] == ORM.STATUS_NEW:
+ summary['score_date'] = None
+ else:
+ summary['score_date'] = cve_current[ORM.CVE_SCORE_DATE]
+
+ ### TO-DO
+ ### Capture CPE changes
+ ###
+
+ # Update the CVE record
+ srt_updated = srtool_today if not update_skip_history else cve_current[ORM.CVE_SRT_UPDATED]
+ sql = ''' UPDATE orm_cve
+ SET recommend = ?,
+ recommend_list = ?,
+ cve_data_type = ?,
+ cve_data_format = ?,
+ cve_data_version = ?,
+ status = ?,
+ description = ?,
+ publishedDate = ?,
+ lastModifiedDate = ?,
+ cvssV3_baseScore = ?,
+ cvssV3_baseSeverity = ?,
+ cvssV2_baseScore = ?,
+ cvssV2_severity = ?,
+ score_date = ?,
+ srt_updated = ?
+ WHERE id = ?'''
+ sql_values = (
+ summary['recommend'],
+ summary['recommend_list'],
+ summary['cve_data_type'],
+ summary['cve_data_format'],
+ summary['cve_data_version'],
+ summary['status'],
+ summary['description'],
+ summary['publishedDate'],
+ summary['lastModifiedDate'],
+ summary['cvssV3_baseScore'],
+ summary['cvssV3_baseSeverity'],
+ summary['cvssV2_baseScore'],
+ summary['cvssV2_severity'],
+ summary['score_date'],
+ srt_updated,
+ cve_id)
+ cur.execute(sql, sql_values)
+ is_change = True
+
+ if log: log.write("\tUPDATED '%s'\n" % summary['name'])
+ #print('UPDATED: %s (%s)' % (sql,sql_values))
+
+ # Prepare the history comment
+ if not update_skip_history:
+ history_update = []
+ if (cve_current[ORM.CVE_CVSSV3_BASESCORE].strip() != summary['cvssV3_baseScore'].strip() ) or \
+ (cve_current[ORM.CVE_CVSSV3_BASESEVERITY].strip() != summary['cvssV3_baseSeverity'].strip()):
+ history_update.append(ORM.UPDATE_SEVERITY_V3 % (
+ "%s %s" % (cve_current[ORM.CVE_CVSSV3_BASESCORE],cve_current[ORM.CVE_CVSSV3_BASESEVERITY]),
+ "%s %s" % (summary['cvssV3_baseScore'],summary['cvssV3_baseSeverity'])))
+ if (cve_current[ORM.CVE_CVSSV2_BASESCORE].strip() != summary['cvssV2_baseScore'].strip()) or \
+ (cve_current[ORM.CVE_CVSSV2_SEVERITY].strip() != summary['cvssV2_severity'].strip() ):
+ history_update.append(ORM.UPDATE_SEVERITY_V2 % (
+ "%s %s" % (cve_current[ORM.CVE_CVSSV2_BASESCORE],cve_current[ORM.CVE_CVSSV2_SEVERITY]),
+ "%s %s" % (summary['cvssV2_baseScore'],summary['cvssV2_severity'])))
+ if cve_current[ORM.CVE_DESCRIPTION].strip() != summary['description'].strip():
+ history_update.append(ORM.UPDATE_DESCRIPTION)
+ if cve_current[ORM.CVE_LASTMODIFIEDDATE] != summary['lastModifiedDate']:
+ history_update.append(ORM.UPDATE_LASTMODIFIEDDATE % (cve_current[ORM.CVE_LASTMODIFIEDDATE],summary['lastModifiedDate']))
+ if history_update:
+ # Add update to history
+ update_comment = "%s%s" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_CVE,';'.join(history_update))
+ sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
+ cur.execute(sql, (cve_id,update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+
+ ### TO-DO
+ ### CREATE NOTIFICATION IF SCORE/SEVERITY HAS CHANGED
+ ###
+
+ else:
+ is_change = False
+ if log: log.write("\tSKIPPED '%s'\n" % summary['name'])
+ cur.close()
+ return (cve_id, is_change)
+
+#######################################################################
+# nist_json: parses JSON, creates CVE object, and updates database as necessary. Commits to database on success
+#
+# Will EITHER create new record in orm_cve if cve does not exist OR overwrite
+# every field if existing cve out-of-date OR ignore cve
+# Requires json to be formatted with NIST Json schema:
+# https://csrc.nist.gov/schema/nvd/feed/0.1/nvd_cve_feed_json_0.1_beta.schema
+
+def nist_json(action, summary_json_url, datasource, datasource_file, log, date_new):
+ import gzip
+ global count_read
+
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+
+ # If this is a volatile preview source:
+ # (a) Fetch the existing CveSource matches into a list
+ # (b) Remove found matches from that list
+ # (c) Delete remaining obsolete CveSource entries
+ preview_dict = {}
+ if "PREVIEW-SOURCE" in datasource[ORM.DATASOURCE_ATTRIBUTES]:
+ sql = '''SELECT * FROM orm_cvesource WHERE datasource_id=? '''
+ for d2c in cur.execute(sql, (datasource[ORM.DATASOURCE_ID],)):
+ preview_dict[d2c[ORM.CVESOURCE_CVE_ID]] = d2c[ORM.CVESOURCE_ID]
+
+ # If we have already cached a current version of the NIST file, read from it directly
+
+ # The value 'date_new' is in UTC, so convert the fetched file date
+ if (not force_cache) and ((not datasource_file) or (not os.path.isfile(datasource_file)) or (date_new > file_date(datasource_file,True))):
+ # Fetch and/or refresh upstream CVE file
+ response = urlopen(summary_json_url)
+ dct = json.loads(gzip.decompress(response.read()).decode('utf-8')) #uncompress and decode json.gz
+
+ #save datasource feed to "data"
+ datasource_file_fd = open(datasource_file, 'w+')
+ datasource_file_fd.write(json.dumps(dct))
+ else:
+ # Use cached CVE file
+ with open(datasource_file) as json_data:
+ dct = json.load(json_data)
+
+ # Download the upstream CVE source file only
+ if ACTION_DOWNLOAD == action:
+ return
+
+ CVE_Items = dct['CVE_Items']
+ total = len(CVE_Items)
+
+ cache_path = os.path.join(srtool_basepath, nist_cache_dir)
+ #begin parsing each cve in the JSON data
+ for i, CVE_Item in enumerate(CVE_Items):
+ count_read += 1
-#################################
+ # Development support
+ if get_override('SRTDBG_MINIMAL_DB') and (i > 10):
+ break
+
+ #print('.', end='', flush=True)
+ try:
+ # Translate a CVE_Item JSON node
+ summary = CVE_ItemToSummary(CVE_Item)
+
+ # Indicate progress
+ print('[%4d]%30s\r' % ((i * 100)/ total, summary['name']), end='', flush=True)
+
+ #if cve exists in cache, delete it
+ cve_path = os.path.join(cache_path, '%s.json' % summary['name'])
+ if (os.path.isfile(cve_path)):
+ os.remove(cve_path)
+
+ # Check if cve object need to be uploaded to database (cases: new cve, modified cve, or no changes)
+ # if true, apply changes. Else ignore and continue
+ cve_id, is_change = sql_cve_query(action, conn, summary, log)
+
+ # Remove this found CVE from the preview check list, if present
+ preview_dict.pop(cve_id,None)
+
+ # If CVE updates, must check and update associated records (CWEs, references, and CVE2CWE)
+ #sql_cwe_query, and sql_cve2cwe_query require valid CVE record primary key at some point during their execution, therefore must always be after call to sql_cve_query
+ if is_change:
+ problem_list = CVE_Item['cve']['problemtype']['problemtype_data']
+ for problem_Item in problem_list:
+ description_list = problem_Item['description']
+ for description_Item in description_list:
+ value = description_Item['value']
+ cwe_id = sql_cwe_query(conn, value)
+ sql_cve2cwe_query(conn, cve_id, cwe_id)
+
+ # Add this data source to the CVE
+ sql = '''SELECT * FROM orm_cvesource WHERE cve_id=? AND datasource_id=? '''
+ exists = cur.execute(sql, (cve_id,datasource[ORM.DATASOURCE_ID])).fetchone()
+ if exists is None:
+ sql = ''' INSERT into orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ cur.execute(sql, (cve_id,datasource[ORM.DATASOURCE_ID]))
+
+ # Safety commit as we go
+ if 199 == (i % 200):
+ conn.commit()
+ print('')
+
+ except Exception as e:
+ print(traceback.format_exc())
+ print("UPDATE FAILED")
+ cur.close()
+ conn.close()
+ raise Exception("Failed to import CVEs %s: %s" % (datasource_file, e))
+ print()
+ log.write("total number of CVEs checked: %s\n" % total)
+
+ # Now delete any un-matched obsolete CveSource entries
+ for old_cve_id in preview_dict.keys():
+ sql = 'DELETE FROM orm_cvesource WHERE id=?'
+ cur.execute(sql, (preview_dict[old_cve_id],))
+
+ conn.commit()
+ cur.close()
+ conn.close()
+
+#######################################################################
# check for updates and apply if any
#
# Change orm_datasource schema to make LastModifiedDate a datetime object
# datetime and urllib imports may be in an inappropriate location (top of file currently)
+#
+# Gets CVE-Modified feed, determines if we are out of date, and applies updates if true
+# tracks history in update_log.txt
-#gets CVE-Modified feed, determines if we are out of date, and applies updates if true
-#tracks history in update_log.txt
-#incremental argument is boolean that idicates if bulk updating or incremental updating.
-def update_nist(is_init,datasource_description, url_file, url_meta, cve_file, incremental, force_update):
+def update_nist(action,datasource_description, url_file, url_meta, cve_file):
nist_cve_url = '%s/%s' % (nist_cve_url_base,url_file)
nist_meta_url = '%s/%s' % (nist_meta_url_base,url_meta)
- nist_file = os.path.join(srtool_basepath,cve_file)
+ nist_file = os.path.join(srtool_basepath,cve_file) if not cve_file.startswith('/') else cve_file
#update log (1=Monday, 7= Sunday)
today = datetime.today()
@@ -170,14 +702,14 @@ def update_nist(is_init,datasource_description, url_file, url_meta, cve_file, in
else:
date_past = datetime.strptime(ds[ORM.DATASOURCE_LASTMODIFIEDDATE], ORM.DATASOURCE_DATETIME_FORMAT)
- log.write("BEGINNING NIST %s\n" % ('INITS' if is_init else 'UPDATES'))
+ log.write("BEGINNING NIST %s\n" % action)
#determine if we are out of date and apply updates if true
if (date_new > date_past) or force_update:
pre_update_time = datetime.now() #used for logging purposes only
- nist_json(is_init,nist_cve_url, ds[ORM.DATASOURCE_ID], nist_file, log, date_new, incremental)
- log.write("began %s: %s\n" % ( 'init' if is_init else 'updates', str(pre_update_time) ))
- log.write("finished %s: %s\n" % ( 'init' if is_init else 'updates', str(datetime.now()) ))
+ nist_json(action,nist_cve_url, ds, nist_file, log, date_new)
+ log.write("began %s: %s\n" % ( action, str(pre_update_time) ))
+ log.write("finished %s: %s\n" % ( action, str(datetime.now()) ))
log.write("=============================================================================\n")
log.write("\n")
@@ -186,11 +718,11 @@ def update_nist(is_init,datasource_description, url_file, url_meta, cve_file, in
c.execute(sql, (str(date_new),))
conn.commit()
else:
- log.write("No %s needed\n" % ('init' if is_init else 'update'))
+ log.write("No %s needed\n" % action)
log.write("Checked: %s\n" % datetime.now())
log.write("=============================================================================\n")
log.write("\n")
- print("NO %s NEEDED" % ('INIT' if is_init else 'UPDATE'))
+ print("NO %s NEEDED" % action)
# Reset datasource's lastModifiedDate as today
sql = "UPDATE orm_datasource SET lastModifiedDate = ? WHERE id='%s'" % ds[ORM.DATASOURCE_ID]
@@ -208,7 +740,7 @@ def update_nist(is_init,datasource_description, url_file, url_meta, cve_file, in
f.close()
except URLError as e:
- raise Exception("Failed to open %s: %s" % (nist_meta_url, e.reason))
+ raise Exception("Failed to open %s: %s" % (nist_meta_url, e))
log.close()
c.close()
conn.close()
@@ -223,269 +755,8 @@ def file_date(filename,utc=False):
file_datetime = file_datetime+(utc_now-now)
return file_datetime
-#parses JSON, creates CVE object, and updates database as necessary. Commits to database on success
-#will EITHER create new record in orm_cve if cve does not exist OR overwrite every field if existing cve out-of-date OR ignore cve
-#requires json to be formatted with NIST Json schema (https://csrc.nist.gov/schema/nvd/feed/0.1/nvd_cve_feed_json_0.1_beta.schema)
-def nist_json(is_init,summary_json_url, datasource_id, datasource_file, log, date_new, incremental):
- import traceback
- import gzip
-
- # If we have already cached a current version of the NIST file, read from it directly
-
- # The value 'date_new' is in UTC, so convert the fetched file date
- if (not datasource_file) or (not os.path.isfile(datasource_file)) or (date_new > file_date(datasource_file,True)):
- # Fetch and/or refresh upstream CVE file
- response = urlopen(summary_json_url)
- dct = json.loads(gzip.decompress(response.read()).decode('utf-8')) #uncompress and decode json.gz
-
- #save datasource feed to "data"
- datasource_file_fd = open(datasource_file, 'w+')
- datasource_file_fd.write(json.dumps(dct))
- else:
- # Use cached CVE file
- with open(datasource_file) as json_data:
- dct = json.load(json_data)
-
- conn = sqlite3.connect(srtDbName)
- c = conn.cursor()
-
- CVE_Items = dct['CVE_Items']
- total = len(CVE_Items)
- v = Cve()
-
- cache_path = os.path.join(srtool_basepath, nist_cache_dir)
- #begin parsing each cve in the JSON data
- for i, CVE_Item in enumerate(CVE_Items):
- # Development support
- if get_override('SRTDBG_MINIMAL_DB') and (i > 10):
- break
-
- references = CVE_Item['cve']['references']['reference_data']
- CVE_data_meta = CVE_Item['cve']['CVE_data_meta']['ID']
-
- #if cve exists in cache, delete it
- cve_path = os.path.join(cache_path, CVE_data_meta + ".json")
- if (os.path.isfile(cve_path)):
- os.remove(cve_path)
-
- #print('.', end='', flush=True)
- print('[%4d]%30s\r' % ((i * 100)/ total, CVE_data_meta), end='', flush=True)
- try:
- v.name = CVE_data_meta
-
- v.cve_data_type = CVE_Item['cve']['data_type']
- v.cve_data_format = CVE_Item['cve']['data_format']
- v.cve_data_version = CVE_Item['cve']['data_version']
-
- v.description = CVE_Item['cve']['description']['description_data'][0]['value']
- v.publishedDate = re.sub('T.*','',CVE_Item['publishedDate'])
- v.lastModifiedDate = re.sub('T.*','',CVE_Item['lastModifiedDate'])
- v.public = True # Always true since NIST is public source
-
- # We do not know yet if this has been published to the SRTool management
- v.publish_state = ORM.PUBLISH_UNPUBLISHED
- v.publish_date = ''
-
- if ('impact' in CVE_Item) and ('baseMetricV3' in CVE_Item['impact']):
- baseMetricV3 = CVE_Item['impact']['baseMetricV3']
- v.cvssV3_baseScore = baseMetricV3['cvssV3']['baseScore']
- v.cvssV3_baseSeverity = baseMetricV3['cvssV3']['baseSeverity']
- if ('impact' in CVE_Item) and ('baseMetricV2' in CVE_Item['impact']):
- baseMetricV2 = CVE_Item['impact']['baseMetricV2']
- v.cvssV2_baseScore = baseMetricV2['cvssV2']['baseScore']
-
- #check if cve object `v` need to be uploaded to database (cases: new cve, modified cve, or no changes)
- #if true, apply changes. Else ignore and continue
- v_id, is_change = sql_cve_query(conn, v, is_init,log)
-
-
- #if incremental update and CVE changed, save json copy of the cve to cache
- if incremental and is_change:
- file = open(cve_path, 'w+')
- file.write(json.dumps(CVE_Item))
-
- #if CVE `v` updates, must check and update associated records (CWEs, references, and CVE2CWE)
- #sql_cwe_query, and sql_cve2cwe_query require valid CVE record primary key at some point during their execution, therefore must always be after call to sql_cve_query
- if is_change:
- problem_list = CVE_Item['cve']['problemtype']['problemtype_data']
- for problem_Item in problem_list:
- description_list = problem_Item['description']
- for description_Item in description_list:
- value = description_Item['value']
- cwe_id = sql_cwe_query(conn, value)
- sql_cve2cwe_query(conn, v_id, cwe_id)
-
- # Add this data source to the CVE
- sql = '''SELECT * FROM orm_cvesource WHERE cve_id=? AND datasource_id=? '''
- exists = c.execute(sql, (v_id,datasource_id)).fetchone()
- if exists is None:
- sql = ''' INSERT into orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
- c.execute(sql, (v_id,datasource_id))
-
- except Exception as e:
- print(traceback.format_exc())
- print("UPDATE FAILED")
- c.close()
- conn.close()
- return
- print()
- log.write("total number of CVEs checked: %s\n" % total)
- conn.commit()
- c.close()
- conn.close()
-
-#################################
-# cve class
-#
-class Cve():
- # index - primary key
- id = -1
-
- name = ''
-
- priority = 0
- status = ORM.STATUS_HISTORICAL
-
- comments = ''
- comments_private = ''
-
- cve_data_type = ''
- cve_data_format = ''
- cve_data_version = ''
-
- public = False
- publish_state = ORM.PUBLISH_UNPUBLISHED
- publish_date = ''
-
- description = ''
- publishedDate = ''
- lastModifiedDate = ''
- problemtype = ''
-
- # cpe_list = ''
-
- cvssV3_baseScore = ''
- cvssV3_baseSeverity = ''
- # cvssV3_vectorString = ''
- # cvssV3_exploitabilityScore = ''
- # cvssV3_impactScore = ''
- # cvssV3_attackVector = ''
- # cvssV3_attackComplexity = ''
- # cvssV3_privilegesRequired = ''
- # cvssV3_userInteraction = ''
- # cvssV3_scope = ''
- # cvssV3_confidentialityImpact = ''
- # cvssV3_integrityImpact = ''
- # cvssV3_availabilityImpact = ''
-
- cvssV2_baseScore = ''
- cvssV2_severity = ''
- # cvssV2_vectorString = ''
- # cvssV2_exploitabilityScore = ''
- # cvssV2_impactScore = ''
- # cvssV2_accessVector = ''
- # cvssV2_accessComplexity = ''
- # cvssV2_authentication = ''
- # cvssV2_confidentialityImpact = ''
- # cvssV2_integrityImpact = ''
-
- recommend = 0
- recommend_list = ''
-
-#generates and executes appropriate SQLite query for CVE depending on situation
-#new CVE -> INSERT || modified CVE -> UPDATE || no change -> ignore and return
-#returns (CVE_ID, BOOL) tuple, True if insert or update executed
-### THIS DOES NOT CALL CONNECTION.COMMIT()
-def sql_cve_query(conn, cve, is_init, log):
- is_change = False
- cur = conn.cursor()
- sql = '''SELECT * FROM orm_cve WHERE name=?'''
- exists = cur.execute(sql, (cve.name,)).fetchone()
- cve_id = -1
- if exists is None:
- # Get the default CVE status
- status = get_cve_default_status(is_init,cve.publishedDate)
-
- sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, srt_updated, packages)
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
- cur.execute(sql, (cve.name, get_name_sort(cve.name), cve.priority, status, cve.comments, cve.comments_private, cve.cve_data_type, cve.cve_data_format, cve.cve_data_version, 1, cve.publish_state, cve.publish_date, cve.description, cve.publishedDate, cve.lastModifiedDate, cve.recommend, cve.recommend_list, cve.cvssV3_baseScore, cve.cvssV3_baseSeverity, cve.cvssV2_baseScore, cve.cvssV2_severity, datetime.now(),''))
- is_change = True
- cve_id = cur.lastrowid
- log.write("\tINSERTED '%s'\n" % cve.name)
-
- elif exists[ORM.CVE_LASTMODIFIEDDATE] < cve.lastModifiedDate:
- sql = ''' UPDATE orm_cve
- SET recommend = ?,
- recommend_list = ?,
- cve_data_type = ?,
- cve_data_format = ?,
- cve_data_version = ?,
- description = ?,
- lastModifiedDate = ?,
- cvssV3_baseScore = ?,
- cvssV3_baseSeverity = ?,
- cvssV2_baseScore = ?,
- cvssV2_severity = ?
- WHERE id = ?'''
- cur.execute(sql, (cve.recommend, cve.recommend_list, cve.cve_data_type, cve.cve_data_format, cve.cve_data_version, cve.description, cve.lastModifiedDate, cve.cvssV3_baseScore, cve.cvssV3_baseSeverity, cve.cvssV2_baseScore, cve.cvssV2_severity, exists[0]))
- is_change = True
- log.write("\tUPDATED '%s'\n" % cve.name)
- cve_id = exists[ORM.CVE_ID]
-
- ### TO-DO
- ### CREATE NOTIFICATION IF SCORE/SEVERITY HAS CHANGED
- ###
-
- else:
- is_change = False
- log.write("\tSKIPPED '%s'\n" % cve.name)
- cur.close()
- return (cve_id, is_change)
-
-
-#################################
-# cwe and cve2cwe
-#
-
-#generates and executes appropriate SQLite query for a new CWE
-#returns CWE_ID
-### THIS DOES NOT CALL CONNECTION.COMMIT()
-def sql_cwe_query(conn, value):
- CWE_ID = 0
- CWE_VULNERABLE_COUNT = 6
- cur = conn.cursor()
- sql = '''SELECT * FROM orm_cwetable WHERE name=?'''
- cwe = cur.execute(sql, (value,)).fetchone()
- if cwe is None:
- sql = '''INSERT INTO orm_cwetable (name, href, summary, description, vulnerable_count, found) VALUES (?,'','','',1,1)'''
- cur.execute(sql, (value,))
- cwe_id = cur.lastrowid
- cur.close()
- return cwe_id
- else:
- sql = ''' UPDATE orm_cwetable
- SET vulnerable_count = ?
- WHERE id = ?'''
- cur.execute(sql, (cwe[CWE_VULNERABLE_COUNT] + 1,cwe[CWE_ID]))
- conn.commit()
- cur.close()
- return cwe[CWE_ID]
-
-#generates and executes appropriate SQLite query for new CVE to CWE relation
-### THIS DOES NOT CALL CONNECTION.COMMIT()
-def sql_cve2cwe_query(conn, cve_id, cwe_id):
- cur = conn.cursor()
- sql = '''SELECT * FROM orm_cvetocwe WHERE cve_id=? AND cwe_id=?'''
- cve2cwe = cur.execute(sql, (cve_id, cwe_id)).fetchone()
- if cve2cwe is None:
- sql = '''INSERT INTO orm_cvetocwe (cve_id, cwe_id) VALUES (?, ?)'''
- cur.execute(sql, (cve_id, cwe_id))
- conn.commit()
- cur.close()
-
-
-#################################
-# main loop
+#######################################################################
+# fetch_cve: extract and return the meta data for a specific CVE
#
def fetch_cve(cve_name,cve_source_file):
@@ -503,8 +774,9 @@ def fetch_cve(cve_name,cve_source_file):
print("Description=ERROR reading CVE summary file '%s':%s" % (cve_cache_path,e))
return
elif cve_source_file:
+ nist_file = os.path.join(srtool_basepath,cve_source_file) if not cve_source_file.startswith('/') else cve_source_file
try:
- f = open(os.path.join(srtool_basepath, cve_source_file), 'r')
+ f = open(nist_file, 'r')
source_dct = json.load(f)
for item in source_dct["CVE_Items"]:
if not 'cve' in item:
@@ -534,135 +806,196 @@ def fetch_cve(cve_name,cve_source_file):
print("description=There is no CVE record for %s in the loaded NIST public CVE database." % cve_name)
return
- summary = {}
+ # Translate a CVE_Item JSON node
+ summary = CVE_ItemToSummary(CVE_Item)
- summary['name'] = cve_name
- summary['cve_data_type'] = CVE_Item['cve']['data_type']
- summary['cve_data_format'] = CVE_Item['cve']['data_format']
- summary['cve_data_version'] = CVE_Item['cve']['data_version']
+ # Return the results
+ for key in summary.keys():
+ print('%s=%s' % (key,summary[key]))
- summary['description'] = CVE_Item['cve']['description']['description_data'][0]['value']
- summary['publishedDate'] = re.sub('T.*','',CVE_Item['publishedDate'])
- summary['lastModifiedDate'] = re.sub('T.*','',CVE_Item['lastModifiedDate'])
- summary['url'] = 'https://nvd.nist.gov/vuln/detail/%s' % cve_name
- summary['url_title'] = 'NIST Link'
+#######################################################################
+# update_cve_list: Update CVE records for a list of CVEs
+#
+# This can be used for forcing the instantiation and/or update
+# for specific CVEs on demand, for example instantiating CVEs found in
+# the defect system that may be from older NIST years which are registered
+# as data sources that are on-demand only
+#
- if ('impact' in CVE_Item) and ('baseMetricV3' in CVE_Item['impact']):
- baseMetricV3 = CVE_Item['impact']['baseMetricV3']
- summary['cvssV3_baseScore'] = baseMetricV3['cvssV3']['baseScore']
- summary['cvssV3_baseSeverity'] = baseMetricV3['cvssV3']['baseSeverity']
- summary['cvssV3_vectorString'] = baseMetricV3['cvssV3']['vectorString']
- summary['cvssV3_exploitabilityScore'] = baseMetricV3['exploitabilityScore']
- summary['cvssV3_impactScore'] = baseMetricV3['impactScore']
- summary['cvssV3_attackVector'] = baseMetricV3['cvssV3']['attackVector']
- summary['cvssV3_attackComplexity'] = baseMetricV3['cvssV3']['attackComplexity']
- summary['cvssV3_privilegesRequired'] = baseMetricV3['cvssV3']['privilegesRequired']
- summary['cvssV3_userInteraction'] = baseMetricV3['cvssV3']['userInteraction']
- summary['cvssV3_scope'] = baseMetricV3['cvssV3']['scope']
- summary['cvssV3_confidentialityImpact'] = baseMetricV3['cvssV3']['confidentialityImpact']
- summary['cvssV3_integrityImpact'] = baseMetricV3['cvssV3']['integrityImpact']
- summary['cvssV3_availabilityImpact'] = baseMetricV3['cvssV3']['availabilityImpact']
- if ('impact' in CVE_Item) and ('baseMetricV2' in CVE_Item['impact']):
- baseMetricV2 = CVE_Item['impact']['baseMetricV2']
- summary['cvssV2_baseScore'] = baseMetricV2['cvssV2']['baseScore']
- summary['cvssV2_severity'] = baseMetricV2['severity']
- summary['cvssV2_vectorString'] = baseMetricV2['cvssV2']['vectorString']
- summary['cvssV2_exploitabilityScore'] = baseMetricV2['exploitabilityScore']
- summary['cvssV2_impactScore'] = baseMetricV2['exploitabilityScore']
- summary['cvssV2_accessVector'] = baseMetricV2['cvssV2']['accessVector']
- summary['cvssV2_accessComplexity'] = baseMetricV2['cvssV2']['accessComplexity']
- summary['cvssV2_authentication'] = baseMetricV2['cvssV2']['authentication']
- summary['cvssV2_confidentialityImpact'] = baseMetricV2['cvssV2']['confidentialityImpact']
- summary['cvssV2_integrityImpact'] = baseMetricV2['cvssV2']['integrityImpact']
+def update_cve_list(action,cve_list,conn=None):
- configurations = CVE_Item['configurations']
- is_first_and = True
- summary['cpe_list'] = ''
- for i, config in enumerate(configurations['nodes']):
- summary['cpe_list'] += '[config]|'
- summary['cpe_list'] += '[and]|'
- if "AND" == config['operator']:
- # create AND record
- if not is_first_and:
- summary['cpe_list'] += '[/and]|'
- summary['cpe_list'] += '[and]|'
- #is_first_and = False
- if 'children' in config:
- for j, cpe_or_node in enumerate(config['children']):
- if "OR" == cpe_or_node['operator']:
- summary['cpe_list'] += nist_scan_configuration_or(cpe_or_node, cve_name, j)
- else:
- print("ERROR CONFIGURE:OR_OP?:%s" % cpe_or_node['operator'])
- elif "OR" == config['operator']:
- summary['cpe_list'] += nist_scan_configuration_or(config, cve_name, 0)
- else:
- print("ERROR CONFIGURE:OP?:%s" % config['operator'])
- summary['cpe_list'] += '[/and]|'
- summary['cpe_list'] += '[/config]|'
+ # Set up database connection
+ do_close = False
+ if not conn:
+ conn = sqlite3.connect(srtDbName)
+ do_close = True
+ cur = conn.cursor()
- summary['ref_list'] = ''
- for i, ref in enumerate(CVE_Item['cve']['references']['reference_data']):
- summary['ref_list'] += '%s%s\t%s\t%s' % ('|' if i>0 else '',ref['url'],','.join([tag for tag in ref['tags']]),ref['refsource'])
+ # Gather the CVE prefix to lookup commands
+ sql = "SELECT * FROM orm_datasource"
+ cur.execute(sql)
+ datasource_table = []
+ for datasource in cur:
+ if 'nist' != datasource[ORM.DATASOURCE_SOURCE]:
+ # Only consider NIST datasources
+ continue
+ datasource_table.append([datasource[ORM.DATASOURCE_CVE_FILTER], datasource[ORM.DATASOURCE_LOOKUP], datasource[ORM.DATASOURCE_ID]])
+
+ update = False
+ fd = None
+ source_dct = []
+ for datasource in datasource_table:
+
+ # Simple caching
+ if fd:
+ fd.close()
+ fd = None
+ source_dct = []
+ has_matches = False
+ # Find at least one CVE that is in this datasource
+ for cve_name in cve_list.split(','):
+ if (not datasource[0]) or cve_name.startswith(datasource[0]):
+ has_matches = True
+ if not has_matches:
+ continue
+ # Find the CVEs in this datasource
+
+ # bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2002.json %command%
+ cve_source_file = re.sub(r".*=", "", datasource[1])
+ cve_source_file = re.sub(r" .*", "", cve_source_file)
+ if verbose: print("NIST_SOURCE:%s %s" % (cve_source_file,cve_name))
+ try:
+ if not fd:
+ # Simple caching
+ fd = open(os.path.join(srtool_basepath, cve_source_file), 'r')
+ source_dct = json.load(fd)
+ for item in source_dct["CVE_Items"]:
+ if not 'cve' in item:
+ continue
+ if not 'CVE_data_meta' in item['cve']:
+ continue
+ if not 'ID' in item['cve']['CVE_data_meta']:
+ continue
+ for cve_name in cve_list.split(','):
+ if item['cve']['CVE_data_meta']['ID'] == cve_name:
+ if verbose: print(" NIST_TRANSLATE:%s %s" % (cve_source_file,cve_name))
+
+ # Translate the CVE content
+ summary = CVE_ItemToSummary(item,True)
+ # Commit the CVE content
+ cve_id, is_change = sql_cve_query(action, conn, summary, None)
+ if is_change:
+ update = True
+
+ # Add NIST datasource to CVE
+ sql = '''SELECT * FROM orm_cvesource WHERE cve_id=? AND datasource_id=?'''
+ cve2ds = cur.execute(sql, (cve_id, datasource[2],)).fetchone()
+ if not cve2ds:
+ sql = ''' INSERT into orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ cur.execute(sql, (cve_id,datasource[2],))
+ # Remember this match in case it gets preempted
+
+ if verbose: print(" NIST_QUERIED:%s %s" % (cve_source_file,cve_name))
- # Return the results
- for key in summary.keys():
- print('%s=%s' % (key,summary[key]))
+ except Exception as e:
+ print("Description=ERROR CVE list load '%s':%s" % (cve_source_file,e))
+ print(traceback.format_exc())
+ return
-def do_nist_scan_configuration_or(cpe_or_node, name, and_enum, key):
- cpe_list = ''
- for cpe in cpe_or_node[key]:
- cpe23Uri = cpe['cpe23Uri']
- if 'cpeMatchString' in cpe:
- cpeMatchString = cpe['cpeMatchString']
- else:
- cpeMatchString = ''
- if 'versionEndIncluding' in cpe:
- versionEndIncluding = cpe['versionEndIncluding']
- else:
- versionEndIncluding = ''
- cpe_list += '%s,%s,%s,%s|' % (cpe['vulnerable'],cpe23Uri,cpeMatchString,versionEndIncluding)
- return cpe_list
+ if update:
+ conn.commit()
+ cur.close()
+ if do_close:
+ conn.close()
-def nist_scan_configuration_or(cpe_or_node, name, and_enum):
- cpe_list = '[or]|'
- found = 0
- if 'cpe' in cpe_or_node:
- if verbose: print("NOTE:NIST_SCAN_CONFIGURATION_OR:cpe")
- cpe_list += do_nist_scan_configuration_or(cpe_or_node, name, and_enum,'cpe')
- found += 1
- if 'cpe_match' in cpe_or_node:
- if verbose: print("NOTE:NIST_SCAN_CONFIGURATION_OR:cpe_match")
- cpe_list += do_nist_scan_configuration_or(cpe_or_node, name, and_enum,'cpe_match')
- found += 1
- cpe_list += '[/or]|'
+def update_existing_cves(action,cve_prefix):
+ # Set up database connection
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
- if verbose and (not found):
- print("WARNING:NIST_SCAN_CONFIGURATION_OR:NO CPE|CPE_MATCH:%s" % cpe_or_node)
- return cpe_list
+ # Gather the CVE prefix to lookup commands
+ sql = 'SELECT * FROM orm_cve WHERE name LIKE "'+cve_prefix+'%"'
+ cur.execute(sql)
+ cve_table = []
+ i = 0
+ for cve in cur:
+ i += 1
+
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ if verbose: print("FOUND:%s" % cve[ORM.CVE_NAME])
+ cve_table.append(cve[ORM.CVE_NAME])
+
+ if 19 == (i % 20):
+ print("SEND:%2d:%s" % (i,cve[ORM.CVE_NAME]))
+ update_cve_list(action,','.join(cve_table),conn)
+ cve_table = []
+
+ if cve_table:
+ print("SEND:%2d:%s" % (i,cve[ORM.CVE_NAME]))
+ update_cve_list(action,','.join(cve_table),conn)
+ cur.close()
+ conn.close()
-#################################
+
+#######################################################################
# main loop
#
def main(argv):
global verbose
+ global force_update
+ global force_cache
+ global update_skip_history
+ global cmd_skip
+ global cmd_count
+
parser = argparse.ArgumentParser(description='srtool_cve.py: manage the CVEs within SRTool database')
parser.add_argument('--init_nist', '-I', action='store_const', const='init_nist', dest='command', help='Initialize nvd.nist.gov/vuln/data-feeds for a specified datasource')
parser.add_argument('--update_nist', '-n', action='store_const', const='update_nist', dest='command', help='Check nvd.nist.gov/vuln/data-feeds for updates on a specified datasource')
+ parser.add_argument('--update_nist_incremental', '-i', action='store_const', const='update_nist_incremental', dest='command', help='Check nvd.nist.gov/vuln/data-feeds for updates')
+ parser.add_argument('--download-only', action='store_const', const='download_nist', dest='command', help='Download the NIST source CVE file(s), load CVEs on demand only')
+ parser.add_argument('--update-cve-list', '-l', dest='update_cve_list', help='Update list of CVEs to database')
+ parser.add_argument('--update-existing-cves', '-L', dest='update_existing_cves', help='Update list of existing CVEs to database')
+ parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Lookup CVE data')
+
parser.add_argument('--source', dest='source', help='Local CVE source file')
parser.add_argument('--url-file', dest='url_file', help='CVE URL extension')
parser.add_argument('--url-meta', dest='url_meta', help='CVE URL meta extension')
parser.add_argument('--file', dest='cve_file', help='Local CVE source file')
- parser.add_argument('--update_nist_incremental', '-i', action='store_const', const='update_nist_incremental', dest='command', help='Check nvd.nist.gov/vuln/data-feeds for updates')
- parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Lookup CVE data')
+
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
- parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Force update')
+ parser.add_argument('--force-cache', action='store_true', dest='force_cache', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
+ parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose output')
args = parser.parse_args()
+
verbose = args.verbose
+ force_update = args.force_update
+ force_cache = args.force_cache
+ update_skip_history = args.update_skip_history
+ cmd_skip = 0
+ if None != args.skip:
+ cmd_skip = int(args.skip)
+ cmd_count = 0
+ if None != args.count:
+ cmd_count = int(args.count)
#srt_error_log("DEBUG:srtool_nist:%s" % args)
+ # Update CVE list
+ if args.update_cve_list:
+ update_cve_list(ACTION_UPDATE_CVE,args.update_cve_list)
+ return
+ elif args.update_existing_cves:
+ update_existing_cves(ACTION_UPDATE_CVE,args.update_existing_cves)
+ return
+
# Required parameters to continue
if not args.cve_file:
print("ERROR: missing --cve_file parameter")
@@ -693,26 +1026,39 @@ def main(argv):
ret = 0
if ('init_nist' == args.command) or ('update_nist' == args.command):
- is_init = ('init_nist' == args.command)
+ if ('init_nist' == args.command):
+ action = ACTION_INIT
+ else:
+ action = ACTION_UPDATE
try:
- print ("BEGINNING NIST %s PLEASE WAIT ... this can take some time" % ('INIT' if is_init else 'UPDATES'))
- update_nist(is_init, args.source, args.url_file, args.url_meta, args.cve_file, False, args.force_update)
- master_log.write("SRTOOL:%s:%s:\t\t\t...\t\t\t%s\n" % (date.today(), args.source, "INIT'ED" if is_init else 'UPDATED'))
- print("DATABASE %s FINISHED\n" % ('INIT' if is_init else 'UPDATE'))
+ print ("BEGINNING NIST %s PLEASE WAIT ... this can take some time" % action)
+ update_nist(action, args.source, args.url_file, args.url_meta, args.cve_file)
+ master_log.write("SRTOOL:%s:%s Done:\t\t\t...\t\t\t%s\n" % (date.today(), args.source, action))
+ print("DATABASE %s FINISHED\n" % action)
+ print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
except Exception as e:
- print("DATABASE %s FAILED ... %s" % ('INIT' if is_init else 'UPDATE',e))
+ print("DATABASE %s FAILED ... %s" % (action,e))
master_log.write("SRTOOL:%s:%s:\t\t\t...\t\t\tFAILED ... %s\n" % (date.today(), args.source, e))
+ print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
ret = 1
elif 'update_nist_incremental' == args.command:
try:
- print ("BEGINNING NIST UPDATES PLEASE WAIT ... this can take some time")
- update_nist(False,args.source, args.url_file, args.url_meta, args.cve_file, True, args.force_update)
+ print ("BEGINNING NIST INCREMENTAL UPDATE PLEASE WAIT ... this can take some time")
+ update_nist(ACTION_INCREMENT,args.source, args.url_file, args.url_meta, args.cve_file)
master_log.write("SRTOOL:%s:'NIST JSON Modified Data':\t\t\t...\t\t\tUPDATED\n" % date.today())
print("DATABASE UPDATE FINISHED\n")
+ print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
except Exception as e:
print("DATABASE INCREMENT FAILED ... %s" % e)
+ print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
master_log.write("SRTOOL:%s:%s:\t\t\t...\t\t\tFAILED ... %s\n" % (date.today(), args.source, e))
ret = 1
+ elif 'download_nist' == args.command:
+ print ("BEGINNING NIST UPDATES PLEASE WAIT ... this can take some time")
+ update_nist(ACTION_DOWNLOAD,args.source, args.url_file, args.url_meta, args.cve_file)
+ master_log.write("SRTOOL:%s:'NIST JSON Modified Data':\t\t\t...\t\t\tUPDATED\n" % date.today())
+ print("DATABASE UPDATE FINISHED\n")
+ print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
else:
ret = 1
print("Command not found")
diff --git a/bin/redhat/srtool_redhat.py b/bin/redhat/srtool_redhat.py
index f570965..9b629cb 100755
--- a/bin/redhat/srtool_redhat.py
+++ b/bin/redhat/srtool_redhat.py
@@ -180,7 +180,9 @@ def main(argv):
parser = argparse.ArgumentParser(description='srtool_redhat.py: manage Red Hat CVE data')
parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Fetch CVE detail')
+
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='is_verbose', help='Enable verbose debugging output')
args = parser.parse_args()
diff --git a/bin/srt b/bin/srt
index ac358d4..4cff6fd 100755
--- a/bin/srt
+++ b/bin/srt
@@ -23,7 +23,7 @@ Usage 1: bin/srt start|stop [webport=<address:port>]
Optional arguments:
[webport] Set the SRTool server port (default: localhost:8000)
[noautoupdate] Disable the auto update server
-Usage 2: bin/srt manage [createsuperuser|lsupdates|migrate|checksettings|collectstatic|...]
+Usage 2: bin/srt manage [createsuperuser|lsupdates|migrate|makemigrations|checksettings|collectstatic|...]
"
databaseCheck()
diff --git a/bin/yp/datasource.json b/bin/yp/datasource.json
index 9748695..abb2e66 100755
--- a/bin/yp/datasource.json
+++ b/bin/yp/datasource.json
@@ -37,6 +37,11 @@
"helptext" : "Text schema of an example defect",
"value" : "54321"
},
+ {
+ "name" : "SRTOOL_DEFECT_DOESNOTIMPACT",
+ "helptext" : "Comment message to mark CVEs that do not affect the products",
+ "value" : "It doesn't impact Yocto Project"
+ },
{
"name" : "SRTOOL_DEFECT_TOOL",
"helptext" : "The registered script to manage defects",
diff --git a/bin/yp/srtool_yp.py b/bin/yp/srtool_yp.py
index 338d446..1438b59 100755
--- a/bin/yp/srtool_yp.py
+++ b/bin/yp/srtool_yp.py
@@ -115,6 +115,7 @@ def main(argv):
parser.add_argument('--file', dest='file', help='Source file')
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose debugging')
args = parser.parse_args()
diff --git a/bin/yp/yocto-project-products.json b/bin/yp/yocto-project-products.json
index b968874..ea15411 100755
--- a/bin/yp/yocto-project-products.json
+++ b/bin/yp/yocto-project-products.json
@@ -1,54 +1,47 @@
{
"Product_Items" : [
- {
- "order" : "1",
- "key" : "Warrior",
- "name" : "Yocto Project Linux",
- "version" : "2.7",
- "profile" : "",
- "cpe" : "cpe:2.3:o:yoctoproject:linux:2.7:*:*:*:*:*:*:*",
- "defect_tags" : "{\"key\":\"warrior\"}",
- "product_tags" : "{\"key\":\"warrior\"}"
- },
+
+ {
+ "order" : "1",
+ "key" : "master",
+ "name" : "Yocto Project Linux",
+ "version" : "dev",
+ "profile" : "",
+ "cpe" : "cpe:2.3:o:yoctoproject:*:*:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"master\"}",
+ "product_tags" : "{\"key\":\"master\",\"public_status\":\"no\",\"mode\":\"develop\"}"
+ },
+
+
{
"order" : "2",
- "key" : "Thud",
+ "key" : "Zeus",
"name" : "Yocto Project Linux",
- "version" : "2.6",
+ "version" : "3.0",
"profile" : "",
- "cpe" : "cpe:2.3:o:yoctoproject:linux:2.6:*:*:*:*:*:*:*",
- "defect_tags" : "{\"key\":\"sumo\"}",
- "product_tags" : "{\"key\":\"sumo\"}"
+ "cpe" : "cpe:2.3:o:yoctoproject:linux:3.0:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"zeus\"}",
+ "product_tags" : "{\"key\":\"zeus\",\"mode\":\"support\"}"
},
{
"order" : "3",
- "key" : "Sumo",
+ "key" : "Warrior",
"name" : "Yocto Project Linux",
- "version" : "2.5",
+ "version" : "2.7",
"profile" : "",
- "cpe" : "cpe:2.3:o:yoctoproject:linux:2.5:*:*:*:*:*:*:*",
- "defect_tags" : "{\"key\":\"sumo\"}",
- "product_tags" : "{\"key\":\"sumo\"}"
+ "cpe" : "cpe:2.3:o:yoctoproject:linux:2.7:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"warrior\"}",
+ "product_tags" : "{\"key\":\"warrior\",\"mode\":\"support\"}"
},
{
"order" : "4",
- "key" : "Rocko",
- "name" : "Yocto Project Linux",
- "version" : "2.4",
- "profile" : "",
- "cpe" : "cpe:2.3:o:yoctoproject:linux:2.4:*:*:*:*:*:*:*",
- "defect_tags" : "{\"key\":\"rocko\"}",
- "product_tags" : "{\"key\":\"rocko\"}"
- },
- {
- "order" : "5",
- "key" : "Pyro",
+ "key" : "Thud",
"name" : "Yocto Project Linux",
- "version" : "2.3",
+ "version" : "2.6",
"profile" : "",
- "cpe" : "cpe:2.3:o:yoctoproject:linux:2.3:*:*:*:*:*:*:*",
- "defect_tags" : "{\"key\":\"pyro\"}",
- "product_tags" : "{\"key\":\"pyro\"}"
+ "cpe" : "cpe:2.3:o:yoctoproject:linux:2.6:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"thud\"}",
+ "product_tags" : "{\"key\":\"thud\",\"mode\":\"support\"}"
}
]
}