aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xbin/acme/datasource.json_sample5
-rwxr-xr-xbin/acme/srtool_acme.py1
-rwxr-xr-xbin/common/srtool_backup.py98
-rwxr-xr-xbin/common/srtool_common.py675
-rwxr-xr-xbin/common/srtool_update.py17
-rwxr-xr-xbin/common/srtool_utils.py931
-rwxr-xr-xbin/debian/srtool_debian.py2
-rwxr-xr-xbin/dev_tools/history.py254
-rwxr-xr-xbin/dev_tools/update_status.sh43
-rwxr-xr-xbin/mitre/datasource_2010.json18
-rwxr-xr-xbin/mitre/datasource_2011.json18
-rwxr-xr-xbin/mitre/datasource_2012.json18
-rwxr-xr-xbin/mitre/datasource_2013.json18
-rwxr-xr-xbin/mitre/datasource_2014.json18
-rwxr-xr-xbin/mitre/datasource_2015.json4
-rwxr-xr-xbin/mitre/datasource_2016.json4
-rwxr-xr-xbin/mitre/datasource_2017.json4
-rwxr-xr-xbin/mitre/datasource_2018.json4
-rwxr-xr-xbin/mitre/datasource_2019.json4
-rwxr-xr-xbin/mitre/srtool_mitre.py39
-rw-r--r--bin/nist/datasource.json3
-rwxr-xr-xbin/nist/datasource_2002.json19
-rwxr-xr-xbin/nist/datasource_2003.json19
-rwxr-xr-xbin/nist/datasource_2004.json19
-rwxr-xr-xbin/nist/datasource_2005.json19
-rwxr-xr-xbin/nist/datasource_2006.json19
-rwxr-xr-xbin/nist/datasource_2007.json19
-rwxr-xr-xbin/nist/datasource_2008.json19
-rwxr-xr-xbin/nist/datasource_2009.json19
-rwxr-xr-xbin/nist/datasource_2010.json18
-rwxr-xr-xbin/nist/datasource_2011.json18
-rwxr-xr-xbin/nist/datasource_2012.json18
-rwxr-xr-xbin/nist/datasource_2013.json18
-rwxr-xr-xbin/nist/datasource_2014.json18
-rwxr-xr-xbin/nist/datasource_2015.json6
-rwxr-xr-xbin/nist/datasource_2016.json6
-rwxr-xr-xbin/nist/datasource_2017.json6
-rwxr-xr-xbin/nist/datasource_2018.json6
-rwxr-xr-xbin/nist/datasource_2019.json6
-rwxr-xr-xbin/nist/datasource_2020.json18
-rwxr-xr-xbin/nist/srtool_nist.py1142
-rwxr-xr-xbin/redhat/srtool_redhat.py2
-rwxr-xr-xbin/srt2
-rwxr-xr-xbin/yp/datasource.json5
-rwxr-xr-xbin/yp/srtool_yp.py1
-rwxr-xr-xbin/yp/yocto-project-products.json63
-rwxr-xr-xdata/notify-categories.json4
-rw-r--r--lib/orm/management/commands/lsupdates.py3
-rw-r--r--lib/orm/migrations/0001_initial.py92
-rwxr-xr-xlib/orm/migrations/0003_modified.py61
-rwxr-xr-xlib/orm/migrations/0004_defect_status.py35
-rwxr-xr-xlib/orm/migrations/0005_publish_report.py34
-rwxr-xr-xlib/orm/migrations/0006_reconcile.py410
-rw-r--r--lib/orm/models.py677
-rw-r--r--lib/srtgui/api.py379
-rw-r--r--lib/srtgui/reports.py492
-rw-r--r--lib/srtgui/tables.py809
-rw-r--r--lib/srtgui/templates/base.html4
-rw-r--r--lib/srtgui/templates/basetable_top.html5
-rw-r--r--lib/srtgui/templates/cve.html25
-rw-r--r--lib/srtgui/templates/cves-select-toastertable.html120
-rw-r--r--lib/srtgui/templates/defect.html54
-rw-r--r--lib/srtgui/templates/detail_search_header.html3
-rwxr-xr-xlib/srtgui/templates/history-cve-toastertable.html73
-rwxr-xr-xlib/srtgui/templates/history-defect-toastertable.html73
-rwxr-xr-xlib/srtgui/templates/history-investigation-toastertable.html73
-rwxr-xr-xlib/srtgui/templates/history-vulnerability-toastertable.html73
-rw-r--r--lib/srtgui/templates/investigation.html45
-rwxr-xr-xlib/srtgui/templates/maintenance.html78
-rw-r--r--lib/srtgui/templates/management.html47
-rwxr-xr-xlib/srtgui/templates/notifications-toastertable.html2
-rwxr-xr-xlib/srtgui/templates/publish-cve-toastertable.html162
-rwxr-xr-xlib/srtgui/templates/publish-defect-toastertable.html168
-rwxr-xr-xlib/srtgui/templates/publish-list-toastertable.html162
-rw-r--r--lib/srtgui/templates/publish.html319
-rw-r--r--lib/srtgui/templates/report.html41
-rwxr-xr-xlib/srtgui/templates/srtool_metadata_include.html36
-rw-r--r--lib/srtgui/templates/toastertable.html1
-rw-r--r--lib/srtgui/templates/vulnerability.html44
-rw-r--r--lib/srtgui/templatetags/projecttags.py18
-rw-r--r--lib/srtgui/urls.py26
-rw-r--r--lib/srtgui/views.py973
-rwxr-xr-xlib/srtmain/management/commands/update.py41
-rwxr-xr-xlib/users/models.py19
84 files changed, 8087 insertions, 1277 deletions
diff --git a/bin/acme/datasource.json_sample b/bin/acme/datasource.json_sample
index dc1d0188..98ef1def 100755
--- a/bin/acme/datasource.json_sample
+++ b/bin/acme/datasource.json_sample
@@ -33,6 +33,11 @@
"helptext" : "Text schema of an example defect",
"value" : "54321"
},
+ {
+ "name" : "SRTOOL_DEFECT_DOESNOTIMPACT",
+ "helptext" : "Comment message to mark CVEs that do not affect the products",
+ "value" : "It doesn't impact ACME"
+ },
{
"name" : "SRTOOL_DEFECT_TOOL",
"helptext" : "The registered script to manage defects",
diff --git a/bin/acme/srtool_acme.py b/bin/acme/srtool_acme.py
index 1aa1b911..f815ae17 100755
--- a/bin/acme/srtool_acme.py
+++ b/bin/acme/srtool_acme.py
@@ -124,6 +124,7 @@ def main(argv):
parser.add_argument('--file', dest='file', help='Source file')
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose debugging')
args = parser.parse_args()
diff --git a/bin/common/srtool_backup.py b/bin/common/srtool_backup.py
index b37e2d08..1b93637c 100755
--- a/bin/common/srtool_backup.py
+++ b/bin/common/srtool_backup.py
@@ -40,6 +40,8 @@ from common.srt_schema import ORM
verbose = False
srtDbName = 'srt.sqlite'
+BACKUP_DIR = 'backups'
+BACKUP_PREFIX = 'backup_'
#################################
# Common routines
@@ -57,20 +59,35 @@ def _log(msg):
f1.close()
#################################
+# Set backup database stamp file
+#
+
+def backup_stamp(backup_dir):
+ if not os.path.isdir(backup_dir):
+ print("ERROR: no such directory '%s'" % backup_dir)
+ exit(1)
+ statinfo = os.stat(os.path.join(backup_dir, 'srt.sqlite'))
+ mod_timestamp = datetime.fromtimestamp(statinfo.st_mtime)
+ stamp_str = mod_timestamp.strftime('%Y-%m-%d %H:%M:%S | %A, %B %d %Y')
+ with open(os.path.join(backup_dir,'timestamp.txt'), 'w') as file:
+ file.write('%s\n' % stamp_str)
+ print("* Set Timestamp:%s" % mod_timestamp.strftime('%Y-%m-%d|%H:%M:%S|%A, %B %d %Y'))
+
+#################################
# Backup the database and data files
#
def backup_db(is_daily):
today = datetime.today()
weeknum = today.strftime("%W")
- weekday = today.isoweekday()
+ weekday = today.strftime("%A") #today.isoweekday()
year = today.strftime("%Y")
# Where are we backing up to
if is_daily:
- backup_dir = os.path.join(script_pathname, "backups/backup_%s" % (weekday))
+ backup_dir = os.path.join(script_pathname, "%s/%s%s" % (BACKUP_DIR,BACKUP_PREFIX,weekday))
else:
- backup_dir = os.path.join(script_pathname, "backups/backup_%s_%s" % (year,weeknum))
+ backup_dir = os.path.join(script_pathname, "%s/%s%s_%s" % (BACKUP_DIR,BACKUP_PREFIX,year,weeknum))
# Make sure directory exists
try:
os.makedirs(backup_dir)
@@ -82,25 +99,79 @@ def backup_db(is_daily):
print("*** Backup dir='%s' ***" % backup_dir)
print("* Copy database")
- cmd = 'cp %s %s' % (os.path.join(script_pathname,srtDbName),os.path.join(script_pathname,backup_dir))
+ cmd = 'cp -p %s %s' % (os.path.join(script_pathname,srtDbName),backup_dir)
print(cmd)
os.system(cmd)
# Copy data but skip cache dir (no deep copy)
print("* Copy data files")
- cmd = 'cp %s/data/* %s/data' % (script_pathname,os.path.join(script_pathname,backup_dir))
+ cmd = 'cp -p %s/data/* %s/data' % (script_pathname,backup_dir)
print(cmd)
os.system(cmd)
# Copy attachments
print("* Copy attachment files")
- cmd = 'cp -r %s/downloads %s' % (script_pathname,os.path.join(script_pathname,backup_dir))
+ cmd = 'cp -r -p %s/downloads %s' % (script_pathname,backup_dir)
print(cmd)
os.system(cmd)
+ # Set stamp file
+ backup_stamp(backup_dir)
+
+#######################################################################
+# list
+#
+
+def backup_list():
+ def sort_key(elem):
+ return elem[0]+elem[2]
+
+ stamps = []
+ for directory in os.listdir(os.path.join(script_pathname, 'backups')):
+ prefix = '1Week' if not directory[len(BACKUP_PREFIX)].isalpha() else '2Day'
+ directory = os.path.join(script_pathname, 'backups', directory)
+ with open(os.path.join(directory,'timestamp.txt'), 'r') as file:
+ line = file.read().strip()
+ #print("DIR=%s,%s" % (directory,line))
+ stamps.append([prefix, directory, line])
+
+ # Add the current database (now)
+ prefix = '3Now'
+ directory = script_pathname
+ statinfo = os.stat(os.path.join(directory, 'srt.sqlite'))
+ mod_timestamp = datetime.fromtimestamp(statinfo.st_mtime)
+ stamp_str = mod_timestamp.strftime('%Y-%m-%d %H:%M:%S | %A, %B %d %Y')
+ stamps.append([prefix, directory, stamp_str])
+
+ # Sort my time and return
+ stamps.sort(key=sort_key)
+ return stamps
+
+def list(db_list=False):
+ stamps = backup_list()
+ for stamp in stamps:
+ # Insert a separator between the date and the time
+ stamp[2] = stamp[2].replace(' ','|',1)
+ if db_list:
+ print("%s|%s|%s" % (stamp[0][1:],os.path.basename(stamp[1]),stamp[2].replace(' | ','|')))
+ else:
+ snap_date,snap_time,snap_day = stamp[2].split('|')
+ print("%-4s,%-16s,%s" % (stamp[0][1:],os.path.basename(stamp[1]),stamp[2].replace(' | ','|')))
+
+#################################
+# Init stamps
+#
+
+def init_stamps():
+ stamps = backup_list()
+ for stamp in stamps:
+ stamp_prefix, stamp_directory, stamp_line = stamp
+ backup_stamp(stamp_directory)
+
#################################
# main loop
#
+
def main(argv):
global verbose
global cmd_skip
@@ -111,7 +182,14 @@ def main(argv):
parser.add_argument('--backup-db', '-b', action='store_const', const='backup', dest='command', help='Backup the database, save to year_weeknum dir')
parser.add_argument('--backup-db-daily', '-d', action='store_const', const='backup-daily', dest='command', help='Backup the database, save to weekday dir')
+ parser.add_argument('--init-stamps', '-I', action='store_const', const='init-stamps', dest='command', help='Initialize the backup directory timestamps')
+ parser.add_argument('--init-dir-stamp', '-D', dest='init_dir_stamp', help='Initialize a specific backup directory timestamp')
+
+ parser.add_argument('--list-backups', '-l', action='store_const', const='list', dest='command', help='List the backup directory timestamps')
+ parser.add_argument('--list-backups-db', '-L', action='store_const', const='list-db', dest='command', help='Dump the backup directory timestamps')
+
parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
parser.add_argument('--count', dest='count', help='Debugging: short run record count')
@@ -136,6 +214,14 @@ def main(argv):
except Exception as e:
print ("DATABASE BACKUP FAILED ... %s" % e)
master_log.write("SRTOOL:%s:DATABASE BACKUP:\t\t\t\t...\t\t\tFAILED ... %s\n" % (date.today(), e))
+ elif 'list' == args.command:
+ list()
+ elif 'list-db' == args.command:
+ list(True)
+ elif 'init-stamps' == args.command:
+ init_stamps()
+ elif args.command.init_dir_stamp:
+ backup_stamp(args.command.init_dir_stamp)
else:
print("Command not found")
master_log.close()
diff --git a/bin/common/srtool_common.py b/bin/common/srtool_common.py
index 13b5893d..d9fbd341 100755
--- a/bin/common/srtool_common.py
+++ b/bin/common/srtool_common.py
@@ -51,6 +51,7 @@ except:
verbose = False
cmd_skip = 0
cmd_count = 0
+cmd_test = False
srtDbName = 'srt.sqlite'
packageKeywordsFile = 'data/package_keywords.csv'
@@ -85,6 +86,12 @@ def get_name_sort(cve_name):
cve_name_sort = cve_name
return cve_name_sort
+def get_tag_key(tag,key,default=None):
+ d = json.loads(tag)
+ if key in d:
+ return d[key]
+ return default
+
#################################
# Load the package keyword source into the database
#
@@ -293,7 +300,8 @@ def score_new_cves(cve_filter):
# Scan the open CVEs
if 'NEW' == cve_filter:
- sql = "SELECT * FROM orm_cve WHERE (status='%s' OR status='%s');" % (ORM.STATUS_NEW,ORM.STATUS_NEW_RESERVED)
+# sql = "SELECT * FROM orm_cve WHERE (status='%s' OR status='%s') AND score_date IS NULL;" % (ORM.STATUS_NEW,ORM.STATUS_NEW_RESERVED)
+ sql = "SELECT * FROM orm_cve WHERE status='%s' AND score_date IS NULL;" % (ORM.STATUS_NEW)
cur.execute(sql)
elif cve_filter.startswith('CVE-'):
cur.execute('SELECT * FROM orm_cve WHERE name LIKE "'+cve_filter+'%"')
@@ -318,22 +326,24 @@ def score_new_cves(cve_filter):
record_count = 0
write_count = 0
ds_count = 0
+ is_change = False
time_now = datetime.now()
for i,cve in enumerate(cur):
cve_name = cve[ORM.CVE_NAME]
- if cve[ORM.CVE_SCORE_DATE]:
- #cve_score_date = datetime.strptime(source[ORM.CVE_SCORE_DATE], '%Y-%m-%d %H:%M:%S')
- # If there is any score_date, then nothing to do here
- continue
-
+# if cve[ORM.CVE_SCORE_DATE]:
+# #cve_score_date = datetime.strptime(source[ORM.CVE_SCORE_DATE], '%Y-%m-%d %H:%M:%S')
+# # If there is any score_date, then nothing to do here
+# continue
+#
# Progress indicator support
if 0 == i % 10:
print('%04d: %20s\r' % (i,cve_name), end='')
- if (0 == i % 200) and not cmd_skip:
+ if (0 == i % 200) and (not cmd_skip) and is_change:
conn.commit()
print("%4d: COMMIT" % i)
sleep(2)
+ is_change = False
# Development/debug support
if cmd_skip:
if i < cmd_skip:
@@ -351,21 +361,24 @@ def score_new_cves(cve_filter):
recommend,recommend_list = compute_recommends(cve)
cve_packages = ''
if recommend_list:
-
# Go ahead and create/attach packages to CVEs
cve_packages = attach_packages(cur_write, cve, recommend_list)
- #cve_packages = cve[ORM.CVE_PACKAGES]
+ else:
+ cve_packages = cve[ORM.CVE_PACKAGES]
- sql = ''' UPDATE orm_cve
- SET recommend = ?,
- recommend_list = ?,
- packages = ?,
- score_date = ?
- WHERE id = ?'''
- cur_write.execute(sql, (recommend, recommend_list, cve_packages, time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT), cve[ORM.CVE_ID]))
- write_count += 1
+ # Always set score_date since it has been evaluated
+ # NOTE: we do not touch 'cve.srt_updated' for this background change
+ sql = ''' UPDATE orm_cve
+ SET recommend = ?,
+ recommend_list = ?,
+ packages = ?,
+ score_date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (recommend, recommend_list, cve_packages, time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT), cve[ORM.CVE_ID]))
+ write_count += 1
+ is_change = True
- if verbose: print(" %d:%s:%s" % (recommend,recommend_list,cve_packages))
+# if verbose: print(" %d:%s:%s" % (recommend,recommend_list,cve_packages))
# Attach all matching CVE sources
for ds_obj in ds_list:
@@ -379,8 +392,9 @@ def score_new_cves(cve_filter):
ds_count += 1
print("%30sADDED [%4d]: %20s <- %20s\r" % ('',ds_count,ds_obj['key'],cve[ORM.CVE_NAME]),end='')
- conn.commit()
- print("COMMIT")
+ if is_change:
+ conn.commit()
+ print("COMMIT")
print("\nUpdated CVEs=%d, Added alternate sources=%d" % (write_count,ds_count))
#################################
@@ -412,6 +426,430 @@ def init_notify_categories(filename):
conn.close()
#################################
+# Update cumulative Cve/Vulnerability/Investigation status
+#
+# * Scan the respective child Vulnerabilities/Investigations/Defects, and
+# sum them into cumulative status for parent
+# * Rules for Status:
+# If any child is VULNERABLE, then the parent is VULNERABLE
+# else if any child is INVESTIGATE, then the parent is INVESTIGATE
+# else if any child is NEW, then the parent is INVESTIGATE
+# else the parent is NOT_VULNERABLE
+# * Exceptions:
+# Children that are 'ORM.STATUS_HISTORICAL' or 'ORM.STATUS_NEW_RESERVED' have no vote
+# If there are no children nor any children with votes, then the status is left unchanged
+# * Rules for Priority:
+# If any child has a higher priority, that priority is used
+#
+
+def _update_cve_status(cur,cve,srtool_today,update_skip_history):
+ if verbose: print("Cve:%s:%s" % (cve[ORM.CVE_NAME],ORM.get_orm_string(cve[ORM.CVE_STATUS],ORM.STATUS_STR)))
+ # Is status locked?
+ # if cve[ORM.CVE_STATUS_LOCK]:
+ # return
+
+ # Get the CVE's Vulnerabilities
+ cve_priority = cve[ORM.CVE_PRIORITY]
+ cve_status = None
+ vote_count = 0
+ cve2vuls = cur.execute("SELECT * FROM orm_cvetovulnerablility where cve_id = '%s'" % cve[ORM.CVE_ID]).fetchall()
+ for cve2vul in cve2vuls:
+ vulnerability_id = cve2vul[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID]
+ vulnerability = cur.execute("SELECT * FROM orm_vulnerability where id = '%s'" % vulnerability_id).fetchone()
+ # Compute Status
+ status = vulnerability[ORM.VULNERABILITY_STATUS]
+ if verbose: print(" %s,%s" % (vulnerability[ORM.VULNERABILITY_NAME],ORM.get_orm_string(status,ORM.STATUS_STR)))
+ if ORM.STATUS_VULNERABLE == status:
+ if verbose: print(" %s => %s" % (ORM.get_orm_string(cve_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR)))
+ cve_status = ORM.STATUS_VULNERABLE
+ vote_count += 1
+ break
+ elif status in (ORM.STATUS_INVESTIGATE,ORM.STATUS_NEW) and cve_status in (None,ORM.STATUS_INVESTIGATE):
+ if verbose: print(" %s => (%s),%s" % (ORM.get_orm_string(cve_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR),ORM.get_orm_string(ORM.STATUS_INVESTIGATE,ORM.STATUS_STR)))
+ cve_status = ORM.STATUS_INVESTIGATE
+ vote_count += 1
+ elif ORM.STATUS_NOT_VULNERABLE == status:
+ # tentative not vulnerable
+ vote_count += 1
+ continue
+ else:
+ # Non-voting status: Active:Historical,New-Reserved Inactive:(New),(Investigate),(Vulnerable),Vulnerable)
+ continue
+ # Compute Priority
+ if cve_priority < vulnerability[ORM.VULNERABILITY_PRIORITY]:
+ cve_priority = vulnerability[ORM.VULNERABILITY_PRIORITY]
+
+ # If no votes, skip and leave existing status
+ if 0 == vote_count:
+ if verbose: print(" No votes:skip")
+ return
+ # if no votes away from 'not vulnerable', defer to 'not vulnerable'
+ if None == cve_status:
+ cve_status = ORM.STATUS_NOT_VULNERABLE
+ if verbose: print(" defer => %s" % (ORM.get_orm_string(cve_status,ORM.STATUS_STR)))
+
+ # Update status
+ history_update = []
+ if cve[ORM.CVE_STATUS] != cve_status:
+ history_update.append(ORM.UPDATE_STATUS % (
+ ORM.get_orm_string(cve[ORM.CVE_STATUS],ORM.STATUS_STR),
+ ORM.get_orm_string(cve_status,ORM.STATUS_STR)))
+ if cve[ORM.CVE_PRIORITY] < cve_priority:
+ history_update.append(ORM.UPDATE_PRIORITY % (
+ ORM.get_orm_string(cve[ORM.CVE_PRIORITY],ORM.PRIORITY_STR),
+ ORM.get_orm_string(cve_priority,ORM.PRIORITY_STR)))
+ if history_update:
+ if verbose: print(" Change CVE:%s" % ';'.join(history_update))
+ if not cmd_test:
+ sql = "UPDATE orm_cve SET status=?, priority=?, srt_updated=? WHERE id=?"
+ cur.execute(sql, (cve_status,cve_priority,srtool_today,cve[ORM.CVE_ID],) )
+ if not update_skip_history:
+ # Add status update in history
+ update_comment = "%s%s {%s}" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_DEFECT,';'.join(history_update),'Cumulative update from vulnerabilities')
+ sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
+ cur.execute(sql, (cve[ORM.CVE_ID],update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+
+ # Create notification
+ ### TO-DO
+ pass
+ else:
+ if verbose: print(" No status change needed!")
+
+def update_cve_status(cve_list,update_skip_history):
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ srtool_today = datetime.today()
+
+ if 'all' == cve_list:
+ cves = cur.execute("SELECT * FROM orm_cve").fetchall()
+ else:
+ cve_paren_list = str(cve_list.split(',')).replace('[','(').replace(']',')')
+ if verbose: print("SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list)
+ cves = cur.execute("SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list).fetchall()
+
+ if verbose: print("ACTION:update_cve_status:count=%d" % (len(cves)))
+
+ i = 0
+ for cve in cves:
+
+ # Leave "New" CVEs to Triage
+ if ORM.STATUS_NEW == cve[ORM.CVE_STATUS]:
+ continue
+
+ _update_cve_status(cur,cve,srtool_today,update_skip_history)
+ i += 1
+ if (0 == i % 100):
+ print("%5d: %-10s\r" % (i,cve[ORM.CVE_NAME]),end='')
+ if (0 == i % 200):
+ conn.commit()
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ print("%5d:" % (i))
+ cur.close()
+ conn.commit()
+ conn.close()
+
+# Indexes into the product table cache
+PRODUCT_DICT_KEY = 0
+PRODUCT_DICT_TAG = 1
+
+def _update_vulnerability_status(cur,vulnerability,srtool_today,product_dict,update_skip_history):
+ if verbose: print("Vulnerability:%s:%s" % (vulnerability[ORM.VULNERABILITY_NAME],ORM.get_orm_string(vulnerability[ORM.VULNERABILITY_STATUS],ORM.STATUS_STR)))
+ # Is status locked?
+ # if vulnerability[ORM.VULNERABILITY_STATUS_LOCK]:
+ # return
+
+ # Get the Vulnerability's Investigations
+ vulnerability_priority = vulnerability[ORM.VULNERABILITY_PRIORITY]
+ vulnerability_status = None
+ vote_count = 0
+ vul2invs = cur.execute("SELECT * FROM orm_vulnerabilitytoinvestigation where vulnerability_id = '%s'" % vulnerability[ORM.VULNERABILITY_ID]).fetchall()
+ for vul2inv in vul2invs:
+ investigation_id = vul2inv[ORM.VULNERABILITYTOINVESTIGATION_INVESTIGATION_ID]
+ investigation = cur.execute("SELECT * FROM orm_investigation where id = '%s'" % investigation_id).fetchone()
+
+ # For now, only calculate the "Public Status", so skip non-supported products
+ product_mode = get_tag_key(product_dict[investigation[ORM.INVESTIGATION_PRODUCT_ID]][PRODUCT_DICT_TAG],'mode')
+ if 'support' != product_mode:
+ if verbose: print(" SKIP:Product %s is mode=%s" % (product_dict[investigation[ORM.INVESTIGATION_PRODUCT_ID]][PRODUCT_DICT_KEY],product_mode))
+ continue
+
+ # Compute Status
+ status = investigation[ORM.INVESTIGATION_STATUS]
+ if verbose: print(" %s,%s" % (investigation[ORM.INVESTIGATION_NAME],ORM.get_orm_string(status,ORM.STATUS_STR)))
+ if ORM.STATUS_VULNERABLE == status:
+ if verbose: print(" %s => %s" % (ORM.get_orm_string(vulnerability_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR)))
+ vulnerability_status = ORM.STATUS_VULNERABLE
+ vote_count += 1
+ break
+ elif status in (ORM.STATUS_INVESTIGATE,ORM.STATUS_NEW) and vulnerability_status in (None,ORM.STATUS_INVESTIGATE):
+ if verbose: print(" %s => (%s),%s" % (ORM.get_orm_string(vulnerability_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR),ORM.get_orm_string(ORM.STATUS_INVESTIGATE,ORM.STATUS_STR)))
+ vulnerability_status = ORM.STATUS_INVESTIGATE
+ vote_count += 1
+ elif ORM.STATUS_NOT_VULNERABLE == status:
+ # tentative not vulnerable
+ vote_count += 1
+ continue
+ else:
+ # Non-voting status: Active:Historical,New-Reserved Inactive:(New),(Investigate),(Vulnerable),Vulnerable)
+ continue
+ # Compute Priority
+ if vulnerability_priority < investigation[ORM.INVESTIGATION_PRIORITY]:
+ vulnerability_priority = investigation[ORM.INVESTIGATION_PRIORITY]
+
+ # If no votes, skip and leave existing status
+ if 0 == vote_count:
+ if verbose: print(" No votes:skip")
+ return
+ # if no votes away from 'not vulnerable', defer to 'not vulnerable'
+ if None == vulnerability_status:
+ vulnerability_status = ORM.STATUS_NOT_VULNERABLE
+ if verbose: print(" defer => %s" % (ORM.get_orm_string(vulnerability_status,ORM.STATUS_STR)))
+
+ # Update status
+ history_update = []
+ if vulnerability[ORM.VULNERABILITY_STATUS] != vulnerability_status:
+ history_update.append(ORM.UPDATE_STATUS % (
+ ORM.get_orm_string(vulnerability[ORM.VULNERABILITY_STATUS],ORM.STATUS_STR),
+ ORM.get_orm_string(vulnerability_status,ORM.STATUS_STR)))
+ if vulnerability[ORM.VULNERABILITY_PRIORITY] < vulnerability_priority:
+ history_update.append(ORM.UPDATE_PRIORITY % (
+ ORM.get_orm_string(vulnerability[ORM.VULNERABILITY_PRIORITY],ORM.PRIORITY_STR),
+ ORM.get_orm_string(vulnerability_priority,ORM.PRIORITY_STR)))
+ if history_update:
+ if verbose: print(" Change Vulnerability:%s" % ';'.join(history_update))
+ if not cmd_test:
+ sql = "UPDATE orm_vulnerability SET status=?, priority=?, srt_updated=? WHERE id=?"
+ cur.execute(sql, (vulnerability_status,vulnerability_priority,srtool_today,vulnerability[ORM.VULNERABILITY_ID],) )
+ if not update_skip_history:
+ # Add status update in history
+ update_comment = "%s%s {%s}" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_DEFECT,';'.join(history_update),'Cumulative update from investigations')
+ sql = '''INSERT INTO orm_vulnerabilityhistory (vulnerability_id, comment, date, author) VALUES (?,?,?,?)'''
+ cur.execute(sql, (vulnerability[ORM.VULNERABILITY_ID],update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+
+ # Create notification
+ ### TO-DO
+ pass
+ else:
+ if verbose: print(" No status change needed!")
+
+def update_vulnerability_status(vulnerability_list,update_skip_history):
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ srtool_today = datetime.today()
+
+ # Pre-gather and cache the product information
+ product_dict = {}
+ products = cur.execute("SELECT * FROM orm_product").fetchall()
+ for product in products:
+ product_dict[ product[ORM.PRODUCT_ID] ] = [product[ORM.PRODUCT_KEY],product[ORM.PRODUCT_PRODUCT_TAGS]]
+
+ if 'all' == vulnerability_list:
+ vulnerabilities = cur.execute("SELECT * FROM orm_vulnerability").fetchall()
+ else:
+ vulnerability_paren_list = str(vulnerability_list.split(',')).replace('[','(').replace(']',')')
+ if verbose: print("SELECT * FROM orm_vulnerability WHERE name IN %s" % vulnerability_paren_list)
+ vulnerabilities = cur.execute("SELECT * FROM orm_vulnerability WHERE name IN %s" % vulnerability_paren_list).fetchall()
+
+ i = 0
+ for vulnerability in vulnerabilities:
+ _update_vulnerability_status(cur,vulnerability,srtool_today,product_dict,update_skip_history)
+ i += 1
+ if (0 == i % 100):
+ print("%5d: %-10s\r" % (i,vulnerability[ORM.VULNERABILITY_NAME]),end='')
+ if (0 == i % 200):
+ conn.commit()
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ print("%5d:" % (i))
+ cur.close()
+ conn.commit()
+ conn.close()
+
+
+def _update_investigation_status(cur,investigation,srtool_today,update_skip_history):
+ if verbose: print("Investigation:%s:%s" % (investigation[ORM.INVESTIGATION_NAME],ORM.get_orm_string(investigation[ORM.INVESTIGATION_STATUS],ORM.STATUS_STR)))
+ # Is status locked?
+ # if investigation[ORM.INVESTIGATION_STATUS_LOCK]:
+ # return
+
+ # Get the Investigation's Defects
+ investigation_priority = investigation[ORM.INVESTIGATION_PRIORITY]
+ investigation_status = None
+ vote_count = 0
+ inv2defs = cur.execute("SELECT * FROM orm_investigationtodefect where investigation_id = '%s'" % investigation[ORM.INVESTIGATION_ID]).fetchall()
+ for inv2def in inv2defs:
+ defect_id = inv2def[ORM.INVESTIGATIONTODEFECT_DEFECT_ID]
+ defect = cur.execute("SELECT * FROM orm_defect where id = '%s'" % defect_id).fetchone()
+ # Compute Status
+ status = defect[ORM.DEFECT_SRT_STATUS]
+ if verbose: print(" %s,%s" % (defect[ORM.DEFECT_NAME],ORM.get_orm_string(status,ORM.STATUS_STR)))
+ if ORM.STATUS_VULNERABLE == status:
+ if verbose: print(" %s => %s" % (ORM.get_orm_string(investigation_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR)))
+ investigation_status = ORM.STATUS_VULNERABLE
+ vote_count += 1
+ break
+ elif status in (ORM.STATUS_INVESTIGATE,ORM.STATUS_NEW) and investigation_status in (None,ORM.STATUS_INVESTIGATE):
+ if verbose: print(" %s => (%s),%s" % (ORM.get_orm_string(investigation_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR),ORM.get_orm_string(ORM.STATUS_INVESTIGATE,ORM.STATUS_STR)))
+ investigation_status = ORM.STATUS_INVESTIGATE
+ vote_count += 1
+ elif ORM.STATUS_NOT_VULNERABLE == status:
+ # tentative not vulnerable
+ vote_count += 1
+ continue
+ else:
+ # Non-voting status: Active:Historical,New-Reserved Inactive:(New),(Investigate),(Vulnerable),Vulnerable)
+ continue
+ # Compute Priority
+ if investigation_priority < defect[ORM.DEFECT_SRT_PRIORITY]:
+ investigation_priority = defect[ORM.DEFECT_SRT_PRIORITY]
+
+ # If no votes, skip and leave existing status
+ if 0 == vote_count:
+ if verbose: print(" No votes:skip")
+ return
+ # if no votes away from 'not vulnerable', defer to 'not vulnerable'
+ if None == investigation_status:
+ investigation_status = ORM.STATUS_NOT_VULNERABLE
+ if verbose: print(" defer => %s" % (ORM.get_orm_string(investigation_status,ORM.STATUS_STR)))
+
+ investigation_outcome = None
+ for inv2def in inv2defs:
+ outcome = defect[ORM.DEFECT_SRT_OUTCOME]
+ if (ORM.OUTCOME_OPEN == outcome) or (ORM.OUTCOME_OPEN == investigation_outcome):
+ investigation_outcome = ORM.OUTCOME_OPEN
+ continue
+ if (ORM.OUTCOME_FIXED == outcome) or (ORM.OUTCOME_FIXED == investigation_outcome):
+ investigation_outcome = ORM.OUTCOME_FIXED
+ continue
+ # ORM.OUTCOME_CLOSED
+ # ORM.OUTCOME_NOT_FIX
+ investigation_outcome = outcome
+
+ if not investigation_outcome:
+ investigation_outcome = investigation[ORM.INVESTIGATION_OUTCOME]
+
+
+ ### TO_DO: DOUBLE CHECK
+ if False:
+ ### WIND_RIVER_EXTENSION_BEGIN ###
+ # FIXUP: Status: overwrite if new is Fixed and old isn't "VULNERABLE"
+ update_fixup = ('Fixed' == jira_resolution) and (ORM.STATUS_VULNERABLE != cve[ORM.CVE_STATUS])
+ ### WIND_RIVER_EXTENSION_END ###
+
+
+
+ # Update status
+ history_update = []
+ if investigation[ORM.INVESTIGATION_STATUS] != investigation_status:
+ history_update.append(ORM.UPDATE_STATUS % (
+ ORM.get_orm_string(investigation[ORM.INVESTIGATION_STATUS],ORM.STATUS_STR),
+ ORM.get_orm_string(investigation_status,ORM.STATUS_STR)))
+ if investigation[ORM.INVESTIGATION_OUTCOME] != investigation_outcome:
+ history_update.append(ORM.UPDATE_OUTCOME % (
+ ORM.get_orm_string(investigation[ORM.INVESTIGATION_OUTCOME],ORM.OUTCOME_STR),
+ ORM.get_orm_string(investigation_outcome,ORM.OUTCOME_STR)))
+ if investigation[ORM.INVESTIGATION_PRIORITY] < investigation_priority:
+ history_update.append(ORM.UPDATE_PRIORITY % (
+ ORM.get_orm_string(investigation[ORM.INVESTIGATION_PRIORITY],ORM.PRIORITY_STR),
+ ORM.get_orm_string(investigation_priority,ORM.PRIORITY_STR)))
+ if history_update:
+ if verbose: print(" Change Investigation:%s" % ';'.join(history_update))
+ if not cmd_test:
+ sql = "UPDATE orm_investigation SET status=?, outcome=?, priority=?, srt_updated=? WHERE id=?"
+ cur.execute(sql, (investigation_status,investigation_outcome,investigation_priority,srtool_today,investigation[ORM.INVESTIGATION_ID],) )
+ if not update_skip_history:
+ # Add status update in history
+ update_comment = "%s%s {%s}" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_DEFECT,';'.join(history_update),'Cumulative update from defects')
+ sql = '''INSERT INTO orm_investigationhistory (investigation_id, comment, date, author) VALUES (?,?,?,?)'''
+ cur.execute(sql, (investigation[ORM.INVESTIGATION_ID],update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+
+ # Create notification
+ ### TO-DO
+ pass
+ else:
+ if verbose: print(" No status change needed!")
+
+def update_investigation_status(investigation_list,update_skip_history):
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ srtool_today = datetime.today()
+
+ if 'all' == investigation_list:
+ investigations = cur.execute("SELECT * FROM orm_investigation").fetchall()
+ else:
+ investigation_paren_list = str(investigation_list.split(',')).replace('[','(').replace(']',')')
+ if verbose: print("SELECT * FROM orm_investigation WHERE name IN %s" % investigation_paren_list)
+ investigations = cur.execute("SELECT * FROM orm_investigation WHERE name IN %s" % investigation_paren_list).fetchall()
+
+ i = 0
+ for investigation in investigations:
+ _update_investigation_status(cur,investigation,srtool_today,update_skip_history)
+ i += 1
+ if (0 == i % 100):
+ print("%5d: %-10s\r" % (i,investigation[ORM.INVESTIGATION_NAME]),end='')
+ if (0 == i % 200):
+ conn.commit()
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ cur.close()
+ conn.commit()
+ conn.close()
+
+# This routine is intended for incremental cumulative status updates
+def update_cve_status_tree(cve_list,update_skip_history):
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+
+ if 'all' == cve_list:
+ # global cumulative update
+ update_investigation_status('all', update_skip_history)
+ update_vulnerability_status('all', update_skip_history)
+ update_cve_status('all', update_skip_history)
+ return
+
+ # Perform a deep update on the CVEs, their vunerabilities, and their investigations
+ cve_paren_list = str(cve_list.split(',')).replace('[','(').replace(']',')')
+ if verbose: print("SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list)
+ cves = cur.execute("SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list).fetchall()
+
+ if verbose: print("ACTION:update_cve_status_tree:count=%d" % (len(cves)))
+
+ i = 0
+ cve_list = []
+ for cve in cves:
+ cve_list.append(cve[ORM.CVE_NAME])
+ vulnerability_list = []
+ investigation_list = []
+
+ cve2vuls = cur.execute("SELECT * FROM orm_cvetovulnerablility where cve_id = '%s'" % cve[ORM.CVE_ID]).fetchall()
+ for cve2vul in cve2vuls:
+ vulnerability_id = cve2vul[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID]
+ vulnerability = cur.execute("SELECT * FROM orm_vulnerability where id = '%s'" % vulnerability_id).fetchone()
+ vulnerability_list.append(vulnerability[ORM.VULNERABILITY_NAME])
+
+ vul2invs = cur.execute("SELECT * FROM orm_vulnerabilitytoinvestigation where vulnerability_id = '%s'" % vulnerability_id).fetchall()
+ for vul2inv in vul2invs:
+ investigation_id = vul2inv[ORM.VULNERABILITYTOINVESTIGATION_INVESTIGATION_ID]
+ investigation = cur.execute("SELECT * FROM orm_investigation where id = '%s'" % investigation_id).fetchone()
+ investigation_list.append(investigation[ORM.INVESTIGATION_NAME])
+
+ # Update the CVE's children status
+ update_investigation_status(','.join(investigation_list), update_skip_history)
+ update_vulnerability_status(','.join(vulnerability_list), update_skip_history)
+
+ # Childred are updated, now update the CVEs
+ update_cve_status(','.join(cve_list), update_skip_history)
+ cur.close()
+ conn.close()
+
+#################################
# Generate database schema offsets
#
#
@@ -428,12 +866,23 @@ def gen_schema_header():
print("ERROR(%d): %s" % (e.returncode, e.output))
return
+ # Fetch USER_SRTOOL_ID
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ USER_SRTOOL_NAME = 'SRTool'
+ user = cur.execute("SELECT * FROM users_srtuser where username = '%s'" % USER_SRTOOL_NAME).fetchone()
+ USER_SRTOOL_ID = user[0] # Hardcoded 'ORM.USERS_SRTUSER_ID'
+ conn.close()
+
with open(os.path.join(srtool_basepath,'bin/common/srt_schema.py'), 'w') as fd:
fd.write("# SRTool database table schema indexes\n")
fd.write("# Generated by: './bin/common/srtool_common.py --generate-schema-header'\n")
fd.write("# Should be run after any schema changes to sync commandline tools\n")
fd.write("\n")
fd.write("class ORM():\n")
+ fd.write(" USER_SRTOOL_NAME = '%s'\n" % USER_SRTOOL_NAME)
+ fd.write(" USER_SRTOOL_ID = %d\n" % USER_SRTOOL_ID)
+
for line in output.decode("utf-8").splitlines():
match = create_re.match(line)
if not match:
@@ -450,14 +899,18 @@ def gen_schema_header():
#print("%s_%s = %d" % (table.upper(),name.upper(),i))
fd.write(" %s_%s = %d\n" % (table.upper(),name.upper(),i))
+ #
+ # Common SRTool Status Mappings
+ #
+
fd.write("\n # Shared Constants\n")
fd.write(" %s_%s = %d\n" % ('PRIORITY','UNDEFINED',0))
- fd.write(" %s_%s = %d\n" % ('PRIORITY','MINOR' ,1))
- fd.write(" %s_%s = %d\n" % ('PRIORITY','LOW' ,2))
- fd.write(" %s_%s = %d\n" % ('PRIORITY','MEDIUM' ,3))
- fd.write(" %s_%s = %d\n" % ('PRIORITY','HIGH' ,4))
+ fd.write(" %s_%s = %d\n" % ('PRIORITY','LOW' ,1))
+ fd.write(" %s_%s = %d\n" % ('PRIORITY','MEDIUM' ,2))
+ fd.write(" %s_%s = %d\n" % ('PRIORITY','HIGH' ,3))
+ fd.write(" %s_%s = %d\n" % ('PRIORITY','CRITICAL' ,4))
fd.write(" %s = '%s'\n" % ('PRIORITY_STR', \
- 'Undefined,Minor,Low,Medium,High' \
+ 'UNDEFINED,Low,Medium,High,Critical' \
))
fd.write(" %s_%s = %d\n" % ('STATUS','HISTORICAL' ,0))
@@ -466,8 +919,12 @@ def gen_schema_header():
fd.write(" %s_%s = %d\n" % ('STATUS','INVESTIGATE' ,3))
fd.write(" %s_%s = %d\n" % ('STATUS','VULNERABLE' ,4))
fd.write(" %s_%s = %d\n" % ('STATUS','NOT_VULNERABLE',5))
+ fd.write(" %s_%s = %d\n" % ('STATUS','NEW_INACTIVE' ,6))
+ fd.write(" %s_%s = %d\n" % ('STATUS','INVESTIGATE_INACTIVE' ,7))
+ fd.write(" %s_%s = %d\n" % ('STATUS','VULNERABLE_INACTIVE' ,8))
+ fd.write(" %s_%s = %d\n" % ('STATUS','NOT_VULNERABLE_INACTIVE',9))
fd.write(" %s = '%s'\n" % ('STATUS_STR', \
- 'Historical,New,New_Reserved,Investigate,Vulnerable,Not_Vulnerable' \
+ 'Historical,New,New_Reserved,Investigate,Vulnerable,Not_Vulnerable,(New),(Investigate),(Vulnerable),(Not Vulnerable)' \
))
fd.write(" %s_%s = %d\n" % ('PUBLISH','UNPUBLISHED',0))
@@ -488,6 +945,10 @@ def gen_schema_header():
'Open,Closed,Fixed,Not_Fix' \
))
+ #
+ # External Defect Record Mappings
+ #
+
fd.write(" %s_%s = %d\n" % ('DEFECT','UNRESOLVED' ,0))
fd.write(" %s_%s = %d\n" % ('DEFECT','RESOLVED' ,1))
fd.write(" %s_%s = %d\n" % ('DEFECT','FIXED' ,2))
@@ -500,12 +961,39 @@ def gen_schema_header():
fd.write(" %s_%s = %d\n" % ('DEFECT','CANNOT_REPRODUCE' ,9))
fd.write(" %s_%s = %d\n" % ('DEFECT','DONE' ,10))
fd.write(" %s_%s = '%s'\n" % ('DEFECT','RESOLUTION_STR', \
- 'Unresolved,Resolved,Fixed,Will_Not_Fix,Withdrawn,Rejected,Duplicate,Not_Applicable,Replaced_By_Requirement,Cannot_Reproduce,Done' \
+ 'Unresolved,Resolved,Fixed,Will Not Fix,Withdrawn,Rejected,Duplicate,Not Applicable,Replaced By Requirement,Cannot Reproduce,Done' \
))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','UNDEFINED',0))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','LOW' ,1))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','MEDIUM' ,2))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','HIGH' ,3))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','CRITICAL' ,4))
+ fd.write(" %s_%s = '%s'\n" % ('DEFECT','PRIORITY_STR', \
+ 'UNDEFINED,P4,P3,P2,P1' \
+ ))
+
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_OPEN' ,0))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_IN_PROGRESS' ,1))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_ON_HOLD' ,2))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_CHECKED_IN' ,3))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_RESOLVED' ,4))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_CLOSED' ,5))
+ fd.write(" %s_%s = '%s'\n" % ('DEFECT','STATUS_STR', \
+ 'Open,In progress,On Hold,Checked In,Resolved,Closed' \
+ ))
+
+ #
+ # Package Record Mappings
+ #
+
fd.write(" %s_%s = %d\n" % ('PACKAGE','FOR' ,0))
fd.write(" %s_%s = %d\n" % ('PACKAGE','AGAINST' ,1))
+ #
+ # Data source Record Mappings
+ #
+
fd.write(" %s_%s = %d\n" % ('DATASOURCE','MINUTELY' ,0))
fd.write(" %s_%s = %d\n" % ('DATASOURCE','HOURLY' ,1))
fd.write(" %s_%s = %d\n" % ('DATASOURCE','DAILY' ,2))
@@ -519,6 +1007,55 @@ def gen_schema_header():
fd.write(" %s_%s = '%s'\n" % ('DATASOURCE','DATE_FORMAT','%Y-%m-%d'))
fd.write(" %s_%s = '%s'\n" % ('DATASOURCE','DATETIME_FORMAT','%Y-%m-%d %H:%M:%S'))
+ #
+ # Update class Mappings
+ #
+
+ fd.write("\n\n")
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','UPDATE_STR','UPDATE(%s):'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','CREATE_STR','CREATE(%s):'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SOURCE_USER','User'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SOURCE_TRIAGE','Triage'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SOURCE_CVE','CVE'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SOURCE_DEFECT','Defect'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','NEW_NAME','New_Name(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','PRIORITY','Priority(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','STATUS','Status(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SEVERITY_V3','Severity_V3(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SEVERITY_V2','Severity_V2(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DESCRIPTION','Description()'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','LASTMODIFIEDDATE','LastModifiedDate(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','OUTCOME','Outcome(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','RELEASE','Release(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','NOTE','User_Note()'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','PRIVATE_NOTE','Private_Note()'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','TAG','Tag()'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','PUBLISH_STATE','Publish_State(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','PUBLISH_DATE','Publish_Date(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ACKNOWLEDGE_DATE','AcknowledgeDate(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_CVE','Attach_CVE(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_CVE','Detach_CVE(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_VUL','Attach_Vulnerability(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_VUL','Detach_Vulnerability(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_INV','Attach_Investigration(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_INV','Detach_Investigration(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_DEV','Attach_Defect(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_DEV','Detach_Defect(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_DOC','Attach_Document(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_DOC','Detach_Document(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_USER_NOTIFY','Attach_User_Notify(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_USER_NOTIFY','Detach_User_Notify(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_ACCESS','Attach_Access(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_ACCESS','Detach_Access(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_PRODUCT','Attach_Product(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_PRODUCT','Detach_Product(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','MARK_NEW','Mark_New()'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','MARK_UPDATED','Mark_Updated()'))
+
+ #
+ # Helper routine to map values to string names
+ #
+
fd.write("\n\n")
fd.write(" # General routine to return string name of a constant (e.g. 'DATASOURCE_FREQUENCY_STR')\n")
fd.write(" @staticmethod\n")
@@ -536,63 +1073,15 @@ def gen_schema_header():
fd.write("\n")
#################################
-# fixups
-#
-
-# Recompute all of the CVE name_sort fields
-def fix_name_sort():
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
- cur_write = conn.cursor()
-
- cur.execute('SELECT * FROM orm_cve')
- for i,cve in enumerate(cur):
- name_sort = get_name_sort(cve[ORM.CVE_NAME])
-
- # Progress indicator support
- if 0 == i % 10:
- print('%05d: %20s to %20s\r' % (i,cve[ORM.CVE_NAME],name_sort), end='')
- if (0 == i % 200):
- conn.commit()
-
- sql = ''' UPDATE orm_cve
- SET name_sort = ?
- WHERE id = ?'''
- cur_write.execute(sql, (name_sort, cve[ORM.CVE_ID],))
- conn.commit()
-
-# Reset empty CVE recommend fields to the proper integer zero
-def fix_cve_recommend():
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
- cur_write = conn.cursor()
-
- cur.execute('SELECT * FROM orm_cve WHERE recommend = ""')
- i = 0
- for cve in cur:
- i += 1
-
- # Progress indicator support
- if 0 == i % 10:
- print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
- if (0 == i % 200):
- conn.commit()
-
- sql = ''' UPDATE orm_cve
- SET recommend = ?
- WHERE id = ?'''
- cur_write.execute(sql, (0, cve[ORM.CVE_ID],))
- print("CVE RECOMMEND FIX COUNT=%d" % i)
- conn.commit()
-
-#################################
# main loop
#
def main(argv):
global verbose
+ global update_skip_history
global cmd_skip
global cmd_count
+ global cmd_test
# setup
parser = argparse.ArgumentParser(description='srtool_common.py: manage SRTool common source data')
@@ -600,15 +1089,24 @@ def main(argv):
parser.add_argument('--init-notify-categories', '-n', action='store_const', const='init_notify_categories', dest='command', help='Initialize notify categories')
parser.add_argument('--score-new-cves', '-s', dest='score_new_cves', help='Score CVEs for triage [NEW|CVE-1234]')
parser.add_argument('--generate-schema-header', '-g', action='store_const', const='gen_schema_header', dest='command', help='Generate database schema header')
+
+
+ parser.add_argument('--update-cve-status-tree', '-S', dest='update_cve_status_tree', help="Update CVEs and their children's cumulative status")
+ parser.add_argument('--update-investigation-status', '-I', dest='update_investigation_status', help='Update Investigation cumulative status')
+ parser.add_argument('--update-vulnerability-status', '-V', dest='update_vulnerability_status', help='Update Vulnerability cumulative status')
+ parser.add_argument('--update-cve-status', '-C', dest='update_cve_status', help='Update CVE cumulative status')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
+
parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
+ parser.add_argument('--test', '-t', action='store_true', dest='test', help='Test run')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
parser.add_argument('--count', dest='count', help='Debugging: short run record count')
- parser.add_argument('--fix-name-sort', action='store_const', const='fix_name_sort', dest='command', help='Recalulate the CVE name sort values')
- parser.add_argument('--fix-cve-recommend', action='store_const', const='fix_cve_recommend', dest='command', help='Fix the empty CVE recommend values')
args = parser.parse_args()
verbose = args.verbose
+ update_skip_history = args.update_skip_history
+ cmd_test = args.test
cmd_skip = 0
if None != args.skip:
cmd_skip = int(args.skip)
@@ -618,6 +1116,9 @@ def main(argv):
if get_override('SRTDBG_MINIMAL_DB'):
cmd_count = 40
+ if verbose:
+ print('srtool_common %s' % args)
+
if 'init_package_keywords' == args.command:
init_package_keywords(packageKeywordsFile)
elif 'init_notify_categories' == args.command:
@@ -626,12 +1127,16 @@ def main(argv):
score_new_cves(args.score_new_cves)
elif 'gen_schema_header' == args.command:
gen_schema_header()
- ### TO-DO: TEMPORARY WORKAROUND
- fix_cve_recommend()
- elif 'fix_name_sort' == args.command:
- fix_name_sort()
- elif 'fix_cve_recommend' == args.command:
- fix_cve_recommend()
+
+ elif args.update_cve_status_tree:
+ update_cve_status_tree(args.update_cve_status_tree, update_skip_history)
+ elif args.update_cve_status:
+ update_cve_status(args.update_cve_status, update_skip_history)
+ elif args.update_vulnerability_status:
+ update_vulnerability_status(args.update_vulnerability_status, update_skip_history)
+ elif args.update_investigation_status:
+ update_investigation_status(args.update_investigation_status, update_skip_history)
+
else:
print("Command not found")
diff --git a/bin/common/srtool_update.py b/bin/common/srtool_update.py
index 1ec6c0a3..92f4479d 100755
--- a/bin/common/srtool_update.py
+++ b/bin/common/srtool_update.py
@@ -27,6 +27,7 @@ import sqlite3
import json
import time
from datetime import datetime, timedelta
+import traceback
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
@@ -85,7 +86,7 @@ def get_tag_key(tag,key,default=''):
# ONDEMAND = 5 "{}" # only on demand
# ONSTARTUP = 6 "{}" # on every SRTool start up
-def run_updates(force_all,name_filter,is_trial):
+def run_updates(force_all,name_filter,update_skip_history,is_trial):
conn = sqlite3.connect(srtDbName)
cur = conn.cursor()
@@ -95,7 +96,7 @@ def run_updates(force_all,name_filter,is_trial):
if verbose:
print("SRTool Update: time_now = %s" % time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT))
status_str = "============================================================\n"
- status_str += "Update: Date=%s,Filter='%s',Force=%s\n" % (time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT),name_filter,force_all)
+ status_str += "Update: Date=%s,Filter='%s',Force=%s,Skip_History=%s\n" % (time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT),name_filter,force_all,update_skip_history)
#get sources that have update command
sources = cur.execute("SELECT * FROM orm_datasource").fetchall()
@@ -198,6 +199,8 @@ def run_updates(force_all,name_filter,is_trial):
update_command = source[ORM.DATASOURCE_UPDATE]
if force_all:
update_command += " --force"
+ if update_skip_history:
+ update_command += " --update-skip-history"
if update_command.startswith('./'):
update_command = os.path.join(script_pathname, update_command)
os.system("echo 'Update:%s,%s' > %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),update_command,os.path.join(script_pathname,SRT_UPDATE_TASK_FILE)))
@@ -235,7 +238,7 @@ def list():
cur = conn.cursor()
cur_write = conn.cursor()
- format_str = "%16s %7s %14s %10s %28s %s"
+ format_str = "%16s %7s %14s %10s %28s '%s'"
print("SRTool Update List:")
print(format_str % ('Data','Source','Name','Frequency','Offset','Description'))
@@ -251,7 +254,7 @@ def list():
if verbose:
print('')
- run_updates(False,'all',True)
+ run_updates(False,'all',True,True)
#################################
# Start 'cron' job for updates
@@ -274,7 +277,7 @@ def cron_start():
extra_line = False
while True:
# Run the updates
- run_updates(False,'all',False)
+ run_updates(False,'all',False,False)
# Toggle an extra line in the log to make updates obvious
if extra_line:
extra_line = False
@@ -318,6 +321,7 @@ def main(argv):
parser.add_argument('--name-filter', '-n', dest='name_filter', help='Filter for datasource name')
parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
parser.add_argument('--trial', '-t', action='store_true', dest='is_trial', help='Debugging: trial run')
@@ -337,12 +341,13 @@ def main(argv):
elif 'run-updates' == args.command:
try:
print("BEGINNING UPDATING DATASOURCES... this MAY take a long time")
- run_updates(args.force,name_filter,args.is_trial)
+ run_updates(args.force,name_filter,args.update_skip_history,args.is_trial)
master_log.write("SRTOOL:%s:UPDATING DATASOURCES:\t\t\t...\t\t\tSUCCESS\n" %(datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT)))
print("FINISHED UPDATING ALL DATASOURCES\n")
except Exception as e:
print("FAILED UPDATING ALL DATASOURCES (%s)" % e)
master_log.write("SRTOOL:%s:UPDATING DATASOURCES\t\t\t...\t\t\tFAILED ... %s\n" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT), e))
+ traceback.print_exc(file=sys.stdout)
elif args.configure_ds_update:
try:
print("CHANGING UPDATE CONFIGURATION FOR %s" % args.configure_ds_update[0])
diff --git a/bin/common/srtool_utils.py b/bin/common/srtool_utils.py
index 8c13f3a1..ac65d42d 100755
--- a/bin/common/srtool_utils.py
+++ b/bin/common/srtool_utils.py
@@ -25,6 +25,9 @@ import os
import sys
import argparse
import sqlite3
+from datetime import datetime, date
+import time
+import re
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
@@ -35,6 +38,7 @@ from common.srt_schema import ORM
verbose = False
cmd_skip = 0
cmd_count = 0
+force = False
srtDbName = 'srt.sqlite'
@@ -141,9 +145,6 @@ def remove_app_sources(master_app):
# Is this reserved by Mitre? Is '** RESERVED **' within the first 20 char positions?
def fix_new_reserved():
- global cmd_skip
- global cmd_count
-
conn = sqlite3.connect(srtDbName)
cur = conn.cursor()
cur_write = conn.cursor()
@@ -171,6 +172,7 @@ def fix_new_reserved():
reserved_pos = cve[ORM.CVE_DESCRIPTION].find('** RESERVED **')
if (0 <= reserved_pos) and (20 > reserved_pos):
print("STATUS_NEW_RESERVED:%s:%s:%s" % (cve[ORM.CVE_STATUS],cve[ORM.CVE_NAME],cve[ORM.CVE_DESCRIPTION][:40]))
+ # NOTE: we do not touch 'cve.srt_updated' for this background change
sql = ''' UPDATE orm_cve
SET status = ?
WHERE id = ?'''
@@ -180,12 +182,881 @@ def fix_new_reserved():
conn.commit()
#################################
+# fix_new_tags
+#
+
+# Fix the None "cve.tags" fields
+def fix_new_tags():
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ cur.execute('SELECT * FROM orm_cve')
+ i = 0
+ j = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+ if (0 == i % 200):
+ conn.commit()
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ if not cve[ORM.CVE_TAGS]:
+ # NOTE: we do not touch 'cve.srt_updated' for this background change
+ sql = ''' UPDATE orm_cve
+ SET tags = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, ('', cve[ORM.CVE_ID],))
+ j += 1
+ print("\nCVE COUNT=%5d,%5d" % (i,j))
+ conn.commit()
+
+#################################
+# fixup fix_name_sort
+#
+
+# Recompute all of the CVE name_sort fields
+def fix_name_sort():
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ cur.execute('SELECT * FROM orm_cve')
+ for i,cve in enumerate(cur):
+ name_sort = get_name_sort(cve[ORM.CVE_NAME])
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s to %20s\r' % (i,cve[ORM.CVE_NAME],name_sort), end='')
+ if (0 == i % 200):
+ conn.commit()
+
+ # NOTE: we do not touch 'cve.srt_updated' for this background change
+ sql = ''' UPDATE orm_cve
+ SET name_sort = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (name_sort, cve[ORM.CVE_ID],))
+ conn.commit()
+
+#################################
+# fixup fix_cve_recommend
+#
+
+# Reset empty CVE recommend fields to the proper integer zero
+def fix_cve_recommend():
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ cur.execute('SELECT * FROM orm_cve WHERE recommend = ""')
+ i = 0
+ fix_count = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+
+ #
+ # Fix miss-write to lastModifiedDate, missing integer for recommend
+ #
+
+ fix = False
+
+ lastModifiedDate = cve[ORM.CVE_LASTMODIFIEDDATE]
+ if '0' == lastModifiedDate:
+ lastModifiedDate = ''
+ fix = True
+
+ recommend = cve[ORM.CVE_RECOMMEND]
+ if not recommend:
+ recommend = 0
+ fix = True
+
+ # NOTE: we do not touch 'cve.srt_updated' for this background change
+ if fix:
+ sql = ''' UPDATE orm_cve
+ SET recommend = ?, lastModifiedDate = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (recommend, lastModifiedDate, cve[ORM.CVE_ID],))
+
+ fix_count += 1
+ if (199 == fix_count % 200):
+ conn.commit()
+
+ print("CVE RECOMMEND FIX COUNT=%d of %d" % (fix_count,i))
+ if fix_count:
+ conn.commit()
+ conn.close()
+
+#################################
+# fixup fix_srt_dates
+#
+
+# Reset older 'date' values as 'datetime' values
+
+def _fix_datetime(value,default):
+ if (not value) or (not value[0].isdigit()):
+ return(default)
+ elif ':' in value:
+ return(value)
+ else:
+ return(datetime.strptime(value, '%Y-%m-%d'))
+
+def _fix_date(value,default):
+ if (not value) or (not value[0].isdigit()):
+ return(False,default)
+ elif not ':' in value:
+ return(False,value)
+ else:
+ value = re.sub('\..*','',value)
+ dt = datetime.strptime(value,ORM.DATASOURCE_DATETIME_FORMAT)
+ return(True,dt.strftime(ORM.DATASOURCE_DATE_FORMAT))
+
+
+def fix_srt_datetime(scope):
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ if ('d' == scope) or ('all' == scope):
+ cur.execute('SELECT * FROM orm_defect')
+ i = 0
+ is_change_count = 0
+ for defect in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,defect[ORM.DEFECT_NAME]), end='')
+ if (0 == i % 200):
+ conn.commit()
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ defect_srt_updated = _fix_datetime(defect[ORM.DEFECT_SRT_UPDATED],defect[ORM.DEFECT_DATE_UPDATED])
+ if defect_srt_updated == defect[ORM.DEFECT_SRT_UPDATED]:
+ continue
+
+ sql = ''' UPDATE orm_defect
+ SET srt_updated = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (defect_srt_updated, defect[ORM.DEFECT_ID],))
+ is_change_count += 1
+ print("DEFECT DATE FIX COUNT=%d/%d" % (is_change_count,i))
+ conn.commit()
+
+ # INVESTIGATION DATE FIX COUNT=1089363, real 12m20.041s = 1472 recs/sec
+ if ('i' == scope) or ('all' == scope):
+ cur.execute('SELECT * FROM orm_investigation')
+ i = 0
+ is_change_count = 0
+ for investigation in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,investigation[ORM.INVESTIGATION_NAME]), end='')
+ if (0 == i % 200):
+ conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ srt_updated = _fix_datetime(investigation[ORM.INVESTIGATION_SRT_UPDATED],None)
+ srt_created = _fix_datetime(investigation[ORM.INVESTIGATION_SRT_CREATED],None)
+ if (not srt_updated) or (not srt_created):
+ print("ERROR[%d]: bad date field at '%s', U=%s,C=%s" % (i,investigation[ORM.INVESTIGATION_ID],investigation[ORM.INVESTIGATION_SRT_UPDATED],investigation[ORM.INVESTIGATION_SRT_CREATED]))
+ exit(1)
+ if (srt_updated == investigation[ORM.INVESTIGATION_SRT_UPDATED]) and (srt_created == investigation[ORM.INVESTIGATION_SRT_CREATED]):
+ continue
+
+ sql = ''' UPDATE orm_investigation
+ SET srt_updated = ?, srt_created = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (srt_updated, srt_created, investigation[ORM.INVESTIGATION_ID],))
+ is_change_count += 1
+ print("INVESTIGATION DATE FIX COUNT=%d/%d" % (is_change_count,i))
+ conn.commit()
+
+ # VULNERABILITY DATE FIX COUNT=86585, real 1m2.969s = 1374 recs/sec
+ if ('v' == scope) or ('all' == scope):
+ cur.execute('SELECT * FROM orm_vulnerability')
+ i = 0
+ is_change_count = 0
+ for vulnerability in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,vulnerability[ORM.VULNERABILITY_NAME]), end='')
+ if (0 == i % 200):
+ conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ srt_updated = _fix_datetime(vulnerability[ORM.VULNERABILITY_SRT_UPDATED],None)
+ srt_created = _fix_datetime(vulnerability[ORM.VULNERABILITY_SRT_CREATED],None)
+ if (not srt_updated) or (not srt_created):
+ print("ERROR[%d]: bad date field at '%s', U=%s,C=%s" % (i,vulnerability[ORM.VULNERABILITY_ID],vulnerability[ORM.VULNERABILITY_SRT_UPDATED],vulnerability[ORM.VULNERABILITY_SRT_CREATED]))
+ exit(1)
+ if (srt_updated == vulnerability[ORM.VULNERABILITY_SRT_UPDATED]) and (srt_created == vulnerability[ORM.VULNERABILITY_SRT_CREATED]):
+ continue
+
+ sql = ''' UPDATE orm_vulnerability
+ SET srt_updated = ?, srt_created = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (srt_updated, srt_created, vulnerability[ORM.VULNERABILITY_ID],))
+ is_change_count += 1
+ print("VULNERABILITY DATE FIX COUNT=%d/%d" % (is_change_count,i))
+ conn.commit()
+
+ # CVE DATE FIX COUNT=86585, real 1m2.969s = 1374 recs/sec
+ # NOTE: only ACK dates need fixing, received bad apha content from srtool_mitre
+ if ('c' == scope) or ('all' == scope):
+ cur.execute('SELECT * FROM orm_cve')
+ i = 0
+ # Sparse updates
+ is_change = False
+ is_change_count = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+ if (0 == i % 200) and is_change:
+ conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ is_change = False
+
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if is_change_count > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ is_change = False
+
+ if cve[ORM.CVE_ACKNOWLEDGE_DATE]:
+ acknowledge_date = _fix_datetime(cve[ORM.CVE_ACKNOWLEDGE_DATE],'alpha')
+ # If the default 'alpha' happens, then date had bad format and must go away
+ if ('alpha' == acknowledge_date) or (acknowledge_date != cve[ORM.CVE_ACKNOWLEDGE_DATE]):
+ acknowledge_date = None
+ is_change = True
+
+ srt_updated = _fix_datetime(cve[ORM.CVE_SRT_UPDATED],None)
+ srt_created = _fix_datetime(cve[ORM.CVE_SRT_CREATED],None)
+ if (not srt_updated) or (not srt_created):
+ print("ERROR[%d]: bad date field at '%s', U=%s,C=%s" % (i,cve[ORM.CVE_ID],cve[ORM.CVE_SRT_UPDATED],cve[ORM.CVE_SRT_CREATED]))
+ exit(1)
+ if (srt_updated != cve[ORM.CVE_SRT_UPDATED]) or (srt_created != cve[ORM.CVE_SRT_CREATED]):
+ is_change = True
+
+ # Anything to do?
+ if not is_change:
+ continue
+
+ is_change_count += 1
+ sql = ''' UPDATE orm_cve
+ SET srt_updated = ?, srt_created = ?, acknowledge_date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (srt_updated, srt_created, acknowledge_date, cve[ORM.CVE_ID],))
+ is_change_count += 1
+ print("CVE DATE FIX COUNT=%d/%d" % (is_change_count,i))
+ conn.commit()
+
+ # Fix CVE History
+ if scope in ('ch','all','history'):
+ cur.execute('SELECT * FROM orm_cvehistory')
+ i = 0
+ # Sparse updates
+ is_change = False
+ is_change_count = 0
+ for cve_history in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: \r' % (i), end='')
+ if (0 == i % 200) and is_change:
+ conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ is_change = False
+
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if is_change_count > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ updated,history_date = _fix_date(cve_history[ORM.CVEHISTORY_DATE],'')
+ if not updated:
+ continue
+
+ is_change = True
+ sql = ''' UPDATE orm_cvehistory
+ SET date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (history_date, cve_history[ORM.CVEHISTORY_ID],))
+ is_change_count += 1
+
+ # Commit all remaining changes
+ if is_change:
+ conn.commit()
+ print("CVE HISTORY DATE FIX COUNT=%d/%d" % (is_change_count,i))
+
+ # Fix Vulnerability History
+ if scope in ('vh','all','history'):
+ cur.execute('SELECT * FROM orm_vulnerabilityhistory')
+ i = 0
+ # Sparse updates
+ is_change = False
+ is_change_count = 0
+ for vulnerabilityhistory in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: \r' % (i), end='')
+ if (0 == i % 200) and is_change:
+ conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ is_change = False
+
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if is_change_count > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ updated,history_date = _fix_date(vulnerabilityhistory[ORM.VULNERABILITYHISTORY_DATE],'')
+ if not updated:
+ continue
+
+ is_change = True
+ sql = ''' UPDATE orm_vulnerabilityhistory
+ SET date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (history_date, vulnerabilityhistory[ORM.VULNERABILITYHISTORY_ID],))
+ is_change_count += 1
+
+ # Commit all remaining changes
+ if is_change:
+ conn.commit()
+ print("VULNERABILITY HISTORY DATE FIX COUNT=%d/%d" % (is_change_count,i))
+
+ # Fix Investigation History
+ if scope in ('ih','all','history'):
+ cur.execute('SELECT * FROM orm_investigationhistory')
+ i = 0
+ # Sparse updates
+ is_change = False
+ is_change_count = 0
+ for investigation_history in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: \r' % (i), end='')
+ if (0 == i % 200) and is_change:
+ conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ is_change = False
+
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if is_change_count > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ updated,history_date = _fix_date(investigation_history[ORM.INVESTIGATIONHISTORY_DATE],'')
+ if not updated:
+ continue
+
+ is_change = True
+ sql = ''' UPDATE orm_investigationhistory
+ SET date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (history_date, investigation_history[ORM.INVESTIGATIONHISTORY_ID],))
+ is_change_count += 1
+
+ # Commit all remaining changes
+ if is_change:
+ conn.commit()
+ print("INVESTIGATION HISTORY DATE FIX COUNT=%d/%d" % (is_change_count,i))
+
+#################################
+# fixup fix_cve_srt_create
+#
+
+# Reset CVE srt_create to NIST release dates
+def fix_reset_nist_to_create(cve_prefix):
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ def date_nist2srt(nist_date,default,cve_name,i):
+ if not nist_date or (4 > len(nist_date)):
+ return default
+ try:
+ return(datetime.strptime(nist_date, '%Y-%m-%d'))
+ except Exception as e:
+ print("\n\ndate_nist2srt:%s,%s,%s,%s" % (cve_name,e,cve_name,i))
+ exit(1)
+ return default
+
+ cur.execute('SELECT * FROM orm_cve WHERE name LIKE "'+cve_prefix+'%"')
+
+ i = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+ if (0 == i % 200):
+ conn.commit()
+ print('')
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ nist_released = date_nist2srt(cve[ORM.CVE_PUBLISHEDDATE],cve[ORM.CVE_SRT_CREATED],cve[ORM.CVE_NAME],i)
+ nist_modified = date_nist2srt(cve[ORM.CVE_LASTMODIFIEDDATE],cve[ORM.CVE_SRT_UPDATED],cve[ORM.CVE_NAME],i)
+
+ sql = ''' UPDATE orm_cve
+ SET srt_created = ?, srt_updated = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (nist_released, nist_modified, cve[ORM.CVE_ID],))
+ print("CVE DATE FIX COUNT=%d" % i)
+ conn.commit()
+
+#################################
+# fixup fix_missing_create_dates
+#
+
+# Reset CVE None creation dates to 2019-01-01, out of the way of reports
+def fix_missing_create_dates():
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ fix_date = datetime.strptime('Jan 1 2019', '%b %d %Y')
+ fix_count = 0
+
+ cur.execute('SELECT * FROM orm_cve')
+ i = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+ if (0 == i % 200):
+# conn.commit()
+ #print('')
+ pass
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ fix = False
+ if not cve[ORM.CVE_SRT_CREATED] or (0 > cve[ORM.CVE_SRT_CREATED].find(':')):
+ srt_created = fix_date
+ fix = True
+ else:
+ srt_created = cve[ORM.CVE_SRT_CREATED]
+ #srt_created = datetime.strptime(cve[ORM.CVE_SRT_CREATED],'%Y-%m-%d')
+ if not cve[ORM.CVE_SRT_UPDATED] or (0 > cve[ORM.CVE_SRT_UPDATED].find(':')):
+ srt_updated = fix_date
+ fix = True
+ else:
+ srt_updated = cve[ORM.CVE_SRT_UPDATED]
+ #srt_updated = datetime.strptime(cve[ORM.CVE_SRT_UPDATED],'%Y-%m-%d')
+
+ if fix:
+ sql = ''' UPDATE orm_cve
+ SET srt_created = ?, srt_updated = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (srt_created, srt_updated, cve[ORM.CVE_ID],))
+ fix_count += 1
+ print("CVE DATE FIX COUNT=%d of %d" % (fix_count,i))
+ conn.commit()
+
+#################################
+# fixup fix_public_reserved
+#
+
+# Reset CVE 'New-Reserved' if now public from NIST
+def fix_public_reserved():
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ fix_count = 0
+
+ cur.execute('SELECT * FROM orm_cve WHERE status = "%s"' % ORM.STATUS_NEW_RESERVED)
+ i = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s %d\r' % (i,cve[ORM.CVE_NAME],cve[ORM.CVE_STATUS]), end='')
+ if (0 == i % 200):
+ conn.commit()
+ #print('')
+ pass
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ if cve[ORM.CVE_CVSSV3_BASESCORE] or cve[ORM.CVE_CVSSV2_BASESCORE]:
+ sql = ''' UPDATE orm_cve
+ SET status = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (ORM.STATUS_NEW, cve[ORM.CVE_ID],))
+ fix_count += 1
+ print("CVE DATE FIX COUNT=%d of %d" % (fix_count,i))
+ conn.commit()
+
+#################################
+# fix_remove_bulk_cve_history
+#
+
+# Remove a specific/accidental set of bulk CVE history updates intended to be background
+def fix_foo():
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_cve = conn.cursor()
+ cur_del = conn.cursor()
+
+ fix_count = 0
+
+ print("FOO=%s\n\n" % ORM.STATUS_NEW_RESERVED)
+
+ cur.execute('SELECT * FROM orm_cvehistory WHERE date LIKE "2019-03-2%"')
+
+ i = 0
+ for cvehistory in cur:
+ i += 1
+
+ # Progress indicator support
+ if 9 == i % 10:
+# print('%05d: %20s %s \r' % (i,cvehistory[ORM.CVEHISTORY_COMMENT],cvehistory[ORM.CVEHISTORY_DATE]), end='')
+ pass
+ if (0 == i % 200):
+# conn.commit()
+ #print('')
+ pass
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ if not (cvehistory[ORM.CVEHISTORY_DATE] in ('2019-03-28','2019-03-27')):
+ continue
+ if not (cvehistory[ORM.CVEHISTORY_COMMENT].startswith("UPDATE(CVE):")):
+ continue
+
+ cur_cve.execute('SELECT * FROM orm_cve WHERE id = "%s"' % cvehistory[ORM.CVEHISTORY_CVE_ID])
+ cve = cur_cve.fetchone()
+ if not (cve[ORM.CVE_NAME].startswith("CVE-200")):
+ continue
+
+ if 19 == fix_count % 20:
+ print("%4d) CVE=%s,CH_Comment=%s,CH_Date=%s" % (fix_count,cve[ORM.CVE_NAME],cvehistory[ORM.CVEHISTORY_COMMENT],cvehistory[ORM.CVEHISTORY_DATE]))
+
+ mydata = cur_del.execute("DELETE FROM orm_cvehistory WHERE id=?", (cvehistory[ORM.CVEHISTORY_ID],))
+ fix_count += 1
+
+ print("CVE DATE FIX COUNT=%d of %d" % (fix_count,i))
+ conn.commit()
+
+#################################
+# fix_defects_to_products
+#
+
+#
+def fix_defects_to_products():
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_cve = conn.cursor()
+ cur_del = conn.cursor()
+
+ fix_count = 0
+
+ # Find all products
+ products = {}
+ cur.execute('SELECT * FROM orm_product')
+ for product in cur:
+ id = product[ORM.PRODUCT_ID]
+ name = "%s %s %s" % (product[ORM.PRODUCT_NAME],product[ORM.PRODUCT_VERSION],product[ORM.PRODUCT_PROFILE])
+ products[id] = name
+ print("[%2d] %s" % (id,name))
+
+ # Test product field for all defects
+ cur.execute('SELECT * FROM orm_defect')
+ i = 0
+ for defect in cur:
+ i += 1
+
+ # Progress indicator support
+ if 99 == i % 100:
+ print('%05d: %-20s\r' % (i,defect[ORM.DEFECT_NAME]), end='')
+ pass
+ if (0 == i % 200):
+# conn.commit()
+ #print('')
+ pass
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ product_id = defect[ORM.DEFECT_PRODUCT_ID]
+ if not product_id in products:
+ print("ERROR:[%5d] %-20s => %s" % (defect[ORM.DEFECT_ID],defect[ORM.DEFECT_NAME],product_id))
+
+# print("CVE DATE FIX COUNT=%d of %d" % (fix_count,i))
+ conn.commit()
+
+#################################
+# find_multiple_defects
+#
+
+def find_multiple_defects():
+
+ conn = sqlite3.connect(srtDbName)
+ cur_i2d = conn.cursor()
+ cur_inv = conn.cursor()
+ cur_def = conn.cursor()
+
+ cur_inv.execute('SELECT * FROM orm_investigation')
+ count = 0
+ for i,investigation in enumerate(cur_inv):
+ if 0 == i % 100:
+ print("%4d) V=%-30s\r" % (i,investigation[ORM.VULNERABILITY_NAME]), end='')
+
+ cur_i2d.execute('SELECT * FROM orm_investigationtodefect WHERE investigation_id = "%s"' % investigation[ORM.INVESTIGATION_ID])
+ i2d_list = cur_i2d.fetchall()
+ if 1 < len(i2d_list):
+ count += 1
+ for k,i2d in enumerate(i2d_list):
+ cur_def.execute('SELECT * FROM orm_defect WHERE id = "%s"' % i2d[ORM.INVESTIGATIONTODEFECT_DEFECT_ID])
+ defect = cur_def.fetchone()
+ if defect[ORM.DEFECT_NAME].startswith("LIN10"):
+ if 0 == k:
+ print("[%02d] Multiple defects for investigation '%s':" % (count,investigation[ORM.INVESTIGATION_NAME]))
+ print(" [%02d] %s: %s (%s)" % (k+1,defect[ORM.DEFECT_NAME],defect[ORM.DEFECT_SUMMARY],ORM.get_orm_string(defect[ORM.DEFECT_RESOLUTION],ORM.DEFECT_RESOLUTION_STR)))
+ conn.close()
+
+#################################
+# find_duplicate_names
+#
+
+def find_duplicate_names():
+
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+
+
+ cur.execute('SELECT * FROM orm_cve')
+ cve_dict = {}
+ for i,cve in enumerate(cur):
+ if 0 == i % 100:
+ print("%4d) C=%-30s\r" % (i,cve[ORM.CVE_NAME]), end='')
+
+ if not cve[ORM.CVE_NAME] in cve_dict:
+ cve_dict[cve[ORM.CVE_NAME]] = cve[ORM.CVE_ID]
+ else:
+ print("\nERROR:Multiple cve names '%s'" % cve[ORM.CVE_NAME])
+ print(" a) id=%d" % cve_dict[cve[ORM.CVE_NAME]])
+ print(" b) id=%d" % cve[ORM.CVE_ID])
+ cve_dict = {}
+ print('')
+
+ cur.execute('SELECT * FROM orm_vulnerability')
+ vul_dict = {}
+ for i,vulnerability in enumerate(cur):
+ if 0 == i % 100:
+ print("%4d) V=%-30s\r" % (i,vulnerability[ORM.VULNERABILITY_NAME]), end='')
+
+ if not vulnerability[ORM.VULNERABILITY_NAME] in vul_dict:
+ vul_dict[vulnerability[ORM.VULNERABILITY_NAME]] = vulnerability[ORM.VULNERABILITY_ID]
+ else:
+ print("\nERROR:Multiple vulnerability names '%s'" % vulnerability[ORM.VULNERABILITY_NAME])
+ print(" a) id=%d" % vul_dict[vulnerability[ORM.VULNERABILITY_NAME]])
+ print(" b) id=%d" % vulnerability[ORM.VULNERABILITY_ID])
+ vul_dict = {}
+ print('')
+
+ cur.execute('SELECT * FROM orm_investigation')
+ inv_dict = {}
+ for i,investigation in enumerate(cur):
+ if 0 == i % 100:
+ print("%4d) I=%-30s\r" % (i,investigation[ORM.INVESTIGATION_NAME]), end='')
+
+ if not investigation[ORM.INVESTIGATION_NAME] in inv_dict:
+ inv_dict[investigation[ORM.INVESTIGATION_NAME]] = investigation[ORM.INVESTIGATION_ID]
+ else:
+ print("\nERROR:Multiple investigation names '%s'" % investigation[ORM.INVESTIGATION_NAME])
+ print(" a) id=%d" % inv_dict[investigation[ORM.INVESTIGATION_NAME]])
+ print(" b) id=%d" % investigation[ORM.INVESTIGATION_ID])
+ inv_dict = {}
+ print('')
+
+ cur.execute('SELECT * FROM orm_defect')
+ dev_dict = {}
+ for i,defect in enumerate(cur):
+ if 0 == i % 100:
+ print("%4d) D=%-30s\r" % (i,defect[ORM.DEFECT_NAME]), end='')
+
+ if not defect[ORM.DEFECT_NAME] in dev_dict:
+ dev_dict[defect[ORM.DEFECT_NAME]] = defect[ORM.DEFECT_ID]
+ else:
+ print("\nERROR:Multiple defect names '%s'" % defect[ORM.DEFECT_NAME])
+ print(" a) id=%d" % dev_dict[defect[ORM.DEFECT_NAME]])
+ print(" b) id=%d" % defect[ORM.DEFECT_ID])
+ dev_dict = {}
+ print('')
+
+ conn.close()
+
+#################################
+# find_bad_links
+#
+
+def find_bad_links():
+
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_del = conn.cursor()
+
+ #
+ print('\n=== CVE Source Check ===\n')
+ #
+
+ cur.execute('SELECT * FROM orm_cvesource')
+ is_change = False
+ for i,cs in enumerate(cur):
+ cveid = cs[ORM.CVESOURCE_CVE_ID]
+ srcid = cs[ORM.CVESOURCE_DATASOURCE_ID]
+ if 0 == i % 100:
+ print("%4d) CVE=%6d,SRC=%6d\r" % (cs[ORM.CVESOURCE_ID],cveid,srcid), end='')
+ error = False
+ if (1 > cveid): error = True
+ if (1 > srcid): error = True
+
+ if error:
+ print("ERROR: [%4d] CVE=%6d,SRC=%6d" % (cs[ORM.CVESOURCE_ID],cveid,srcid))
+ if force:
+ sql = 'DELETE FROM orm_cvesource WHERE id=?'
+ cur_del.execute(sql, (cs[ORM.CVESOURCE_ID],))
+ is_change = True
+
+ print('')
+ if is_change:
+ conn.commit()
+
+ #
+ print('\n=== Defect to Product Check ===\n')
+ #
+
+ # Find all products
+ products = {}
+ cur.execute('SELECT * FROM orm_product')
+ for product in cur:
+ id = product[ORM.PRODUCT_ID]
+ name = "%s %s %s" % (product[ORM.PRODUCT_NAME],product[ORM.PRODUCT_VERSION],product[ORM.PRODUCT_PROFILE])
+ products[id] = name
+ print("[%2d] %s" % (id,name))
+
+ # Test product field for all defects
+ cur.execute('SELECT * FROM orm_defect')
+ i = 0
+ for defect in cur:
+ i += 1
+
+ # Progress indicator support
+ if 99 == i % 100:
+ print('%05d: %-20s\r' % (i,defect[ORM.DEFECT_NAME]), end='')
+ pass
+ if (0 == i % 200):
+# conn.commit()
+ #print('')
+ pass
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ product_id = defect[ORM.DEFECT_PRODUCT_ID]
+ if not product_id in products:
+ print("ERROR:[%5d] %-20s => %s" % (defect[ORM.DEFECT_ID],defect[ORM.DEFECT_NAME],product_id))
+
+ conn.close()
+
+
+#################################
# main loop
#
+
def main(argv):
global verbose
global cmd_skip
global cmd_count
+ global force
# setup
parser = argparse.ArgumentParser(description='srtool.py: manage the SRTool database')
@@ -194,13 +1065,28 @@ def main(argv):
parser.add_argument('--settings', '-S', action='store_const', const='settings', dest='command', help='Show the SRT Settings')
parser.add_argument('--remove-app-sources', dest='remove_app_sources', help='Remove data sources for a previous app')
+ parser.add_argument('--fix-name-sort', action='store_const', const='fix_name_sort', dest='command', help='Recalulate the CVE name sort values')
+ parser.add_argument('--fix-cve-recommend', action='store_const', const='fix_cve_recommend', dest='command', help='Fix the empty CVE recommend values')
+ parser.add_argument('--fix-new-reserved', action='store_const', const='fix_new_reserved', dest='command', help='Reset new reserved CVEs to NEW_RESERVED')
+ parser.add_argument('--fix-new-tags', action='store_const', const='fix_new_tags', dest='command', help='Reset new cve.tags')
+ parser.add_argument('--fix-srt-datetime', dest='fix_srt_datetime', help='Fix SRT dates to datetimes [all|c|v|i|d|history|ch|vh|ih|dh]')
+ parser.add_argument('--fix-reset-nist-to-create', dest='fix_reset_nist_to_create', help='Bulk reset CVE [prefix*] srt_create dates to NIST release dates')
+ parser.add_argument('--fix-missing-create-dates', action='store_const', const='fix_missing_create_dates', dest='command', help='Reset CVE srt_create dates to NIST release dates')
+ parser.add_argument('--fix-public-reserved', action='store_const', const='fix_public_reserved', dest='command', help='Reset CVE NEW_RESERVED if now public')
+ parser.add_argument('--fix-remove-bulk-cve-history', action='store_const', const='fix_remove_bulk_cve_history', dest='command', help='foo')
+
+ parser.add_argument('--find-multiple-defects', action='store_const', const='find_multiple_defects', dest='command', help='foo')
+ parser.add_argument('--find-duplicate-names', action='store_const', const='find_duplicate_names', dest='command', help='foo')
+
+ parser.add_argument('--fix-defects-to-products', action='store_const', const='fix_defects_to_products', dest='command', help='foo')
+ parser.add_argument('--find-bad-links', action='store_const', const='find_bad_links', dest='command', help='Find bad links, e.g. "orm_cvesource" (with "-f" to fix)')
+
parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
parser.add_argument('--count', dest='count', help='Debugging: short run record count')
- parser.add_argument('--fix-new-reserved', action='store_const', const='fix_new_reserved', dest='command', help='Reset new reserved CVEs to NEW_RESERVED')
-
args = parser.parse_args()
master_log = open(os.path.join(script_pathname, "update_logs/master_log.txt"), "a")
@@ -210,6 +1096,7 @@ def main(argv):
cmd_skip = int(args.skip)
if None != args.count:
cmd_count = int(args.count)
+ force = args.force
if args.sources:
if args.sources.startswith('s'):
@@ -226,10 +1113,40 @@ def main(argv):
sources('reset')
elif 'settings' == args.command:
settings()
- elif 'fix_new_reserved' == args.command:
- fix_new_reserved()
+
elif args.remove_app_sources:
remove_app_sources(args.remove_app_sources)
+
+ elif 'fix_name_sort' == args.command:
+ fix_name_sort()
+ elif 'fix_cve_recommend' == args.command:
+ fix_cve_recommend()
+ elif 'fix_new_reserved' == args.command:
+ fix_new_reserved()
+ elif 'fix_new_tags' == args.command:
+ fix_new_tags()
+ elif args.fix_srt_datetime:
+ fix_srt_datetime(args.fix_srt_datetime)
+ elif args.fix_reset_nist_to_create:
+ fix_reset_nist_to_create(args.fix_reset_nist_to_create)
+ elif 'fix_missing_create_dates' == args.command:
+ fix_missing_create_dates()
+ elif 'fix_public_reserved' == args.command:
+ fix_public_reserved()
+ elif 'fix_remove_bulk_cve_history' == args.command:
+ fix_remove_bulk_cve_history()
+ elif 'fix_defects_to_products' == args.command:
+ fix_defects_to_products()
+
+
+ elif 'find_multiple_defects' == args.command:
+ find_multiple_defects()
+ elif 'find_duplicate_names' == args.command:
+ find_duplicate_names()
+ elif 'find_bad_links' == args.command:
+ find_bad_links()
+
+
else:
print("Command not found")
master_log.close()
diff --git a/bin/debian/srtool_debian.py b/bin/debian/srtool_debian.py
index a8d8b3d4..094deda6 100755
--- a/bin/debian/srtool_debian.py
+++ b/bin/debian/srtool_debian.py
@@ -203,7 +203,9 @@ def main(argv):
# parser.add_argument('--url-file', dest='url_file', help='CVE URL extension')
parser.add_argument('--file', dest='cve_file', help='Local CVE source file')
parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Fetch CVE detail')
+
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='is_verbose', help='Enable verbose debugging output')
args = parser.parse_args()
diff --git a/bin/dev_tools/history.py b/bin/dev_tools/history.py
new file mode 100755
index 00000000..90798747
--- /dev/null
+++ b/bin/dev_tools/history.py
@@ -0,0 +1,254 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2017-2018 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import json
+import os
+import sys
+import argparse
+from datetime import datetime, date, timedelta
+import sqlite3
+import re
+import subprocess
+
+# load the srt.sqlite schema indexes
+if os.path.isdir('bin'):
+ dir_path = 'bin'
+else:
+ dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+
+# Setup:
+verbose = False
+is_trial = False
+
+#######################################################################
+# Helper Routines
+# stamp = ['d|W',directory,timestamp]
+#
+
+def backup_list():
+ def sort_key(elem):
+ return elem[0]+elem[2]
+
+ stamps = []
+ for directory in os.listdir(os.path.join(srtool_basepath, 'backups')):
+ prefix = 'W' if 10 < len(directory) else 'd'
+ directory = os.path.join(srtool_basepath, 'backups', directory)
+ with open(os.path.join(directory,'timestamp.txt'), 'r') as file:
+ line = file.read().strip()
+ #print("DIR=%s,%s" % (directory,line))
+ stamps.append([prefix, directory, line])
+
+ # Add the current database (now)
+ prefix = 'n'
+ directory = srtool_basepath
+ statinfo = os.stat(os.path.join(directory, 'srt.sqlite'))
+ mod_timestamp = datetime.fromtimestamp(statinfo.st_mtime)
+ stamp_str = mod_timestamp.strftime('%Y-%m-%d %H:%M:%S | %A, %B %d %Y')
+ stamps.append([prefix, directory, stamp_str])
+
+ # Sort my time and return
+ stamps.sort(key=sort_key)
+ return stamps
+
+def run_command(cmnd):
+ print("Command:%s" % cmnd)
+ if not is_trial:
+ p = subprocess.Popen(cmnd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ for line in p.stdout.readlines():
+ if 0 < line.find(b'\r'):
+ continue
+ print(line)
+ retval = p.wait()
+
+#######################################################################
+# init_timestamps
+#
+
+def init_timestamps():
+
+ backup_dir = os.path.join(srtool_basepath, 'backups')
+ for directory in os.listdir(backup_dir):
+ directory = os.path.join(backup_dir, directory)
+ statinfo = os.stat(os.path.join(directory, 'srt.sqlite'))
+ mod_timestamp = datetime.fromtimestamp(statinfo.st_mtime)
+ stamp_str = mod_timestamp.strftime('%Y-%m-%d %H:%M:%S | %A, %B %d %Y')
+ with open(os.path.join(directory,'timestamp.txt'), 'w') as file:
+ file.write('%s\n' % stamp_str)
+ print("DIR=%s,%s" % (directory,mod_timestamp.strftime('%Y-%m-%d %H:%M:%S | %A, %B %d %Y')))
+
+
+#######################################################################
+# list_history
+#
+
+def list_history():
+ stamps = backup_list()
+ for stamp in stamps:
+ print("DIR=%s,%-14s,%s" % (stamp[0],os.path.basename(stamp[1]),stamp[2]))
+
+#######################################################################
+# trace
+#
+
+def trace(item):
+ stamps = backup_list()
+ for stamp in stamps:
+ srtDbName = os.path.join(stamp[1],'srt.sqlite')
+ #print("db=%s" % srtDbName)
+
+ stamp_date = re.sub(' .*','',stamp[2])
+ stamp_day = re.sub('.*\| ','',stamp[2])
+ stamp_day = re.sub(',.*','',stamp_day)
+ stamp_text = '%s,%-9s %8s' % (stamp[0],stamp_day,stamp_date)
+
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+
+ if item.startswith('CVE-'):
+ cur.execute('SELECT * FROM orm_cve WHERE name = "%s"' % item)
+ for cve in cur:
+ status = ORM.get_orm_string(cve[ORM.CVE_STATUS],ORM.STATUS_STR)
+ print("%s] %-16s, %s, %s %s , %s %s " % (stamp_text, cve[ORM.CVE_NAME], status, cve[ORM.CVE_CVSSV3_BASESCORE],cve[ORM.CVE_CVSSV3_BASESEVERITY],cve[ORM.CVE_CVSSV2_BASESCORE],cve[ORM.CVE_CVSSV2_SEVERITY]))
+
+ conn.close()
+
+#######################################################################
+# replay_nist
+#
+
+def replay_nist():
+ stamps = backup_list()
+
+ # Read base database
+ for i,stamp in enumerate(stamps):
+ print("%2d: [%s]%s" % (i+1,stamp[0],stamp[2]))
+ index = input("Which backup? ")
+ if not index:
+ return
+ try:
+ index = int(index)
+ except:
+ print("Not a number '%s'" % index)
+ return
+ if (index>=1) and (index<len(stamps)):
+ print("You selected base:%s " % stamps[index-1][2])
+ else:
+ print("Out of range '%d'" % index)
+ return
+
+ # Read replay database
+ for i,stamp in enumerate(stamps):
+ print("%2d: [%s]%s" % (i+1,stamp[0],stamp[2]))
+ replay_index = input("Which backup? ")
+ if not replay_index:
+ return
+ try:
+ replay_index = int(replay_index)
+ except:
+ print("Not a number '%s'" % replay_index)
+ return
+ if (replay_index>=1) and (replay_index<len(stamps)):
+ print("You selected replay:%s " % stamps[replay_index-1][2])
+ else:
+ print("Out of range '%d'" % replay_index)
+ return
+
+ # Stop the SRTool server
+ cmnd = './bin/srt_stop.sh'
+ run_command(cmnd)
+
+ # Create restore backup
+ srtDbName = os.path.join(srtool_basepath, 'srt.sqlite')
+ restore_db = os.path.join(srtool_basepath, 'srt.sqlite.restore')
+ if not os.path.isfile(restore_db):
+ cmnd = 'cp %s %s' % (srtDbName,restore_db)
+ run_command(cmnd)
+
+ # Copy in the replay database
+ cmnd = 'cp %s/srt.sqlite .' % stamps[index-1][1]
+ run_command(cmnd)
+
+ # Replay the NIST data
+# cmnd = "bin/nist/srtool_nist.py --update_nist --source='NIST 2019' --file=%s/data/nvdcve-1.0-2019.json --url-file=nvdcve-1.0-2019.json.gz --url-meta=nvdcve-1.0-2019.meta --force --force-cache" % stamps[replay_index-1][1]
+ cmnd = "bin/nist/srtool_nist.py --update_nist_incremental --source='NIST Modified Data' --file=%s/data/nvdcve-1.0-modified.json --url-file=nvdcve-1.0-modified.json.gz --url-meta=nvdcve-1.0-modified.meta --force --force-cache" % stamps[replay_index-1][1]
+
+ run_command(cmnd)
+
+ # Restart the SRTool server
+ cmnd = './bin/srt_start.sh'
+ run_command(cmnd)
+
+#######################################################################
+# restore
+#
+
+def restore():
+ srtDbName = os.path.join(srtool_basepath, 'srt.sqlite')
+ restore_db = os.path.join(srtool_basepath, 'srt.sqlite.restore')
+ if os.path.isfile(restore_db):
+ cmnd = 'cp %s %s' % (restore_db,srtDbName)
+ run_command(cmnd)
+ else:
+ print("No restore database found")
+
+#######################################################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+ global is_trial
+
+ parser = argparse.ArgumentParser(description='history.py: manage the history database')
+ parser.add_argument('--init-timestamps', '-I', action='store_const', const='init_timestamps', dest='command', help='Initialize the backup directory timestamps')
+ parser.add_argument('--list-history', '-l', action='store_const', const='list_history', dest='command', help='List the backup directory timestamps')
+ parser.add_argument('--trace', '-t', dest='trace', help='Trace an item')
+
+ parser.add_argument('--replay-nist', '-r', action='store_const', const='replay_nist', dest='command', help='Replay NIST update')
+ parser.add_argument('--restore', '-R', action='store_const', const='restore', dest='command', help='Restore database')
+
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose output')
+ parser.add_argument('--trial', '-T', action='store_true', dest='is_trial', help='Verbose output')
+
+ args = parser.parse_args()
+ verbose = args.verbose
+ is_trial = args.is_trial
+
+ if 'init_timestamps' == args.command:
+ init_timestamps()
+ elif 'list_history' == args.command:
+ list_history()
+ elif args.trace:
+ trace(args.trace)
+ elif 'replay_nist' == args.command:
+ replay_nist()
+ elif 'restore' == args.command:
+ restore()
+
+
+
+if __name__ == '__main__':
+ srtool_basepath = os.path.dirname(os.path.abspath(sys.argv[0]))
+ main(sys.argv[1:])
diff --git a/bin/dev_tools/update_status.sh b/bin/dev_tools/update_status.sh
new file mode 100755
index 00000000..243626a4
--- /dev/null
+++ b/bin/dev_tools/update_status.sh
@@ -0,0 +1,43 @@
+#!/bin/sh
+
+#
+# Helper routine to see if any active update commands are executing
+# in addition to showing the the backgroup updater is running.
+#
+# Sample result:
+# $ ./is_update.sh
+# 18149 python3 /opt/srt/bin/common/srtool_update.py --cron-start
+# Update:2019-03-16 12:29:21,bin/common/srtool_common.py --score-new-cves NEW --count=100
+# Done:2019-03-16 12:29:49,bin/common/srtool_common.py --score-new-cves NEW --count=100
+#
+# An "Update" without a "Done" is an running tack
+#
+
+# Test if the backgroup updater is running
+if [ -f .srtupdate.pid ] ; then
+ pid=`cat .srtupdate.pid`
+ updater=`ps -e -o pid,cmd | grep $pid | grep -v grep | grep cron`
+else
+ echo "No updater pid file found"
+ updater=""
+fi
+if [ -z "$updater" ] ; then
+ echo "!!! WARNING: UPDATER IS NOT RUNNING !!!"
+ cat .srtupdate.task
+ exit 1
+else
+ echo "UPDATER:$updater"
+fi
+
+# Test if their is an open update in progress
+cat .srtupdate.task
+is_start=`grep "^Update" .srtupdate.task | grep -v "<cron_start>"`
+is_stop=`grep "^Done" .srtupdate.task`
+if [ -z "$is_stop" ] ; then
+ echo "!!! UPDATE JOB RUNNING !!!"
+ exit 1
+else
+ echo "UPDATE PAUSED BETWEEN JOBS."
+ exit 0
+fi
+
diff --git a/bin/mitre/datasource_2010.json b/bin/mitre/datasource_2010.json
new file mode 100755
index 00000000..547de7a8
--- /dev/null
+++ b/bin/mitre/datasource_2010.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2010",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2010",
+ "cve_filter" : "CVE-2010",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2010' --file=data/allitems-cvrf-year-2010.xml --url-file=allitems-cvrf-year-2010.xml",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2010' --file=data/allitems-cvrf-year-2010.xml --url-file=allitems-cvrf-year-2010.xml",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2010.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2011.json b/bin/mitre/datasource_2011.json
new file mode 100755
index 00000000..2138154a
--- /dev/null
+++ b/bin/mitre/datasource_2011.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2011",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2011",
+ "cve_filter" : "CVE-2011",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2011' --file=data/allitems-cvrf-year-2011.xml --url-file=allitems-cvrf-year-2011.xml",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2011' --file=data/allitems-cvrf-year-2011.xml --url-file=allitems-cvrf-year-2011.xml",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2011.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2012.json b/bin/mitre/datasource_2012.json
new file mode 100755
index 00000000..49f32562
--- /dev/null
+++ b/bin/mitre/datasource_2012.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2012",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2012",
+ "cve_filter" : "CVE-2012",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2012' --file=data/allitems-cvrf-year-2012.xml --url-file=allitems-cvrf-year-2012.xml",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2012' --file=data/allitems-cvrf-year-2012.xml --url-file=allitems-cvrf-year-2012.xml",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2012.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2013.json b/bin/mitre/datasource_2013.json
new file mode 100755
index 00000000..d18fe739
--- /dev/null
+++ b/bin/mitre/datasource_2013.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2013",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2013",
+ "cve_filter" : "CVE-2013",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2013' --file=data/allitems-cvrf-year-2013.xml --url-file=allitems-cvrf-year-2013.xml",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2013' --file=data/allitems-cvrf-year-2013.xml --url-file=allitems-cvrf-year-2013.xml",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2013.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2014.json b/bin/mitre/datasource_2014.json
new file mode 100755
index 00000000..fc469f99
--- /dev/null
+++ b/bin/mitre/datasource_2014.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2014",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2014",
+ "cve_filter" : "CVE-2014",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2014' --file=data/allitems-cvrf-year-2014.xml --url-file=allitems-cvrf-year-2014.xml",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2014' --file=data/allitems-cvrf-year-2014.xml --url-file=allitems-cvrf-year-2014.xml",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2014.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2015.json b/bin/mitre/datasource_2015.json
index 0ce89f12..e91f7bd0 100755
--- a/bin/mitre/datasource_2015.json
+++ b/bin/mitre/datasource_2015.json
@@ -7,8 +7,8 @@
"name" : "MITRE",
"description" : "MITRE 2015",
"cve_filter" : "CVE-2015",
- "init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml",
- "update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2015.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2016.json b/bin/mitre/datasource_2016.json
index 36ca814f..5fba94b6 100755
--- a/bin/mitre/datasource_2016.json
+++ b/bin/mitre/datasource_2016.json
@@ -7,8 +7,8 @@
"name" : "MITRE",
"description" : "MITRE 2016",
"cve_filter" : "CVE-2016",
- "init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml",
- "update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2016.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2017.json b/bin/mitre/datasource_2017.json
index 2b326bf4..9047fd5e 100755
--- a/bin/mitre/datasource_2017.json
+++ b/bin/mitre/datasource_2017.json
@@ -7,8 +7,8 @@
"name" : "MITRE",
"description" : "MITRE 2017",
"cve_filter" : "CVE-2017",
- "init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml",
- "update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2017.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2018.json b/bin/mitre/datasource_2018.json
index ebb6eff2..567c46bd 100755
--- a/bin/mitre/datasource_2018.json
+++ b/bin/mitre/datasource_2018.json
@@ -7,8 +7,8 @@
"name" : "MITRE",
"description" : "MITRE 2018",
"cve_filter" : "CVE-2018",
- "init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml",
- "update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2018.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2019.json b/bin/mitre/datasource_2019.json
index 7113aa95..f106f88f 100755
--- a/bin/mitre/datasource_2019.json
+++ b/bin/mitre/datasource_2019.json
@@ -7,8 +7,8 @@
"name" : "MITRE",
"description" : "MITRE 2019",
"cve_filter" : "CVE-2019",
- "init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml",
- "update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2019.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/srtool_mitre.py b/bin/mitre/srtool_mitre.py
index 3c6af89d..3928e51e 100755
--- a/bin/mitre/srtool_mitre.py
+++ b/bin/mitre/srtool_mitre.py
@@ -113,15 +113,16 @@ def get_cve_default_status(is_init,publishedDate,description):
if is_init:
# Note: the NIST 'published date' is in the format "2017-05-11", so do a simple string compare
#print("INIT status: %s versus %s" % (init_new_date,publishedDate))
- if not publishedDate or (publishedDate > init_new_date):
- # Is this reserved by Mitre? Is '** RESERVED **' within the first 20 char positions?
- reserved_pos = description.find('** RESERVED **')
- if (0 <= reserved_pos) and (20 > reserved_pos):
- return ORM.STATUS_NEW_RESERVED
- else:
+# if not publishedDate or (publishedDate > init_new_date):
+# # Is this reserved by Mitre? Is '** RESERVED **' within the first 20 char positions?
+# reserved_pos = description.find('** RESERVED **')
+# if (0 <= reserved_pos) and (20 > reserved_pos):
+# return ORM.STATUS_NEW_RESERVED
+# else:
+ if True:
return ORM.STATUS_NEW
- else:
- return ORM.STATUS_HISTORICAL
+# else:
+# return ORM.STATUS_HISTORICAL
else:
return ORM.STATUS_NEW
@@ -276,6 +277,7 @@ def append_cve_database(is_init,file_xml):
cur_write = conn.cursor()
cur_ds = conn.cursor()
datasource_id = 0
+ srtool_today = datetime.today()
i = 0
for child in root:
@@ -317,12 +319,19 @@ def append_cve_database(is_init,file_xml):
# Get the default CVE status
status = get_cve_default_status(is_init,summary['Published'],summary['Description'])
- sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, srt_updated, packages)
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
- cur.execute(sql, (cve_name, get_name_sort(cve_name), ORM.PRIORITY_UNDEFINED, status, '', '', 'CVE', 'MITRE', '', 1, ORM.PUBLISH_UNPUBLISHED, '', summary['Description'], summary['Published'], summary['Modified'],0, '', '', '', '', '', datetime.now(),''))
+ # 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
+ sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, tags, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, acknowledge_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, srt_updated, srt_created, packages)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
+ cur.execute(sql, (cve_name, get_name_sort(cve_name), ORM.PRIORITY_UNDEFINED, status, '', '', '', 'CVE', 'MITRE', '', 1, ORM.PUBLISH_UNPUBLISHED, '', summary['Description'], summary['Published'], summary['Modified'],0, '', '', '', '', '', '', datetime.now(), datetime.now(),''))
+ # 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
cve_id = cur.lastrowid
print("MITRE:ADDED %20s\r" % cve_name)
+ # Also create CVE history entry
+ update_comment = "%s {%s}" % (ORM.UPDATE_CREATE_STR % ORM.UPDATE_SOURCE_CVE,'Created from MITRE')
+ sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
+ cur.execute(sql, (cve_id,update_comment,srtool_today,ORM.USER_SRTOOL_NAME,) )
+
# Add this data source to the CVE
sql = '''SELECT * FROM orm_cvesource WHERE cve_id=? AND datasource_id=? '''
if not cur_ds.execute(sql, (cve_id,datasource_id)).fetchone():
@@ -405,13 +414,16 @@ def main(argv):
# setup
parser = argparse.ArgumentParser(description='srtool_mitre.py: manage Mitre CVE data')
- parser.add_argument('--initialize', '-I', action='store_const', const='init_mitre', dest='command', help='Download the Mitre source CVE file')
+ parser.add_argument('--initialize', '-I', action='store_const', const='init_mitre', dest='command', help='Download the Mitre source CVE file, add CVEs')
parser.add_argument('--update', '-u', action='store_const', const='update_mitre', dest='command', help='Update the Mitre source CVE file')
parser.add_argument('--source', dest='source', help='Local CVE source file')
parser.add_argument('--url-file', dest='url_file', help='CVE URL extension')
+ parser.add_argument('--download-only', action='store_const', const='download_mitre', dest='command', help='Download the Mitre source CVE file only')
parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Fetch CVE detail')
parser.add_argument('--file', dest='cve_file', help='Local CVE source file')
+
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='is_verbose', help='Enable verbose debugging output')
parser.add_argument('--dump', '-D', action='store_const', const='dump', dest='command', help='test dump data')
parser.add_argument('--dump2', '-2', action='store_const', const='dump2', dest='command', help='test dump data')
@@ -449,12 +461,15 @@ def main(argv):
print("ERROR: missing --url_file parameter")
exit(1)
+ # Currently no different between initialize and update actions
if 'init_mitre' == args.command:
init_mitre_file(args.source,args.url_file,args.cve_file,args.force_update)
append_cve_database(True,args.cve_file)
elif 'update_mitre' == args.command:
init_mitre_file(args.source,args.url_file,args.cve_file,args.force_update)
append_cve_database(False,args.cve_file)
+ elif 'download_mitre' == args.command:
+ init_mitre_file(args.source,args.url_file,args.cve_file,args.force_update)
else:
print("Command not found")
diff --git a/bin/nist/datasource.json b/bin/nist/datasource.json
index 45210e40..de52a6b4 100644
--- a/bin/nist/datasource.json
+++ b/bin/nist/datasource.json
@@ -20,9 +20,10 @@
"source" : "nist",
"name" : "NIST",
"description" : "NIST Modified Data",
+ "attributes" : "PREVIEW-SOURCE",
"cve_filter" : "",
"init" : "",
- "update" : "bin/nist/srtool_nist.py -i --source='NIST Modified Data' --file=data/nvdcve-1.0-modified.json --url-file=nvdcve-1.0-modified.json.gz --url-meta=nvdcve-1.0-modified.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist_incremental --source='NIST Modified Data' --file=data/nvdcve-1.0-modified.json --url-file=nvdcve-1.0-modified.json.gz --url-meta=nvdcve-1.0-modified.meta",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-modified.json %command%",
"update_frequency" : "2",
"_comment_" : "Update at 7:00 am",
diff --git a/bin/nist/datasource_2002.json b/bin/nist/datasource_2002.json
new file mode 100755
index 00000000..f4e62d34
--- /dev/null
+++ b/bin/nist/datasource_2002.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2002",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2002",
+ "cve_filter" : "CVE-2002",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2002' --file=data/nvdcve-1.1-2002.json --url-file=nvdcve-1.1-2002.json.gz --url-meta=nvdcve-1.1-2002.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2002' --file=data/nvdcve-1.1-2002.json --url-file=nvdcve-1.1-2002.json.gz --url-meta=nvdcve-1.1-2002.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2002.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2003.json b/bin/nist/datasource_2003.json
new file mode 100755
index 00000000..8bcf620a
--- /dev/null
+++ b/bin/nist/datasource_2003.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2003",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2003",
+ "cve_filter" : "CVE-2003",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2003' --file=data/nvdcve-1.1-2003.json --url-file=nvdcve-1.1-2003.json.gz --url-meta=nvdcve-1.1-2003.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2003' --file=data/nvdcve-1.1-2003.json --url-file=nvdcve-1.1-2003.json.gz --url-meta=nvdcve-1.1-2003.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2003.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2004.json b/bin/nist/datasource_2004.json
new file mode 100755
index 00000000..3839e125
--- /dev/null
+++ b/bin/nist/datasource_2004.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2004",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2004",
+ "cve_filter" : "CVE-2004",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2004' --file=data/nvdcve-1.1-2004.json --url-file=nvdcve-1.1-2004.json.gz --url-meta=nvdcve-1.1-2004.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2004' --file=data/nvdcve-1.1-2004.json --url-file=nvdcve-1.1-2004.json.gz --url-meta=nvdcve-1.1-2004.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2004.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2005.json b/bin/nist/datasource_2005.json
new file mode 100755
index 00000000..a3acfaa7
--- /dev/null
+++ b/bin/nist/datasource_2005.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2005",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2005",
+ "cve_filter" : "CVE-2005",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2005' --file=data/nvdcve-1.1-2005.json --url-file=nvdcve-1.1-2005.json.gz --url-meta=nvdcve-1.1-2005.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2005' --file=data/nvdcve-1.1-2005.json --url-file=nvdcve-1.1-2005.json.gz --url-meta=nvdcve-1.1-2005.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2005.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2006.json b/bin/nist/datasource_2006.json
new file mode 100755
index 00000000..6f3c508e
--- /dev/null
+++ b/bin/nist/datasource_2006.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2006",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2006",
+ "cve_filter" : "CVE-2006",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2006' --file=data/nvdcve-1.1-2006.json --url-file=nvdcve-1.1-2006.json.gz --url-meta=nvdcve-1.1-2006.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2006' --file=data/nvdcve-1.1-2006.json --url-file=nvdcve-1.1-2006.json.gz --url-meta=nvdcve-1.1-2006.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2006.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2007.json b/bin/nist/datasource_2007.json
new file mode 100755
index 00000000..5ea00944
--- /dev/null
+++ b/bin/nist/datasource_2007.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2007",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2007",
+ "cve_filter" : "CVE-2007",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2007' --file=data/nvdcve-1.1-2007.json --url-file=nvdcve-1.1-2007.json.gz --url-meta=nvdcve-1.1-2007.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2007' --file=data/nvdcve-1.1-2007.json --url-file=nvdcve-1.1-2007.json.gz --url-meta=nvdcve-1.1-2007.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2007.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2008.json b/bin/nist/datasource_2008.json
new file mode 100755
index 00000000..891d3046
--- /dev/null
+++ b/bin/nist/datasource_2008.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2008",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2008",
+ "cve_filter" : "CVE-2008",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2008' --file=data/nvdcve-1.1-2008.json --url-file=nvdcve-1.1-2008.json.gz --url-meta=nvdcve-1.1-2008.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2008' --file=data/nvdcve-1.1-2008.json --url-file=nvdcve-1.1-2008.json.gz --url-meta=nvdcve-1.1-2008.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2008.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2009.json b/bin/nist/datasource_2009.json
new file mode 100755
index 00000000..2bebc343
--- /dev/null
+++ b/bin/nist/datasource_2009.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2009",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2009",
+ "cve_filter" : "CVE-2009",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2009' --file=data/nvdcve-1.1-2009.json --url-file=nvdcve-1.1-2009.json.gz --url-meta=nvdcve-1.1-2009.meta",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2009' --file=data/nvdcve-1.1-2009.json --url-file=nvdcve-1.1-2009.json.gz --url-meta=nvdcve-1.1-2009.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2009.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2010.json b/bin/nist/datasource_2010.json
new file mode 100755
index 00000000..21030e45
--- /dev/null
+++ b/bin/nist/datasource_2010.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2010",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2010",
+ "cve_filter" : "CVE-2010",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2010' --file=data/nvdcve-1.1-2010.json --url-file=nvdcve-1.1-2010.json.gz --url-meta=nvdcve-1.1-2010.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2010' --file=data/nvdcve-1.1-2010.json --url-file=nvdcve-1.1-2010.json.gz --url-meta=nvdcve-1.1-2010.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2010.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2011.json b/bin/nist/datasource_2011.json
new file mode 100755
index 00000000..5b0bb052
--- /dev/null
+++ b/bin/nist/datasource_2011.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2011",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2011",
+ "cve_filter" : "CVE-2011",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2011' --file=data/nvdcve-1.1-2011.json --url-file=nvdcve-1.1-2011.json.gz --url-meta=nvdcve-1.1-2011.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2011' --file=data/nvdcve-1.1-2011.json --url-file=nvdcve-1.1-2011.json.gz --url-meta=nvdcve-1.1-2011.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2011.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2012.json b/bin/nist/datasource_2012.json
new file mode 100755
index 00000000..69d40bad
--- /dev/null
+++ b/bin/nist/datasource_2012.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2012",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2012",
+ "cve_filter" : "CVE-2012",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2012' --file=data/nvdcve-1.1-2012.json --url-file=nvdcve-1.1-2012.json.gz --url-meta=nvdcve-1.1-2012.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2012' --file=data/nvdcve-1.1-2012.json --url-file=nvdcve-1.1-2012.json.gz --url-meta=nvdcve-1.1-2012.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2012.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2013.json b/bin/nist/datasource_2013.json
new file mode 100755
index 00000000..2f2d313a
--- /dev/null
+++ b/bin/nist/datasource_2013.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2013",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2013",
+ "cve_filter" : "CVE-2013",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2013' --file=data/nvdcve-1.1-2013.json --url-file=nvdcve-1.1-2013.json.gz --url-meta=nvdcve-1.1-2013.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2013' --file=data/nvdcve-1.1-2013.json --url-file=nvdcve-1.1-2013.json.gz --url-meta=nvdcve-1.1-2013.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2013.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2014.json b/bin/nist/datasource_2014.json
new file mode 100755
index 00000000..619197c1
--- /dev/null
+++ b/bin/nist/datasource_2014.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2014",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2014",
+ "cve_filter" : "CVE-2014",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2014' --file=data/nvdcve-1.1-2014.json --url-file=nvdcve-1.1-2014.json.gz --url-meta=nvdcve-1.1-2014.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2014' --file=data/nvdcve-1.1-2014.json --url-file=nvdcve-1.1-2014.json.gz --url-meta=nvdcve-1.1-2014.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2014.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2015.json b/bin/nist/datasource_2015.json
index ccca2f3f..7600aac1 100755
--- a/bin/nist/datasource_2015.json
+++ b/bin/nist/datasource_2015.json
@@ -7,9 +7,9 @@
"name" : "NIST",
"description" : "NIST 2015",
"cve_filter" : "CVE-2015",
- "init" : "bin/nist/srtool_nist.py -I --source='NIST 2015' --file=data/nvdcve-1.0-2015.json --url-file=nvdcve-1.0-2015.json.gz --url-meta=nvdcve-1.0-2015.meta",
- "update" : "bin/nist/srtool_nist.py -n --source='NIST 2015' --file=data/nvdcve-1.0-2015.json --url-file=nvdcve-1.0-2015.json.gz --url-meta=nvdcve-1.0-2015.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2015.json %command%",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2015' --file=data/nvdcve-1.1-2015.json --url-file=nvdcve-1.1-2015.json.gz --url-meta=nvdcve-1.1-2015.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2015' --file=data/nvdcve-1.1-2015.json --url-file=nvdcve-1.1-2015.json.gz --url-meta=nvdcve-1.1-2015.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2015.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
diff --git a/bin/nist/datasource_2016.json b/bin/nist/datasource_2016.json
index 9c87ef92..55244a2b 100755
--- a/bin/nist/datasource_2016.json
+++ b/bin/nist/datasource_2016.json
@@ -7,9 +7,9 @@
"name" : "NIST",
"description" : "NIST 2016",
"cve_filter" : "CVE-2016",
- "init" : "bin/nist/srtool_nist.py -I --source='NIST 2016' --file=data/nvdcve-1.0-2016.json --url-file=nvdcve-1.0-2016.json.gz --url-meta=nvdcve-1.0-2016.meta",
- "update" : "bin/nist/srtool_nist.py -n --source='NIST 2016' --file=data/nvdcve-1.0-2016.json --url-file=nvdcve-1.0-2016.json.gz --url-meta=nvdcve-1.0-2016.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2016.json %command%",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2016' --file=data/nvdcve-1.1-2016.json --url-file=nvdcve-1.1-2016.json.gz --url-meta=nvdcve-1.1-2016.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2016' --file=data/nvdcve-1.1-2016.json --url-file=nvdcve-1.1-2016.json.gz --url-meta=nvdcve-1.1-2016.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2016.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
diff --git a/bin/nist/datasource_2017.json b/bin/nist/datasource_2017.json
index 40695ef5..2c68ed89 100755
--- a/bin/nist/datasource_2017.json
+++ b/bin/nist/datasource_2017.json
@@ -7,9 +7,9 @@
"name" : "NIST",
"description" : "NIST 2017",
"cve_filter" : "CVE-2017",
- "init" : "bin/nist/srtool_nist.py -I --source='NIST 2017' --file=data/nvdcve-1.0-2017.json --url-file=nvdcve-1.0-2017.json.gz --url-meta=nvdcve-1.0-2017.meta",
- "update" : "bin/nist/srtool_nist.py -n --source='NIST 2017' --file=data/nvdcve-1.0-2017.json --url-file=nvdcve-1.0-2017.json.gz --url-meta=nvdcve-1.0-2017.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2017.json %command%",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2017' --file=data/nvdcve-1.1-2017.json --url-file=nvdcve-1.1-2017.json.gz --url-meta=nvdcve-1.1-2017.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2017' --file=data/nvdcve-1.1-2017.json --url-file=nvdcve-1.1-2017.json.gz --url-meta=nvdcve-1.1-2017.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2017.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
diff --git a/bin/nist/datasource_2018.json b/bin/nist/datasource_2018.json
index cf87ca2a..03d09612 100755
--- a/bin/nist/datasource_2018.json
+++ b/bin/nist/datasource_2018.json
@@ -7,9 +7,9 @@
"name" : "NIST",
"description" : "NIST 2018",
"cve_filter" : "CVE-2018",
- "init" : "bin/nist/srtool_nist.py -I --source='NIST 2018' --file=data/nvdcve-1.0-2018.json --url-file=nvdcve-1.0-2018.json.gz --url-meta=nvdcve-1.0-2018.meta",
- "update" : "bin/nist/srtool_nist.py -n --source='NIST 2018' --file=data/nvdcve-1.0-2018.json --url-file=nvdcve-1.0-2018.json.gz --url-meta=nvdcve-1.0-2018.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2018.json %command%",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2018' --file=data/nvdcve-1.1-2018.json --url-file=nvdcve-1.1-2018.json.gz --url-meta=nvdcve-1.1-2018.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2018' --file=data/nvdcve-1.1-2018.json --url-file=nvdcve-1.1-2018.json.gz --url-meta=nvdcve-1.1-2018.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2018.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
diff --git a/bin/nist/datasource_2019.json b/bin/nist/datasource_2019.json
index f3315526..269f77bc 100755
--- a/bin/nist/datasource_2019.json
+++ b/bin/nist/datasource_2019.json
@@ -7,9 +7,9 @@
"name" : "NIST",
"description" : "NIST 2019",
"cve_filter" : "CVE-2019",
- "init" : "bin/nist/srtool_nist.py -I --source='NIST 2019' --file=data/nvdcve-1.0-2019.json --url-file=nvdcve-1.0-2019.json.gz --url-meta=nvdcve-1.0-2019.meta",
- "update" : "bin/nist/srtool_nist.py -n --source='NIST 2019' --file=data/nvdcve-1.0-2019.json --url-file=nvdcve-1.0-2019.json.gz --url-meta=nvdcve-1.0-2019.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2019.json %command%",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2019' --file=data/nvdcve-1.1-2019.json --url-file=nvdcve-1.1-2019.json.gz --url-meta=nvdcve-1.1-2019.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2019' --file=data/nvdcve-1.1-2019.json --url-file=nvdcve-1.1-2019.json.gz --url-meta=nvdcve-1.1-2019.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2019.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
diff --git a/bin/nist/datasource_2020.json b/bin/nist/datasource_2020.json
new file mode 100755
index 00000000..e4bb63dc
--- /dev/null
+++ b/bin/nist/datasource_2020.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2020",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2020",
+ "cve_filter" : "CVE-2020",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2020' --file=data/nvdcve-1.1-2020.json --url-file=nvdcve-1.1-2020.json.gz --url-meta=nvdcve-1.1-2020.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2020' --file=data/nvdcve-1.1-2020.json --url-file=nvdcve-1.1-2020.json.gz --url-meta=nvdcve-1.1-2020.meta",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2020.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/srtool_nist.py b/bin/nist/srtool_nist.py
index 37116140..c7a61dce 100755
--- a/bin/nist/srtool_nist.py
+++ b/bin/nist/srtool_nist.py
@@ -21,8 +21,8 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
### Usage Examples (run from top level directory)
-# Updating a specific NIST feed: ./bin/srtool.py -u "NIST JSON Data 2017"
-# Updating with the NIST incremental feed: ./bin/srtool.py -U
+# Updating a specific NIST feed: ./bin/nist/srtool_nist.py -u "NIST JSON Data 2017"
+# Updating with the NIST incremental feed: ./bin/nist/srtool_nist.py -U
import os
import sys
@@ -33,6 +33,7 @@ import json
from datetime import datetime, date, timedelta
import pytz
from urllib.request import urlopen, URLError
+import traceback
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
@@ -44,15 +45,30 @@ lookupTable = []
cveIndex = {}
db_change = False
+count_read = 0
+count_create = 0
+count_update = 0
+
+ACTION_INIT = 'Initialize'
+ACTION_UPDATE = 'Update'
+ACTION_INCREMENT = 'Increment'
+ACTION_DOWNLOAD = 'Download'
+ACTION_UPDATE_CVE = 'Update_Cve'
+
srtDbName = 'srt.sqlite'
srtErrorLog = 'srt_errors.txt'
verbose = False
-
-nist_cve_url_base = 'https://static.nvd.nist.gov/feeds/json/cve/1.0'
-nist_meta_url_base = 'https://nvd.nist.gov/feeds/json/cve/1.0'
+force_update = False
+force_cache = False
+update_skip_history = False
+cmd_skip = 0
+cmd_count = 0
+
+nist_cve_url_base = 'https://nvd.nist.gov/feeds/json/cve/1.1'
+nist_meta_url_base = 'https://nvd.nist.gov/feeds/json/cve/1.1'
nist_cache_dir = 'data/cache/nist'
-#################################
+#######################################################################
# Helper methods
#
@@ -78,6 +94,17 @@ def srt_error_log(msg):
f1.write("|" + msg + "|\n" )
f1.close()
+# quick development/debugging support
+def _log(msg):
+ DBG_LVL = os.environ['SRTDBG_LVL'] if ('SRTDBG_LVL' in os.environ) else 2
+ DBG_LOG = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srt_dbg.log'
+ if 1 == DBG_LVL:
+ print(msg)
+ elif 2 == DBG_LVL:
+ f1=open(DBG_LOG, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
def get_name_sort(cve_name):
try:
a = cve_name.split('-')
@@ -86,10 +113,159 @@ def get_name_sort(cve_name):
cve_name_sort = cve_name
return cve_name_sort
-# Newly discovered or updated CVEs default to NEW for triage
-# Inited CVEs default to HISTORICAL, unless they are within the courtesy CVE_INIT_NEW_DELTA
+#######################################################################
+# CVE_ItemToSummary: Translate a CVE_Item JSON node to a dictionary
+
+def do_nist_scan_configuration_or(cpe_or_node, name, and_enum, key):
+ cpe_list = ''
+ for cpe in cpe_or_node[key]:
+ cpe23Uri = cpe['cpe23Uri']
+ if 'cpeMatchString' in cpe:
+ cpeMatchString = cpe['cpeMatchString']
+ else:
+ cpeMatchString = ''
+ if 'versionEndIncluding' in cpe:
+ versionEndIncluding = cpe['versionEndIncluding']
+ else:
+ versionEndIncluding = ''
+ cpe_list += '%s,%s,%s,%s|' % (cpe['vulnerable'],cpe23Uri,cpeMatchString,versionEndIncluding)
+ return cpe_list
+
+def nist_scan_configuration_or(cpe_or_node, name, and_enum):
+ cpe_list = '[or]|'
+ found = 0
+ if 'cpe' in cpe_or_node:
+ if verbose: print("NOTE:NIST_SCAN_CONFIGURATION_OR:cpe")
+ cpe_list += do_nist_scan_configuration_or(cpe_or_node, name, and_enum,'cpe')
+ found += 1
+ if 'cpe_match' in cpe_or_node:
+ if verbose: print("NOTE:NIST_SCAN_CONFIGURATION_OR:cpe_match")
+ cpe_list += do_nist_scan_configuration_or(cpe_or_node, name, and_enum,'cpe_match')
+ found += 1
+ cpe_list += '[/or]|'
+
+ if verbose and (not found):
+ print("WARNING:NIST_SCAN_CONFIGURATION_OR:NO CPE|CPE_MATCH:%s" % cpe_or_node)
+ srt_error_log("WARNING:NIST_SCAN_CONFIGURATION_OR:NO CPE|CPE_MATCH:%s" % cpe_or_node)
+ return cpe_list
+
+def fixscore(score):
+ if not score:
+ return ''
+ return '%02.2f' % float(score)
+
+def CVE_ItemToSummary(CVE_Item,header_only=False):
+ summary = {}
+
+ #
+ # Assure that all fields are at least defined as empty string
+ #
+
+ # Header info
+ summary['name'] = CVE_Item['cve']['CVE_data_meta']['ID']
+ summary['cve_data_type'] = CVE_Item['cve']['data_type']
+ summary['cve_data_format'] = CVE_Item['cve']['data_format']
+ summary['cve_data_version'] = CVE_Item['cve']['data_version']
+
+ summary['description'] = CVE_Item['cve']['description']['description_data'][0]['value']
+ summary['publishedDate'] = re.sub('T.*','',CVE_Item['publishedDate'])
+ summary['lastModifiedDate'] = re.sub('T.*','',CVE_Item['lastModifiedDate'])
+ summary['url'] = 'https://nvd.nist.gov/vuln/detail/%s' % summary['name']
+ summary['url_title'] = 'NIST Link'
+
+ # cvssV3
+ is_v3 = ('impact' in CVE_Item) and ('baseMetricV3' in CVE_Item['impact'])
+ baseMetricV3 = CVE_Item['impact']['baseMetricV3'] if is_v3 else ''
+ summary['cvssV3_baseScore'] = baseMetricV3['cvssV3']['baseScore'] if is_v3 else ''
+ summary['cvssV3_baseSeverity'] = baseMetricV3['cvssV3']['baseSeverity'] if is_v3 else ''
+ summary['cvssV3_vectorString'] = baseMetricV3['cvssV3']['vectorString'] if is_v3 else ''
+ summary['cvssV3_exploitabilityScore'] = baseMetricV3['exploitabilityScore'] if is_v3 else ''
+ summary['cvssV3_impactScore'] = baseMetricV3['impactScore'] if is_v3 else ''
+ summary['cvssV3_attackVector'] = baseMetricV3['cvssV3']['attackVector'] if is_v3 else ''
+ summary['cvssV3_attackComplexity'] = baseMetricV3['cvssV3']['attackComplexity'] if is_v3 else ''
+ summary['cvssV3_privilegesRequired'] = baseMetricV3['cvssV3']['privilegesRequired'] if is_v3 else ''
+ summary['cvssV3_userInteraction'] = baseMetricV3['cvssV3']['userInteraction'] if is_v3 else ''
+ summary['cvssV3_scope'] = baseMetricV3['cvssV3']['scope'] if is_v3 else ''
+ summary['cvssV3_confidentialityImpact'] = baseMetricV3['cvssV3']['confidentialityImpact'] if is_v3 else ''
+ summary['cvssV3_integrityImpact'] = baseMetricV3['cvssV3']['integrityImpact'] if is_v3 else ''
+ summary['cvssV3_availabilityImpact'] = baseMetricV3['cvssV3']['availabilityImpact'] if is_v3 else ''
+
+ # cvssV2
+ is_v2 = ('impact' in CVE_Item) and ('baseMetricV2' in CVE_Item['impact'])
+ baseMetricV2 = CVE_Item['impact']['baseMetricV2'] if is_v2 else ''
+ summary['cvssV2_baseScore'] = baseMetricV2['cvssV2']['baseScore'] if is_v2 else ''
+ summary['cvssV2_severity'] = baseMetricV2['severity'] if is_v2 else ''
+ summary['cvssV2_vectorString'] = baseMetricV2['cvssV2']['vectorString'] if is_v2 else ''
+ summary['cvssV2_exploitabilityScore'] = baseMetricV2['exploitabilityScore'] if is_v2 else ''
+ summary['cvssV2_impactScore'] = baseMetricV2['exploitabilityScore'] if is_v2 else ''
+ summary['cvssV2_accessVector'] = baseMetricV2['cvssV2']['accessVector'] if is_v2 else ''
+ summary['cvssV2_accessComplexity'] = baseMetricV2['cvssV2']['accessComplexity'] if is_v2 else ''
+ summary['cvssV2_authentication'] = baseMetricV2['cvssV2']['authentication'] if is_v2 else ''
+ summary['cvssV2_confidentialityImpact'] = baseMetricV2['cvssV2']['confidentialityImpact'] if is_v2 else ''
+ summary['cvssV2_integrityImpact'] = baseMetricV2['cvssV2']['integrityImpact'] if is_v2 else ''
+
+ # SRTool specific meta data
+ summary['priority'] = '0'
+ summary['status'] = '0'
+ summary['comments'] = ''
+ summary['comments_private'] = ''
+ summary['tags'] = ''
+ summary['public'] = '1' # Always true since NIST is public source
+ summary['recommend'] = '0'
+ summary['recommend_list'] = ''
+ summary['publish_state'] = ORM.PUBLISH_UNPUBLISHED
+ summary['publish_date'] = ''
+ summary['acknowledge_date'] = ''
+ summary['packages'] = ''
+
+ # Fix score to sortable string value
+ summary['cvssV3_baseScore'] = '%02.2f' % float(summary['cvssV3_baseScore']) if summary['cvssV3_baseScore'] else ''
+ summary['cvssV2_baseScore'] = '%02.2f' % float(summary['cvssV2_baseScore']) if summary['cvssV2_baseScore'] else ''
+
+ # The CVE table only needs the header, CVE details needs the rest
+ if header_only:
+ summary['cpe_list'] = ''
+ summary['ref_list'] = ''
+ return summary
+
+ configurations = CVE_Item['configurations']
+ is_first_and = True
+ summary['cpe_list'] = ''
+ for i, config in enumerate(configurations['nodes']):
+ summary['cpe_list'] += '[config]|'
+ summary['cpe_list'] += '[and]|'
+ if "AND" == config['operator']:
+ # create AND record
+ if not is_first_and:
+ summary['cpe_list'] += '[/and]|'
+ summary['cpe_list'] += '[and]|'
+ #is_first_and = False
+ if 'children' in config:
+ for j, cpe_or_node in enumerate(config['children']):
+ if "OR" == cpe_or_node['operator']:
+ summary['cpe_list'] += nist_scan_configuration_or(cpe_or_node, summary['name'], j)
+ else:
+ print("ERROR CONFIGURE:OR_OP?:%s" % cpe_or_node['operator'])
+ elif "OR" == config['operator']:
+ summary['cpe_list'] += nist_scan_configuration_or(config, summary['name'], 0)
+ else:
+ print("ERROR CONFIGURE:OP?:%s" % config['operator'])
+ summary['cpe_list'] += '[/and]|'
+ summary['cpe_list'] += '[/config]|'
+
+ summary['ref_list'] = ''
+ for i, ref in enumerate(CVE_Item['cve']['references']['reference_data']):
+ summary['ref_list'] += '%s%s\t%s\t%s' % ('|' if i>0 else '',ref['url'],','.join([tag for tag in ref['tags']]),ref['refsource'])
+
+ return summary
+
+#######################################################################
+# get_cve_default_status: bootstrap initial CVE states
+# Newly discovered or updated CVEs default to NEW for triage
+# Inited CVEs default to HISTORICAL, unless they are within the courtesy CVE_INIT_NEW_DELTA
+
init_new_date = None
-def get_cve_default_status(is_init,publishedDate):
+def get_cve_default_status(action,publishedDate):
global init_new_date
if None == init_new_date:
@@ -108,31 +284,387 @@ def get_cve_default_status(is_init,publishedDate):
#print("\nPreset new data = %s" % init_new_date.strftime("%Y-%m-%d"))
init_new_date = init_new_date.strftime("%Y-%m-%d")
- if is_init:
+ if ACTION_INIT == action:
# Note: the NIST 'published date' is in the format "2017-05-11", so do a simple string compare
#print("INIT status: %s > %s" % (publishedDate, init_new_date))
- if not publishedDate or (publishedDate > init_new_date):
+# if not publishedDate or (publishedDate > init_new_date):
+ if True:
return ORM.STATUS_NEW
- else:
- return ORM.STATUS_HISTORICAL
+# else:
+# return ORM.STATUS_HISTORICAL
else:
return ORM.STATUS_NEW
+#######################################################################
+# cwe and cve2cwe
+#
+# Generates and executes appropriate SQLite query for a new CWE
+# returns CWE_ID
+
+### THIS DOES NOT CALL CONNECTION.COMMIT()
+def sql_cwe_query(conn, value):
+ CWE_ID = 0
+ CWE_VULNERABLE_COUNT = 6
+ cur = conn.cursor()
+ sql = '''SELECT * FROM orm_cwetable WHERE name=?'''
+ cwe = cur.execute(sql, (value,)).fetchone()
+ if cwe is None:
+ sql = '''INSERT INTO orm_cwetable (name, href, summary, description, vulnerable_count, found) VALUES (?,'','','',1,1)'''
+ cur.execute(sql, (value,))
+ cwe_id = cur.lastrowid
+ cur.close()
+ return cwe_id
+ else:
+ sql = ''' UPDATE orm_cwetable
+ SET vulnerable_count = ?
+ WHERE id = ?'''
+ cur.execute(sql, (cwe[CWE_VULNERABLE_COUNT] + 1,cwe[CWE_ID]))
+ conn.commit()
+ cur.close()
+ return cwe[CWE_ID]
+
+#generates and executes appropriate SQLite query for new CVE to CWE relation
+### THIS DOES NOT CALL CONNECTION.COMMIT()
+def sql_cve2cwe_query(conn, cve_id, cwe_id):
+ cur = conn.cursor()
+ sql = '''SELECT * FROM orm_cvetocwe WHERE cve_id=? AND cwe_id=?'''
+ cve2cwe = cur.execute(sql, (cve_id, cwe_id)).fetchone()
+ if cve2cwe is None:
+ sql = '''INSERT INTO orm_cvetocwe (cve_id, cwe_id) VALUES (?, ?)'''
+ cur.execute(sql, (cve_id, cwe_id))
+ conn.commit()
+ cur.close()
+
+#######################################################################
+#
+# Generates and executes appropriate SQLite query for CVE depending on situation
+# new CVE -> INSERT || modified CVE -> UPDATE || no change -> ignore and return
+# returns (CVE_ID, BOOL) tuple, True if insert or update executed
+#
+
+### THIS DOES NOT CALL CONNECTION.COMMIT()
+def sql_cve_query(action, conn, summary, log):
+ global count_create
+ global count_update
+
+ is_change = False
+ cur = conn.cursor()
+ sql = '''SELECT * FROM orm_cve WHERE name=?'''
+ cve_current = cur.execute(sql, (summary['name'],)).fetchone()
+ cve_id = -1
+ srtool_today = datetime.today()
+ if cve_current is None:
+ count_create += 1
+
+ # Get the default CVE status
+ summary['status'] = get_cve_default_status(action,summary['publish_date'])
+
+# # Offsets... 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
+# sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, tags, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, acknowledge_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, srt_updated, srt_created, packages)
+# VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
+# cur.execute(sql, (cve.name, get_name_sort(cve.name), cve.priority, status, cve.comments, cve.comments_private, cve.tags, cve.cve_data_type, cve.cve_data_format, cve.cve_data_version, 1, cve.publish_state, cve.publish_date, cve.acknowledge_date, cve.description, cve.publishedDate, cve.lastModifiedDate, cve.recommend, cve.recommend_list, cve.cvssV3_baseScore, cve.cvssV3_baseSeverity, cve.cvssV2_baseScore, cve.cvssV2_severity, srtool_today, srtool_today,''))
+# # Offsets... 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
+
+ sql_elements = [
+ 'name',
+ 'name_sort',
+ 'priority',
+ 'status',
+ 'comments',
+ 'comments_private',
+ 'tags',
+ 'cve_data_type',
+ 'cve_data_format',
+ 'cve_data_version',
+ 'public',
+ 'publish_state',
+ 'publish_date',
+ 'acknowledge_date',
+ 'description',
+ 'publishedDate',
+ 'lastModifiedDate',
+ 'recommend',
+ 'recommend_list',
+ 'cvssV3_baseScore',
+ 'cvssV3_baseSeverity',
+ 'cvssV2_baseScore',
+ 'cvssV2_severity',
+ 'packages',
+ 'srt_updated',
+ 'srt_created',
+ ]
+ sql_qmarks = []
+ for i in range(len(sql_elements)):
+ sql_qmarks.append('?')
+ sql_values = (
+ summary['name'],
+ get_name_sort(summary['name']),
+ summary['priority'],
+ summary['status'],
+ summary['comments'],
+ summary['comments_private'],
+ summary['tags'],
+ summary['cve_data_type'],
+ summary['cve_data_format'],
+ summary['cve_data_version'],
+ summary['public'],
+ summary['publish_state'],
+ summary['publish_date'],
+ summary['acknowledge_date'],
+ summary['description'],
+ summary['publishedDate'],
+ summary['lastModifiedDate'],
+ summary['recommend'],
+ summary['recommend_list'],
+ summary['cvssV3_baseScore'],
+ summary['cvssV3_baseSeverity'],
+ summary['cvssV2_baseScore'],
+ summary['cvssV2_severity'],
+ summary['packages'],
+ srtool_today,
+ srtool_today
+ )
+
+ #print('INSERT into orm_cve (%s) VALUES (%s)' % (','.join(sql_elements),','.join(sql_qmarks)),sql_values)
+ cur.execute('INSERT into orm_cve (%s) VALUES (%s)' % (','.join(sql_elements),','.join(sql_qmarks)),sql_values)
+
+ is_change = True
+ cve_id = cur.lastrowid
+ if log: log.write("\tINSERTED '%s'\n" % summary['name'])
+
+ # Also create CVE history entry
+ update_comment = "%s {%s}" % (ORM.UPDATE_CREATE_STR % ORM.UPDATE_SOURCE_CVE,'Created from NIST')
+ sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
+ cur.execute(sql, (cve_id,update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+
+ elif (cve_current[ORM.CVE_LASTMODIFIEDDATE] < summary['lastModifiedDate']) or force_update:
+ count_update += 1
+
+ cve_id = cve_current[ORM.CVE_ID]
+
+ # If CVE was 'reserved', promote to "new'
+ if cve_current[ORM.CVE_STATUS] in (ORM.STATUS_NEW_RESERVED,):
+ summary['status'] = ORM.STATUS_NEW
+ else:
+ summary['status'] = cve_current[ORM.CVE_STATUS]
+
+ # If CVE is "new', reset score date so that it will be rescanned
+ if summary['status'] == ORM.STATUS_NEW:
+ summary['score_date'] = None
+ else:
+ summary['score_date'] = cve_current[ORM.CVE_SCORE_DATE]
+
+ ### TO-DO
+ ### Capture CPE changes
+ ###
+
+ # Update the CVE record
+ srt_updated = srtool_today if not update_skip_history else cve_current[ORM.CVE_SRT_UPDATED]
+ sql = ''' UPDATE orm_cve
+ SET recommend = ?,
+ recommend_list = ?,
+ cve_data_type = ?,
+ cve_data_format = ?,
+ cve_data_version = ?,
+ status = ?,
+ description = ?,
+ publishedDate = ?,
+ lastModifiedDate = ?,
+ cvssV3_baseScore = ?,
+ cvssV3_baseSeverity = ?,
+ cvssV2_baseScore = ?,
+ cvssV2_severity = ?,
+ score_date = ?,
+ srt_updated = ?
+ WHERE id = ?'''
+ sql_values = (
+ summary['recommend'],
+ summary['recommend_list'],
+ summary['cve_data_type'],
+ summary['cve_data_format'],
+ summary['cve_data_version'],
+ summary['status'],
+ summary['description'],
+ summary['publishedDate'],
+ summary['lastModifiedDate'],
+ summary['cvssV3_baseScore'],
+ summary['cvssV3_baseSeverity'],
+ summary['cvssV2_baseScore'],
+ summary['cvssV2_severity'],
+ summary['score_date'],
+ srt_updated,
+ cve_id)
+ cur.execute(sql, sql_values)
+ is_change = True
+
+ if log: log.write("\tUPDATED '%s'\n" % summary['name'])
+ #print('UPDATED: %s (%s)' % (sql,sql_values))
+
+ # Prepare the history comment
+ if not update_skip_history:
+ history_update = []
+ if (cve_current[ORM.CVE_CVSSV3_BASESCORE].strip() != summary['cvssV3_baseScore'].strip() ) or \
+ (cve_current[ORM.CVE_CVSSV3_BASESEVERITY].strip() != summary['cvssV3_baseSeverity'].strip()):
+ history_update.append(ORM.UPDATE_SEVERITY_V3 % (
+ "%s %s" % (cve_current[ORM.CVE_CVSSV3_BASESCORE],cve_current[ORM.CVE_CVSSV3_BASESEVERITY]),
+ "%s %s" % (summary['cvssV3_baseScore'],summary['cvssV3_baseSeverity'])))
+ if (cve_current[ORM.CVE_CVSSV2_BASESCORE].strip() != summary['cvssV2_baseScore'].strip()) or \
+ (cve_current[ORM.CVE_CVSSV2_SEVERITY].strip() != summary['cvssV2_severity'].strip() ):
+ history_update.append(ORM.UPDATE_SEVERITY_V2 % (
+ "%s %s" % (cve_current[ORM.CVE_CVSSV2_BASESCORE],cve_current[ORM.CVE_CVSSV2_SEVERITY]),
+ "%s %s" % (summary['cvssV2_baseScore'],summary['cvssV2_severity'])))
+ if cve_current[ORM.CVE_DESCRIPTION].strip() != summary['description'].strip():
+ history_update.append(ORM.UPDATE_DESCRIPTION)
+ if cve_current[ORM.CVE_LASTMODIFIEDDATE] != summary['lastModifiedDate']:
+ history_update.append(ORM.UPDATE_LASTMODIFIEDDATE % (cve_current[ORM.CVE_LASTMODIFIEDDATE],summary['lastModifiedDate']))
+ if history_update:
+ # Add update to history
+ update_comment = "%s%s" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_CVE,';'.join(history_update))
+ sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
+ cur.execute(sql, (cve_id,update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+
+ ### TO-DO
+ ### CREATE NOTIFICATION IF SCORE/SEVERITY HAS CHANGED
+ ###
+
+ else:
+ is_change = False
+ if log: log.write("\tSKIPPED '%s'\n" % summary['name'])
+ cur.close()
+ return (cve_id, is_change)
+
+#######################################################################
+# nist_json: parses JSON, creates CVE object, and updates database as necessary. Commits to database on success
+#
+# Will EITHER create new record in orm_cve if cve does not exist OR overwrite
+# every field if existing cve out-of-date OR ignore cve
+# Requires json to be formatted with NIST Json schema:
+# https://csrc.nist.gov/schema/nvd/feed/0.1/nvd_cve_feed_json_0.1_beta.schema
+
+def nist_json(action, summary_json_url, datasource, datasource_file, log, date_new):
+ import gzip
+ global count_read
+
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+
+ # If this is a volatile preview source:
+ # (a) Fetch the existing CveSource matches into a list
+ # (b) Remove found matches from that list
+ # (c) Delete remaining obsolete CveSource entries
+ preview_dict = {}
+ if "PREVIEW-SOURCE" in datasource[ORM.DATASOURCE_ATTRIBUTES]:
+ sql = '''SELECT * FROM orm_cvesource WHERE datasource_id=? '''
+ for d2c in cur.execute(sql, (datasource[ORM.DATASOURCE_ID],)):
+ preview_dict[d2c[ORM.CVESOURCE_CVE_ID]] = d2c[ORM.CVESOURCE_ID]
+
+ # If we have already cached a current version of the NIST file, read from it directly
+
+ # The value 'date_new' is in UTC, so convert the fetched file date
+ if (not force_cache) and ((not datasource_file) or (not os.path.isfile(datasource_file)) or (date_new > file_date(datasource_file,True))):
+ # Fetch and/or refresh upstream CVE file
+ response = urlopen(summary_json_url)
+ dct = json.loads(gzip.decompress(response.read()).decode('utf-8')) #uncompress and decode json.gz
+
+ #save datasource feed to "data"
+ datasource_file_fd = open(datasource_file, 'w+')
+ datasource_file_fd.write(json.dumps(dct))
+ else:
+ # Use cached CVE file
+ with open(datasource_file) as json_data:
+ dct = json.load(json_data)
+
+ # Download the upstream CVE source file only
+ if ACTION_DOWNLOAD == action:
+ return
+
+ CVE_Items = dct['CVE_Items']
+ total = len(CVE_Items)
+
+ cache_path = os.path.join(srtool_basepath, nist_cache_dir)
+ #begin parsing each cve in the JSON data
+ for i, CVE_Item in enumerate(CVE_Items):
+ count_read += 1
-#################################
+ # Development support
+ if get_override('SRTDBG_MINIMAL_DB') and (i > 10):
+ break
+
+ #print('.', end='', flush=True)
+ try:
+ # Translate a CVE_Item JSON node
+ summary = CVE_ItemToSummary(CVE_Item)
+
+ # Indicate progress
+ print('[%4d]%30s\r' % ((i * 100)/ total, summary['name']), end='', flush=True)
+
+ #if cve exists in cache, delete it
+ cve_path = os.path.join(cache_path, '%s.json' % summary['name'])
+ if (os.path.isfile(cve_path)):
+ os.remove(cve_path)
+
+ # Check if cve object need to be uploaded to database (cases: new cve, modified cve, or no changes)
+ # if true, apply changes. Else ignore and continue
+ cve_id, is_change = sql_cve_query(action, conn, summary, log)
+
+ # Remove this found CVE from the preview check list, if present
+ preview_dict.pop(cve_id,None)
+
+ # If CVE updates, must check and update associated records (CWEs, references, and CVE2CWE)
+ #sql_cwe_query, and sql_cve2cwe_query require valid CVE record primary key at some point during their execution, therefore must always be after call to sql_cve_query
+ if is_change:
+ problem_list = CVE_Item['cve']['problemtype']['problemtype_data']
+ for problem_Item in problem_list:
+ description_list = problem_Item['description']
+ for description_Item in description_list:
+ value = description_Item['value']
+ cwe_id = sql_cwe_query(conn, value)
+ sql_cve2cwe_query(conn, cve_id, cwe_id)
+
+ # Add this data source to the CVE
+ sql = '''SELECT * FROM orm_cvesource WHERE cve_id=? AND datasource_id=? '''
+ exists = cur.execute(sql, (cve_id,datasource[ORM.DATASOURCE_ID])).fetchone()
+ if exists is None:
+ sql = ''' INSERT into orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ cur.execute(sql, (cve_id,datasource[ORM.DATASOURCE_ID]))
+
+ # Safety commit as we go
+ if 199 == (i % 200):
+ conn.commit()
+ print('')
+
+ except Exception as e:
+ print(traceback.format_exc())
+ print("UPDATE FAILED")
+ cur.close()
+ conn.close()
+ raise Exception("Failed to import CVEs %s: %s" % (datasource_file, e))
+ print()
+ log.write("total number of CVEs checked: %s\n" % total)
+
+ # Now delete any un-matched obsolete CveSource entries
+ for old_cve_id in preview_dict.keys():
+ sql = 'DELETE FROM orm_cvesource WHERE id=?'
+ cur.execute(sql, (preview_dict[old_cve_id],))
+
+ conn.commit()
+ cur.close()
+ conn.close()
+
+#######################################################################
# check for updates and apply if any
#
# Change orm_datasource schema to make LastModifiedDate a datetime object
# datetime and urllib imports may be in an inappropriate location (top of file currently)
+#
+# Gets CVE-Modified feed, determines if we are out of date, and applies updates if true
+# tracks history in update_log.txt
-#gets CVE-Modified feed, determines if we are out of date, and applies updates if true
-#tracks history in update_log.txt
-#incremental argument is boolean that idicates if bulk updating or incremental updating.
-def update_nist(is_init,datasource_description, url_file, url_meta, cve_file, incremental, force_update):
+def update_nist(action,datasource_description, url_file, url_meta, cve_file):
nist_cve_url = '%s/%s' % (nist_cve_url_base,url_file)
nist_meta_url = '%s/%s' % (nist_meta_url_base,url_meta)
- nist_file = os.path.join(srtool_basepath,cve_file)
+ nist_file = os.path.join(srtool_basepath,cve_file) if not cve_file.startswith('/') else cve_file
#update log (1=Monday, 7= Sunday)
today = datetime.today()
@@ -170,14 +702,14 @@ def update_nist(is_init,datasource_description, url_file, url_meta, cve_file, in
else:
date_past = datetime.strptime(ds[ORM.DATASOURCE_LASTMODIFIEDDATE], ORM.DATASOURCE_DATETIME_FORMAT)
- log.write("BEGINNING NIST %s\n" % ('INITS' if is_init else 'UPDATES'))
+ log.write("BEGINNING NIST %s\n" % action)
#determine if we are out of date and apply updates if true
if (date_new > date_past) or force_update:
pre_update_time = datetime.now() #used for logging purposes only
- nist_json(is_init,nist_cve_url, ds[ORM.DATASOURCE_ID], nist_file, log, date_new, incremental)
- log.write("began %s: %s\n" % ( 'init' if is_init else 'updates', str(pre_update_time) ))
- log.write("finished %s: %s\n" % ( 'init' if is_init else 'updates', str(datetime.now()) ))
+ nist_json(action,nist_cve_url, ds, nist_file, log, date_new)
+ log.write("began %s: %s\n" % ( action, str(pre_update_time) ))
+ log.write("finished %s: %s\n" % ( action, str(datetime.now()) ))
log.write("=============================================================================\n")
log.write("\n")
@@ -186,11 +718,11 @@ def update_nist(is_init,datasource_description, url_file, url_meta, cve_file, in
c.execute(sql, (str(date_new),))
conn.commit()
else:
- log.write("No %s needed\n" % ('init' if is_init else 'update'))
+ log.write("No %s needed\n" % action)
log.write("Checked: %s\n" % datetime.now())
log.write("=============================================================================\n")
log.write("\n")
- print("NO %s NEEDED" % ('INIT' if is_init else 'UPDATE'))
+ print("NO %s NEEDED" % action)
# Reset datasource's lastModifiedDate as today
sql = "UPDATE orm_datasource SET lastModifiedDate = ? WHERE id='%s'" % ds[ORM.DATASOURCE_ID]
@@ -208,7 +740,7 @@ def update_nist(is_init,datasource_description, url_file, url_meta, cve_file, in
f.close()
except URLError as e:
- raise Exception("Failed to open %s: %s" % (nist_meta_url, e.reason))
+ raise Exception("Failed to open %s: %s" % (nist_meta_url, e))
log.close()
c.close()
conn.close()
@@ -223,269 +755,8 @@ def file_date(filename,utc=False):
file_datetime = file_datetime+(utc_now-now)
return file_datetime
-#parses JSON, creates CVE object, and updates database as necessary. Commits to database on success
-#will EITHER create new record in orm_cve if cve does not exist OR overwrite every field if existing cve out-of-date OR ignore cve
-#requires json to be formatted with NIST Json schema (https://csrc.nist.gov/schema/nvd/feed/0.1/nvd_cve_feed_json_0.1_beta.schema)
-def nist_json(is_init,summary_json_url, datasource_id, datasource_file, log, date_new, incremental):
- import traceback
- import gzip
-
- # If we have already cached a current version of the NIST file, read from it directly
-
- # The value 'date_new' is in UTC, so convert the fetched file date
- if (not datasource_file) or (not os.path.isfile(datasource_file)) or (date_new > file_date(datasource_file,True)):
- # Fetch and/or refresh upstream CVE file
- response = urlopen(summary_json_url)
- dct = json.loads(gzip.decompress(response.read()).decode('utf-8')) #uncompress and decode json.gz
-
- #save datasource feed to "data"
- datasource_file_fd = open(datasource_file, 'w+')
- datasource_file_fd.write(json.dumps(dct))
- else:
- # Use cached CVE file
- with open(datasource_file) as json_data:
- dct = json.load(json_data)
-
- conn = sqlite3.connect(srtDbName)
- c = conn.cursor()
-
- CVE_Items = dct['CVE_Items']
- total = len(CVE_Items)
- v = Cve()
-
- cache_path = os.path.join(srtool_basepath, nist_cache_dir)
- #begin parsing each cve in the JSON data
- for i, CVE_Item in enumerate(CVE_Items):
- # Development support
- if get_override('SRTDBG_MINIMAL_DB') and (i > 10):
- break
-
- references = CVE_Item['cve']['references']['reference_data']
- CVE_data_meta = CVE_Item['cve']['CVE_data_meta']['ID']
-
- #if cve exists in cache, delete it
- cve_path = os.path.join(cache_path, CVE_data_meta + ".json")
- if (os.path.isfile(cve_path)):
- os.remove(cve_path)
-
- #print('.', end='', flush=True)
- print('[%4d]%30s\r' % ((i * 100)/ total, CVE_data_meta), end='', flush=True)
- try:
- v.name = CVE_data_meta
-
- v.cve_data_type = CVE_Item['cve']['data_type']
- v.cve_data_format = CVE_Item['cve']['data_format']
- v.cve_data_version = CVE_Item['cve']['data_version']
-
- v.description = CVE_Item['cve']['description']['description_data'][0]['value']
- v.publishedDate = re.sub('T.*','',CVE_Item['publishedDate'])
- v.lastModifiedDate = re.sub('T.*','',CVE_Item['lastModifiedDate'])
- v.public = True # Always true since NIST is public source
-
- # We do not know yet if this has been published to the SRTool management
- v.publish_state = ORM.PUBLISH_UNPUBLISHED
- v.publish_date = ''
-
- if ('impact' in CVE_Item) and ('baseMetricV3' in CVE_Item['impact']):
- baseMetricV3 = CVE_Item['impact']['baseMetricV3']
- v.cvssV3_baseScore = baseMetricV3['cvssV3']['baseScore']
- v.cvssV3_baseSeverity = baseMetricV3['cvssV3']['baseSeverity']
- if ('impact' in CVE_Item) and ('baseMetricV2' in CVE_Item['impact']):
- baseMetricV2 = CVE_Item['impact']['baseMetricV2']
- v.cvssV2_baseScore = baseMetricV2['cvssV2']['baseScore']
-
- #check if cve object `v` need to be uploaded to database (cases: new cve, modified cve, or no changes)
- #if true, apply changes. Else ignore and continue
- v_id, is_change = sql_cve_query(conn, v, is_init,log)
-
-
- #if incremental update and CVE changed, save json copy of the cve to cache
- if incremental and is_change:
- file = open(cve_path, 'w+')
- file.write(json.dumps(CVE_Item))
-
- #if CVE `v` updates, must check and update associated records (CWEs, references, and CVE2CWE)
- #sql_cwe_query, and sql_cve2cwe_query require valid CVE record primary key at some point during their execution, therefore must always be after call to sql_cve_query
- if is_change:
- problem_list = CVE_Item['cve']['problemtype']['problemtype_data']
- for problem_Item in problem_list:
- description_list = problem_Item['description']
- for description_Item in description_list:
- value = description_Item['value']
- cwe_id = sql_cwe_query(conn, value)
- sql_cve2cwe_query(conn, v_id, cwe_id)
-
- # Add this data source to the CVE
- sql = '''SELECT * FROM orm_cvesource WHERE cve_id=? AND datasource_id=? '''
- exists = c.execute(sql, (v_id,datasource_id)).fetchone()
- if exists is None:
- sql = ''' INSERT into orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
- c.execute(sql, (v_id,datasource_id))
-
- except Exception as e:
- print(traceback.format_exc())
- print("UPDATE FAILED")
- c.close()
- conn.close()
- return
- print()
- log.write("total number of CVEs checked: %s\n" % total)
- conn.commit()
- c.close()
- conn.close()
-
-#################################
-# cve class
-#
-class Cve():
- # index - primary key
- id = -1
-
- name = ''
-
- priority = 0
- status = ORM.STATUS_HISTORICAL
-
- comments = ''
- comments_private = ''
-
- cve_data_type = ''
- cve_data_format = ''
- cve_data_version = ''
-
- public = False
- publish_state = ORM.PUBLISH_UNPUBLISHED
- publish_date = ''
-
- description = ''
- publishedDate = ''
- lastModifiedDate = ''
- problemtype = ''
-
- # cpe_list = ''
-
- cvssV3_baseScore = ''
- cvssV3_baseSeverity = ''
- # cvssV3_vectorString = ''
- # cvssV3_exploitabilityScore = ''
- # cvssV3_impactScore = ''
- # cvssV3_attackVector = ''
- # cvssV3_attackComplexity = ''
- # cvssV3_privilegesRequired = ''
- # cvssV3_userInteraction = ''
- # cvssV3_scope = ''
- # cvssV3_confidentialityImpact = ''
- # cvssV3_integrityImpact = ''
- # cvssV3_availabilityImpact = ''
-
- cvssV2_baseScore = ''
- cvssV2_severity = ''
- # cvssV2_vectorString = ''
- # cvssV2_exploitabilityScore = ''
- # cvssV2_impactScore = ''
- # cvssV2_accessVector = ''
- # cvssV2_accessComplexity = ''
- # cvssV2_authentication = ''
- # cvssV2_confidentialityImpact = ''
- # cvssV2_integrityImpact = ''
-
- recommend = 0
- recommend_list = ''
-
-#generates and executes appropriate SQLite query for CVE depending on situation
-#new CVE -> INSERT || modified CVE -> UPDATE || no change -> ignore and return
-#returns (CVE_ID, BOOL) tuple, True if insert or update executed
-### THIS DOES NOT CALL CONNECTION.COMMIT()
-def sql_cve_query(conn, cve, is_init, log):
- is_change = False
- cur = conn.cursor()
- sql = '''SELECT * FROM orm_cve WHERE name=?'''
- exists = cur.execute(sql, (cve.name,)).fetchone()
- cve_id = -1
- if exists is None:
- # Get the default CVE status
- status = get_cve_default_status(is_init,cve.publishedDate)
-
- sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, srt_updated, packages)
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
- cur.execute(sql, (cve.name, get_name_sort(cve.name), cve.priority, status, cve.comments, cve.comments_private, cve.cve_data_type, cve.cve_data_format, cve.cve_data_version, 1, cve.publish_state, cve.publish_date, cve.description, cve.publishedDate, cve.lastModifiedDate, cve.recommend, cve.recommend_list, cve.cvssV3_baseScore, cve.cvssV3_baseSeverity, cve.cvssV2_baseScore, cve.cvssV2_severity, datetime.now(),''))
- is_change = True
- cve_id = cur.lastrowid
- log.write("\tINSERTED '%s'\n" % cve.name)
-
- elif exists[ORM.CVE_LASTMODIFIEDDATE] < cve.lastModifiedDate:
- sql = ''' UPDATE orm_cve
- SET recommend = ?,
- recommend_list = ?,
- cve_data_type = ?,
- cve_data_format = ?,
- cve_data_version = ?,
- description = ?,
- lastModifiedDate = ?,
- cvssV3_baseScore = ?,
- cvssV3_baseSeverity = ?,
- cvssV2_baseScore = ?,
- cvssV2_severity = ?
- WHERE id = ?'''
- cur.execute(sql, (cve.recommend, cve.recommend_list, cve.cve_data_type, cve.cve_data_format, cve.cve_data_version, cve.description, cve.lastModifiedDate, cve.cvssV3_baseScore, cve.cvssV3_baseSeverity, cve.cvssV2_baseScore, cve.cvssV2_severity, exists[0]))
- is_change = True
- log.write("\tUPDATED '%s'\n" % cve.name)
- cve_id = exists[ORM.CVE_ID]
-
- ### TO-DO
- ### CREATE NOTIFICATION IF SCORE/SEVERITY HAS CHANGED
- ###
-
- else:
- is_change = False
- log.write("\tSKIPPED '%s'\n" % cve.name)
- cur.close()
- return (cve_id, is_change)
-
-
-#################################
-# cwe and cve2cwe
-#
-
-#generates and executes appropriate SQLite query for a new CWE
-#returns CWE_ID
-### THIS DOES NOT CALL CONNECTION.COMMIT()
-def sql_cwe_query(conn, value):
- CWE_ID = 0
- CWE_VULNERABLE_COUNT = 6
- cur = conn.cursor()
- sql = '''SELECT * FROM orm_cwetable WHERE name=?'''
- cwe = cur.execute(sql, (value,)).fetchone()
- if cwe is None:
- sql = '''INSERT INTO orm_cwetable (name, href, summary, description, vulnerable_count, found) VALUES (?,'','','',1,1)'''
- cur.execute(sql, (value,))
- cwe_id = cur.lastrowid
- cur.close()
- return cwe_id
- else:
- sql = ''' UPDATE orm_cwetable
- SET vulnerable_count = ?
- WHERE id = ?'''
- cur.execute(sql, (cwe[CWE_VULNERABLE_COUNT] + 1,cwe[CWE_ID]))
- conn.commit()
- cur.close()
- return cwe[CWE_ID]
-
-#generates and executes appropriate SQLite query for new CVE to CWE relation
-### THIS DOES NOT CALL CONNECTION.COMMIT()
-def sql_cve2cwe_query(conn, cve_id, cwe_id):
- cur = conn.cursor()
- sql = '''SELECT * FROM orm_cvetocwe WHERE cve_id=? AND cwe_id=?'''
- cve2cwe = cur.execute(sql, (cve_id, cwe_id)).fetchone()
- if cve2cwe is None:
- sql = '''INSERT INTO orm_cvetocwe (cve_id, cwe_id) VALUES (?, ?)'''
- cur.execute(sql, (cve_id, cwe_id))
- conn.commit()
- cur.close()
-
-
-#################################
-# main loop
+#######################################################################
+# fetch_cve: extract and return the meta data for a specific CVE
#
def fetch_cve(cve_name,cve_source_file):
@@ -503,8 +774,9 @@ def fetch_cve(cve_name,cve_source_file):
print("Description=ERROR reading CVE summary file '%s':%s" % (cve_cache_path,e))
return
elif cve_source_file:
+ nist_file = os.path.join(srtool_basepath,cve_source_file) if not cve_source_file.startswith('/') else cve_source_file
try:
- f = open(os.path.join(srtool_basepath, cve_source_file), 'r')
+ f = open(nist_file, 'r')
source_dct = json.load(f)
for item in source_dct["CVE_Items"]:
if not 'cve' in item:
@@ -534,135 +806,196 @@ def fetch_cve(cve_name,cve_source_file):
print("description=There is no CVE record for %s in the loaded NIST public CVE database." % cve_name)
return
- summary = {}
+ # Translate a CVE_Item JSON node
+ summary = CVE_ItemToSummary(CVE_Item)
- summary['name'] = cve_name
- summary['cve_data_type'] = CVE_Item['cve']['data_type']
- summary['cve_data_format'] = CVE_Item['cve']['data_format']
- summary['cve_data_version'] = CVE_Item['cve']['data_version']
+ # Return the results
+ for key in summary.keys():
+ print('%s=%s' % (key,summary[key]))
- summary['description'] = CVE_Item['cve']['description']['description_data'][0]['value']
- summary['publishedDate'] = re.sub('T.*','',CVE_Item['publishedDate'])
- summary['lastModifiedDate'] = re.sub('T.*','',CVE_Item['lastModifiedDate'])
- summary['url'] = 'https://nvd.nist.gov/vuln/detail/%s' % cve_name
- summary['url_title'] = 'NIST Link'
+#######################################################################
+# update_cve_list: Update CVE records for a list of CVEs
+#
+# This can be used for forcing the instantiation and/or update
+# for specific CVEs on demand, for example instantiating CVEs found in
+# the defect system that may be from older NIST years which are registered
+# as data sources that are on-demand only
+#
- if ('impact' in CVE_Item) and ('baseMetricV3' in CVE_Item['impact']):
- baseMetricV3 = CVE_Item['impact']['baseMetricV3']
- summary['cvssV3_baseScore'] = baseMetricV3['cvssV3']['baseScore']
- summary['cvssV3_baseSeverity'] = baseMetricV3['cvssV3']['baseSeverity']
- summary['cvssV3_vectorString'] = baseMetricV3['cvssV3']['vectorString']
- summary['cvssV3_exploitabilityScore'] = baseMetricV3['exploitabilityScore']
- summary['cvssV3_impactScore'] = baseMetricV3['impactScore']
- summary['cvssV3_attackVector'] = baseMetricV3['cvssV3']['attackVector']
- summary['cvssV3_attackComplexity'] = baseMetricV3['cvssV3']['attackComplexity']
- summary['cvssV3_privilegesRequired'] = baseMetricV3['cvssV3']['privilegesRequired']
- summary['cvssV3_userInteraction'] = baseMetricV3['cvssV3']['userInteraction']
- summary['cvssV3_scope'] = baseMetricV3['cvssV3']['scope']
- summary['cvssV3_confidentialityImpact'] = baseMetricV3['cvssV3']['confidentialityImpact']
- summary['cvssV3_integrityImpact'] = baseMetricV3['cvssV3']['integrityImpact']
- summary['cvssV3_availabilityImpact'] = baseMetricV3['cvssV3']['availabilityImpact']
- if ('impact' in CVE_Item) and ('baseMetricV2' in CVE_Item['impact']):
- baseMetricV2 = CVE_Item['impact']['baseMetricV2']
- summary['cvssV2_baseScore'] = baseMetricV2['cvssV2']['baseScore']
- summary['cvssV2_severity'] = baseMetricV2['severity']
- summary['cvssV2_vectorString'] = baseMetricV2['cvssV2']['vectorString']
- summary['cvssV2_exploitabilityScore'] = baseMetricV2['exploitabilityScore']
- summary['cvssV2_impactScore'] = baseMetricV2['exploitabilityScore']
- summary['cvssV2_accessVector'] = baseMetricV2['cvssV2']['accessVector']
- summary['cvssV2_accessComplexity'] = baseMetricV2['cvssV2']['accessComplexity']
- summary['cvssV2_authentication'] = baseMetricV2['cvssV2']['authentication']
- summary['cvssV2_confidentialityImpact'] = baseMetricV2['cvssV2']['confidentialityImpact']
- summary['cvssV2_integrityImpact'] = baseMetricV2['cvssV2']['integrityImpact']
+def update_cve_list(action,cve_list,conn=None):
- configurations = CVE_Item['configurations']
- is_first_and = True
- summary['cpe_list'] = ''
- for i, config in enumerate(configurations['nodes']):
- summary['cpe_list'] += '[config]|'
- summary['cpe_list'] += '[and]|'
- if "AND" == config['operator']:
- # create AND record
- if not is_first_and:
- summary['cpe_list'] += '[/and]|'
- summary['cpe_list'] += '[and]|'
- #is_first_and = False
- if 'children' in config:
- for j, cpe_or_node in enumerate(config['children']):
- if "OR" == cpe_or_node['operator']:
- summary['cpe_list'] += nist_scan_configuration_or(cpe_or_node, cve_name, j)
- else:
- print("ERROR CONFIGURE:OR_OP?:%s" % cpe_or_node['operator'])
- elif "OR" == config['operator']:
- summary['cpe_list'] += nist_scan_configuration_or(config, cve_name, 0)
- else:
- print("ERROR CONFIGURE:OP?:%s" % config['operator'])
- summary['cpe_list'] += '[/and]|'
- summary['cpe_list'] += '[/config]|'
+ # Set up database connection
+ do_close = False
+ if not conn:
+ conn = sqlite3.connect(srtDbName)
+ do_close = True
+ cur = conn.cursor()
- summary['ref_list'] = ''
- for i, ref in enumerate(CVE_Item['cve']['references']['reference_data']):
- summary['ref_list'] += '%s%s\t%s\t%s' % ('|' if i>0 else '',ref['url'],','.join([tag for tag in ref['tags']]),ref['refsource'])
+ # Gather the CVE prefix to lookup commands
+ sql = "SELECT * FROM orm_datasource"
+ cur.execute(sql)
+ datasource_table = []
+ for datasource in cur:
+ if 'nist' != datasource[ORM.DATASOURCE_SOURCE]:
+ # Only consider NIST datasources
+ continue
+ datasource_table.append([datasource[ORM.DATASOURCE_CVE_FILTER], datasource[ORM.DATASOURCE_LOOKUP], datasource[ORM.DATASOURCE_ID]])
+
+ update = False
+ fd = None
+ source_dct = []
+ for datasource in datasource_table:
+
+ # Simple caching
+ if fd:
+ fd.close()
+ fd = None
+ source_dct = []
+ has_matches = False
+ # Find at least one CVE that is in this datasource
+ for cve_name in cve_list.split(','):
+ if (not datasource[0]) or cve_name.startswith(datasource[0]):
+ has_matches = True
+ if not has_matches:
+ continue
+ # Find the CVEs in this datasource
+
+ # bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2002.json %command%
+ cve_source_file = re.sub(r".*=", "", datasource[1])
+ cve_source_file = re.sub(r" .*", "", cve_source_file)
+ if verbose: print("NIST_SOURCE:%s %s" % (cve_source_file,cve_name))
+ try:
+ if not fd:
+ # Simple caching
+ fd = open(os.path.join(srtool_basepath, cve_source_file), 'r')
+ source_dct = json.load(fd)
+ for item in source_dct["CVE_Items"]:
+ if not 'cve' in item:
+ continue
+ if not 'CVE_data_meta' in item['cve']:
+ continue
+ if not 'ID' in item['cve']['CVE_data_meta']:
+ continue
+ for cve_name in cve_list.split(','):
+ if item['cve']['CVE_data_meta']['ID'] == cve_name:
+ if verbose: print(" NIST_TRANSLATE:%s %s" % (cve_source_file,cve_name))
+
+ # Translate the CVE content
+ summary = CVE_ItemToSummary(item,True)
+ # Commit the CVE content
+ cve_id, is_change = sql_cve_query(action, conn, summary, None)
+ if is_change:
+ update = True
+
+ # Add NIST datasource to CVE
+ sql = '''SELECT * FROM orm_cvesource WHERE cve_id=? AND datasource_id=?'''
+ cve2ds = cur.execute(sql, (cve_id, datasource[2],)).fetchone()
+ if not cve2ds:
+ sql = ''' INSERT into orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ cur.execute(sql, (cve_id,datasource[2],))
+ # Remember this match in case it gets preempted
+
+ if verbose: print(" NIST_QUERIED:%s %s" % (cve_source_file,cve_name))
- # Return the results
- for key in summary.keys():
- print('%s=%s' % (key,summary[key]))
+ except Exception as e:
+ print("Description=ERROR CVE list load '%s':%s" % (cve_source_file,e))
+ print(traceback.format_exc())
+ return
-def do_nist_scan_configuration_or(cpe_or_node, name, and_enum, key):
- cpe_list = ''
- for cpe in cpe_or_node[key]:
- cpe23Uri = cpe['cpe23Uri']
- if 'cpeMatchString' in cpe:
- cpeMatchString = cpe['cpeMatchString']
- else:
- cpeMatchString = ''
- if 'versionEndIncluding' in cpe:
- versionEndIncluding = cpe['versionEndIncluding']
- else:
- versionEndIncluding = ''
- cpe_list += '%s,%s,%s,%s|' % (cpe['vulnerable'],cpe23Uri,cpeMatchString,versionEndIncluding)
- return cpe_list
+ if update:
+ conn.commit()
+ cur.close()
+ if do_close:
+ conn.close()
-def nist_scan_configuration_or(cpe_or_node, name, and_enum):
- cpe_list = '[or]|'
- found = 0
- if 'cpe' in cpe_or_node:
- if verbose: print("NOTE:NIST_SCAN_CONFIGURATION_OR:cpe")
- cpe_list += do_nist_scan_configuration_or(cpe_or_node, name, and_enum,'cpe')
- found += 1
- if 'cpe_match' in cpe_or_node:
- if verbose: print("NOTE:NIST_SCAN_CONFIGURATION_OR:cpe_match")
- cpe_list += do_nist_scan_configuration_or(cpe_or_node, name, and_enum,'cpe_match')
- found += 1
- cpe_list += '[/or]|'
+def update_existing_cves(action,cve_prefix):
+ # Set up database connection
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
- if verbose and (not found):
- print("WARNING:NIST_SCAN_CONFIGURATION_OR:NO CPE|CPE_MATCH:%s" % cpe_or_node)
- return cpe_list
+ # Gather the CVE prefix to lookup commands
+ sql = 'SELECT * FROM orm_cve WHERE name LIKE "'+cve_prefix+'%"'
+ cur.execute(sql)
+ cve_table = []
+ i = 0
+ for cve in cur:
+ i += 1
+
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ if verbose: print("FOUND:%s" % cve[ORM.CVE_NAME])
+ cve_table.append(cve[ORM.CVE_NAME])
+
+ if 19 == (i % 20):
+ print("SEND:%2d:%s" % (i,cve[ORM.CVE_NAME]))
+ update_cve_list(action,','.join(cve_table),conn)
+ cve_table = []
+
+ if cve_table:
+ print("SEND:%2d:%s" % (i,cve[ORM.CVE_NAME]))
+ update_cve_list(action,','.join(cve_table),conn)
+ cur.close()
+ conn.close()
-#################################
+
+#######################################################################
# main loop
#
def main(argv):
global verbose
+ global force_update
+ global force_cache
+ global update_skip_history
+ global cmd_skip
+ global cmd_count
+
parser = argparse.ArgumentParser(description='srtool_cve.py: manage the CVEs within SRTool database')
parser.add_argument('--init_nist', '-I', action='store_const', const='init_nist', dest='command', help='Initialize nvd.nist.gov/vuln/data-feeds for a specified datasource')
parser.add_argument('--update_nist', '-n', action='store_const', const='update_nist', dest='command', help='Check nvd.nist.gov/vuln/data-feeds for updates on a specified datasource')
+ parser.add_argument('--update_nist_incremental', '-i', action='store_const', const='update_nist_incremental', dest='command', help='Check nvd.nist.gov/vuln/data-feeds for updates')
+ parser.add_argument('--download-only', action='store_const', const='download_nist', dest='command', help='Download the NIST source CVE file(s), load CVEs on demand only')
+ parser.add_argument('--update-cve-list', '-l', dest='update_cve_list', help='Update list of CVEs to database')
+ parser.add_argument('--update-existing-cves', '-L', dest='update_existing_cves', help='Update list of existing CVEs to database')
+ parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Lookup CVE data')
+
parser.add_argument('--source', dest='source', help='Local CVE source file')
parser.add_argument('--url-file', dest='url_file', help='CVE URL extension')
parser.add_argument('--url-meta', dest='url_meta', help='CVE URL meta extension')
parser.add_argument('--file', dest='cve_file', help='Local CVE source file')
- parser.add_argument('--update_nist_incremental', '-i', action='store_const', const='update_nist_incremental', dest='command', help='Check nvd.nist.gov/vuln/data-feeds for updates')
- parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Lookup CVE data')
+
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
- parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Force update')
+ parser.add_argument('--force-cache', action='store_true', dest='force_cache', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
+ parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose output')
args = parser.parse_args()
+
verbose = args.verbose
+ force_update = args.force_update
+ force_cache = args.force_cache
+ update_skip_history = args.update_skip_history
+ cmd_skip = 0
+ if None != args.skip:
+ cmd_skip = int(args.skip)
+ cmd_count = 0
+ if None != args.count:
+ cmd_count = int(args.count)
#srt_error_log("DEBUG:srtool_nist:%s" % args)
+ # Update CVE list
+ if args.update_cve_list:
+ update_cve_list(ACTION_UPDATE_CVE,args.update_cve_list)
+ return
+ elif args.update_existing_cves:
+ update_existing_cves(ACTION_UPDATE_CVE,args.update_existing_cves)
+ return
+
# Required parameters to continue
if not args.cve_file:
print("ERROR: missing --cve_file parameter")
@@ -693,26 +1026,39 @@ def main(argv):
ret = 0
if ('init_nist' == args.command) or ('update_nist' == args.command):
- is_init = ('init_nist' == args.command)
+ if ('init_nist' == args.command):
+ action = ACTION_INIT
+ else:
+ action = ACTION_UPDATE
try:
- print ("BEGINNING NIST %s PLEASE WAIT ... this can take some time" % ('INIT' if is_init else 'UPDATES'))
- update_nist(is_init, args.source, args.url_file, args.url_meta, args.cve_file, False, args.force_update)
- master_log.write("SRTOOL:%s:%s:\t\t\t...\t\t\t%s\n" % (date.today(), args.source, "INIT'ED" if is_init else 'UPDATED'))
- print("DATABASE %s FINISHED\n" % ('INIT' if is_init else 'UPDATE'))
+ print ("BEGINNING NIST %s PLEASE WAIT ... this can take some time" % action)
+ update_nist(action, args.source, args.url_file, args.url_meta, args.cve_file)
+ master_log.write("SRTOOL:%s:%s Done:\t\t\t...\t\t\t%s\n" % (date.today(), args.source, action))
+ print("DATABASE %s FINISHED\n" % action)
+ print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
except Exception as e:
- print("DATABASE %s FAILED ... %s" % ('INIT' if is_init else 'UPDATE',e))
+ print("DATABASE %s FAILED ... %s" % (action,e))
master_log.write("SRTOOL:%s:%s:\t\t\t...\t\t\tFAILED ... %s\n" % (date.today(), args.source, e))
+ print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
ret = 1
elif 'update_nist_incremental' == args.command:
try:
- print ("BEGINNING NIST UPDATES PLEASE WAIT ... this can take some time")
- update_nist(False,args.source, args.url_file, args.url_meta, args.cve_file, True, args.force_update)
+ print ("BEGINNING NIST INCREMENTAL UPDATE PLEASE WAIT ... this can take some time")
+ update_nist(ACTION_INCREMENT,args.source, args.url_file, args.url_meta, args.cve_file)
master_log.write("SRTOOL:%s:'NIST JSON Modified Data':\t\t\t...\t\t\tUPDATED\n" % date.today())
print("DATABASE UPDATE FINISHED\n")
+ print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
except Exception as e:
print("DATABASE INCREMENT FAILED ... %s" % e)
+ print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
master_log.write("SRTOOL:%s:%s:\t\t\t...\t\t\tFAILED ... %s\n" % (date.today(), args.source, e))
ret = 1
+ elif 'download_nist' == args.command:
+ print ("BEGINNING NIST UPDATES PLEASE WAIT ... this can take some time")
+ update_nist(ACTION_DOWNLOAD,args.source, args.url_file, args.url_meta, args.cve_file)
+ master_log.write("SRTOOL:%s:'NIST JSON Modified Data':\t\t\t...\t\t\tUPDATED\n" % date.today())
+ print("DATABASE UPDATE FINISHED\n")
+ print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
else:
ret = 1
print("Command not found")
diff --git a/bin/redhat/srtool_redhat.py b/bin/redhat/srtool_redhat.py
index f570965c..9b629cbe 100755
--- a/bin/redhat/srtool_redhat.py
+++ b/bin/redhat/srtool_redhat.py
@@ -180,7 +180,9 @@ def main(argv):
parser = argparse.ArgumentParser(description='srtool_redhat.py: manage Red Hat CVE data')
parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Fetch CVE detail')
+
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='is_verbose', help='Enable verbose debugging output')
args = parser.parse_args()
diff --git a/bin/srt b/bin/srt
index ac358d43..4cff6fd9 100755
--- a/bin/srt
+++ b/bin/srt
@@ -23,7 +23,7 @@ Usage 1: bin/srt start|stop [webport=<address:port>]
Optional arguments:
[webport] Set the SRTool server port (default: localhost:8000)
[noautoupdate] Disable the auto update server
-Usage 2: bin/srt manage [createsuperuser|lsupdates|migrate|checksettings|collectstatic|...]
+Usage 2: bin/srt manage [createsuperuser|lsupdates|migrate|makemigrations|checksettings|collectstatic|...]
"
databaseCheck()
diff --git a/bin/yp/datasource.json b/bin/yp/datasource.json
index 97486956..abb2e663 100755
--- a/bin/yp/datasource.json
+++ b/bin/yp/datasource.json
@@ -37,6 +37,11 @@
"helptext" : "Text schema of an example defect",
"value" : "54321"
},
+ {
+ "name" : "SRTOOL_DEFECT_DOESNOTIMPACT",
+ "helptext" : "Comment message to mark CVEs that do not affect the products",
+ "value" : "It doesn't impact Yocto Project"
+ },
{
"name" : "SRTOOL_DEFECT_TOOL",
"helptext" : "The registered script to manage defects",
diff --git a/bin/yp/srtool_yp.py b/bin/yp/srtool_yp.py
index 338d4467..1438b59f 100755
--- a/bin/yp/srtool_yp.py
+++ b/bin/yp/srtool_yp.py
@@ -115,6 +115,7 @@ def main(argv):
parser.add_argument('--file', dest='file', help='Source file')
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose debugging')
args = parser.parse_args()
diff --git a/bin/yp/yocto-project-products.json b/bin/yp/yocto-project-products.json
index b9688747..ea154113 100755
--- a/bin/yp/yocto-project-products.json
+++ b/bin/yp/yocto-project-products.json
@@ -1,54 +1,47 @@
{
"Product_Items" : [
- {
- "order" : "1",
- "key" : "Warrior",
- "name" : "Yocto Project Linux",
- "version" : "2.7",
- "profile" : "",
- "cpe" : "cpe:2.3:o:yoctoproject:linux:2.7:*:*:*:*:*:*:*",
- "defect_tags" : "{\"key\":\"warrior\"}",
- "product_tags" : "{\"key\":\"warrior\"}"
- },
+
+ {
+ "order" : "1",
+ "key" : "master",
+ "name" : "Yocto Project Linux",
+ "version" : "dev",
+ "profile" : "",
+ "cpe" : "cpe:2.3:o:yoctoproject:*:*:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"master\"}",
+ "product_tags" : "{\"key\":\"master\",\"public_status\":\"no\",\"mode\":\"develop\"}"
+ },
+
+
{
"order" : "2",
- "key" : "Thud",
+ "key" : "Zeus",
"name" : "Yocto Project Linux",
- "version" : "2.6",
+ "version" : "3.0",
"profile" : "",
- "cpe" : "cpe:2.3:o:yoctoproject:linux:2.6:*:*:*:*:*:*:*",
- "defect_tags" : "{\"key\":\"sumo\"}",
- "product_tags" : "{\"key\":\"sumo\"}"
+ "cpe" : "cpe:2.3:o:yoctoproject:linux:3.0:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"zeus\"}",
+ "product_tags" : "{\"key\":\"zeus\",\"mode\":\"support\"}"
},
{
"order" : "3",
- "key" : "Sumo",
+ "key" : "Warrior",
"name" : "Yocto Project Linux",
- "version" : "2.5",
+ "version" : "2.7",
"profile" : "",
- "cpe" : "cpe:2.3:o:yoctoproject:linux:2.5:*:*:*:*:*:*:*",
- "defect_tags" : "{\"key\":\"sumo\"}",
- "product_tags" : "{\"key\":\"sumo\"}"
+ "cpe" : "cpe:2.3:o:yoctoproject:linux:2.7:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"warrior\"}",
+ "product_tags" : "{\"key\":\"warrior\",\"mode\":\"support\"}"
},
{
"order" : "4",
- "key" : "Rocko",
- "name" : "Yocto Project Linux",
- "version" : "2.4",
- "profile" : "",
- "cpe" : "cpe:2.3:o:yoctoproject:linux:2.4:*:*:*:*:*:*:*",
- "defect_tags" : "{\"key\":\"rocko\"}",
- "product_tags" : "{\"key\":\"rocko\"}"
- },
- {
- "order" : "5",
- "key" : "Pyro",
+ "key" : "Thud",
"name" : "Yocto Project Linux",
- "version" : "2.3",
+ "version" : "2.6",
"profile" : "",
- "cpe" : "cpe:2.3:o:yoctoproject:linux:2.3:*:*:*:*:*:*:*",
- "defect_tags" : "{\"key\":\"pyro\"}",
- "product_tags" : "{\"key\":\"pyro\"}"
+ "cpe" : "cpe:2.3:o:yoctoproject:linux:2.6:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"thud\"}",
+ "product_tags" : "{\"key\":\"thud\",\"mode\":\"support\"}"
}
]
}
diff --git a/data/notify-categories.json b/data/notify-categories.json
index dc658fea..0deb631b 100755
--- a/data/notify-categories.json
+++ b/data/notify-categories.json
@@ -13,6 +13,10 @@
},
{
+ "name" : "CVE_DUPLICATE_NOLINK"
+ },
+
+ {
"name" : "VULNERABILITY"
},
diff --git a/lib/orm/management/commands/lsupdates.py b/lib/orm/management/commands/lsupdates.py
index 2a89a811..ca67713a 100644
--- a/lib/orm/management/commands/lsupdates.py
+++ b/lib/orm/management/commands/lsupdates.py
@@ -327,9 +327,7 @@ class Command(BaseCommand):
for source in data_sources:
if source.loaded and not (source.update_frequency == DataSource.ONSTARTUP):
- logger.info("Skipping source data from %s",source.description)
print("Skipping datasource %s (already loaded)" % (source.description))
- _log("Skipping datasource %s (already loaded)" % (source.description))
continue
elif not source.init:
# No Init action?
@@ -338,7 +336,6 @@ class Command(BaseCommand):
else:
logger.info("Fetching datasource %s:%s" % (source.source,source.description))
print("Fetching datasource '%s:%s'" % (source.source,source.description))
- _log("Fetching datasource '%s:%s'" % (source.source,source.description))
# Development/testing shortcut
if ('cve' == source.data) and ('yes' == SrtSetting.objects.get(name='SRTDBG_SKIP_CVE_IMPORT').value):
diff --git a/lib/orm/migrations/0001_initial.py b/lib/orm/migrations/0001_initial.py
index 0914d2bc..69ff00a3 100644
--- a/lib/orm/migrations/0001_initial.py
+++ b/lib/orm/migrations/0001_initial.py
@@ -99,10 +99,11 @@ class Migration(migrations.Migration):
('cvssV2_baseScore',models.CharField(max_length=50, blank=True)),
('cvssV2_severity', models.CharField(max_length=50, blank=True)),
- ('packages', models.TextField(blank=True, null=True)),
+ ('packages', models.TextField(blank=True)),
('score_date', models.DateTimeField(null=True, blank=True)),
('srt_updated', models.DateTimeField(auto_now=True)),
+## ('srt_created', models.DateTimeField(auto_now_add=True)),
],
),
@@ -158,18 +159,18 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='CveSource',
fields=[
- ('cve', models.ForeignKey(related_name='source2cve', default=None, to='orm.cve', null=True,on_delete=models.CASCADE,)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('cve', models.ForeignKey(default=None, related_name='source2cve', to='orm.cve', null=True,on_delete=models.CASCADE,)),
('datasource', models.ForeignKey(default=None, to='orm.datasource',null=True,on_delete=models.CASCADE,)),
],
),
-
migrations.CreateModel(
name='CveToCwe',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('cve', models.ForeignKey(related_name='cve2cwe', default=None, to='orm.cve', null=True,on_delete=models.CASCADE,)),
- ('cwe', models.ForeignKey(default=None, to='orm.cwetable', null=True,on_delete=models.CASCADE,)),
+ ('cve', models.ForeignKey(related_name='cve2cwe', to='orm.cve', on_delete=models.CASCADE,)),
+ ('cwe', models.ForeignKey(to='orm.cwetable', on_delete=models.CASCADE,)),
],
),
@@ -177,10 +178,11 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='Package',
fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mode', models.IntegerField(default=0)),
('name', models.CharField(max_length=50, blank=True)),
('realname', models.CharField(max_length=50, blank=True)),
- ('invalidname', models.TextField(blank=True, null=True)),
+ ('invalidname', models.TextField(blank=True)),
('weight', models.IntegerField(default=0)),
('cve_count', models.IntegerField(default=0)),
('vulnerability_count', models.IntegerField(default=0)),
@@ -192,8 +194,9 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='PackageToCve',
fields=[
- ('package', models.ForeignKey(related_name='package2cve', default=None, to='orm.package', null=True,on_delete=models.CASCADE,)),
- ('cve', models.ForeignKey(related_name='cve2package', default=None, to='orm.cve', null=True,on_delete=models.CASCADE,)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('package', models.ForeignKey(related_name='package2cve', to='orm.package', on_delete=models.CASCADE,)),
+ ('cve', models.ForeignKey(related_name='cve2package', to='orm.cve', on_delete=models.CASCADE,)),
('applicable', models.NullBooleanField(default=True, null=True)),
],
),
@@ -202,13 +205,13 @@ class Migration(migrations.Migration):
name='CveReference',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('cve', models.ForeignKey(related_name='references', default=None, to='orm.cve', null=True,on_delete=models.CASCADE,)),
+ ('cve', models.ForeignKey(related_name='references', to='orm.cve', on_delete=models.CASCADE,)),
('hyperlink', models.CharField(max_length=100, null=True)),
('resource', models.CharField(max_length=100, null=True)),
('type', models.CharField(max_length=100, null=True)),
('source', models.CharField(max_length=100, null=True)),
('name', models.CharField(max_length=100, null=True)),
- ('datasource', models.ForeignKey(related_name='source_references', default=None, to='orm.datasource', null=True,on_delete=models.CASCADE,)),
+ ('datasource', models.ForeignKey(related_name='source_references', to='orm.datasource', default=None, null=True,on_delete=models.CASCADE,)),
],
),
@@ -216,7 +219,7 @@ class Migration(migrations.Migration):
name='CveHistory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('cve', models.ForeignKey(default=None, to='orm.cve', null=True,on_delete=models.CASCADE,)),
+ ('cve', models.ForeignKey(default=None, null=True, to='orm.cve', on_delete=models.CASCADE,)),
('comment', models.TextField(blank=True)),
('date', models.DateField(null=True, blank=True)),
('author', models.TextField(blank=True)),
@@ -258,8 +261,8 @@ class Migration(migrations.Migration):
name='CveToVulnerablility',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('vulnerability', models.ForeignKey(default=None, to='orm.vulnerability', null=True,on_delete=models.CASCADE,)),
- ('cve', models.ForeignKey(default=None, to='orm.cve', null=True,on_delete=models.CASCADE,)),
+ ('vulnerability', models.ForeignKey(to='orm.vulnerability', on_delete=models.CASCADE,)),
+ ('cve', models.ForeignKey(to='orm.cve', on_delete=models.CASCADE,)),
],
),
@@ -267,7 +270,7 @@ class Migration(migrations.Migration):
name='VulnerabilityComments',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('vulnerability', models.ForeignKey(default=None, to='orm.vulnerability', null=True,on_delete=models.CASCADE,)),
+ ('vulnerability', models.ForeignKey(to='orm.vulnerability', on_delete=models.CASCADE,)),
('comment', models.TextField(blank=True)),
('date', models.DateField(null=True, blank=True)),
('author', models.TextField(blank=True)),
@@ -278,7 +281,7 @@ class Migration(migrations.Migration):
name='VulnerabilityHistory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('vulnerability', models.ForeignKey(default=None, to='orm.vulnerability', null=True,on_delete=models.CASCADE,)),
+ ('vulnerability', models.ForeignKey(to='orm.vulnerability', on_delete=models.CASCADE,)),
('comment', models.TextField(blank=True)),
('date', models.DateField(null=True, blank=True)),
('author', models.TextField(blank=True)),
@@ -289,7 +292,7 @@ class Migration(migrations.Migration):
name='VulnerabilityUploads',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('vulnerability', models.ForeignKey(default=None, to='orm.vulnerability', null=True,on_delete=models.CASCADE,)),
+ ('vulnerability', models.ForeignKey(to='orm.vulnerability',on_delete=models.CASCADE,)),
('description', models.TextField(blank=True)),
('path', models.TextField(blank=True)),
('size', models.IntegerField(default=0)),
@@ -310,7 +313,7 @@ class Migration(migrations.Migration):
('resolution', models.IntegerField(default=0)),
('publish', models.TextField(blank=True)),
('release_version', models.CharField(max_length=50)),
- ('product', models.ForeignKey(default=None, to='orm.product', null=True,on_delete=models.CASCADE,)),
+ ('product', models.ForeignKey(to='orm.product', on_delete=models.CASCADE,)),
('date_created', models.CharField(max_length=50)),
('date_updated', models.CharField(max_length=50)),
('srt_updated', models.DateTimeField(auto_now=True)),
@@ -323,8 +326,8 @@ class Migration(migrations.Migration):
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=50)),
- ('vulnerability', models.ForeignKey(related_name='vulnerability_investigation',default=None, to='orm.vulnerability', null=True,on_delete=models.CASCADE,)),
- ('product', models.ForeignKey(related_name='references', default=None, to='orm.product', null=True,on_delete=models.CASCADE,)),
+ ('vulnerability', models.ForeignKey(related_name='vulnerability_investigation',to='orm.vulnerability', on_delete=models.CASCADE,)),
+ ('product', models.ForeignKey(related_name='references', to='orm.product',on_delete=models.CASCADE,)),
('public', models.BooleanField(default=True)),
('comments', models.TextField(blank=True)),
@@ -340,9 +343,9 @@ class Migration(migrations.Migration):
name='InvestigationToDefect',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('investigation', models.ForeignKey(default=None, to='orm.investigation', null=True,on_delete=models.CASCADE,)),
- ('defect', models.ForeignKey(default=None, to='orm.defect', null=True,on_delete=models.CASCADE,)),
- ('product', models.ForeignKey(default=None, to='orm.product', null=True,on_delete=models.CASCADE,)),
+ ('investigation', models.ForeignKey(to='orm.investigation', on_delete=models.CASCADE,)),
+ ('defect', models.ForeignKey(to='orm.defect', on_delete=models.CASCADE,)),
+ ('product', models.ForeignKey(to='orm.product', on_delete=models.CASCADE,)),
],
),
@@ -350,7 +353,7 @@ class Migration(migrations.Migration):
name='InvestigationComments',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('investigation', models.ForeignKey(default=None, to='orm.investigation', null=True,on_delete=models.CASCADE,)),
+ ('investigation', models.ForeignKey(to='orm.investigation', on_delete=models.CASCADE,)),
('comment', models.TextField(blank=True)),
('date', models.DateField(null=True, blank=True)),
('author', models.TextField(blank=True)),
@@ -361,7 +364,7 @@ class Migration(migrations.Migration):
name='InvestigationHistory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('investigation', models.ForeignKey(default=None, to='orm.investigation', null=True,on_delete=models.CASCADE,)),
+ ('investigation', models.ForeignKey(to='orm.investigation', on_delete=models.CASCADE,)),
('comment', models.TextField(blank=True)),
('date', models.DateField(null=True, blank=True)),
('author', models.TextField(blank=True)),
@@ -372,7 +375,7 @@ class Migration(migrations.Migration):
name='InvestigationUploads',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('investigation', models.ForeignKey(default=None, to='orm.investigation', null=True,on_delete=models.CASCADE,)),
+ ('investigation', models.ForeignKey(to='orm.investigation', on_delete=models.CASCADE,)),
('description', models.TextField(blank=True)),
('path', models.TextField(blank=True)),
('size', models.IntegerField(default=0)),
@@ -394,38 +397,43 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='VulnerabilityAccess',
fields=[
- ('vulnerability', models.ForeignKey(default=None, to='orm.vulnerability', null=True,on_delete=models.CASCADE,)),
- ('user', models.ForeignKey(default=None, to='users.srtuser', null=True,on_delete=models.CASCADE,)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('vulnerability', models.ForeignKey(to='orm.vulnerability', on_delete=models.CASCADE,)),
+ ('user', models.ForeignKey(to='users.srtuser', on_delete=models.CASCADE,)),
],
),
migrations.CreateModel(
name='InvestigationAccess',
fields=[
- ('investigation', models.ForeignKey(default=None, to='orm.investigation', null=True,on_delete=models.CASCADE,)),
- ('user', models.ForeignKey(default=None, to='users.srtuser', null=True,on_delete=models.CASCADE,)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('investigation', models.ForeignKey(to='orm.investigation', on_delete=models.CASCADE,)),
+ ('user', models.ForeignKey(to='users.srtuser', on_delete=models.CASCADE,)),
],
),
migrations.CreateModel(
name='VulnerabilityNotification',
fields=[
- ('vulnerability', models.ForeignKey(default=None, to='orm.vulnerability', null=True,on_delete=models.CASCADE,)),
- ('user', models.ForeignKey(default=None, to='users.srtuser', null=True,on_delete=models.CASCADE,)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('vulnerability', models.ForeignKey(to='orm.vulnerability', on_delete=models.CASCADE,)),
+ ('user', models.ForeignKey(to='users.srtuser', on_delete=models.CASCADE,)),
],
),
migrations.CreateModel(
name='InvestigationNotification',
fields=[
- ('investigation', models.ForeignKey(default=None, to='orm.investigation', null=True,on_delete=models.CASCADE,)),
- ('user', models.ForeignKey(default=None, to='users.srtuser', null=True,on_delete=models.CASCADE,)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('investigation', models.ForeignKey(to='orm.investigation', on_delete=models.CASCADE,)),
+ ('user', models.ForeignKey(to='users.srtuser', on_delete=models.CASCADE,)),
],
),
migrations.CreateModel(
name='CpeTable',
fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('vulnerable', models.BooleanField(default='False')),
('cpeMatchString', models.TextField(blank=True)),
('cpe23Uri', models.TextField(blank=True)),
@@ -436,14 +444,16 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='CpeToCve',
fields=[
- ('cpe', models.ForeignKey(default=None, to='orm.cpetable',on_delete=models.CASCADE,)),
- ('cve', models.ForeignKey(default=None, to='orm.cve',on_delete=models.CASCADE,)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('cpe', models.ForeignKey(to='orm.cpetable',on_delete=models.CASCADE,)),
+ ('cve', models.ForeignKey(to='orm.cve',on_delete=models.CASCADE,)),
],
),
migrations.CreateModel(
name='CpeFilter',
fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key_prime', models.CharField(max_length=50)),
('key_sub', models.CharField(max_length=50)),
('status', models.IntegerField(default=CpeFilter.UNDECIDED)),
@@ -456,6 +466,7 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='PublishPending',
fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cve', models.ForeignKey(default=None, to='orm.cve',blank=True,null=True,on_delete=models.CASCADE,)),
('vulnerability', models.ForeignKey(default=None, to='orm.vulnerability',blank=True,null=True,on_delete=models.CASCADE,)),
('investigation', models.ForeignKey(default=None, to='orm.investigation',blank=True,null=True,on_delete=models.CASCADE,)),
@@ -468,19 +479,21 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='Notify',
fields=[
- ('category', models.CharField(max_length=50, null=True)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('category', models.CharField(max_length=50)),
('description', models.TextField()),
('url', models.TextField()),
('priority', models.IntegerField(default=0)),
('author', models.TextField()),
- ('srt_updated', models.DateTimeField(auto_now_add=True)),
- ('srt_created', models.DateTimeField(auto_now=True)),
+## ('srt_updated', models.DateTimeField(auto_now=True)),
+## ('srt_created', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='NotifyAccess',
fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('notify', models.ForeignKey(default=None, to='orm.notify',blank=True,null=True,on_delete=models.CASCADE,)),
('user', models.ForeignKey(default=None, to='users.srtuser',blank=True,null=True,on_delete=models.CASCADE,)),
],
@@ -489,7 +502,8 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='NotifyCategories',
fields=[
- ('category', models.CharField(max_length=50, null=True)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('category', models.CharField(max_length=50)),
],
),
diff --git a/lib/orm/migrations/0003_modified.py b/lib/orm/migrations/0003_modified.py
new file mode 100755
index 00000000..e8752007
--- /dev/null
+++ b/lib/orm/migrations/0003_modified.py
@@ -0,0 +1,61 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0002_updates'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='cve',
+ name='srt_created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AddField(
+ model_name='cve',
+ name='tags',
+ field=models.TextField(blank=True, default='', null=True),
+ ),
+ migrations.AddField(
+ model_name='cve',
+ name='acknowledge_date',
+ field=models.DateTimeField(null=True),
+ ),
+
+ migrations.AddField(
+ model_name='investigation',
+ name='tags',
+ field=models.TextField(blank=True, default='', null=True),
+ ),
+ migrations.AddField(
+ model_name='investigation',
+ name='srt_created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AddField(
+ model_name='investigation',
+ name='srt_updated',
+ field=models.DateTimeField(auto_now=True),
+ ),
+
+ migrations.AddField(
+ model_name='vulnerability',
+ name='tags',
+ field=models.TextField(blank=True, default='', null=True),
+ ),
+ migrations.AddField(
+ model_name='vulnerability',
+ name='srt_created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AddField(
+ model_name='vulnerability',
+ name='srt_updated',
+ field=models.DateTimeField(auto_now=True),
+ ),
+ ]
diff --git a/lib/orm/migrations/0004_defect_status.py b/lib/orm/migrations/0004_defect_status.py
new file mode 100755
index 00000000..4e5b2f8d
--- /dev/null
+++ b/lib/orm/migrations/0004_defect_status.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0003_modified'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='defect',
+ name='srt_status',
+ field=models.IntegerField(default=0),
+ ),
+ migrations.AddField(
+ model_name='defect',
+ name='srt_outcome',
+ field=models.IntegerField(default=0),
+ ),
+ migrations.AddField(
+ model_name='defect',
+ name='srt_priority',
+ field=models.IntegerField(default=0),
+ ),
+ migrations.AddField(
+ model_name='defect',
+ name='duplicate_of',
+ field=models.CharField(max_length=50, blank=True, default=''),
+ ),
+
+ ]
diff --git a/lib/orm/migrations/0005_publish_report.py b/lib/orm/migrations/0005_publish_report.py
new file mode 100755
index 00000000..6a0c34ee
--- /dev/null
+++ b/lib/orm/migrations/0005_publish_report.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0004_defect_status'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='PublishSet',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('cve', models.ForeignKey(default=None, to='orm.cve', null=True, on_delete=models.CASCADE,)),
+ ('state', models.IntegerField(default=0)),
+ ('reason', models.TextField(blank=True)),
+ ],
+ ),
+
+ migrations.CreateModel(
+ name='DefectHistory',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('defect', models.ForeignKey(to='orm.defect', on_delete=models.CASCADE,)),
+ ('comment', models.TextField(blank=True)),
+ ('date', models.DateField(null=True, blank=True)),
+ ('author', models.TextField(blank=True)),
+ ],
+ ),
+
+ ]
diff --git a/lib/orm/migrations/0006_reconcile.py b/lib/orm/migrations/0006_reconcile.py
new file mode 100755
index 00000000..e7ad54bf
--- /dev/null
+++ b/lib/orm/migrations/0006_reconcile.py
@@ -0,0 +1,410 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.5 on 2020-01-12 06:21
+from __future__ import unicode_literals
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0005_publish_report'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='notify',
+ name='srt_created',
+ field=models.DateTimeField(auto_now_add=True, null=True),
+ ),
+ migrations.AddField(
+ model_name='notify',
+ name='srt_updated',
+ field=models.DateTimeField(auto_now=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='cpefilter',
+ name='automatic',
+ field=models.BooleanField(default='False'),
+ ),
+ migrations.AlterField(
+ model_name='cpefilter',
+ name='key_prime',
+ field=models.CharField(max_length=40),
+ ),
+ migrations.AlterField(
+ model_name='cpefilter',
+ name='key_sub',
+ field=models.CharField(max_length=40),
+ ),
+ migrations.AlterField(
+ model_name='cpefilter',
+ name='status',
+ field=models.IntegerField(choices=[(0, 'Undecided'), (1, 'Include'), (2, 'Exclude'), (3, 'Manual')], default=0),
+ ),
+ migrations.AlterField(
+ model_name='cpetocve',
+ name='cpe',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cpe2cve', to='orm.CpeTable'),
+ ),
+ migrations.AlterField(
+ model_name='cpetocve',
+ name='cve',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cve2cpe', to='orm.Cve'),
+ ),
+ migrations.AlterField(
+ model_name='cve',
+ name='publish_state',
+ field=models.IntegerField(choices=[(0, 'Unpublished'), (1, 'Not to be Published'), (2, 'Published'), (3, 'Publish Request (New)'), (4, 'Publish Request (Update)'), (5, 'Publish Submitted')], default=0),
+ ),
+ migrations.AlterField(
+ model_name='cve',
+ name='score_date',
+ field=models.DateField(blank=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='cve',
+ name='srt_created',
+ field=models.DateTimeField(auto_now_add=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='cve',
+ name='srt_updated',
+ field=models.DateTimeField(auto_now=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='cve',
+ name='status',
+ field=models.IntegerField(choices=[(0, 'Historical'), (1, 'New'), (2, 'New-Reserved'), (3, 'Investigate'), (4, 'Vulnerable'), (5, 'Not Vulnerable')], default=1),
+ ),
+ migrations.AlterField(
+ model_name='cvehistory',
+ name='cve',
+ field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='cve_history', to='orm.Cve'),
+ ),
+ migrations.AlterField(
+ model_name='cvelocal',
+ name='cvssV3_attackVector',
+ field=models.CharField(blank=True, max_length=50),
+ ),
+ migrations.AlterField(
+ model_name='cvereference',
+ name='datasource',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='source_references', to='orm.DataSource'),
+ ),
+ migrations.AlterField(
+ model_name='cvesource',
+ name='cve',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='cve_parent', to='orm.Cve'),
+ ),
+ migrations.AlterField(
+ model_name='cvesource',
+ name='datasource',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='cve_datasource', to='orm.DataSource'),
+ ),
+ migrations.AlterField(
+ model_name='cvetocwe',
+ name='cwe',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cwe2cve', to='orm.CweTable'),
+ ),
+ migrations.AlterField(
+ model_name='cvetovulnerablility',
+ name='cve',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cve_to_vulnerability', to='orm.Cve'),
+ ),
+ migrations.AlterField(
+ model_name='cvetovulnerablility',
+ name='vulnerability',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_to_cve', to='orm.Vulnerability'),
+ ),
+ migrations.AlterField(
+ model_name='datasource',
+ name='update_frequency',
+ field=models.IntegerField(choices=[(0, 'Minute'), (1, 'Hourly'), (2, 'Daily'), (3, 'Weekly'), (4, 'Monthly'), (5, 'OnDemand'), (6, 'OnStartup')], default=2),
+ ),
+ migrations.AlterField(
+ model_name='defect',
+ name='priority',
+ field=models.IntegerField(choices=[(0, 'Undefined'), (1, 'Low'), (2, 'Medium'), (3, 'High'), (4, 'Critical'), (5, 'PRIORITY_ERROR')], default=1),
+ ),
+ migrations.AlterField(
+ model_name='defect',
+ name='product',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='product_defect', to='orm.Product'),
+ ),
+ migrations.AlterField(
+ model_name='defect',
+ name='resolution',
+ field=models.IntegerField(choices=[(0, 'Unresolved'), (1, 'Resolved'), (2, 'Fixed'), (3, "Won't Fix"), (4, 'Withdrawn'), (5, 'Rejected'), (6, 'Duplicate'), (7, 'Not Applicable'), (8, 'Replaced By Requirement'), (9, 'Cannot Reproduce'), (10, 'Done')], default=0),
+ ),
+ migrations.AlterField(
+ model_name='defect',
+ name='srt_outcome',
+ field=models.IntegerField(choices=[(0, 'Open'), (1, 'Closed (Not Vulnerable)'), (2, 'Closed (Fixed)'), (3, "Closed (Won't Fix)")], default=0),
+ ),
+ migrations.AlterField(
+ model_name='defect',
+ name='srt_priority',
+ field=models.IntegerField(choices=[(0, 'Undefined'), (1, 'Low'), (2, 'Medium'), (3, 'High'), (4, 'Critical'), (5, 'PRIORITY_ERROR')], default=1),
+ ),
+ migrations.AlterField(
+ model_name='defect',
+ name='srt_status',
+ field=models.IntegerField(choices=[(0, 'Historical'), (1, 'New'), (2, 'New-Reserved'), (3, 'Investigate'), (4, 'Vulnerable'), (5, 'Not Vulnerable')], default=3),
+ ),
+ migrations.AlterField(
+ model_name='defect',
+ name='status',
+ field=models.IntegerField(choices=[(0, 'Open'), (1, 'In progress'), (2, 'On Hold'), (3, 'Checked In'), (4, 'Resolved'), (5, 'Closed')], default=0),
+ ),
+ migrations.AlterField(
+ model_name='defecthistory',
+ name='defect',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='defect_history', to='orm.Defect'),
+ ),
+ migrations.AlterField(
+ model_name='helptext',
+ name='area',
+ field=models.IntegerField(choices=[(0, 'variable')]),
+ ),
+ migrations.AlterField(
+ model_name='helptext',
+ name='text',
+ field=models.TextField(),
+ ),
+ migrations.AlterField(
+ model_name='investigation',
+ name='outcome',
+ field=models.IntegerField(choices=[(0, 'Open'), (1, 'Closed (Not Vulnerable)'), (2, 'Closed (Fixed)'), (3, "Closed (Won't Fix)")], default=3),
+ ),
+ migrations.AlterField(
+ model_name='investigation',
+ name='priority',
+ field=models.IntegerField(choices=[(0, 'Undefined'), (1, 'Low'), (2, 'Medium'), (3, 'High'), (4, 'Critical'), (5, 'PRIORITY_ERROR')], default=1),
+ ),
+ migrations.AlterField(
+ model_name='investigation',
+ name='product',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='product_investigation', to='orm.Product'),
+ ),
+ migrations.AlterField(
+ model_name='investigation',
+ name='srt_created',
+ field=models.DateTimeField(auto_now_add=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='investigation',
+ name='srt_updated',
+ field=models.DateTimeField(auto_now=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='investigation',
+ name='status',
+ field=models.IntegerField(choices=[(0, 'Historical'), (1, 'New'), (2, 'New-Reserved'), (3, 'Investigate'), (4, 'Vulnerable'), (5, 'Not Vulnerable')], default=0),
+ ),
+ migrations.AlterField(
+ model_name='investigation',
+ name='tags',
+ field=models.TextField(blank=True, default=''),
+ ),
+ migrations.AlterField(
+ model_name='investigationaccess',
+ name='investigation',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_users', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='investigationaccess',
+ name='user',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_user', to=settings.AUTH_USER_MODEL),
+ ),
+ migrations.AlterField(
+ model_name='investigationcomments',
+ name='investigation',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_comments', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='investigationhistory',
+ name='investigation',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_history', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='investigationnotification',
+ name='investigation',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_notification', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='investigationnotification',
+ name='user',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_notify', to=settings.AUTH_USER_MODEL),
+ ),
+ migrations.AlterField(
+ model_name='investigationtodefect',
+ name='defect',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='defect_to_investigation', to='orm.Defect'),
+ ),
+ migrations.AlterField(
+ model_name='investigationtodefect',
+ name='investigation',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_to_defect', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='investigationtodefect',
+ name='product',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='defect_to_product', to='orm.Product'),
+ ),
+ migrations.AlterField(
+ model_name='investigationuploads',
+ name='investigation',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_uploads', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='notify',
+ name='author',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AlterField(
+ model_name='notify',
+ name='description',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AlterField(
+ model_name='notify',
+ name='url',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AlterField(
+ model_name='notifyaccess',
+ name='notify',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='todo2user', to='orm.Notify'),
+ ),
+ migrations.AlterField(
+ model_name='notifyaccess',
+ name='user',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='user2todo', to=settings.AUTH_USER_MODEL),
+ ),
+ migrations.AlterField(
+ model_name='package',
+ name='mode',
+ field=models.IntegerField(choices=[(0, 'For'), (1, 'Against')], default=0),
+ ),
+ migrations.AlterField(
+ model_name='publishpending',
+ name='cve',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='publish_pending_cves', to='orm.Cve'),
+ ),
+ migrations.AlterField(
+ model_name='publishpending',
+ name='investigation',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='publish_pending_investigations', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='publishpending',
+ name='note',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AlterField(
+ model_name='publishpending',
+ name='vulnerability',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='publish_pending_vulnerabilities', to='orm.Vulnerability'),
+ ),
+ migrations.AlterField(
+ model_name='publishset',
+ name='state',
+ field=models.IntegerField(choices=[(0, 'Skip'), (1, 'New'), (2, 'Modified'), (3, 'New_User'), (4, 'Modified_User'), (5, 'PUBLISH_SET_ERROR')], default=0),
+ ),
+ migrations.AlterField(
+ model_name='srtsetting',
+ name='helptext',
+ field=models.TextField(),
+ ),
+ migrations.AlterField(
+ model_name='vulnerability',
+ name='outcome',
+ field=models.IntegerField(choices=[(0, 'Open'), (1, 'Closed (Not Vulnerable)'), (2, 'Closed (Fixed)'), (3, "Closed (Won't Fix)")], default=0),
+ ),
+ migrations.AlterField(
+ model_name='vulnerability',
+ name='priority',
+ field=models.IntegerField(choices=[(0, 'Undefined'), (1, 'Low'), (2, 'Medium'), (3, 'High'), (4, 'Critical'), (5, 'PRIORITY_ERROR')], default=1),
+ ),
+ migrations.AlterField(
+ model_name='vulnerability',
+ name='public',
+ field=models.BooleanField(default=True),
+ ),
+ migrations.AlterField(
+ model_name='vulnerability',
+ name='srt_created',
+ field=models.DateTimeField(auto_now_add=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='vulnerability',
+ name='srt_updated',
+ field=models.DateTimeField(auto_now=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='vulnerability',
+ name='status',
+ field=models.IntegerField(choices=[(0, 'Historical'), (1, 'New'), (2, 'New-Reserved'), (3, 'Investigate'), (4, 'Vulnerable'), (5, 'Not Vulnerable')], default=3),
+ ),
+ migrations.AlterField(
+ model_name='vulnerability',
+ name='tags',
+ field=models.TextField(blank=True, default=''),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilityaccess',
+ name='user',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_user', to=settings.AUTH_USER_MODEL),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilityaccess',
+ name='vulnerability',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_users', to='orm.Vulnerability'),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilitycomments',
+ name='vulnerability',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_comments', to='orm.Vulnerability'),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilityhistory',
+ name='vulnerability',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_history', to='orm.Vulnerability'),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilitynotification',
+ name='user',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_notify', to=settings.AUTH_USER_MODEL),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilitynotification',
+ name='vulnerability',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_notification', to='orm.Vulnerability'),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilitytoinvestigation',
+ name='investigation',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation2vulnerability', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilitytoinvestigation',
+ name='vulnerability',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability2investigation', to='orm.Vulnerability'),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilityuploads',
+ name='vulnerability',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_uploads', to='orm.Vulnerability'),
+ ),
+ migrations.AlterUniqueTogether(
+ name='cpefilter',
+ unique_together=set([('key_prime', 'key_sub')]),
+ ),
+ migrations.AlterUniqueTogether(
+ name='product',
+ unique_together=set([('name', 'version', 'profile')]),
+ ),
+ ]
diff --git a/lib/orm/models.py b/lib/orm/models.py
index 0f6cfb17..0dd73ba4 100644
--- a/lib/orm/models.py
+++ b/lib/orm/models.py
@@ -114,6 +114,266 @@ def GitURLField(**kwargs):
# Core Classes
+# Helper class to common mappings
+class SRTool():
+
+ # Global date format
+ DATE_FORMAT = '%Y-%m-%d'
+ DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S'
+
+ # SRTool Priority
+ UNDEFINED = 0
+ LOW = 1
+ MEDIUM = 2
+ HIGH = 3
+ CRITICAL = 4
+ PRIORITY_ERROR = 99
+ SRT_PRIORITY = (
+ (UNDEFINED, 'Undefined'),
+ (LOW, 'Low'),
+ (MEDIUM, 'Medium'),
+ (HIGH, 'High'),
+ (CRITICAL, 'Critical'),
+ )
+ @staticmethod
+ def priority_text(index):
+ if (0 > index) or (index >= len(SRTool.SRT_PRIORITY)):
+ return 'PRIORITY_ERROR'
+ return SRTool.SRT_PRIORITY[index][1]
+ @staticmethod
+ def priority_index(value):
+ for item in SRTool.SRT_PRIORITY:
+ if value == item[1]:
+ return item[0]
+ return SRTool.PRIORITY_ERROR
+
+ # SRTool Severity (same integer values as prority)
+ SRT_SEVERITY = (
+ (UNDEFINED, 'UNDEFINED'),
+ (LOW, 'LOW'),
+ (MEDIUM, 'MEDIUM'),
+ (HIGH, 'HIGH'),
+ (CRITICAL, 'CRITICAL'),
+ )
+ @staticmethod
+ def severity_text(index):
+ if (0 > index) or (index >= len(SRTool.SRT_SEVERITY)):
+ return 'SEVERITY_ERROR'
+ return SRTool.SRT_SEVERITY[index][1]
+ @staticmethod
+ def severity_index(value):
+ for item in SRTool.SRT_SEVERITY:
+ if value == item[1]:
+ return item[0]
+ return SRTool.PRIORITY_ERROR
+
+ # SRTool Status
+ HISTORICAL = 0
+ NEW = 1
+ NEW_RESERVED = 2
+ INVESTIGATE = 3
+ VULNERABLE = 4
+ NOT_VULNERABLE = 5
+ NEW_INACTIVE = 6
+ INVESTIGATE_INACTIVE = 7
+ VULNERABLE_INACTIVE = 8
+ NOT_VULNERABLE_INACTIVE = 9
+ STATUS_ERROR = 99
+ SRT_STATUS = (
+ (HISTORICAL, 'Historical'),
+ (NEW, 'New'),
+ (NEW_RESERVED, 'New-Reserved'),
+ (INVESTIGATE, 'Investigate'),
+ (VULNERABLE, 'Vulnerable'),
+ (NOT_VULNERABLE, 'Not Vulnerable'),
+ (NEW_INACTIVE, '(New)'),
+ (INVESTIGATE_INACTIVE, '(Investigate)'),
+ (VULNERABLE_INACTIVE, '(Vulnerable)'),
+ (NOT_VULNERABLE_INACTIVE, '(Not Vulnerable)'),
+ )
+ @staticmethod
+ def status_text(index):
+ if (0 > index) or (index >= len(SRTool.SRT_STATUS)):
+ return 'STATUS_ERROR'
+ return SRTool.SRT_STATUS[index][1]
+ @staticmethod
+ def status_index(value):
+ for item in SRTool.SRT_STATUS:
+ if value == item[1]:
+ return item[0]
+ return SRTool.STATUS_ERROR
+ @staticmethod
+ def status_to_inactive(value):
+ if SRTool.NEW == value:
+ return SRTool.NEW_INACTIVE
+ elif SRTool.INVESTIGATE == value:
+ return SRTool.INVESTIGATE_INACTIVE
+ elif SRTool.VULNERABLE == value:
+ return SRTool.VULNERABLE_INACTIVE
+ elif SRTool.NOT_VULNERABLE == value:
+ return SRTool.NOT_VULNERABLE_INACTIVE
+ else:
+ return value
+ @staticmethod
+ def status_to_active(value):
+ if SRTool.NEW_INACTIVE == value:
+ return SRTool.NEW
+ elif SRTool.INVESTIGATE_INACTIVE == value:
+ return SRTool.INVESTIGATE
+ elif SRTool.VULNERABLE_INACTIVE == value:
+ return SRTool.VULNERABLE
+ elif SRTool.NOT_VULNERABLE_INACTIVE == value:
+ return SRTool.NOT_VULNERABLE
+ else:
+ return value
+
+ OPEN = 0
+ CLOSED = 1
+ FIXED = 2
+ NOT_FIX = 3
+ OUTCOME_ERROR = 4
+ SRT_OUTCOME = (
+ (OPEN, 'Open'),
+ (CLOSED, 'Closed (Not Vulnerable)'),
+ (FIXED, 'Closed (Fixed)'),
+ (NOT_FIX, "Closed (Won't Fix)"),
+ )
+ @staticmethod
+ def outcome_text(index):
+ if (0 > index) or (index >= len(SRTool.SRT_OUTCOME)):
+ return "OUTCOME_ERROR"
+ return SRTool.SRT_OUTCOME[index][1]
+ @staticmethod
+ def outcome_index(value):
+ for item in SRTool.SRT_OUTCOME:
+ if value == item[1]:
+ return item[0]
+ return SRTool.OUTCOME_ERROR
+
+ # Publish state
+ PUBLISH_UNPUBLISHED = 0
+ PUBLISH_NOPUBLISH = 1
+ PUBLISH_PUBLISHED = 2
+ PUBLISH_REQUEST = 3
+ PUBLISH_UPDATE = 4
+ PUBLISH_SUBMITTED = 5
+ PUBLISH_ERROR = 99
+ SRT_PUBLISH_STATE = (
+ (PUBLISH_UNPUBLISHED, 'Unpublished'),
+ (PUBLISH_NOPUBLISH, 'Not to be Published'),
+ (PUBLISH_PUBLISHED, 'Published'),
+ (PUBLISH_REQUEST, 'Publish Request (New)'),
+ (PUBLISH_UPDATE, 'Publish Request (Update)'),
+ (PUBLISH_SUBMITTED, 'Publish Submitted'),
+ )
+ @staticmethod
+ def publish_text(index):
+ if (0 > index) or (index >= len(SRTool.SRT_PUBLISH_STATE)):
+ return SRTool.SRT_PUBLISH_STATE[SRTool.PUBLISH_ERROR][1]
+ return 'PUBLISH_ERROR'
+ @staticmethod
+ def publish_index(value):
+ for item in SRTool.SRT_PUBLISH_STATE:
+ if value == item[1]:
+ return item[0]
+ return SRTool.PUBLISH_ERROR
+
+ # Normalize displayed dates
+ @staticmethod
+ def date_ymd_text(value):
+ if isinstance(value,datetime):
+ return(value.strftime("%Y-%m-%d"))
+ return(value)
+
+ # Extract dictionary tag values
+ @staticmethod
+ def get_dict_tag(tag,dict_str,default=None):
+ dict = json.loads(dict_str)
+ if tag in dict:
+ return dict[tag]
+ return default
+
+
+# Helper class to format and track updates
+# Enforce strict formatting and content to enable reporting, change filtering, pretty printing
+class Update():
+ # General history prefix format (type,source,semicolon-joined changes):
+ # UPDATE(User):Priority(%s,%s);Tag();Status(%s,%s) {helpful text}
+ # CREATE(Defect): {Created from defect ABCD-1234}
+ # Update report check strings: 'UPDATE(','Priority(','Status('
+
+ # General update label
+ UPDATE_STR = "UPDATE(%s):"
+ CREATE_STR = "CREATE(%s):"
+ UPDATE_PREFIX_STR = "UPDATE("
+ CREATE_PREFIX_STR = "CREATE("
+
+ # Update sources
+ SOURCE_USER = "User"
+ SOURCE_TRIAGE = "Triage"
+ SOURCE_CVE = "CVE"
+ SOURCE_DEFECT = "Defect"
+
+ # Update labels (no string overlaps allowed)
+ NEW_NAME = "New_Name(%s,%s)"
+ PRIORITY = "Priority(%s,%s)"
+ STATUS = "Status(%s,%s)"
+ SEVERITY_V3 = "Severity_V3(%s,%s)"
+ SEVERITY_V2 = "Severity_V2(%s,%s)"
+ OUTCOME = "Outcome(%s,%s)"
+ RELEASE = "Release(%s,%s)"
+ DESCRIPTION = "Description()"
+ LASTMODIFIEDDATE = "LastModifiedDate(%s,%s)"
+ NOTE = "User_Note()"
+ PRIVATE_NOTE = "Private_Note()"
+ TAG = "Tag()"
+ PUBLISH_STATE = "Publish_State(%s,%s)"
+ PUBLISH_DATE = "Publish_Date(%s,%s)"
+ AFFECTED_COMPONENT = "Affected_Component(%s,%s)"
+ ACKNOWLEDGE_DATE = "AcknowledgeDate(%s,%s)"
+ ATTACH_CVE = "Attach_CVE(%s)"
+ DETACH_CVE = "Detach_CVE(%s)"
+ ATTACH_VUL = "Attach_Vulnerability(%s)"
+ DETACH_VUL = "Detach_Vulnerability(%s)"
+ ATTACH_INV = "Attach_Investigration(%s)"
+ DETACH_INV = "Detach_Investigration(%s)"
+ ATTACH_DEV = "Attach_Defect(%s)"
+ DETACH_DEV = "Detach_Defect(%s)"
+ ATTACH_DOC = "Attach_Document(%s)"
+ DETACH_DOC = "Detach_Document(%s)"
+ ATTACH_USER_NOTIFY = "Attach_User_Notify(%s)"
+ DETACH_USER_NOTIFY = "Detach_User_Notify(%s)"
+ ATTACH_ACCESS = "Attach_Access(%s)"
+ DETACH_ACCESS = "Detach_Access(%s)"
+ ATTACH_PRODUCT = "Attach_Product(%s)"
+ DETACH_PRODUCT = "Detach_Product(%s)"
+ MARK_NEW = "Mark_New(%s)"
+ MARK_UPDATED = "Mark_Updated(%s)"
+ MARK_PREFIX = "Mark_"
+ MARK_NEW_PREFIX = "Mark_New"
+ MARK_UPDATED_PREFIX = "Mark_Updated"
+ MARK_UNMARK = "Mark_Unmark()"
+
+ # Update Report list
+ UPDATE_CHECK_LIST = (
+ PRIORITY,
+ STATUS,
+ SEVERITY_V3,
+ SEVERITY_V2,
+ RELEASE,
+ MARK_NEW,
+ MARK_UPDATED,
+ )
+
+ #Any matching string for the period indicates reportable change
+ @staticmethod
+ def get_check_list():
+ check_list = []
+ for check in UPDATE_CHECK_LIST:
+ simple_check = re.sub(r'(.*', '(', check)
+ check_list.append(simple_check)
+ return(check_list)
+
class SrtSetting(models.Model):
name = models.CharField(max_length=63)
helptext = models.TextField()
@@ -128,6 +388,11 @@ class SrtSetting(models.Model):
return(SrtSetting.objects.get(name=key).value)
except:
return(default)
+ @staticmethod
+ def set_setting(key,value):
+ obj,created = SrtSetting.objects.get_or_create(name=key)
+ obj.value = value
+ obj.save()
class HelpText(models.Model):
@@ -163,6 +428,10 @@ class DataSource(models.Model):
DATE_FORMAT = '%Y-%m-%d'
DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S'
+ # Metadata
+ LOOKUP_MISSING = 'LOOKUP-MISSING'
+ PREVIEW_SOURCE = 'PREVIEW-SOURCE'
+
key = models.CharField(max_length=20)
data = models.CharField(max_length=20)
source = models.CharField(max_length=20)
@@ -194,20 +463,22 @@ class CweTable(models.Model):
class Cve(models.Model):
search_allowed_fields = ['name', 'description', 'publishedDate',
- 'lastModifiedDate', 'comments', 'comments_private']
+ 'lastModifiedDate', 'comments', 'comments_private', 'tags', 'packages']
# SRTool Priority
UNDEFINED = 0
- MINOR = 1
- LOW = 2
- MEDIUM = 3
- HIGH = 4
+ LOW = 1
+ MEDIUM = 2
+ HIGH = 3
+ CRITICAL = 4
+ PRIORITY_ERROR = 5
PRIORITY = (
(UNDEFINED, 'Undefined'),
- (MINOR, 'Minor'),
(LOW, 'Low'),
(MEDIUM, 'Medium'),
(HIGH, 'High'),
+ (CRITICAL, 'Critical'),
+ (PRIORITY_ERROR, 'PRIORITY_ERROR'),
)
# WR Status
@@ -256,6 +527,7 @@ class Cve(models.Model):
status = models.IntegerField(choices=STATUS, default=NEW)
comments = models.TextField(blank=True)
comments_private = models.TextField(blank=True)
+ tags = models.TextField(blank=True, default='', null=True)
cve_data_type = models.CharField(max_length=100, blank=True)
cve_data_format = models.CharField(max_length=50, blank=True)
@@ -264,6 +536,7 @@ class Cve(models.Model):
public = models.BooleanField(default=True)
publish_state = models.IntegerField(choices=PUBLISH_STATE, default=PUBLISH_UNPUBLISHED)
publish_date = models.CharField(max_length=50, blank=True)
+ acknowledge_date = models.DateTimeField(null=True)
description = models.TextField(blank=True)
publishedDate = models.CharField(max_length=50, blank=True)
@@ -281,25 +554,41 @@ class Cve(models.Model):
packages = models.TextField(blank=True)
score_date = models.DateField(null=True, blank=True)
- srt_updated = models.DateTimeField(auto_now=True)
+ srt_updated = models.DateTimeField(auto_now=True, null=True)
+ srt_created = models.DateTimeField(auto_now_add=True, null=True)
@property
def get_priority_text(self):
- return Cve.PRIORITY[int(self.priority)][1]
+ return SRTool.priority_text(self.priority)
+ @property
+ def get_status_text(self):
+ return SRTool.status_text(self.status)
@property
def get_publish_text(self):
return Cve.PUBLISH_STATE[int(self.publish_state)][1]
@property
- def get_status_text(self):
- return Cve.STATUS[int(self.status)][1]
- @property
def is_local(self):
try:
CveLocal.objects.get(name=self.name)
return True
except:
return False
-
+ @property
+ def get_publishset_state(self):
+ try:
+ obj = PublishSet.objects.get(cve=self)
+ return obj.state_text
+ except:
+ return PublishSet.PUBLISH_SET_STATE[PublishSet.PUBLISH_SET_NONE][1]
+ @property
+ def get_public_comments(self):
+ the_comments = self.comments.strip()
+ the_packages = self.packages.strip()
+ if not the_comments or not the_packages:
+ return '%s%s' % (the_comments,the_packages)
+ if the_comments == the_packages:
+ return the_comments
+ return '%s' % (the_comments)
class CveDetail():
# CPE item list
@@ -317,6 +606,7 @@ class CveDetail():
description = ''
publishedDate = ''
+ acknowledge_date = ''
lastModifiedDate = ''
url_title = ''
url = ''
@@ -431,11 +721,12 @@ class CveLocal(models.Model):
# Map of all sources for the given CVE
class CveSource(models.Model):
- cve = models.ForeignKey(Cve,related_name="cve_parent",on_delete=models.CASCADE,)
+ cve = models.ForeignKey(Cve,related_name="cve_parent",blank=True, null=True,on_delete=models.CASCADE,)
datasource = models.ForeignKey(DataSource,related_name="cve_datasource", blank=True, null=True,on_delete=models.CASCADE,)
class CveHistory(models.Model):
- cve = models.ForeignKey(Cve,related_name="cve_history",on_delete=models.CASCADE,)
+ search_allowed_fields = ['cve__name', 'comment', 'date', 'author']
+ cve = models.ForeignKey(Cve,related_name="cve_history",default=None, null=True, on_delete=models.CASCADE,)
comment = models.TextField(blank=True)
date = models.DateField(null=True, blank=True)
author = models.TextField(blank=True)
@@ -485,12 +776,12 @@ class Package(models.Model):
@staticmethod
def update_computed_counts(package_name=None):
# A 'None' indicates all packages
- _log("update_computed_counts0:%s" % package_name)
+# _log("update_computed_counts0:%s" % package_name)
if package_name:
package_list = Package.objects.filter(name=package_name)
else:
package_list = Package.objects.all()
- _log("update_computed_counts:p:%s" % len(package_list))
+# _log("update_computed_counts:p:%s" % len(package_list))
for package in package_list:
try:
state = "p"
@@ -498,7 +789,7 @@ class Package(models.Model):
package.vulnerability_count = 0
package.investigation_count = 0
package.defect_count = 0
- _log("update_computed_counts2:c:%s" % len(package.package2cve.all()))
+# _log("update_computed_counts2:c:%s" % len(package.package2cve.all()))
for pc in package.package2cve.all():
cve = pc.cve
package.cve_count += 1
@@ -559,7 +850,7 @@ class CveToCwe(models.Model):
class CveReference(models.Model):
cve = models.ForeignKey(Cve,related_name="references",on_delete=models.CASCADE,)
- hyperlink = models.CharField(max_length=100)
+ hyperlink = models.CharField(max_length=100, null=True)
resource = models.CharField(max_length=100, null=True)
type = models.CharField(max_length=100, null=True)
source = models.CharField(max_length=100, null=True)
@@ -586,26 +877,20 @@ class Product(models.Model):
def long_name(self):
long_name = '%s %s %s' % (self.name,self.version,self.profile)
return long_name.strip()
- def get_defect_tag(self,tag):
- dict = json.loads(self.defect_tags)
- try:
- return dict[tag]
- except:
- _log("ERROR:get_defect_tag:%s[%s]" % (dict,tag))
- return ''
- def get_product_tag(self,tag):
- dict = json.loads(self.product_tags)
- try:
- return dict[tag]
- except:
- _log("ERROR:get_product_tags:%s[%s]" % (dict,tag))
- return ''
+ def get_defect_tag(self,tag,default=None):
+ return SRTool.get_dict_tag(tag,self.defect_tags,default)
+ def get_product_tag(self,tag,default=None):
+ return SRTool.get_dict_tag(tag,self.product_tags,default)
+ def get_defect_str(self):
+ return self.defect_tags.replace('"','')
+ def get_product_str(self):
+ return self.product_tags.replace('"','')
# VULNERABILITY
# Company-level Vulnerablility Record
class Vulnerability(models.Model):
- search_allowed_fields = ['name', 'comments', 'comments_private']
+ search_allowed_fields = ['name', 'comments', 'comments_private', 'tags']
HISTORICAL = 0
NEW = 1
@@ -632,18 +917,21 @@ class Vulnerability(models.Model):
(FIXED, 'Closed (Fixed)'),
(NOT_FIX, "Closed (Won't Fix)"),
)
- # SRTool Severity, matched with Cve/Defect Priority with placeholder for 'minor'
+
+ # SRTool Priority
UNDEFINED = 0
- MINOR = 1
- LOW = 2
- MEDIUM = 3
- HIGH = 4
+ LOW = 1
+ MEDIUM = 2
+ HIGH = 3
+ CRITICAL = 4
+ PRIORITY_ERROR = 5
PRIORITY = (
(UNDEFINED, 'Undefined'),
- (MINOR, 'Minor'),
(LOW, 'Low'),
(MEDIUM, 'Medium'),
(HIGH, 'High'),
+ (CRITICAL, 'Critical'),
+ (PRIORITY_ERROR, 'PRIORITY_ERROR'),
)
name = models.CharField(max_length=50)
@@ -653,21 +941,26 @@ class Vulnerability(models.Model):
public = models.BooleanField(default=True)
comments = models.TextField(blank=True, default='')
comments_private = models.TextField(blank=True, default='')
+ tags = models.TextField(blank=True, default='')
status = models.IntegerField(choices=STATUS, default=INVESTIGATE)
outcome = models.IntegerField(choices=OUTCOME, default=OPEN)
priority = models.IntegerField(choices=PRIORITY, default=LOW)
+ srt_updated = models.DateTimeField(auto_now=True, null=True)
+ srt_created = models.DateTimeField(auto_now_add=True, null=True)
+
+ @property
+ def get_priority_text(self):
+ return SRTool.priority_text(self.priority)
@property
def get_status_text(self):
- return Vulnerability.STATUS[int(self.status)][1]
+ return SRTool.status_text(self.status)
@property
def get_outcome_text(self):
+ return SRTool.outcome_text(self.outcome)
return Vulnerability.OUTCOME[int(self.outcome)][1]
@property
- def get_priority_text(self):
- return Vulnerability.PRIORITY[int(self.priority)][1]
- @property
def get_long_name(self):
if self.cve_primary_name:
return "%s (%s)" % (self.name,self.cve_primary_name)
@@ -698,6 +991,9 @@ class Vulnerability(models.Model):
print("Error in new_vulnerability_name")
raise
return "VUL-%05d" % index
+ @property
+ def investigation_list(self):
+ return VulnerabilityToInvestigation.objects.filter(vulnerability_id=self.id).order_by('investigation__product__order')
class VulnerabilityComments(models.Model):
vulnerability = models.ForeignKey(Vulnerability,related_name="vulnerability_comments",on_delete=models.CASCADE,)
@@ -706,6 +1002,7 @@ class VulnerabilityComments(models.Model):
author = models.TextField(blank=True)
class VulnerabilityHistory(models.Model):
+ search_allowed_fields = ['vulnerability__name', 'comment', 'date', 'author']
vulnerability = models.ForeignKey(Vulnerability,related_name="vulnerability_history",on_delete=models.CASCADE,)
comment = models.TextField(blank=True)
date = models.DateField(null=True, blank=True)
@@ -733,58 +1030,62 @@ class Defect(models.Model):
#Issue Type,Key,Summary,Priority,Status,Resolution,Publish To OLS,Fix Version
#Bug,LIN10-2031,Security Advisory - libvorbis - CVE-2017-14633,P3,Closed,Fixed,Reviewed - Publish,10.17.41.3
- NONE = 0
- MINOR = 1
- LOW = 2
- MEDIUM = 3
- HIGH = 4
- Priority = (
- (NONE, 'None'),
- (MINOR, 'P4'),
- (LOW, 'P3'),
- (MEDIUM, 'P2'),
- (HIGH, 'P1'),
+ # Defect/SRTool Priority
+ DEFECT_UNDEFINED = 0
+ DEFECT_LOW = 1
+ DEFECT_MEDIUM = 2
+ DEFECT_HIGH = 3
+ DEFECT_CRITICAL = 4
+ DEFECT_PRIORITY_ERROR = 5
+ DEFECT_PRIORITY = (
+ (DEFECT_UNDEFINED, 'Undefined'),
+ (DEFECT_LOW, 'Low'),
+ (DEFECT_MEDIUM, 'Medium'),
+ (DEFECT_HIGH, 'High'),
+ (DEFECT_CRITICAL, 'Critical'),
+ (DEFECT_PRIORITY_ERROR, 'PRIORITY_ERROR'),
)
- OPEN = 0
- IN_PROGRESS = 1
- ON_HOLD = 2
- CHECKED_IN = 3
- RESOLVED = 4
- CLOSED = 5
- Status = (
- (OPEN, 'Open'),
- (IN_PROGRESS, 'In progress'),
- (ON_HOLD, 'On Hold'),
- (CHECKED_IN, 'Checked In'),
- (RESOLVED, 'Resolved'),
- (CLOSED, 'Closed'),
+ DEFECT_STATUS_OPEN = 0
+ DEFECT_STATUS_IN_PROGRESS = 1
+ DEFECT_STATUS_ON_HOLD = 2
+ DEFECT_STATUS_CHECKED_IN = 3
+ DEFECT_STATUS_RESOLVED = 4
+ DEFECT_STATUS_CLOSED = 5
+ DEFECT_STATUS = (
+ (DEFECT_STATUS_OPEN, 'Open'),
+ (DEFECT_STATUS_IN_PROGRESS, 'In progress'),
+ (DEFECT_STATUS_ON_HOLD, 'On Hold'),
+ (DEFECT_STATUS_CHECKED_IN, 'Checked In'),
+ (DEFECT_STATUS_RESOLVED, 'Resolved'),
+ (DEFECT_STATUS_CLOSED, 'Closed'),
)
- UNRESOLVED = 0
- RESOLVED = 1
- FIXED = 2
- WILL_NOT_FIX = 3
- WITHDRAWN = 4
- REJECTED = 5
- DUPLICATE = 6
- NOT_APPLICABLE = 7
- REPLACED_BY_REQUIREMENT = 8
- CANNOT_REPRODUCE = 9
- DONE = 10
- Resolution = (
- (UNRESOLVED, 'Unresolved'),
- (RESOLVED, 'Resolved'),
- (FIXED, 'Fixed'),
- (WILL_NOT_FIX, 'Won\'t Fix'),
- (WITHDRAWN, 'Withdrawn'),
- (REJECTED, 'Rejected'),
- (DUPLICATE, 'Duplicate'),
- (NOT_APPLICABLE, 'Not Applicable'),
- (REPLACED_BY_REQUIREMENT, 'Replaced By Requirement'),
- (CANNOT_REPRODUCE, 'Cannot Reproduce'),
- (DONE, 'Done'),
+ DEFECT_UNRESOLVED = 0
+ DEFECT_RESOLVED = 1
+ DEFECT_FIXED = 2
+ DEFECT_WILL_NOT_FIX = 3
+ DEFECT_WITHDRAWN = 4
+ DEFECT_REJECTED = 5
+ DEFECT_DUPLICATE = 6
+ DEFECT_NOT_APPLICABLE = 7
+ DEFECT_REPLACED_BY_REQUIREMENT = 8
+ DEFECT_CANNOT_REPRODUCE = 9
+ DEFECT_DONE = 10
+ DEFECT_RESOLUTION = (
+ (DEFECT_UNRESOLVED, 'Unresolved'),
+ (DEFECT_RESOLVED, 'Resolved'),
+ (DEFECT_FIXED, 'Fixed'),
+ (DEFECT_WILL_NOT_FIX, 'Won\'t Fix'),
+ (DEFECT_WITHDRAWN, 'Withdrawn'),
+ (DEFECT_REJECTED, 'Rejected'),
+ (DEFECT_DUPLICATE, 'Duplicate'),
+ (DEFECT_NOT_APPLICABLE, 'Not Applicable'),
+ (DEFECT_REPLACED_BY_REQUIREMENT, 'Replaced By Requirement'),
+ (DEFECT_CANNOT_REPRODUCE, 'Cannot Reproduce'),
+ (DEFECT_DONE, 'Done'),
)
+
Components = (
'BSP',
'Kernel',
@@ -796,12 +1097,62 @@ class Defect(models.Model):
'Test',
)
+ HISTORICAL = 0
+ NEW = 1
+ NEW_RESERVED = 2
+ INVESTIGATE = 3
+ VULNERABLE = 4
+ NOT_VULNERABLE = 5
+ SRT_STATUS = (
+ (HISTORICAL, 'Historical'),
+ (NEW, 'New'),
+ (NEW_RESERVED, 'New-Reserved'),
+ (INVESTIGATE, 'Investigate'),
+ (VULNERABLE, 'Vulnerable'),
+ (NOT_VULNERABLE, 'Not Vulnerable'),
+ )
+
+ OPEN = 0
+ CLOSED = 1
+ FIXED = 2
+ NOT_FIX = 3
+ SRT_OUTCOME = (
+ (OPEN, 'Open'),
+ (CLOSED, 'Closed (Not Vulnerable)'),
+ (FIXED, 'Closed (Fixed)'),
+ (NOT_FIX, "Closed (Won't Fix)"),
+ )
+
+ # SRTool Priority
+ UNDEFINED = 0
+ LOW = 1
+ MEDIUM = 2
+ HIGH = 3
+ CRITICAL = 4
+ PRIORITY_ERROR = 5
+ SRT_PRIORITY = (
+ (UNDEFINED, 'Undefined'),
+ (LOW, 'Low'),
+ (MEDIUM, 'Medium'),
+ (HIGH, 'High'),
+ (CRITICAL, 'Critical'),
+ (PRIORITY_ERROR, 'PRIORITY_ERROR'),
+ )
+
name = models.CharField(max_length=50)
summary = models.TextField(blank=True)
url = models.TextField(blank=True)
- priority = models.IntegerField(choices=Priority, default=MINOR)
- status = models.IntegerField(choices=Status, default=OPEN)
- resolution = models.IntegerField(choices=Resolution, default=UNRESOLVED)
+ duplicate_of = models.CharField(max_length=50, blank=True, default='')
+
+ # External defect specific values
+ priority = models.IntegerField(choices=DEFECT_PRIORITY, default=DEFECT_LOW)
+ status = models.IntegerField(choices=DEFECT_STATUS, default=DEFECT_STATUS_OPEN)
+ resolution = models.IntegerField(choices=DEFECT_RESOLUTION, default=DEFECT_UNRESOLVED)
+ # SRTool compatible values
+ srt_priority = models.IntegerField(choices=SRT_PRIORITY, default=LOW)
+ srt_status = models.IntegerField(choices=SRT_STATUS, default=INVESTIGATE)
+ srt_outcome = models.IntegerField(choices=SRT_OUTCOME, default=OPEN)
+
publish = models.TextField(blank=True)
release_version = models.CharField(max_length=50)
product = models.ForeignKey(Product,related_name="product_defect",on_delete=models.CASCADE,)
@@ -812,25 +1163,77 @@ class Defect(models.Model):
# Methods
@property
+ def get_defect_priority_text(self):
+ return Defect.DEFECT_PRIORITY[int(self.priority)][1]
+ @property
+ def get_defect_status_text(self):
+ return Defect.DEFECT_STATUS[int(self.status)][1]
+ @property
+ def get_defect_resolution_text(self):
+ return Defect.DEFECT_RESOLUTION[int(self.resolution)][1]
+ @property
def get_priority_text(self):
- return Defect.Priority[int(self.priority)][1]
+ return SRTool.priority_text(self.srt_priority)
@property
def get_status_text(self):
- return Defect.Status[int(self.status)][1]
+ return SRTool.status_text(self.srt_status)
+ @property
+ def get_outcome_text(self):
+ return SRTool.outcome_text(self.srt_outcome)
+ @property
+ def get_date_created_text(self):
+ return re.sub(r"T.*", "", self.date_created)
+ @property
+ def get_date_updated_text(self):
+ return re.sub(r"T.*", "", self.date_updated)
@property
- def get_resolution_text(self):
- return Defect.Resolution[int(self.resolution)][1]
def get_long_name(self):
if self.release_version:
return "%s (%s)" % (self.name,self.release_version)
return "%s" % (self.name)
+ @property
+ def get_cve_names(self):
+ cve_list = []
+ for di in InvestigationToDefect.objects.filter(defect = self):
+ for i2v in VulnerabilityToInvestigation.objects.filter(investigation = di.investigation):
+ for v2c in CveToVulnerablility.objects.filter(vulnerability = i2v.vulnerability):
+ cve_list.append(v2c.cve.name)
+ return ','.join(cve_list)
+ @property
+ def get_cve_ids(self):
+ cve_list = []
+ for di in InvestigationToDefect.objects.filter(defect = self):
+ for i2v in VulnerabilityToInvestigation.objects.filter(investigation = di.investigation):
+ for v2c in CveToVulnerablility.objects.filter(vulnerability = i2v.vulnerability):
+ cve_list.append(str(v2c.cve.id))
+ return ','.join(cve_list)
+ @property
+ def get_publishset_state(self):
+ pub_list = []
+ cve_list = self.get_cve_names
+ if not cve_list:
+ return PublishSet.PUBLISH_SET_STATE[PublishSet.PUBLISH_SET_NONE][1]
+ for cve_name in cve_list.split(','):
+ try:
+ cve = Cve.objects.get(name = cve_name)
+ pub_list.append(cve.get_publishset_state)
+ except Exception as e:
+ pass
+ return ','.join(pub_list)
+
+class DefectHistory(models.Model):
+ search_allowed_fields = ['defect__name', 'comment', 'date', 'author']
+ defect = models.ForeignKey(Defect,related_name="defect_history",on_delete=models.CASCADE,)
+ comment = models.TextField(blank=True)
+ date = models.DateField(null=True, blank=True)
+ author = models.TextField(blank=True)
# INVESTIGATION
# Product-level Vulnerablility Investigation Record
class Investigation(models.Model):
- search_allowed_fields = ['name', 'comments', 'comments_private']
+ search_allowed_fields = ['name', 'comments', 'comments_private', 'tags']
HISTORICAL = 0
NEW = 1
@@ -858,18 +1261,22 @@ class Investigation(models.Model):
(NOT_FIX, "Closed (Won't Fix)"),
)
+ # SRTool Priority
UNDEFINED = 0
- MINOR = 1
- LOW = 2
- MEDIUM = 3
- HIGH = 4
+ LOW = 1
+ MEDIUM = 2
+ HIGH = 3
+ CRITICAL = 4
+ PRIORITY_ERROR = 5
PRIORITY = (
(UNDEFINED, 'Undefined'),
- (MINOR, 'Minor'),
(LOW, 'Low'),
(MEDIUM, 'Medium'),
(HIGH, 'High'),
+ (CRITICAL, 'Critical'),
+ (PRIORITY_ERROR, 'PRIORITY_ERROR'),
)
+
name = models.CharField(max_length=50)
vulnerability = models.ForeignKey(Vulnerability,related_name="vulnerability_investigation",on_delete=models.CASCADE,)
product = models.ForeignKey(Product,related_name="product_investigation",on_delete=models.CASCADE,)
@@ -877,21 +1284,25 @@ class Investigation(models.Model):
public = models.BooleanField(default=True)
comments = models.TextField(blank=True)
comments_private = models.TextField(blank=True)
+ tags = models.TextField(blank=True, default='')
status = models.IntegerField(choices=STATUS, default=OPEN)
outcome = models.IntegerField(choices=OUTCOME, default=INVESTIGATE)
priority = models.IntegerField(choices=PRIORITY, default=LOW)
+ srt_updated = models.DateTimeField(auto_now=True, null=True)
+ srt_created = models.DateTimeField(auto_now_add=True, null=True)
+
# Methods
@property
+ def get_priority_text(self):
+ return SRTool.priority_text(self.priority)
+ @property
def get_status_text(self):
- return Investigation.STATUS[int(self.status)][1]
+ return SRTool.status_text(self.status)
@property
def get_outcome_text(self):
- return Investigation.OUTCOME[int(self.outcome)][1]
- @property
- def get_priority_text(self):
- return Investigation.PRIORITY[int(self.priority)][1]
+ return SRTool.outcome_text(self.outcome)
@property
def get_long_name(self):
if self.vulnerability and self.vulnerability.cve_primary_name:
@@ -920,6 +1331,7 @@ class InvestigationComments(models.Model):
author = models.TextField(blank=True)
class InvestigationHistory(models.Model):
+ search_allowed_fields = ['investigation__name', 'comment', 'date', 'author']
investigation = models.ForeignKey(Investigation,related_name="investigation_history",on_delete=models.CASCADE,)
comment = models.TextField(blank=True)
date = models.DateField(null=True, blank=True)
@@ -979,17 +1391,21 @@ def _log_args(msg, *args, **kwargs):
# Action items waiting
class Notify(models.Model):
search_allowed_fields = ['category','description','url']
+
+ # SRTool Priority
UNDEFINED = 0
- MINOR = 1
- LOW = 2
- MEDIUM = 3
- HIGH = 4
+ LOW = 1
+ MEDIUM = 2
+ HIGH = 3
+ CRITICAL = 4
+ PRIORITY_ERROR = 5
PRIORITY = (
(UNDEFINED, 'Undefined'),
- (MINOR, 'Minor'),
(LOW, 'Low'),
(MEDIUM, 'Medium'),
(HIGH, 'High'),
+ (CRITICAL, 'Critical'),
+ (PRIORITY_ERROR, 'PRIORITY_ERROR'),
)
category = models.CharField(max_length=50)
@@ -997,8 +1413,10 @@ class Notify(models.Model):
priority = models.IntegerField(default=0)
url = models.TextField(blank=True)
author = models.TextField(blank=True)
- srt_updated = models.DateTimeField(auto_now_add=True)
- srt_created = models.DateTimeField(auto_now=True)
+## srt_updated = models.DateTimeField(auto_now_add=True)
+## srt_created = models.DateTimeField(auto_now=True)
+ srt_updated = models.DateTimeField(auto_now=True, null=True)
+ srt_created = models.DateTimeField(auto_now_add=True, null=True)
@property
def get_priority_text(self):
@@ -1013,6 +1431,35 @@ class NotifyAccess(models.Model):
class NotifyCategories(models.Model):
category = models.CharField(max_length=50)
+class PublishSet(models.Model):
+ search_allowed_fields = ['cve__name','cve__description','cve__status','cve__publishedDate','cve__lastModifiedDate']
+
+ # Publish state
+ PUBLISH_SET_NONE = 0
+ PUBLISH_SET_NEW = 1
+ PUBLISH_SET_MODIFIED = 2
+ PUBLISH_SET_NEW_USER = 3
+ PUBLISH_SET_MODIFIED_USER = 4
+ PUBLISH_SET_ERROR = 5
+ PUBLISH_SET_STATE = (
+ (PUBLISH_SET_NONE, 'Skip'),
+ (PUBLISH_SET_NEW, 'New'),
+ (PUBLISH_SET_MODIFIED, 'Modified'),
+ (PUBLISH_SET_NEW_USER, 'New_User'),
+ (PUBLISH_SET_MODIFIED_USER, 'Modified_User'),
+ (PUBLISH_SET_ERROR, 'PUBLISH_SET_ERROR'),
+ )
+
+ cve = models.ForeignKey(default=None, to='orm.cve', null=True, on_delete=models.CASCADE,)
+ state = models.IntegerField(choices=PUBLISH_SET_STATE, default=PUBLISH_SET_NONE)
+ reason = models.TextField(blank=True)
+
+ @property
+ def state_text(self):
+ if (0 > self.state) or (self.state >= len(self.PUBLISH_SET_STATE)):
+ return self.PUBLISH_SET_STATE[self.PUBLISH_SET_ERROR][1]
+ return self.PUBLISH_SET_STATE[self.state][1]
+
#
# Database Cache Support
#
diff --git a/lib/srtgui/api.py b/lib/srtgui/api.py
index 16ff88b0..e84113a0 100644
--- a/lib/srtgui/api.py
+++ b/lib/srtgui/api.py
@@ -22,6 +22,10 @@ import os
import sys
import logging
import subprocess
+from datetime import datetime, date
+import traceback
+import re
+import json
from django.http import JsonResponse
@@ -70,6 +74,31 @@ def execute_process(*args):
return result.returncode,result.stdout,result.stderr
#
+# Update CVE datasource list: (a) fetch alt sources, (b) refresh preview sources
+#
+
+# #### TODO
+def update_cve_datasources(source_filter=''):
+ # Attach all matching CVE sources
+ _log("Alternate1:%s" % (cve_object.name))
+ query_set = DataSource.objects.filter(data="cve")
+ if source_filter:
+ query_set =query_set.filter(source=source_filter)
+ for ds in query_set:
+ _log("Alternate2:%s" % (ds.key))
+ if ds.cve_filter and cve_object.name.startswith(ds.cve_filter):
+ cve_source_object,created = CveSource.objects.get_or_create(cve=cve_object,datasource=ds)
+ _log("Alternate CVE source %s for %s (created=%s)" % (ds.key,cve_object.name,created))
+
+ # Force update the CVE summary data from sources
+ result_returncode,result_stdout,result_stderr = execute_process(
+ './bin/nist/srtool_nist.py',
+ '--update-cve-list',
+ cve_object.name,
+ '--force'
+ )
+
+#
# Extract Upstream CVE record details
#
@@ -82,6 +111,7 @@ def readCveDetails_Upstream(cve, cve_datasource):
# Get the object
lookup_command = cve_datasource.lookup
+ lookup_attributes = ''
if not lookup_command:
v.description = "ERROR(%s):missing lookup command" % (cve_datasource.description)
return v
@@ -135,6 +165,16 @@ def readCveDetails_Upstream(cve, cve_datasource):
#_log("cpe_list:%s:%s:" % (cve.name,value))
elif name == 'ref_list':
v.ref_list = value
+ elif name == 'ATTRIBUTES':
+ # Returned metadata
+ lookup_attributes = value
+ _log("NOTE:readCveDetails_Upstream:%s:%s" % (v.name,v.cvssV2_severity))
+
+ # Check for metadata special cases
+ if cve_datasource.LOOKUP_MISSING in lookup_attributes:
+ pass
+
+
return v
#
@@ -338,3 +378,342 @@ def summaryCveDetails(cve,cve_sources):
cve_detail.ref_list = cve_main.ref_list
return cve_detail,cve_html
+
+#
+# Publish Support
+#
+
+
+# Accumulate the history status changes over the date range
+# CVE rec
+# cve[name][key][first,last]
+ # Severity_V3(8.0 HIGH,5.4 MEDIUM)
+ # Severity_V2(8.5 HIGH,4.3 MEDIUM)
+ # Priority(UNDEFINED,Medium)
+ # Status(Historical,Vulnerable)
+# CVE product/defect
+# cve[name][product][defect][key][first,last]
+ # Release(,8.0.0.30)
+ # Status(Historical,Vulnerable)
+
+
+# Calculate the publishable CVEs for a given period
+# Accumulate the CVE history status changes over the date range
+def publishCalculate(date_start,date_stop):
+ from orm.models import SrtSetting, PublishSet, Cve, CveHistory, DefectHistory, Update, SRTool, InvestigationToDefect, Product
+
+ # Precompile the filter for efficiency
+ update_regex = re.compile(r"([^\(]*)\(([^,]*),([^\)]*)\)")
+
+ # Accumulate the CVE history status changes
+ # Severity_V3(8.0 HIGH,5.4 MEDIUM)
+ # Severity_V2(8.5 HIGH,4.3 MEDIUM)
+ # Priority(UNDEFINED,Medium)
+ # Status(Historical,Vulnerable)
+ cve_updates = {}
+ # cve_updates[cve_id_str][key][first,last]
+ def cve_update(cve_id_str,change):
+ m = update_regex.search(change)
+ if m:
+ field = m.group(1)
+ value_old = m.group(2)
+ value_new = m.group(3)
+ else:
+ field = re.sub(r"\(.*", "", change)
+ value_old = ''
+ value_new = ''
+
+ if not field in ('Severity_V3','Severity_V2'):
+ return
+
+ # Fix-up
+ if ('Severity_V3' == field) or ('Severity_V2' == field):
+ score_old,severity_old = value_old.split(' ')
+ score_new,severity_new = value_new.split(' ')
+ if score_old.replace('0','') == score_new.replace('0',''):
+ return
+ if severity_old == severity_new:
+ return
+ value_old = severity_old
+ value_new = severity_new
+
+ if not cve_id_str in cve_updates:
+ cve_updates[cve_id_str] = {}
+ if not field in cve_updates[cve_id_str]:
+ # Preset the old value and accumulate the new value
+ cve_updates[cve_id_str][field] = [value_old,value_new]
+ else:
+ # Only accumulate the new value
+ cve_updates[cve_id_str][field] = [cve_updates[cve_id_str][field][0],value_new]
+
+ # Accumulate the CVE Defect history status changes
+ # Status(Historical,Vulnerable)
+ # Priority(UNDEFINED,Medium)
+ # Release(,8.0.0.30)
+ defect_updates = {}
+ # defect_updates[cve_id_str][product][defect][key][first,last]
+ def defect_update(cve_id_str,product_key,defect_name,change):
+ m = update_regex.search(change)
+ if m:
+ field = m.group(1)
+ value_old = m.group(2)
+ value_new = m.group(3)
+ else:
+ field = re.sub(r"\(.*", "", change)
+ value_old = ''
+ value_new = ''
+
+ if not cve_id_str in defect_updates:
+ defect_updates[cve_id_str] = {}
+ if not product_key in defect_updates[cve_id_str]:
+ defect_updates[cve_id_str][product_key] = {}
+ if not defect_name in defect_updates[cve_id_str][product_key]:
+ defect_updates[cve_id_str][product_key][defect_name] = {}
+ if not field in defect_updates[cve_id_str][product_key][defect_name]:
+ # Preset the old value and accumulate the new value
+ defect_updates[cve_id_str][product_key][defect_name][field] = [value_old,value_new]
+ else:
+ # Only accumulate the new value
+ defect_updates[cve_id_str][product_key][defect_name][field] = [defect_updates[cve_id_str][product_key][defect_name][field][0],value_new]
+
+ try:
+ PublishSet.objects.all().delete()
+
+ # Convert dates to CVE-type dates
+ date_start_text = date_start.strftime('%Y-%m-%d')
+ date_stop_text = date_stop.strftime('%Y-%m-%d')
+
+ # Find all candidate new CVEs
+ queryset = \
+ Cve.objects.filter(acknowledge_date__gte=date_start_text,acknowledge_date__lte=date_stop_text) | \
+ Cve.objects.filter(srt_created__gte=date_start,srt_created__lte=date_stop)
+ exclude_list = [Cve.NEW, Cve.HISTORICAL, Cve.NEW_RESERVED]
+ queryset = queryset.exclude(status__in=exclude_list)
+
+ # Gather only CVE histories from currently supported products
+ # This assumes that the defect names have the format "<DEFECT_KEY>-*"
+ # Example entry: "CREATE(Defect): {Created from defect <DEFECT_KEY>-7058}"
+ # Gather the supported product keys
+ product_filter = []
+ product_query = Product.objects.filter()
+ for product in product_query:
+ if "support" == product.get_product_tag('mode').order_by('-order'):
+ product_filter.append(product.get_defect_tag('key'))
+ # Scan the CVE histories
+ new_cves = {}
+ create_filter = Update.CREATE_STR % Update.SOURCE_DEFECT
+ for cve in queryset:
+ try:
+ history_query = CveHistory.objects.filter(cve=cve,comment__startswith=create_filter)
+ if history_query:
+ supported = False
+ _keys = []
+ for history in history_query:
+ _keys.append(history.comment)
+ for key in product_filter:
+ # CREATE(Defect): {Created from defect <DEFECT_KEY>}
+ if 0 < history.comment.find(' %s-' % key):
+ supported = True
+ break
+ if not supported:
+ continue
+ except:
+ # No matches to test
+ pass
+
+ p = PublishSet(cve=cve, state=PublishSet.PUBLISH_SET_NEW, reason='LastModifiedDate(,%s)' % cve.lastModifiedDate)
+ p.save()
+ new_cves[str(cve.id)] = True
+
+ # Fixup
+ bootstrap_date = datetime.strptime('2019-03-10',"%Y-%m-%d")
+ if date_start < bootstrap_date:
+ date_start = bootstrap_date
+
+ # Find all candidate updated CVEs, made by user or imported from CVE integration tools
+ # UPDATE(CVE):Severity_V3(8.0 HIGH,5.4 MEDIUM);Severity_V2(8.5 HIGH,4.3 MEDIUM);LastModifiedDate(2017-08-12,2019-03-19)
+ for ch in CveHistory.objects.filter(date__gte=date_start_text,date__lte=date_stop_text,comment__startswith=Update.UPDATE_PREFIX_STR).order_by('date'):
+ # Already new
+ if ch.cve.id in new_cves:
+ continue
+ # Ignore CVEs with non-applicable
+ if ch.cve.status in [Cve.NEW, Cve.HISTORICAL, Cve.NEW_RESERVED]:
+ continue
+ change_str = re.sub(r"^.*:", "", ch.comment)
+ change_str = re.sub(r"{.*", "", change_str)
+ for change in change_str.split(';'):
+ cve_update(str(ch.cve.id),change)
+
+ # Find all candidate updated Defects, made by user or imported from defect integration tools
+ # UPDATE(Defect):Priority(UNDEFINED,Medium);Status(Historical,Investigate);Release(,8.0.0.30) {Update from defect LIN8-8669}
+ for dh in DefectHistory.objects.filter(date__gte=date_start_text,date__lte=date_stop_text,comment__startswith=Update.UPDATE_PREFIX_STR).order_by('date'):
+ # Get the product key
+ for i2d in InvestigationToDefect.objects.filter(defect_id=dh.defect.id):
+ # get first product key
+ product_key = i2d.product.key
+ break
+ else:
+ # no investigation for this orphaned defect
+ continue
+ change_str = re.sub(r"^.*:", "", dh.comment)
+ change_str = re.sub(r"{.*", "", change_str)
+ for change in change_str.split(';'):
+ cve_id_strs = dh.defect.get_cve_ids
+ for cve_id_str in cve_id_strs.split(','):
+ # Already new
+ if cve_id_str in new_cves:
+ continue
+ defect_update(cve_id_str,product_key,dh.defect.name,change)
+
+
+ # Merge manual Marks to table
+ queryset = CveHistory.objects.filter(
+ date__gte=date_start,
+ date__lte=date_stop)
+ for cvehistory in queryset:
+ if cvehistory.comment.startswith(Update.MARK_NEW_PREFIX):
+ publish_object,created = PublishSet.objects.get_or_create(cve=cvehistory.cve)
+ publish_object.state = PublishSet.PUBLISH_SET_NEW_USER
+ publish_object.reason= "CC " + cvehistory.comment
+ publish_object.save()
+ elif cvehistory.comment.startswith(Update.MARK_UPDATED_PREFIX):
+ publish_object,created = PublishSet.objects.get_or_create(cve=cvehistory.cve)
+ publish_object.state = PublishSet.PUBLISH_SET_MODIFIED_USER
+ publish_object.reason= "DD " + cvehistory.comment
+ publish_object.save()
+ elif cvehistory.comment.startswith(Update.MARK_UNMARK):
+ publish_object,created = PublishSet.objects.get_or_create(cve=cvehistory.cve)
+ publish_object.state = PublishSet.PUBLISH_SET_NONE
+ publish_object.reason= "EE " + cvehistory.comment
+ _log("PUBLISH_SET_NONE(%d):%s:%s" % (cvehistory.id,cvehistory.cve.name,cvehistory.comment))
+ publish_object.save()
+
+ #
+ # for all cves, merge data, create publish records
+ # cve_change_tree[cve_id_str][dict]
+ #
+
+ cve_change_tree = {}
+ # cve_updates[cve_id_str][key][first,last]
+ for cve_id_str in cve_updates:
+ if not cve_id_str in cve_change_tree:
+ cve_change_tree[cve_id_str] = {}
+ for key in cve_updates[cve_id_str]:
+ cve_change_tree[cve_id_str][key] = cve_updates[cve_id_str][key]
+
+ # defect_updates[cve_id_str][product][defect][key][first,last]
+ for cve_id_str in defect_updates:
+ if not cve_id_str in cve_change_tree:
+ cve_change_tree[cve_id_str] = {}
+ for product in defect_updates[cve_id_str]:
+ product_updates = []
+ for defect in defect_updates[cve_id_str][product]:
+ defect_changes = []
+ for key in defect_updates[cve_id_str][product][defect].keys():
+ defect_changes.append('%s(%s,%s)' % (key,defect_updates[cve_id_str][product][defect][key][0],defect_updates[cve_id_str][product][defect][key][1]))
+ product_updates.append('%s[%s]' % (defect,','.join(defect_changes)))
+ cve_change_tree[cve_id_str][product] = '|'.join(product_updates)
+
+ # Create publish records
+ for cve_id_str in cve_change_tree:
+ publish_object,created = PublishSet.objects.get_or_create(cve_id=int(cve_id_str))
+ publish_object.state = PublishSet.PUBLISH_SET_MODIFIED
+ publish_object.reason = json.dumps(cve_change_tree[cve_id_str])
+ publish_object.save()
+
+ # Update last calculation date
+ SrtSetting.set_setting('publish_last_calc',datetime.today().strftime('%m/%d/%Y %H:%M'))
+ except Exception as e:
+ _log("ERROR:publishCalculate:%s,%s." % (e,traceback.print_stack()))
+
+
+# Reset: for each CVE History:
+# (a) Remove any MARK_NEW or MARK_UPDATED in the period
+#
+def publishReset(date_start,date_stop):
+ from orm.models import Cve, CveHistory, Update
+ # Fixup
+ #bootstrap_date = datetime.strptime('2019-03-10',"%Y-%m-%d")
+
+ # Deleted manual Marks from table
+ queryset = CveHistory.objects.filter(
+ date__gte=date_start,
+ date__lte=date_stop)
+ for cvehistory in queryset:
+ if cvehistory.comment.startswith(Update.MARK_PREFIX):
+ cvehistory.delete()
+
+# MarkNew: for each CVE:
+# (a) Remove any previous MARK_UPDATED in the period (there can be many periods)
+# (a) Remove any previous MARK_NEW (there can only be one)
+# (b) Insert MARK_NEW at period's middle date
+#
+def publishMarkNew(cve_list,reason_map,date_start,date_stop):
+ from orm.models import Cve, CveHistory, Update
+ # Fixup
+ bootstrap_date = datetime.strptime('2019-03-10',"%Y-%m-%d")
+ if date_start < bootstrap_date:
+ date_start = bootstrap_date
+ mid_date = date_start + (date_stop - date_start)/2
+ for cve_name in cve_list.split(','):
+ cve = Cve.objects.get(name = cve_name)
+ # Remove marks in period
+ queryset = CveHistory.objects.filter(
+ cve = cve,
+ comment__startswith = Update.MARK_PREFIX,
+ date__gte=date_start,
+ date__lte=date_stop)
+ for cvehistory in queryset:
+ cvehistory.delete()
+ # Remove all mark news
+ queryset = CveHistory.objects.filter(cve = cve,comment__startswith = Update.MARK_NEW_PREFIX)
+ for cvehistory in queryset:
+ cvehistory.delete()
+ cvehistory = CveHistory(cve=cve, comment=Update.MARK_NEW % reason_map[cve_name], date=mid_date, author='SRTool')
+ cvehistory.save()
+
+# MarkModified: for each CVE:
+# (a) Remove any previous MARK_UPDATED in the period (there can be many periods)
+# (b) Insert MARK_UPDATED at period's middle date
+#
+def publishMarkModified(cve_list,reason_map,date_start,date_stop):
+ from orm.models import Cve, CveHistory, Update
+ # Fixup
+ bootstrap_date = datetime.strptime('2019-03-10',"%Y-%m-%d")
+ if date_start < bootstrap_date:
+ date_start = bootstrap_date
+ mid_date = date_start + (date_stop - date_start)/2
+ for cve_name in cve_list.split(','):
+ cve = Cve.objects.get(name = cve_name)
+ # Remove mark in period
+ queryset = CveHistory.objects.filter(
+ cve = cve,
+ comment__startswith = Update.MARK_PREFIX,
+ date__gte=date_start,
+ date__lte=date_stop)
+ for cvehistory in queryset:
+ cvehistory.delete()
+ cvehistory = CveHistory(cve=cve, comment=Update.MARK_UPDATED % reason_map[cve_name], date=mid_date, author='SRTool')
+ cvehistory.save()
+
+# MarkNone: for each CVE:
+# (a) Remove any MARK_NEW or MARK_UPDATED in the period
+#
+def publishMarkNone(cve_list,date_start,date_stop):
+ from orm.models import Cve, CveHistory, Update
+ # Fixup
+ bootstrap_date = datetime.strptime('2019-03-10',"%Y-%m-%d")
+ date_start_max = max(date_start,bootstrap_date)
+ mid_date = date_start_max + (date_stop - date_start_max)/2
+ for cve_name in cve_list.split(','):
+ cve = Cve.objects.get(name = cve_name)
+ queryset = CveHistory.objects.filter(
+ cve = cve,
+ comment__startswith = Update.MARK_PREFIX,
+ date__gte=date_start,
+ date__lte=date_stop)
+ for cvehistory in queryset:
+ cvehistory.delete()
+ cvehistory = CveHistory(cve=cve, comment=Update.MARK_UNMARK, date=mid_date, author='SRTool')
+ cvehistory.save()
+
diff --git a/lib/srtgui/reports.py b/lib/srtgui/reports.py
index 572e45bf..e282a8d0 100644
--- a/lib/srtgui/reports.py
+++ b/lib/srtgui/reports.py
@@ -25,6 +25,8 @@ import csv
from orm.models import Cve, CveSource, Vulnerability, Investigation, Defect, Product
from orm.models import Package
+from orm.models import SRTool, SrtSetting
+from orm.models import PublishSet, DefectHistory
from srtgui.api import readCveDetails, summaryCveDetails
from django.db.models import Q
@@ -93,7 +95,14 @@ class ManagementReport(Report):
context['report_columnrange_list'] = ''
context['report_format_list'] = '\
<input type="radio" name="format" value="txt" checked> Text<br> \
- <input type="radio" name="format" value="csv"> CSV<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="semi">Semi-colon</option> \
+ <option value="comma">Comma</option> \
+ <option value="tab">Tab</option> \
+ </select>) \
+ <br> \
'
return context
@@ -107,12 +116,15 @@ class ManagementReport(Report):
format = request_POST.get('format', '')
title = request_POST.get('title', '')
report_type = request_POST.get('report_type', '')
+ csv_separator = request_POST.get('csv_separator', 'semi')
- report_name = '%s/management_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/management_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
if 'csv' == format:
- tab = "\t"
+ tab = ';'
+ if csv_separator == 'comma': tab = ','
+ if csv_separator == 'tab': tab = '\t'
else:
tab = " = "
@@ -125,11 +137,13 @@ class ManagementReport(Report):
file.write("%s%s%s\n" % ('cve_open',tab,Cve.objects.filter( Q(status=Cve.INVESTIGATE) & Q(status=Cve.VULNERABLE) ).count()))
file.write("%s%s%s\n" % ('vulnerability_total',tab,Vulnerability.objects.all().count()))
file.write("%s%s%s\n" % ('vulnerability_open',tab,Vulnerability.objects.filter(outcome=Vulnerability.OPEN).count()))
+ file.write("%s%s%s\n" % ('vulnerability_critical',tab,Vulnerability.objects.filter(priority=Vulnerability.CRITICAL).count()))
file.write("%s%s%s\n" % ('vulnerability_high',tab,Vulnerability.objects.filter(priority=Vulnerability.HIGH).count()))
file.write("%s%s%s\n" % ('vulnerability_medium',tab,Vulnerability.objects.filter(priority=Vulnerability.MEDIUM).count()))
- file.write("%s%s%s\n" % ('vulnerability_low',tab,Vulnerability.objects.filter(priority=Vulnerability.HIGH).count()))
+ file.write("%s%s%s\n" % ('vulnerability_low',tab,Vulnerability.objects.filter(priority=Vulnerability.LOW).count()))
file.write("%s%s%s\n" % ('investigation_total',tab,Investigation.objects.all().count()))
file.write("%s%s%s\n" % ('investigation_open',tab,Investigation.objects.filter(outcome=Investigation.OPEN).count()))
+ file.write("%s%s%s\n" % ('investigation_critical',tab,Investigation.objects.filter(priority=Investigation.CRITICAL).count()))
file.write("%s%s%s\n" % ('investigation_high',tab,Investigation.objects.filter(priority=Investigation.HIGH).count()))
file.write("%s%s%s\n" % ('investigation_medium',tab,Investigation.objects.filter(priority=Investigation.MEDIUM).count()))
file.write("%s%s%s\n" % ('investigation_low',tab,Investigation.objects.filter(priority=Investigation.LOW).count()))
@@ -254,7 +268,7 @@ class CveReport(Report):
cve = Cve.objects.get(id=record_list)
- report_name = '%s/cve_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/cve_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
if 'csv' == format:
@@ -417,9 +431,9 @@ class VulnerabilityReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
- v = Vulnerability.objects.get(id=record_list)
+ vulnerability = Vulnerability.objects.get(id=record_list)
- report_name = '%s/vulnerability_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/vulnerability_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
if 'csv' == format:
@@ -429,24 +443,26 @@ class VulnerabilityReport(Report):
if ('summary' == report_type) or ('audit' == report_type):
if 'txt' == format:
- file.write("Report : Vulnerability %s - Summary\n" % v.name)
+ file.write("Report : Vulnerability %s - Summary\n" % vulnerability.name)
file.write("\n")
- file.write("Vulnerability: %s\n" % v.name)
- file.write(" Status: %s\n" % v.get_status_text)
- file.write(" Outcome: %s\n" % v.get_outcome_text)
- file.write(" Priority: %s\n" % v.get_priority_text)
- file.write(" Comments: %s\n" % v.comments)
+ file.write("Vulnerability: %s\n" % vulnerability.name)
+ file.write(" Status: %s\n" % vulnerability.get_status_text)
+ file.write(" Outcome: %s\n" % vulnerability.get_outcome_text)
+ file.write(" Priority: %s\n" % vulnerability.get_priority_text)
+ file.write(" Comments: %s\n" % vulnerability.comments)
file.write("\n")
- file.write("Affected Products:\n")
+ file.write("Products:\n")
found_p = False
- for i,p in enumerate(v.get_affected_list):
- found_p = True
- file.write("%2d) Product: %s\n" % (i,p.product.long_name))
- found_i = False
- for investigation in Investigation.objects.filter(vulnerability=v,product=p.product):
+ for i,product in enumerate(Product.objects.all().order_by('order')):
+ product_header = False
+ for investigation in Investigation.objects.filter(vulnerability=vulnerability,product=product):
+ found_p = True
found_i = True
+ if not product_header:
+ file.write("%2d) Product: %s\n" % (i+1,investigation.product.long_name))
+ product_header = True
file.write(" Investigation: %s\n" % investigation.name)
file.write(" Status: %s\n" % investigation.get_status_text)
file.write(" Outcome: %s\n" % investigation.get_outcome_text)
@@ -457,24 +473,13 @@ class VulnerabilityReport(Report):
file.write(",")
file.write("%s (%s)" % (id.defect.name,id.defect.get_status_text))
file.write("\n")
- if not found_i:
- file.write(" No investigations found\n")
- if not found_p:
- file.write(" No affected products found\n")
-
- file.write("\n")
- file.write("Related Products:\n")
- found_p = False
- for i,p in enumerate(v.get_related_list):
- found_p = True
- file.write("%2d) Product: %s\n" % (i,p.product.long_name))
if not found_p:
- file.write(" No related products found\n")
+ file.write(" No products found\n")
file.write("\n")
file.write("Comments:\n")
found_c = False
- for i,vc in enumerate(v.vulnerability_comments.all()):
+ for i,vc in enumerate(vulnerability.vulnerability_comments.all()):
found_c = True
file.write(" %2d) %s (%s): %s\n" % (i,vc.date,vc.author,vc.comment))
if not found_c:
@@ -483,7 +488,7 @@ class VulnerabilityReport(Report):
if 'audit' == report_type:
file.write("\n")
file.write("Audit Trail:\n")
- for i,vh in enumerate(v.vulnerability_history.all()):
+ for i,vh in enumerate(vulnerability.vulnerability_history.all()):
file.write(" %2d) %s (%s): %s\n" % (i,vh.date,vh.author,vh.comment))
file.write("\n")
@@ -529,7 +534,7 @@ class InvestigationReport(Report):
investigation = Investigation.objects.get(id=record_list)
- report_name = '%s/investigation_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/investigation_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
if 'csv' == format:
@@ -593,7 +598,14 @@ class DefectReport(Report):
context['report_columnrange_list'] = ''
context['report_format_list'] = '\
<input type="radio" name="format" value="txt" checked> Text<br> \
- <input type="radio" name="format" value="csv"> CSV<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="semi">Semi-colon</option> \
+ <option value="comma">Comma</option> \
+ <option value="tab">Tab</option> \
+ </select>) \
+ <br> \
'
return context
@@ -608,29 +620,36 @@ class DefectReport(Report):
title = request_POST.get('title', '')
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
+ csv_separator = request_POST.get('csv_separator', 'semi')
- report_name = '%s/defect_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/defect_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
if 'csv' == format:
- tab = "\t"
+ tab = ';'
+ if csv_separator == 'comma': tab = ','
+ if csv_separator == 'tab': tab = '\t'
else:
tab = ","
if ('summary' == report_type):
if 'csv' == format:
- file.write("Name\tSummary\tPriority\tStatus\tResolution\tReleased Version\tURL\tInvestigations\tProduct\n")
+ file.write("Name\tSummary\tPriority\tStatus\tResolution\tSRT Priority\tSRT Status\tSRT Outcome\tReleased Version\tURL\tInvestigations\tProduct\n")
if 'txt' == format:
file.write("Report : Defects Table\n")
file.write("\n")
- file.write("Name,Summary,Priority,Status,Resolution,Released Version,URL,Investigations,Product\n")
+ file.write("Name,Summary,Priority,Status,Resolution,SRT Priority,SRT Status,SRT Outcome,Released Version,URL,Investigations,Product\n")
defect = Defect.objects.get(id=record_list)
file.write("%s%s" % (defect.name,tab))
file.write("%s%s" % (defect.summary,tab))
+
+ file.write("%s%s" % (defect.get_defect_priority_text,tab))
+ file.write("%s%s" % (defect.get_defect_status_text,tab))
+ file.write("%s%s" % (defect.get_defect_resolution_text,tab))
file.write("%s%s" % (defect.get_priority_text,tab))
file.write("%s%s" % (defect.get_status_text,tab))
- file.write("%s%s" % (defect.get_resolution_text,tab))
+ file.write("%s%s" % (defect.get_outcome_text,tab))
file.write("%s%s" % (defect.release_version,tab))
file.write("%s%s" % (defect.publish,tab))
file.write("%s%s" % (defect.url,tab))
@@ -832,7 +851,7 @@ class CvesReport(Report):
defect.name if defect else '<no_defect>',
defect.get_priority_text if defect else '',
defect.get_status_text if defect else '',
- defect.get_resolution_text if defect else '',
+ defect.get_defect_resolution_text if defect else '',
])
else:
writer.writerow([
@@ -848,7 +867,7 @@ class CvesReport(Report):
defect.name if defect else '<no_defect>',
defect.get_priority_text if defect else '',
defect.get_status_text if defect else '',
- defect.get_resolution_text if defect else '',
+ defect.get_defect_resolution_text if defect else '',
])
def exec_report(self, *args, **kwargs):
@@ -865,7 +884,7 @@ class CvesReport(Report):
record_list = request_POST.get('record_list', '')
name_filter = request_POST.get('name_filter', '').upper()
- report_name = '%s/cves_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/cves_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
if 'csv' == format:
delimiter = '\t'
else:
@@ -947,7 +966,7 @@ class SelectCvesReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
- report_name = '%s/select_cves_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/select_cves_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
if 'csv' == format:
@@ -1105,7 +1124,7 @@ class VulnerabilitiesReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
- report_name = '%s/vulnerabilities_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/vulnerabilities_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
if 'csv' == format:
delimiter = '\t'
else:
@@ -1153,11 +1172,18 @@ class InvestigationsReport(Report):
<input type="radio" name="columns" value="all" > All<br> \
'
context['report_format_list'] = '\
- <input type="radio" name="format" value="txt" checked> Text (comma delimited)<br> \
- <input type="radio" name="format" value="csv"> CSV (tab delimited)<br> \
+ <input type="radio" name="format" value="txt" checked> Text<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="semi">Semi-colon</option> \
+ <option value="comma">Comma</option> \
+ <option value="tab">Tab</option> \
+ </select>) \
+ <br> \
'
context['report_custom_list'] = '\
- Product defect prefix filter = <input type="text" placeholder="e.g. LIN9" name="name_filter" size="40"> (method to filter by product)<br>\
+ Product defect prefix filter = <input type="text" placeholder="" name="name_filter" size="40"> (method to filter by product)<br>\
'
return context
@@ -1175,6 +1201,7 @@ class InvestigationsReport(Report):
'Comments Private',
'Vulnerability',
'Product',
+ 'Updated',
])
else:
writer.writerow([
@@ -1188,6 +1215,7 @@ class InvestigationsReport(Report):
'Comments Private',
'Vulnerability',
'Product',
+ 'Updated',
])
else:
investigation_defects = ''
@@ -1212,6 +1240,7 @@ class InvestigationsReport(Report):
investigation.comments_private,
investigation.vulnerability.get_long_name,
investigation.product.long_name,
+ investigation.srt_updated.strftime('%m-%d-%Y'),
])
else:
writer.writerow([
@@ -1225,6 +1254,7 @@ class InvestigationsReport(Report):
investigation.comments_private,
investigation.vulnerability.get_long_name,
investigation.product.long_name,
+ investigation.srt_updated.strftime('%m-%d-%Y'),
])
def exec_report(self, *args, **kwargs):
@@ -1240,12 +1270,16 @@ class InvestigationsReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
name_filter = request_POST.get('name_filter', '').upper()
+ csv_separator = request_POST.get('csv_separator', 'semi')
+
+ report_name = '%s/investigations_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
- report_name = '%s/investigations_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
if 'csv' == format:
- delimiter = '\t'
+ delimiter = ';'
+ if csv_separator == 'comma': delimiter = ','
+ if csv_separator == 'tab': delimiter = '\t'
else:
- delimiter = ','
+ delimiter = ","
with open(report_name, 'w', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=delimiter,
@@ -1293,11 +1327,19 @@ class DefectsReport(Report):
'
context['report_format_list'] = '\
<input type="radio" name="format" value="txt" checked> Text (comma delimited)<br> \
- <input type="radio" name="format" value="csv"> CSV (tab delimited)<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="semi">Semi-colon</option> \
+ <option value="comma">Comma</option> \
+ <option value="tab">Tab</option> \
+ </select>) \
+ <br> \
'
+
context['report_custom_list'] = '\
- Defect name filter = <input type="text" placeholder="e.g. LIN9" name="name_filter" size="40"> <br>\
- '
+ Defect name filter = <input type="text" placeholder="e.g. %s" name="name_filter" size="40"> <br>\
+ ' % SrtSetting.get_setting('SRTOOL_DEFECT_SAMPLENAME',"DEFECT-XYZ")
return context
def print_row(self,writer,is_header,is_full,defect):
@@ -1309,6 +1351,9 @@ class DefectsReport(Report):
'Priority',
'Status',
'Resolution',
+ 'SRT Priority',
+ 'SRT Status',
+ 'SRT Outcome',
'Release Version',
'Publish',
'Investigations',
@@ -1321,6 +1366,9 @@ class DefectsReport(Report):
'Priority',
'Status',
'Resolution',
+ 'SRT Priority',
+ 'SRT Status',
+ 'SRT Outcome',
'Release Version',
'Publish',
'URL',
@@ -1337,9 +1385,12 @@ class DefectsReport(Report):
writer.writerow([
defect.name,
defect.summary,
+ defect.get_defect_priority_text,
+ defect.get_defect_status_text,
+ defect.get_defect_resolution_text,
defect.get_priority_text,
defect.get_status_text,
- defect.get_resolution_text,
+ defect.get_outcome_text,
defect.release_version,
defect.publish,
defect_investigations,
@@ -1349,9 +1400,12 @@ class DefectsReport(Report):
writer.writerow([
defect.name,
defect.summary,
+ defect.get_defect_priority_text,
+ defect.get_defect_status_text,
+ defect.get_defect_resolution_text,
defect.get_priority_text,
defect.get_status_text,
- defect.get_resolution_text,
+ defect.get_outcome_text,
defect.release_version,
defect.publish,
defect.url,
@@ -1372,10 +1426,13 @@ class DefectsReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
name_filter = request_POST.get('name_filter', '').upper()
+ csv_separator = request_POST.get('csv_separator', 'semi')
- report_name = '%s/defects_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/defects_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
if 'csv' == format:
- delimiter = '\t'
+ delimiter = ';'
+ if csv_separator == 'comma': delimiter = ','
+ if csv_separator == 'tab': delimiter = '\t'
else:
delimiter = ','
with open(report_name, 'w', newline='') as csvfile:
@@ -1437,7 +1494,7 @@ class ProductsReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
- report_name = '%s/products_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/products_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
if 'csv' == format:
@@ -1504,7 +1561,6 @@ class PublishCveReport(Report):
_log_args("REPORT_PUBLISHCVE_EXEC", *args, **kwargs)
super(PublishCveReport, self).exec_report(*args, **kwargs)
- _log("FOO1")
request_POST = self.request.POST
records = request_POST.get('records', '')
@@ -1513,11 +1569,9 @@ class PublishCveReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
- _log("FOO2 (%s,%s,%s" % (record_list,format,report_type))
- report_name = '%s/cve_publish_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/cve_publish_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
- _log("FOO3")
if 'csv' == format:
tab = "\t"
else:
@@ -1531,9 +1585,7 @@ class PublishCveReport(Report):
file.write("\n")
file.write("Name,Status,Type,Format,Version,Vulnerabilities,Description\n")
- _log("FOO4")
for id in record_list.split(','):
- _log("FOO5:%s" % id)
if not id:
continue
try:
@@ -1553,9 +1605,8 @@ class PublishCveReport(Report):
file.write("%s" % (cve.description))
file.write("\n")
except Exception as e:
- _log("FOOX:%s" % e)
+ _log("EXCEPTION:%s" % e)
- _log("FOO9:%s" % (report_name))
return report_name,os.path.basename(report_name)
class PublishPendingCveReport(Report):
@@ -1586,7 +1637,6 @@ class PublishPendingCveReport(Report):
_log_args("REPORT_PUBLISHPENDINGCVE_EXEC", *args, **kwargs)
super(PublishPendingCveReport, self).exec_report(*args, **kwargs)
- _log("FOO1")
request_POST = self.request.POST
records = request_POST.get('records', '')
@@ -1595,11 +1645,9 @@ class PublishPendingCveReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
- _log("FOO2 (%s,%s,%s" % (record_list,format,report_type))
- report_name = '%s/cve_publish_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/cve_publish_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
- _log("FOO3")
if 'csv' == format:
tab = "\t"
else:
@@ -1613,11 +1661,9 @@ class PublishPendingCveReport(Report):
file.write("\n")
file.write("Name,Status,Type,Format,Version,Vulnerabilities,Description\n")
- _log("FOO4")
for id in record_list.split(','):
if not id:
continue
- _log("FOO5:%s" % id)
try:
cve = Cve.objects.get(id=id)
file.write("%s%s" % (cve.name,tab))
@@ -1635,9 +1681,188 @@ class PublishPendingCveReport(Report):
file.write("%s" % (cve.description))
file.write("\n")
except Exception as e:
- _log("FOOX:%s" % e)
+ _log("EXCEPTION:%s" % e)
+
+ return report_name,os.path.basename(report_name)
+
+class PublishListReport(Report):
+ """Report for the Publish Cve Page"""
+
+ def __init__(self, parent_page, *args, **kwargs):
+ _log_args("REPORT_PUBLISHLIST_INIT(%s)" % parent_page, *args, **kwargs)
+ super(PublishListReport, self).__init__(parent_page, *args, **kwargs)
+
+ def get_context_data(self, *args, **kwargs):
+ _log_args("REPORT_PUBLISHLIST_CONTEXT", *args, **kwargs)
+ context = super(PublishListReport, self).get_context_data(*args, **kwargs)
+ context['report_type_list'] = '\
+ <option value="preview">Preview CVE Publish List</option> \
+ <option value="report">Publish Report </option> \
+ '
+ context['report_columnrange_list'] = ''
+ context['report_format_list'] = '\
+ <input type="radio" name="format" value="txt" checked> Text<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="semi">Semi-colon</option> \
+ <option value="comma">Comma</option> \
+ <option value="tab">Tab</option> \
+ </select>) \
+ <br> \
+ '
+ return context
+
+ def exec_report(self, *args, **kwargs):
+ _log_args("REPORT_PUBLISHLIST_EXEC", *args, **kwargs)
+ super(PublishListReport, self).exec_report(*args, **kwargs)
+
+ request_POST = self.request.POST
+ format = request_POST.get('format', '')
+ report_type = request_POST.get('report_type', '')
+ csv_separator = request_POST.get('csv_separator', 'semi')
+
+ report_name = '%s/publish_list_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
+ with open(report_name, 'w') as file:
+
+ if 'csv' == format:
+ tab = ';'
+ if csv_separator == 'comma': tab = ','
+ if csv_separator == 'tab': tab = '\t'
+ else:
+ tab = ","
+
+ if ('preview' == report_type):
+ if 'csv' == format:
+ file.write("State\tCve_Name\tCve_Published\tCve_Modified\tCve_Status\tCve_Acknowledge\tReason\tCVE_Description\n".replace('\t',tab))
+ if 'txt' == format:
+ file.write("Report : CVEs Table\n")
+ file.write("\n")
+ file.write('%-7s %-18s %11s %11s %16s %11s %-35s %s\n' % ('State','Cve_Name','Published','Modified','Cve_Status','Acknowledge','CVE_Description','Reason'))
+
+ for publishset in PublishSet.objects.all():
+ if 'csv' == format:
+ file.write("%s%s" % (publishset.state_text,tab))
+ file.write("%s%s" % (publishset.cve.name,tab))
+ file.write("%s%s" % (publishset.cve.publishedDate,tab))
+ file.write("%s%s" % (publishset.cve.lastModifiedDate,tab))
+ file.write("%s%s" % (publishset.cve.get_status_text,tab))
+ file.write("%s%s" % (publishset.cve.acknowledge_date,tab))
+ file.write("%s%s" % (publishset.reason,tab))
+ file.write("%s%s" % (publishset.cve.description,tab))
+ file.write("\n")
+ if 'txt' == format:
+ try:
+ acknowledge_date = publishset.cve.acknowledge_date.strftime('%m/%d/%Y')
+ except:
+ acknowledge_date = ''
+ if publishset.cve.description:
+ description = publishset.cve.description[:30] + '...'
+ else:
+ description = ''
+ file.write("%-7s," % publishset.state_text)
+ file.write("%-18s," % publishset.cve.name)
+ file.write("%11s," % publishset.cve.publishedDate)
+ file.write("%11s," % publishset.cve.lastModifiedDate)
+ file.write("%16s," % publishset.cve.get_status_text)
+ file.write("%11s," % acknowledge_date)
+ file.write("%-35s," % description)
+ file.write("%s," % publishset.reason)
+ file.write("\n")
+
+ if ('report' == report_type):
+ product_list = Product.objects.all()
+
+ def get_product_status_matrix(product_list,cve):
+ # Preset the default product status labels
+ status_table = {}
+ product_top_order = 99
+ product_top_defect = []
+ for product in product_list:
+ status_table[product.key] = publishset.cve.get_status_text
+ # Set the specific status for the child investigations
+ for cv in cve.cve_to_vulnerability.all():
+ #status_text = cv.vulnerability.get_status_text
+ for investigation in cv.vulnerability.vulnerability_investigation.all():
+ product_key = investigation.product.key
+ release_version_list = []
+ for id in investigation.investigation_to_defect.all():
+ # Find defect(s) for higest ordered product
+ if product_top_order > investigation.product.order:
+ product_top_order = investigation.product.order
+ product_top_defect = []
+ if product_top_order == investigation.product.order:
+ product_top_defect.append(id.defect.name)
+ # Gather the status or release version
+ if id.defect.release_version:
+ release_version_list.append(id.defect.release_version)
+ release_version = '/'.join(release_version_list)
+ if release_version:
+ status_table[product_key] = release_version
+ elif investigation.status in (SRTool.NOT_VULNERABLE,SRTool.VULNERABLE):
+ status_table[product_key] = investigation.get_status_text
+ else:
+ status_table[product_key] = ''
+ return status_table
+
+ if 'csv' == format:
+ file.write("State\tCve_Name\tCve_Published\tCve_Modified\tCve_Status\tCve_Acknowledge\tCVE_Description")
+ for product in product_list:
+ file.write("\t%s" % product.long_name)
+ file.write("\n")
+
+ if 'txt' == format:
+ file.write("Report : CVEs Table\n")
+ file.write("\n")
+ file.write('%-7s,%-18s,%11s,%11s,%16s,%11s,%-35s,' % ('State','Cve_Name','Published','Modified','Cve_Status','Acknowledge','CVE_Description'))
+ for product in product_list:
+ min_len = max(16,len(product.long_name)+1)
+ str_format = "%s%ds," % ('%',min_len)
+ file.write(str_format % product.long_name)
+ file.write("\n")
+ for publishset in PublishSet.objects.all():
+ if 'csv' == format:
+ # Print common status
+ file.write("%s%s" % (publishset.state_text,tab))
+ file.write("%s%s" % (publishset.cve.name,tab))
+ file.write("%s%s" % (publishset.cve.publishedDate,tab))
+ file.write("%s%s" % (publishset.cve.lastModifiedDate,tab))
+ file.write("%s%s" % (publishset.cve.get_status_text,tab))
+ file.write("%s%s" % (publishset.cve.acknowledge_date,tab))
+ file.write("%s%s" % (publishset.reason,tab))
+ file.write("%s%s" % (publishset.cve.description,tab))
+ # Compute the product columns
+ status_table = get_product_status_matrix(product_list,publishset.cve)
+ # Print the product columns
+ for product in Product.objects.all():
+ file.write("%s%s" % (status_table[product.key],tab))
+ file.write("\n")
+ if 'txt' == format:
+ try:
+ acknowledge_date = publishset.cve.acknowledge_date.strftime('%m/%d/%Y')
+ except:
+ acknowledge_date = ''
+ if publishset.cve.description:
+ description = publishset.cve.description[:30] + '...'
+ else:
+ description = ''
+ # Print common status
+ file.write("%-7s," % publishset.state_text)
+ file.write("%-18s," % publishset.cve.name)
+ file.write("%11s," % publishset.cve.publishedDate)
+ file.write("%11s," % publishset.cve.lastModifiedDate)
+ file.write("%16s," % publishset.cve.get_status_text)
+ file.write("%11s," % acknowledge_date)
+ file.write("%-35s," % description)
+ # Compute the product columns
+ status_table = get_product_status_matrix(product_list,publishset.cve)
+ # Print the product columns
+ for product in Product.objects.all():
+ min_len = max(16,len(product.long_name)+1)
+ str_format = "%s%ds," % ('%',min_len)
+ file.write(str_format % status_table[product.key])
+ file.write("\n")
- _log("FOO9:%s" % (report_name))
return report_name,os.path.basename(report_name)
class PackageFiltersReport(Report):
@@ -1659,7 +1884,7 @@ class PackageFiltersReport(Report):
'
context['report_columnrange_list'] = ''
context['report_format_list'] = '\
- <input type="radio" name="format" value="csv" checked> CSV<br> \
+ <input type="radio" name="format" value="csv" checked> r<br> \
'
return context
@@ -1731,7 +1956,7 @@ class CpesSrtoolReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
- report_name = '%s/cpes_srtool_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/cpes_srtool_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
reportfile = open(report_name, 'w', newline='')
if 'csv' == format:
@@ -1776,6 +2001,111 @@ class CpesSrtoolReport(Report):
return report_name,os.path.basename(report_name)
+###############################################################################
+#
+# History reports
+#
+
+class HistoryDefectReport(Report):
+ """Report for the History Defect Page"""
+
+ def __init__(self, parent_page, *args, **kwargs):
+ _log_args("WR_HISTORY_DEFECT_INIT(%s)" % parent_page, *args, **kwargs)
+ super(HistoryDefectReport, self).__init__(parent_page, *args, **kwargs)
+
+ def get_context_data(self, *args, **kwargs):
+ _log_args("WR_HISTORY_DEFECT_CONTEXT", *args, **kwargs)
+ context = super(HistoryDefectReport, self).get_context_data(*args, **kwargs)
+
+ context['report_type_list'] = '\
+ <option value="history">Defect History</option> \
+ '
+
+ context['report_columnrange_list'] = ''
+ context['report_format_list'] = '\
+ <input type="radio" name="format" value="txt" checked> Text<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="semi">Semi-colon</option> \
+ <option value="comma">Comma</option> \
+ <option value="tab">Tab</option> \
+ </select>) \
+ <br> \
+ '
+
+ context['report_recordrange_list'] = '\
+ <input type="radio" name="records" value="selected" checked> Selected<br> \
+ <input type="radio" name="records" value="all"> All<br> \
+ '
+
+ # Add a date range
+ date_start = datetime.strptime('2019-2-15', '%Y-%m-%d')
+ date_stop = datetime.strptime('2019-3-15', '%Y-%m-%d')
+ context['report_date_list'] = '\
+ Start: <input type="text" name="date_start" value="%s"><br> \
+ Stop: <input type="text" name="date_stop" value="%s"> \
+ ' % (date_start.strftime('%m/%d/%Y'),date_stop.strftime('%m/%d/%Y'))
+
+ # Done!
+ return context
+
+ def exec_report(self, *args, **kwargs):
+ _log_args("WR_HISTORY_DEFECT_EXEC", *args, **kwargs)
+
+ request_POST = self.request.POST
+
+ records = request_POST.get('records', '')
+ format = request_POST.get('format', '')
+# title = request_POST.get('title', '')
+ report_type = request_POST.get('report_type', '')
+ record_list = request_POST.get('record_list', '')
+ csv_separator = request_POST.get('csv_separator', 'semi')
+
+ # Dates (make as no timezone)
+ msg = ''
+ try:
+ msg = 'Start:%s' % request_POST.get('date_start', '')
+ date_start = datetime.strptime(request_POST.get('date_start', ''), '%m/%d/%Y')
+ msg = 'Stop:%s' % request_POST.get('date_stop', '')
+ date_stop = datetime.strptime(request_POST.get('date_stop', ''), '%m/%d/%Y')
+ if date_stop < date_start:
+ return 'Error:stop date is before start date',''
+ except Exception as e:
+ return 'Error:bad format for dates (must be mm/dd/yyyy) (%s)(%s)' % (msg,e),''
+
+ report_name = '%s/defect_history_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
+ with open(report_name, 'w') as file:
+
+ if 'csv' == format:
+ separator = ";"
+ if csv_separator == 'comma': separator = ","
+ if csv_separator == 'tab': separator = "\t"
+ writer = csv.writer(csvfile, delimiter=separator,
+ quotechar='"', quoting=csv.QUOTE_MINIMAL)
+ else:
+ separator = ","
+
+ if ('history' == report_type):
+ if 'csv' == format:
+ writer.writerow(['Index','Defect','Date','Author','Comment'])
+ if 'txt' == format:
+ file.write("Report : Defect History\n")
+ file.write("\n")
+ text_format='%02d) %-14s %-10s %-10s %s\n'
+ file.write(text_format % (0,'Defect','Date','Author','Comment'))
+
+ for i,dh in enumerate(DefectHistory.objects.filter(date__gte=date_start,date__lte=date_stop).order_by('defect__name')):
+ if 'csv' == format:
+ writer.writerow([i+1,dh.defect.name,dh.date.strftime('%Y-%m-%d'),dh.author,dh.comment])
+ if 'txt' == format:
+ file.write(text_format % (i+1,dh.defect.name,dh.date.strftime('%Y-%m-%d'),dh.author,dh.comment))
+
+ return report_name,os.path.basename(report_name)
+
+###############################################################################
+#
+
class DefaultReport(Report):
"""Report for the Default Page"""
@@ -1843,11 +2173,19 @@ class ReportManager():
elif 'update-published' == parent_page:
return PublishPendingCveReport(parent_page, *args, **kwargs)
+ elif 'publish' == parent_page:
+ return PublishListReport(parent_page, *args, **kwargs)
+ elif 'publish-list' == parent_page:
+ return PublishListReport(parent_page, *args, **kwargs)
+
elif 'package-filters' == parent_page:
return PackageFiltersReport(parent_page, *args, **kwargs)
elif 'cpes_srtool' == parent_page:
return CpesSrtoolReport(parent_page, *args, **kwargs)
+ elif 'history_defect' == parent_page:
+ return HistoryDefectReport(parent_page, *args, **kwargs)
+
else:
return DefaultReport(parent_page, *args, **kwargs)
diff --git a/lib/srtgui/tables.py b/lib/srtgui/tables.py
index 44d02a11..e80ee71e 100644
--- a/lib/srtgui/tables.py
+++ b/lib/srtgui/tables.py
@@ -19,12 +19,17 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+import re
+import json
+
from srtgui.widgets import ToasterTable
from orm.models import Cve, Vulnerability, Investigation, CweTable, Product
from orm.models import Package
-from orm.models import CpeTable, CpeFilter, Defect, DataSource
+from orm.models import CpeTable, CpeFilter, Defect, DataSource, SrtSetting
from orm.models import PublishPending
from orm.models import Notify, NotifyCategories
+from orm.models import CveHistory, VulnerabilityHistory, InvestigationHistory, DefectHistory
+from orm.models import PublishSet
from users.models import UserSafe
from django.db.models import Q
@@ -32,8 +37,6 @@ from django.db.models import Q
from srtgui.tablefilter import TableFilter
from srtgui.tablefilter import TableFilterActionToggle
-import re
-
# quick development/debugging support
from srtgui.api import _log
@@ -159,24 +162,6 @@ class CvesTable(ToasterTable):
hidden=True,
)
- self.add_column(title="Data Type",
- field_name="cve_data_type",
- hideable=True,
- hidden=True,
- )
-
- self.add_column(title="Data Format",
- field_name="cve_data_format",
- hideable=True,
- hidden=True,
- )
-
- self.add_column(title="Data Version",
- field_name="cve_data_version",
- hideable=True,
- hidden=True,
- )
-
self.add_column(title="Description",
field_name="description",
hideable=False,
@@ -205,23 +190,23 @@ class CvesTable(ToasterTable):
static_data_template=priority_v2_template,
)
- self.add_column(title="Packages",
+ self.add_column(title="Affected Components",
field_name="packages",
- hideable=True,
- hidden=True,
+ hideable=False,
+ hidden=False,
)
self.add_column(title="Published",
help_text="Initial publish date of the CVE",
hideable=False,
- #orderable=True,
+ orderable=True,
field_name="publishedDate",
)
self.add_column(title="Modified",
help_text="Last modification date of the CVE",
hideable=True,
- #orderable=True,
+ orderable=True,
field_name="lastModifiedDate",
)
@@ -237,6 +222,12 @@ class CvesTable(ToasterTable):
hideable=True,
)
+ self.add_column(title="Tags",
+ field_name="tags",
+ hideable=True,
+ hidden=True,
+ )
+
self.add_column(title="Publish Request",
help_text="SRT Publish Request State",
hideable=True,
@@ -289,7 +280,7 @@ class CvesTable(ToasterTable):
orderable=True,
field_name="srt_updated",
static_data_name="srt_updated",
- static_data_template='{{data.srt_updated | date:"m/d/y H:i"}}'
+ static_data_template='{{data.srt_updated | date:"Y/m/d"}}'
)
source_count_template = '''
@@ -315,6 +306,9 @@ class SelectCveTable(ToasterTable):
context = super(SelectCveTable, self).get_context_data(**kwargs)
context['products'] = Product.objects.all()
context['components'] = Defect.Components
+ context['doesnotimpact_text'] = SrtSetting.get_setting('SRTOOL_DEFECT_DOESNOTIMPACT',"It doesn't impact the product")
+ context['components'] = Defect.Components
+
return context
def apply_row_customization(self, row):
@@ -562,39 +556,76 @@ class DefectsTable(ToasterTable):
def setup_filters(self, *args, **kwargs):
- # Priority filter
- is_priority = TableFilter(name="is_priority",
+ # Defect Priority filter
+ is_defect_priority = TableFilter(name="is_defect_priority",
title="Filter defects by 'Priority'")
- for priority in range(len(Defect.Priority)):
- is_priority.add_action(TableFilterActionToggle(
- Defect.Priority[priority][1].lower().replace(' ','_'),
- Defect.Priority[priority][1],
- Q(priority=Defect.Priority[priority][0]))
+ for priority in range(len(Defect.DEFECT_PRIORITY)):
+ if Defect.DEFECT_PRIORITY_ERROR == Defect.DEFECT_PRIORITY[priority][0]:
+ continue
+ is_defect_priority.add_action(TableFilterActionToggle(
+ Defect.DEFECT_PRIORITY[priority][1].lower().replace(' ','_'),
+ Defect.DEFECT_PRIORITY[priority][1],
+ Q(priority=Defect.DEFECT_PRIORITY[priority][0]))
)
- self.add_filter(is_priority)
-
- # Status filter
- is_status = TableFilter(name="is_status",
- title="Filter defects by 'Status'")
- for status in range(len(Defect.Status)):
- is_status.add_action(TableFilterActionToggle(
- Defect.Status[status][1].lower().replace(' ','_'),
- Defect.Status[status][1],
- Q(status=Defect.Status[status][0]))
+ self.add_filter(is_defect_priority)
+
+ # Defect Status filter
+ is_defect_status = TableFilter(name="is_defect_status",
+ title="Filter defects by defect 'Status'")
+ for status in range(len(Defect.DEFECT_STATUS)):
+ is_defect_status.add_action(TableFilterActionToggle(
+ Defect.DEFECT_STATUS[status][1].lower().replace(' ','_'),
+ Defect.DEFECT_STATUS[status][1],
+ Q(status=Defect.DEFECT_STATUS[status][0]))
)
- self.add_filter(is_status)
+ self.add_filter(is_defect_status)
# Resolution filter
is_resolution = TableFilter(name="is_resolution",
title="Filter defects by 'Resolution'")
- for resolution in range(len(Defect.Resolution)):
+ for resolution in range(len(Defect.DEFECT_RESOLUTION)):
is_resolution.add_action(TableFilterActionToggle(
- Defect.Resolution[resolution][1].lower().replace(' ','_'),
- Defect.Resolution[resolution][1],
- Q(resolution=Defect.Resolution[resolution][0]))
+ Defect.DEFECT_RESOLUTION[resolution][1].lower().replace(' ','_'),
+ Defect.DEFECT_RESOLUTION[resolution][1],
+ Q(resolution=Defect.DEFECT_RESOLUTION[resolution][0]))
)
self.add_filter(is_resolution)
+ # SRT Priority filter
+ is_srt_priority = TableFilter(name="is_srt_priority",
+ title="Filter defects by 'Priority'")
+ for priority in range(len(Defect.SRT_PRIORITY)):
+ if Defect.PRIORITY_ERROR == Defect.SRT_PRIORITY[priority][0]:
+ continue
+ is_srt_priority.add_action(TableFilterActionToggle(
+ Defect.SRT_PRIORITY[priority][1].lower().replace(' ','_'),
+ Defect.SRT_PRIORITY[priority][1],
+ Q(priority=Defect.SRT_PRIORITY[priority][0]))
+ )
+ self.add_filter(is_srt_priority)
+
+ # SRTool Status filter
+ is_srt_status = TableFilter(name="is_srt_status",
+ title="Filter defects by 'Status'")
+ for status in range(len(Defect.SRT_STATUS)):
+ is_srt_status.add_action(TableFilterActionToggle(
+ Defect.SRT_STATUS[status][1].lower().replace(' ','_'),
+ Defect.SRT_STATUS[status][1],
+ Q(status=Defect.SRT_STATUS[status][0]))
+ )
+ self.add_filter(is_srt_status)
+
+ # SRTool Outcome filter
+ is_srt_outcome = TableFilter(name="is_srt_outcome",
+ title="Filter defects by 'Outcome'")
+ for status in range(len(Defect.SRT_OUTCOME)):
+ is_srt_outcome.add_action(TableFilterActionToggle(
+ Defect.SRT_OUTCOME[status][1].lower().replace(' ','_'),
+ Defect.SRT_OUTCOME[status][1],
+ Q(status=Defect.SRT_OUTCOME[status][0]))
+ )
+ self.add_filter(is_srt_outcome)
+
# Product filter
#(name="Wind River Linux",version="LTS-17")
is_product = TableFilter(name="is_product",
@@ -631,31 +662,58 @@ class DefectsTable(ToasterTable):
field_name="summary",
)
- self.add_column(title="Priority",
+ self.add_column(title="Defect Priority",
hideable=False,
- field_name="priority",
orderable=True,
- filter_name="is_priority",
- static_data_name="priority",
- static_data_template='{{data.get_priority_text}}',
+ filter_name="is_defect_priority",
+ static_data_name="defect_priority",
+ static_data_template='{{data.get_defect_priority_text}}',
)
- self.add_column(title="Status",
+ self.add_column(title="Defect Status",
hideable=False,
- field_name="status",
orderable=True,
- filter_name="is_status",
- static_data_name="status",
- static_data_template='{{data.get_status_text}}',
+ filter_name="is_defect_status",
+ static_data_name="defect_status",
+ static_data_template='{{data.get_defect_status_text}}',
)
- self.add_column(title="Resolution",
+ self.add_column(title="Defect Resolution",
hideable=False,
- field_name="resolution",
orderable=True,
filter_name="is_resolution",
- static_data_name="resolution",
- static_data_template='{{data.get_resolution_text}}',
+ static_data_name="defect_resolution",
+ static_data_template='{{data.get_defect_resolution_text}}',
+ )
+
+ self.add_column(title="SRT Priority",
+ hideable=False,
+ orderable=True,
+ filter_name="is_srt_priority",
+ static_data_name="srt_priority",
+ static_data_template='{{data.get_priority_text}}',
+ )
+
+ self.add_column(title="SRT Status",
+ hideable=False,
+ orderable=True,
+ filter_name="is_srt_status",
+ static_data_name="srt_status",
+ static_data_template='{{data.get_status_text}}',
+ )
+
+ self.add_column(title="Duplicate Of",
+ hideable=True,
+ hidden=True,
+ field_name="duplicate_of",
+ )
+
+ self.add_column(title="Outcome",
+ hideable=True,
+ hidden=True,
+ filter_name="is_srt_outcome",
+ static_data_name="srt_outcome",
+ static_data_template='{{data.get_outcome_text}}',
)
self.add_column(title="Release Version",
@@ -712,6 +770,23 @@ class DefectsTable(ToasterTable):
static_data_template=product_link_template,
)
+ self.add_column(title="Defect Created",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ field_name="date_created",
+ static_data_name="date_created",
+ static_data_template='{{date_created}}'
+ )
+ self.add_column(title="Defect Update",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ field_name="date_updated",
+ static_data_name="date_updated",
+ static_data_template='{{data.date_updated}}'
+ )
+
self.add_column(title="SRT Update",
hideable=True,
hidden=True,
@@ -726,7 +801,7 @@ class CwesTable(ToasterTable):
def __init__(self, *args, **kwargs):
super(CwesTable, self).__init__(*args, **kwargs)
- self.default_orderby = "name_sort"
+ self.default_orderby = "name"
def get_context_data(self, **kwargs):
context = super(CwesTable, self).get_context_data(**kwargs)
@@ -1162,13 +1237,17 @@ class ProductsTable(ToasterTable):
)
self.add_column(title="Defect Tags",
- field_name="defect_tags",
- hideable=False,
+ hideable=True,
+ hidden=True,
+ static_data_name="defect_tags",
+ static_data_template='{{data.get_defect_str}}',
)
self.add_column(title="Product Tags",
- field_name="product_tags",
- hideable=False,
+ hideable=True,
+ hidden=True,
+ static_data_name="product_tags",
+ static_data_template='{{data.get_product_str}}',
)
@@ -1262,6 +1341,10 @@ class VulnerabilitiesTable(ToasterTable):
# Priority filter
is_priority = TableFilter(name="is_priority",
title="Filter Vulnerabilities by 'Priority'")
+ exec_is_undefined = TableFilterActionToggle(
+ "undefined",
+ "Undefined",
+ Q(priority=Vulnerability.UNDEFINED))
exec_is_low = TableFilterActionToggle(
"low",
"Low",
@@ -1274,9 +1357,15 @@ class VulnerabilitiesTable(ToasterTable):
"high",
"High",
Q(priority=Vulnerability.HIGH))
+ exec_is_critical = TableFilterActionToggle(
+ "critical",
+ "Critical",
+ Q(priority=Vulnerability.CRITICAL))
+ is_priority.add_action(exec_is_undefined)
is_priority.add_action(exec_is_low)
is_priority.add_action(exec_is_medium)
is_priority.add_action(exec_is_high)
+ is_priority.add_action(exec_is_critical)
self.add_filter(is_priority)
def setup_queryset(self, *args, **kwargs):
@@ -1356,6 +1445,12 @@ class VulnerabilitiesTable(ToasterTable):
hideable=True,
)
+ self.add_column(title="Tags",
+ field_name="tags",
+ hideable=True,
+ hidden=True,
+ )
+
investigate_link_template = '''
{% for investigation in data.vulnerability_investigation.all %}
{% if not forloop.first %} {% endif %}<a href="{% url 'investigation' investigation.name %}" target="_blank">{{investigation.name}}</a>
@@ -1397,6 +1492,16 @@ class VulnerabilitiesTable(ToasterTable):
hidden=False,
)
+ self.add_column(title="SRT Update",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ field_name="srt_updated",
+ static_data_name="srt_updated",
+ static_data_template='{{data.srt_updated | date:"Y/m/d"}}'
+ )
+
+
class InvestigationsTable(ToasterTable):
"""Table of All Investigations in SRTool"""
@@ -1458,23 +1563,34 @@ class InvestigationsTable(ToasterTable):
# Priority filter
is_priority = TableFilter(name="is_priority",
title="Filter Investigations by 'Priority'")
+ exec_is_undefined = TableFilterActionToggle(
+ "undefined",
+ "Undefined",
+ Q(priority=Vulnerability.UNDEFINED))
exec_is_low = TableFilterActionToggle(
"low",
"Low",
- Q(priority=Investigation.LOW))
+ Q(priority=Vulnerability.LOW))
exec_is_medium = TableFilterActionToggle(
"medium",
"Medium",
- Q(priority=Investigation.MEDIUM))
+ Q(priority=Vulnerability.MEDIUM))
exec_is_high = TableFilterActionToggle(
"high",
"High",
- Q(priority=Investigation.HIGH))
+ Q(priority=Vulnerability.HIGH))
+ exec_is_critical = TableFilterActionToggle(
+ "critical",
+ "Critical",
+ Q(priority=Vulnerability.CRITICAL))
+ is_priority.add_action(exec_is_undefined)
is_priority.add_action(exec_is_low)
is_priority.add_action(exec_is_medium)
is_priority.add_action(exec_is_high)
+ is_priority.add_action(exec_is_critical)
self.add_filter(is_priority)
+
# Product filter
is_product = TableFilter(name="is_product",
title="Filter Investigations by 'Product'")
@@ -1572,6 +1688,12 @@ class InvestigationsTable(ToasterTable):
hideable=True,
)
+ self.add_column(title="Tags",
+ field_name="tags",
+ hideable=True,
+ hidden=True,
+ )
+
self.add_column(title="Vulnerability",
hidden=False,
orderable=False,
@@ -1587,6 +1709,16 @@ class InvestigationsTable(ToasterTable):
static_data_template="<a href=\"{% url 'product' data.product.id %}\">{{data.product.long_name}}</a>",
)
+ self.add_column(title="SRT Update",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ field_name="srt_updated",
+ static_data_name="srt_updated",
+ static_data_template='{{data.srt_updated | date:"Y/m/d"}}'
+ )
+
+
class SourcesTable(ToasterTable):
"""Table of All Data Sources in SRTool"""
@@ -2193,3 +2325,538 @@ class PackageFilterDetailTable(ToasterTable):
static_data_name="defects",
static_data_template=defect_link_template,
)
+
+class HistoryCveTable(ToasterTable):
+ """Table of History Cves Details in SRTool"""
+
+ def __init__(self, *args, **kwargs):
+ super(HistoryCveTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "-date"
+
+ def get_context_data(self,**kwargs):
+ context = super(HistoryCveTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = CveHistory.objects.all()
+
+ def setup_columns(self, *args, **kwargs):
+ self.add_column(title="Cve",
+ hideable=False,
+ orderable=True,
+ static_data_name="name",
+ static_data_template='''<a href="{% url 'cve' data.cve.name %}">{{data.cve.name}}</a>''',
+ )
+ self.add_column(title="Comment",
+ field_name="comment",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Date",
+ field_name="date",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Author",
+ field_name="author",
+ hideable=False,
+ orderable=True,
+ )
+
+class HistoryVulnerabilityTable(ToasterTable):
+ """Table of History Vulnerability Details in SRTool"""
+
+ def __init__(self, *args, **kwargs):
+ super(HistoryVulnerabilityTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "-date"
+
+ def get_context_data(self,**kwargs):
+ context = super(HistoryVulnerabilityTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = VulnerabilityHistory.objects.all()
+
+ def setup_columns(self, *args, **kwargs):
+ self.add_column(title="Vulnerability",
+ hideable=False,
+ orderable=True,
+ static_data_name="vulnerability",
+ static_data_template='''<a href="{% url 'vulnerability' data.vulnerability.name %}">{{data.vulnerability.name}}</a>''',
+ )
+ self.add_column(title="Comment",
+ field_name="comment",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Date",
+ field_name="date",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Author",
+ field_name="author",
+ hideable=False,
+ orderable=True,
+ )
+
+class HistoryInvestigationTable(ToasterTable):
+ """Table of History Investigation Details in SRTool"""
+
+ def __init__(self, *args, **kwargs):
+ super(HistoryInvestigationTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "-date"
+
+ def get_context_data(self,**kwargs):
+ context = super(HistoryInvestigationTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = InvestigationHistory.objects.all()
+
+ def setup_columns(self, *args, **kwargs):
+ self.add_column(title="Investigation",
+ hideable=False,
+ orderable=True,
+ static_data_name="investigation",
+ static_data_template='''<a href="{% url 'investigation' data.investigation.name %}">{{data.investigation.name}}</a>''',
+ )
+ self.add_column(title="Comment",
+ field_name="comment",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Date",
+ field_name="date",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Author",
+ field_name="author",
+ hideable=False,
+ orderable=True,
+ )
+
+class HistoryDefectTable(ToasterTable):
+ """Table of History Defect Details in SRTool"""
+
+ def __init__(self, *args, **kwargs):
+ super(HistoryDefectTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "-date"
+
+ def get_context_data(self,**kwargs):
+ context = super(HistoryDefectTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = DefectHistory.objects.all()
+
+ def setup_columns(self, *args, **kwargs):
+ self.add_column(title="Defect",
+ hideable=False,
+ orderable=True,
+ static_data_name="defect",
+ static_data_template='''<a href="{% url 'defect_name' data.defect.name %}">{{data.defect.name}}</a>''',
+ )
+ self.add_column(title="Comment",
+ field_name="comment",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Date",
+ field_name="date",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Author",
+ field_name="author",
+ hideable=False,
+ orderable=True,
+ )
+
+
+class PublishListTable(ToasterTable):
+ """Table of Publish View in SRTool"""
+
+ def __init__(self, *args, **kwargs):
+ super(PublishListTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "cve"
+
+ def get_context_data(self,**kwargs):
+ context = super(PublishListTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_filters(self, *args, **kwargs):
+ # Is Status filter
+ is_status = TableFilter(name="is_status",
+ title="Filter CVE's by 'Status'")
+ for status in range(len(Cve.STATUS)):
+ is_status.add_action(TableFilterActionToggle(
+ Cve.STATUS[status][1].lower().replace(' ','_'),
+ Cve.STATUS[status][1],
+ Q(cve__status=Cve.STATUS[status][0]))
+ )
+ self.add_filter(is_status)
+
+ # Is State filter
+ is_state = TableFilter(name="is_state",
+ title="Filter items by 'State'")
+ for state in range(len(PublishSet.PUBLISH_SET_STATE)):
+ if PublishSet.PUBLISH_SET_ERROR == PublishSet.PUBLISH_SET_STATE[state][0]:
+ continue
+ is_state.add_action(TableFilterActionToggle(
+ PublishSet.PUBLISH_SET_STATE[state][1].lower().replace(' ','_'),
+ PublishSet.PUBLISH_SET_STATE[state][1],
+ Q(state=PublishSet.PUBLISH_SET_STATE[state][0]))
+ )
+ self.add_filter(is_state)
+
+#Record.objects.filter( Q(parameter__icontains="wd2") | ~Q(parameter__icontains="wd") )
+
+ # V3 filter
+ is_v3 = TableFilter(name="is_v3",title="Filter items by 'V3'")
+ exec_v3 = TableFilterActionToggle(
+ "v3",
+ "Severity_V3 change",
+ Q(reason__icontains="Severity_V3"))
+ is_v3.add_action(exec_v3)
+ self.add_filter(is_v3)
+
+ # V2 filter
+ is_v2 = TableFilter(name="is_v2",title="Filter items by 'V2'")
+ exec_v2 = TableFilterActionToggle(
+ "v2",
+ "Severity_V2 change",
+ Q(reason__icontains="Severity_V2"))
+ is_v2.add_action(exec_v2)
+ self.add_filter(is_v2)
+
+ # Product filters
+ # Gather the supported products
+ product_query = Product.objects.filter()
+ product_filter = []
+ for product in product_query:
+ if "support" == product.get_product_tag('mode').order_by('-order'):
+ product_filter.append(product.get_defect_tag('key'))
+ for product_key in product_filter:
+ is_filter = TableFilter(name="is_%s" % product_key,title="Filter CVE's by '%s'" % product_key)
+ for status in range(len(Cve.STATUS)):
+ is_filter.add_action(TableFilterActionToggle(
+ product_key.lower(),
+ product_key,
+ Q(reason__icontains==product_key))
+ )
+ self.add_filter(is_filter)
+
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = PublishSet.objects.all()
+
+ def apply_row_customization(self, row):
+ data = super(PublishListTable, self).apply_row_customization(row)
+ # data:dict_keys(['rows', 'total', 'default_orderby', 'error', 'columns'])
+ def get_key(key,dict):
+ if key in dict:
+ return(dict[key])
+ return ''
+ # {'Severity_V2': '["", "MEDIUM"]', 'Severity_V3': '["", "MEDIUM"]'}
+ for i in range(len(data['rows'])):
+ reason = data['rows'][i]['reason']
+ if not reason:
+ continue
+ try:
+ # CvssV3
+ reason_dict = json.loads(reason)
+ cvssV3 = get_key('Severity_V3',reason_dict)
+ if cvssV3:
+ data['rows'][i]['cvssV3'] = "%s,%s" % (cvssV3[0],cvssV3[1])
+ # CvssV2
+ cvssV2 = get_key('Severity_V2',reason_dict)
+ if cvssV2:
+ data['rows'][i]['cvssV2'] = "%s,%s" % (cvssV2[0],cvssV2[1])
+ # Products
+ for product_key in ('LIN5','CGP5','SCP5','OVP','LIN6','CGP6','SCP6','LIN7','CGP7','SCP7','LIN8','LIN9','LIN10','LIN1018'):
+ product_col = get_key(product_key,reason_dict)
+ if product_col:
+ data['rows'][i][product_key] = "%s" % (product_col)
+ except Exception as e:
+ continue
+ return data
+
+ def setup_columns(self, *args, **kwargs):
+
+ self.add_column(title="Select",
+ field_name="Select",
+ hideable=False,
+ static_data_name="select",
+ static_data_template='<input type="checkbox" id="box_{{data.id}}" name="{{data.cve.name}}" />',
+ )
+
+ self.add_column(title="State",
+ hideable=False,
+ orderable=True,
+ filter_name="is_state",
+ static_data_name="state",
+ static_data_template='''{{data.state_text}}''',
+ )
+
+ self.add_column(title="CVE",
+ field_name="cve__name",
+ hideable=False,
+ orderable=True,
+ static_data_name="cve__name",
+ static_data_template='''<a href="{% url 'cve' data.cve.name %}">{{data.cve.name}}</a>''',
+ )
+
+ self.add_column(title="CVE Published",
+ field_name="cve__publishedDate",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="CVE Modified",
+ field_name="cve__lastModifiedDate",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="CVE Status",
+ field_name="cve_status",
+ hideable=False,
+ orderable=True,
+ filter_name="is_status",
+ static_data_name="cve_status",
+ static_data_template='''{{data.cve.get_status_text}}''',
+ )
+
+ self.add_column(title="cvssV3",
+ hideable=True,
+ hidden=False,
+ filter_name="is_v3",
+ static_data_name="cvssV3",
+ static_data_template='',
+ )
+
+ self.add_column(title="cvssV2",
+ hideable=True,
+ hidden=False,
+ filter_name="is_v2",
+ static_data_name="cvssV2",
+ static_data_template='',
+ )
+
+ self.add_column(title="CVE Description",
+ field_name="cve__description",
+ hideable=False,
+ orderable=False,
+ )
+
+ # Product columns
+ for product_key in ('LIN5','CGP5','SCP5','OVP','LIN6','CGP6','SCP6','LIN7','CGP7','SCP7','LIN8','LIN9','LIN10','LIN1018'):
+ self.add_column(title=product_key,
+# hideable=True,
+# hidden=True,
+ filter_name="is_%s" % product_key,
+ static_data_name=product_key,
+ static_data_template='',
+ )
+
+ self.add_column(title="CVE Acknowledge",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ static_data_name="cve_acknowledge",
+ static_data_template='''{{data.cve.acknowledge_date|date:'Y-m-d'}}''',
+ )
+
+ self.add_column(title="Public Comments",
+ hideable=False,
+ orderable=False,
+ static_data_name="public_comments",
+ static_data_template='''{{data.cve.get_public_comments}}''',
+ )
+
+ self.add_column(title="reason",
+ field_name="reason",
+ hideable=False,
+ orderable=False,
+ )
+
+
+class PublishCveTable(ToasterTable):
+ """Table of Publish View in SRTool"""
+
+ def __init__(self, *args, **kwargs):
+ super(PublishCveTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "-status"
+
+ def get_context_data(self,**kwargs):
+ context = super(PublishCveTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_filters(self, *args, **kwargs):
+ # Is Status filter
+ is_status = TableFilter(name="is_status",
+ title="Filter CVE's by 'Status'")
+ for status in range(len(Cve.STATUS)):
+ is_status.add_action(TableFilterActionToggle(
+ Cve.STATUS[status][1].lower().replace(' ','_'),
+ Cve.STATUS[status][1],
+ Q(status=Cve.STATUS[status][0]))
+ )
+ self.add_filter(is_status)
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = Cve.objects.all()
+ exclude_list = [Cve.NEW, Cve.HISTORICAL, Cve.NEW_RESERVED] # CVE.NEW
+ self.queryset = self.queryset.exclude(status__in=exclude_list)
+
+ def setup_columns(self, *args, **kwargs):
+
+ self.add_column(title="Select",
+ field_name="Select",
+ hideable=False,
+ static_data_name="select",
+ static_data_template='<input type="checkbox" id="box_{{data.id}}" name="{{data.name}}" />',
+ )
+
+ self.add_column(title="State",
+ hideable=False,
+ orderable=True,
+ static_data_name="state",
+ static_data_template='''{{data.get_publishset_state}}''',
+ )
+
+ self.add_column(title="Name",
+ field_name="name",
+ hideable=False,
+ orderable=True,
+ static_data_name="cve_name",
+ static_data_template='''<a href="{% url 'cve' data.name %}">{{data.name}}</a>''',
+ )
+
+ self.add_column(title="CVE Published",
+ field_name="publishedDate",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="CVE Modified",
+ field_name="lastModifiedDate",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="CVE Status",
+ field_name="status",
+ hideable=False,
+ orderable=True,
+ filter_name="is_status",
+ static_data_name="cve_status",
+ static_data_template='''{{data.get_status_text}}''',
+ )
+
+ self.add_column(title="CVE Description",
+ field_name="description",
+ hideable=False,
+ orderable=False,
+ )
+
+ self.add_column(title="CVE Acknowledge",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ static_data_name="cve_acknowledge",
+ static_data_template='''{{data.acknowledge_date|date:'Y-m-d'}}''',
+ )
+
+
+class PublishDefectTable(ToasterTable):
+ """Table of Publish View in SRTool"""
+
+ def __init__(self, *args, **kwargs):
+ super(PublishDefectTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "-date_updated"
+
+ def get_context_data(self,**kwargs):
+ context = super(PublishDefectTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_filters(self, *args, **kwargs):
+ # Is Status filter
+ is_status = TableFilter(name="is_status",
+ title="Filter CVE's by 'Status'")
+ for status in range(len(Defect.DEFECT_STATUS)):
+ is_status.add_action(TableFilterActionToggle(
+ Defect.DEFECT_STATUS[status][1].lower().replace(' ','_'),
+ Defect.DEFECT_STATUS[status][1],
+ Q(status=Defect.DEFECT_STATUS[status][0]))
+ )
+ self.add_filter(is_status)
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = Defect.objects.all()
+
+ def setup_columns(self, *args, **kwargs):
+
+ self.add_column(title="Select",
+ field_name="Select",
+ hideable=False,
+ static_data_name="select",
+ static_data_template='<input type="checkbox" id="box_{{data.id}}" name="{{data.name}}" />',
+ )
+
+ self.add_column(title="State",
+ hideable=False,
+ orderable=True,
+ static_data_name="state",
+ static_data_template='''{{data.get_publishset_state}}''',
+ )
+
+ self.add_column(title="Name",
+ hideable=False,
+ orderable=True,
+ static_data_name="name",
+ static_data_template='''<a href="{% url 'defect_name' data.name %}">{{data.name}}</a>''',
+ )
+
+ self.add_column(title="Created",
+ hideable=False,
+ orderable=True,
+ static_data_name="date_created",
+ static_data_template='''{{data.get_date_created_text}}''',
+ )
+
+ self.add_column(title="Modified",
+ hideable=False,
+ orderable=True,
+ static_data_name="date_updated",
+ static_data_template='''{{data.get_date_updated_text}}''',
+ )
+
+ self.add_column(title="Status",
+ field_name="status",
+ hideable=False,
+ orderable=True,
+ filter_name="is_status",
+ static_data_name="status",
+ static_data_template='''{{data.get_defect_status_text}}''',
+ )
+
+ self.add_column(title="Summary",
+ field_name="summary",
+ hideable=False,
+ orderable=False,
+ )
+
+ self.add_column(title="Release Version",
+ field_name="release_version",
+ orderable=True,
+ )
+
+ self.add_column(title="CVE List",
+ field_name="get_cve_names",
+ hideable=False,
+ orderable=False,
+ )
+
diff --git a/lib/srtgui/templates/base.html b/lib/srtgui/templates/base.html
index f8b43194..623dfdd8 100644
--- a/lib/srtgui/templates/base.html
+++ b/lib/srtgui/templates/base.html
@@ -226,9 +226,9 @@ window.onclick = function(event) {
<li id="navbar-export">
{% if request.resolver_match.url_name == 'landing' %}
- <a href="{% url 'report' request.resolver_match.url_name %}"><i class="glyphicon glyphicon-tasks"></i> Export</a>
+ <a href="{% url 'report' request.resolver_match.url_name %}" target="_blank"><i class="glyphicon glyphicon-tasks"></i> Export</a>
{% else %}
- <a id="report_link" href="{% url 'report' request.resolver_match.url_name %}"><i class="glyphicon glyphicon-tasks"></i> Export</a>
+ <a id="report_link" href="{% url 'report' request.resolver_match.url_name %}" target="_blank"><i class="glyphicon glyphicon-tasks"></i> Export</a>
{% endif %}
</li>
diff --git a/lib/srtgui/templates/basetable_top.html b/lib/srtgui/templates/basetable_top.html
index 5a9076d2..ce478c05 100644
--- a/lib/srtgui/templates/basetable_top.html
+++ b/lib/srtgui/templates/basetable_top.html
@@ -140,7 +140,7 @@
if ( !editColTimer ) {
//
- // we don't have a timer active so set one up
+ // we do not have a timer active so set one up
// and clear the action list
//
@@ -173,8 +173,9 @@
<form class="navbar-form navbar-left" id="searchform">
<div class="form-group">
<div class="btn-group">
- <input class="form-control" id="search" name="search" type="text" placeholder="Search {%if object_search_display %}{{object_search_display}}{%else%}{{objectname}}{%endif%}" value="{%if request.GET.search %}{{request.GET.search}}{% endif %}"/>
+ <input class="form-control" id="search" name="search" type="text" placeholder="Search! {%if object_search_display %}{{object_search_display}}{%else%}{{objectname}}{%endif%}" value="{%if request.GET.search %}{{request.GET.search}}{% endif %}"/>
{% if request.GET.search %}<a href="javascript:$('#search').val('');searchform.submit()" tabindex="-1"><span class="remove-search-btn-variables glyphicon glyphicon-remove-circle"></span></a>{%endif%}
+ <span class="glyphicon glyphicon-question-sign get-help" title="Default is an 'and' search; use 'OR' keyword to 'or' the terms"></span>
</div>
</div>
<input type="hidden" name="orderby" value="{{request.GET.orderby}}">
diff --git a/lib/srtgui/templates/cve.html b/lib/srtgui/templates/cve.html
index ecbcf39e..c3cfcac5 100644
--- a/lib/srtgui/templates/cve.html
+++ b/lib/srtgui/templates/cve.html
@@ -40,6 +40,7 @@
{% else %}
<span style="padding-left:30px;"><button id="select-cveedit" class="btn btn-default" type="button">Edit CVE Data ...</button></span>
{% endif %}
+ <span style="padding-left:30px;"><button id="submit-delete-cve" class="btn btn-default" type="button">Delete CVE</button></span>
{% endif %}
</span>
{% if not is_edit %}
@@ -106,8 +107,8 @@
</tr>
</thead>
- {% if cve_list_table.1.0.cve_history.all %}
- {% for c in cve_list_table.1.0.cve_history.all %}
+ {% if object.cve_history.all %}
+ {% for c in object.cve_history.all %}
<tr>
<td>{{ c.comment }}</td>
<td>{{ c.date }}</td>
@@ -123,6 +124,7 @@
</div>
<HR ALIGN="center" WIDTH="100%">
+Created={{object.srt_created}} Updated={{object.srt_updated}}
<script>
var selected_quickedit=false;
@@ -149,7 +151,9 @@
}
// reload the page with the updated tables
- if (('new_name' in data) && ("" != data.new_name)) {
+ if (('new_name' in data) && (0 == data.new_name.indexOf("url:"))) {
+ window.location.replace(data.new_name.replace("url:",""));
+ } else if (('new_name' in data) && ("" != data.new_name)) {
var new_url = "{% url 'cve' object.name %}".replace("{{object.name}}",data.new_name);
window.location.replace(new_url);
} else {
@@ -199,18 +203,24 @@
$('#submit-quickedit').click(function(){
var note=$('#text-note').val().trim();
var private_note=$('#text-private-note').val().trim();
+ var tags=$('#text-tags').val().trim();
var priority=$('#select-priority-state').val();
var status=$('#select-status-state').val();
var publish_state=$('#select-publish-state').val();
var publish_date=$('#select-publish-date').val();
+ var acknowledge_date=$('#text-acknowledge-date').val();
+ var affected_components=$('#text-affected-components').val();
postCommitAjaxRequest({
"action" : 'submit-quickedit',
"priority" : priority,
"status" : status,
"note" : note,
"private_note" : private_note,
+ "tags" : tags,
"publish_state" : publish_state,
"publish_date" : publish_date,
+ "acknowledge_date" : acknowledge_date,
+ "affected_components" : affected_components
});
});
@@ -292,6 +302,15 @@
});
});
+ $("#submit-delete-cve").click(function(){
+ var result = confirm("Are you sure you want to permamently delete '{{object.name}}' and all its related records?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-delete-cve'
+ });
+ }
+ });
+
/* Set the report link */
diff --git a/lib/srtgui/templates/cves-select-toastertable.html b/lib/srtgui/templates/cves-select-toastertable.html
index 38828359..d29a2b92 100644
--- a/lib/srtgui/templates/cves-select-toastertable.html
+++ b/lib/srtgui/templates/cves-select-toastertable.html
@@ -1,4 +1,7 @@
{% extends 'base.html' %}
+{% load projecttags %}
+{% load humanize %}
+
{% load static %}
@@ -29,7 +32,7 @@
/* Define the columns that floats next to each other */
.column1 {
float: left;
- width: 280px;
+ width: 380px;
padding: 10px;
}
.column2 {
@@ -48,7 +51,7 @@
{% endblock %}
-{% block title %} Select CVE's - SRTool {% endblock %}
+{% block title %} Select CVEs - SRTool {% endblock %}
{% block pagecontent %}
@@ -58,8 +61,8 @@
<ul class="breadcrumb" id="breadcrumb">
<li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
<li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
- <li><a href="{% url 'triage_cves' %}">Triage CVE's</a></li><span class="divider">&rarr;</span>
- <li>Select CVE's</li>
+ <li><a href="{% url 'triage_cves' %}">Triage CVEs</a></li><span class="divider">&rarr;</span>
+ <li>Select CVEs</li>
</ul>
</div>
</div>
@@ -79,17 +82,18 @@
</div>
-->
- <div id="details-isvulnerable" style="display:none;">
+ <div id="details-isvulnerable-investigate" style="display:none;">
<p><p>
- <button class="execute" id="submit-isvulnerable"> Submit Vulnerable CVE's </button>
+ <button class="execute" id="submit-isvulnerable-investigate"> Submit Vulnerable CVEs </button>
<div class="row">
<div class="column1">
<p><b><label id="products_count">Optional: Add Vulnerable Products (0):</label></b>
- <div id="all-products" class="scrolling" style="width: 250px;">
- {% for product in products %}
+ <div id="all-products" class="scrolling" style="width: 350px;">
+ {% for product in products|dictsort:"order" %}
<div class="checkbox">
<label>
<input class="checkbox-products" name="{{product.pk}}" type="checkbox">{{product.long_name}}
+ &nbsp;({{product.defect_tags|get_strdict_value:"found_version"}})
</label>
<p>
</div>
@@ -100,10 +104,13 @@
<p><b><label id="priority">Set Priority:</label></b>
<div id="priority-list" class="scrolling" style="width: 120px;">
<div class="checkbox"> <label>
+ <input type="radio" name="priority" value="99" type="checkbox" checked="yes"> Auto <span class="glyphicon glyphicon-question-sign get-help" title="CVSSv3, else CVSSv2, else Medium"></span>
+ </label><p></div>
+ <div class="checkbox"> <label>
<input type="radio" name="priority" value="4" type="checkbox"> High
</label><p></div>
<div class="checkbox"> <label>
- <input type="radio" name="priority" value="3" type="checkbox" checked="yes"> Medium
+ <input type="radio" name="priority" value="3" type="checkbox"> Medium
</label><p></div>
<div class="checkbox"> <label>
<input type="radio" name="priority" value="2" type="checkbox"> Low
@@ -127,11 +134,23 @@
</div>
<p><input id="create_defects" type="checkbox"> Create Defect(s) </input>
- Reason: <input type="text" id="input-defect-reason" name="defect-reason" size="20" placeholder="(optional)"> (e.g. "Security Advisory [- REASON -] CVE-2020-1234")
+ Reason: <input type="text" id="input-defect-reason" name="defect-reason" size="20" placeholder="(optional)"> (e.g. Defect summary: "Security Advisory [- REASON -] CVE-2020-1234")
&nbsp;&nbsp;<input id="create_notifications" type="checkbox" checked>&nbsp;Create Notifications</input>
<p><b><big>Reason: </big></b>
<input type="text" id="input-isvulnerable-reason" name="reason" size="40">&nbsp;&nbsp;<input id="markPublishIs" type="checkbox">&nbsp;Mark for Publish</input>&nbsp;&nbsp;<input id="markFor" type="checkbox"> Add Keywords to 'For' </input>
+ <p><b><big>Affected Components: </big></b>
+ <input type="text" id="input-isvulnerable-components" name="components" size="40"> (e.g. space-separated list of packages, recipes, sub-system list, applications, )
+
+ <div id="published-date-list">
+ <p><i>Acknowledge Date</i> =
+ <select name="Acknowledge_Date" id="select-acknowledge-date">
+ <option value="today" selected>Today</option>
+ <option value="publish">CVE's original release date</option>
+ <option value="update">CVE's last revised date</option>
+ <option value="no_change">No change</option>
+ </select>
+ </div>
<div id="group_vulnerability" style="border: 1px solid #dddddd; padding: 0 5px; width: 400px; margin-bottom: 10px; margin-top: 0px; " >
<div class="checkbox"> <label>
@@ -151,15 +170,15 @@
<div id="details-notvulnerable" style="display:none;">
<p><p>
- <button class="execute" id="submit-notvulnerable"> Submit Not-vulnerable CVE's </button>
- <input id="notship" type="checkbox"> "We do not ship ..." </input>
+ <button class="execute" id="submit-notvulnerable"> Submit Not-vulnerable CVEs </button>
+ <input id="notship" type="checkbox"> "{{doesnotimpact_text|safe}}" </input>
<p><b><big>Reason: </big></b>
<p><input type="text" id="input-notvulnerable-reason" name="reason" size="40">&nbsp;&nbsp;<input id="markPublishNot" type="checkbox">&nbsp;Mark for Publish</input>&nbsp;&nbsp;<input id="markAgainst" type="checkbox"> Add Keywords to 'Against' </input>
</div>
<div id="details-investigate" style="display:none;">
<p><p>
- <button class="execute" id="submit-investigate"> Submit Investigation CVE's </button>
+ <button class="execute" id="submit-investigate"> Submit Investigation CVEs </button>
</div>
<div id="details-other" style="display:none;">
@@ -251,13 +270,11 @@
}
document.getElementById("unselect-these").innerText = "Un-select "+cve_checked_count+" checked";
if (0 == cve_checked_count) {
- //$("#submit-isvulnerable").attr("disabled","disabled");
document.getElementById("submit-notvulnerable").disabled = true;
- document.getElementById("submit-isvulnerable").disabled = true;
+ document.getElementById("submit-isvulnerable-investigate").disabled = true;
} else {
- //$("#submit-isvulnerable").removeAttr("disabled");
document.getElementById("submit-notvulnerable").disabled = false;
- document.getElementById("submit-isvulnerable").disabled = false;
+ document.getElementById("submit-isvulnerable-investigate").disabled = false;
}
}
@@ -282,19 +299,20 @@
$("#select-these").removeAttr("disabled");
$("#unselect-these").removeAttr("disabled");
$("#select-notvulnerable").removeAttr("disabled");
- $("#select-investigate").removeAttr("disabled");
$("#select-other").removeAttr("disabled");
- $("#details-isvulnerable").slideUp();
+ $("#details-isvulnerable-investigate").slideUp();
} else {
selected_isvulnerable=true;
$("#select-these").attr("disabled","disabled");
$("#unselect-these").attr("disabled","disabled");
$("#select-notvulnerable").attr("disabled","disabled");
- $("#select-investigate").attr("disabled","disabled");
$("#select-other").attr("disabled","disabled");
- $("#input-isvulnerable-reason").val($("#search-input-selectcvetable").val())
+ /* preset reason/components from search */
+ /* $("#input-isvulnerable-reason").val($("#search-input-selectcvetable").val()) */
+ /* $("#input-isvulnerable-components").val($("#search-input-selectcvetable").val()) */
update_vulnerable_status();
- $("#details-isvulnerable").slideDown();
+ document.getElementById("submit-isvulnerable-investigate").innerText = " Submit Vulnerable CVEs ";
+ $("#details-isvulnerable-investigate").slideDown();
}
});
@@ -330,7 +348,7 @@
$("#select-isvulnerable").removeAttr("disabled");
$("#select-notvulnerable").removeAttr("disabled");
$("#select-other").removeAttr("disabled");
- $("#details-investigate").slideUp();
+ $("#details-isvulnerable-investigate").slideUp();
} else {
selected_investigate=true;
$("#select-these").attr("disabled","disabled");
@@ -339,7 +357,8 @@
$("#select-notvulnerable").attr("disabled","disabled");
$("#select-other").attr("disabled","disabled");
update_vulnerable_status();
- $("#details-investigate").slideDown();
+ document.getElementById("submit-isvulnerable-investigate").innerText = " Submit Investigation CVEs ";
+ $("#details-isvulnerable-investigate").slideDown();
}
});
@@ -367,19 +386,19 @@
$('#notship').click(function(){
- not_ship = "We do not ship '";
+ not_ship = "{{doesnotimpact_text|safe}}";
var reason = $("#input-notvulnerable-reason").val().trim();
reason = reason.replace(not_ship,'');
- if ("'" == reason.slice(-1)) {
- reason = reason.slice(0,-1)
- }
if ($('#notship').is(':checked')) {
- reason = not_ship + reason + "'";
+ reason = not_ship + reason;
}
$("#input-notvulnerable-reason").val(reason);
});
- $('#submit-isvulnerable').click(function(){
+ $('#submit-isvulnerable-investigate').click(function(){
+
+ /* Disable the button so the we do not get double clicks */
+ $("#submit-isvulnerable-investigate").attr("disabled","disabled");
var cve_list=[];
$('#selectcvetable input:checked').each(function(){
@@ -387,7 +406,7 @@
});
cve_list = cve_list.join(",");
if ("" == cve_list) {
- alert("No CVE's were selected");
+ alert("No CVEs were selected");
return;
}
var product_list=[];
@@ -409,9 +428,15 @@
$('#group_vulnerability input:checked').each(function(){
group_vulnerability = $(this).attr('value');
});
+ if (selected_isvulnerable) {
+ action = 'submit-isvulnerable';
+ } else {
+ action = 'submit-investigate';
+ }
postCommitAjaxRequest({
- "action" : 'submit-isvulnerable',
+ "action" : action,
"reason" : $("#input-isvulnerable-reason").val(),
+ "affected_components" : $("#input-isvulnerable-components").val(),
"defect_reason" : $("#input-defect-reason").val(),
"cves" : cve_list,
"products": product_list,
@@ -420,6 +445,7 @@
"pub" : $('#markPublishIs').is(':checked') ? "yes" : "no",
"for" : $('#markFor').is(':checked') ? "yes" : "no",
"mk_d" : $('#create_defects').is(':checked') ? "yes" : "no",
+ "acknowledge_date" : $('#select-acknowledge-date').val(),
"vul_group": group_vulnerability,
"vul_name": $("#vulnerability_name").val(),
"notify" : $('#create_notifications').is(':checked') ? "yes" : "no",
@@ -435,36 +461,20 @@
});
cve_list = cve_list.join(",");
if ("" == cve_list) {
- alert("No CVE's were selected");
+ alert("No CVEs were selected");
return;
}
+ reason = $("#input-notvulnerable-reason").val()
+ reason = reason.replace(/:$/, "");
postCommitAjaxRequest({
"action" : 'submit-notvulnerable',
- "reason" : $("#input-notvulnerable-reason").val(),
+ "reason" : reason,
"cves" : cve_list,
"pub" : $('#markPublishNot').is(':checked') ? "yes" : "no",
"against" : $('#markAgainst').is(':checked') ? "yes" : "no",
});
});
- $('#submit-investigate').click(function(){
- var cve_list=[];
- $('#selectcvetable input').each(function(){
- if ($(this).is(':checked')) {
- cve_list.push($(this).prop('name'));
- }
- });
- cve_list = cve_list.join(",");
- if ("" == cve_list) {
- alert("No CVE's were selected");
- return;
- }
- postCommitAjaxRequest({
- "action" : 'submit-investigate',
- "cves" : cve_list,
- });
- });
-
$('#submit-other').click(function(){
var cve_list=[];
$('#selectcvetable input').each(function(){
@@ -474,7 +484,7 @@
});
cve_list = cve_list.join(",");
if ("" == cve_list) {
- alert("No CVE's were selected");
+ alert("No CVEs were selected");
return;
}
var status=$('#select-status-state').val();
@@ -501,11 +511,11 @@
var titleElt = $("[data-role='page-title']");
tableElt.on("table-done", function (e, total, tableParams) {
- var title = "Triage CVE's";
+ var title = "Triage CVEs";
if (tableParams.search || tableParams.filter) {
if (total === 0) {
- title = "No CVE's found";
+ title = "No CVEs found";
}
else if (total > 0) {
title = total + " CVE" + (total > 1 ? "'s" : '') + " found";
diff --git a/lib/srtgui/templates/defect.html b/lib/srtgui/templates/defect.html
index aa13b0dd..ed00fd90 100644
--- a/lib/srtgui/templates/defect.html
+++ b/lib/srtgui/templates/defect.html
@@ -38,14 +38,30 @@
<dt>URL:</dt>
<dd><a href="{{object.url}}" id="dataid_{{object.id}}" target="_blank">{{object.url}}</a></dd>
- <dt>Priority:</dt>
+ <dt>Defect Priority:</dt>
+ <dd>{{object.get_defect_priority_text}}</dd>
+
+ <dt>Defect Status:</dt>
+ <dd>{{object.get_defect_status_text}}</dd>
+
+ <dt>Defect Resolution:</dt>
+ <dd>{{object.get_defect_resolution_text}}</dd>
+
+ <dt>Duplicate Of:</dt>
+ <dd>
+ {% if object.duplicate_of %}
+ <a href="{% url 'defect_name' object.duplicate_of %}">{{object.duplicate_of}}</a>&nbsp;(<a href="{{SRTOOL_DEFECT_URLBASE}}/{{object.duplicate_of}}">{{SRTOOL_DEFECT_URLBASE}}/{{object.duplicate_of}}</a>)
+ {% endif %}
+ </dd>
+
+ <dt>SRTool Priority:</dt>
<dd>{{object.get_priority_text}}</dd>
- <dt>Status:</dt>
+ <dt>SRTool Status:</dt>
<dd>{{object.get_status_text}}</dd>
- <dt>Resolution:</dt>
- <dd>{{object.get_resolution_text}}</dd>
+ <dt>SRTool Outcome:</dt>
+ <dd>{{object.get_outcome_text}}</dd>
<dt>Publish:</dt>
<dd>{{object.publish}}</dd>
@@ -74,6 +90,36 @@
</div>
</div>
+<div class="row" style="padding-left: 25px;">
+ <h3>History</h3>
+ <table class="table table-striped table-condensed" data-testid="vuln-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>Comment</th>
+ <th>Date</th>
+ <th>Author</th>
+ </tr>
+ </thead>
+ {% if object.defect_history.all %}
+ {% for c in object.defect_history.all %}
+ <tr>
+ <td>{{ c.comment }}</td>
+ <td>{{ c.date }}</td>
+ <td>{{ c.author }}</td>
+ </tr>
+ {% endfor %}
+ {% else %}
+ <tr>
+ <td>No history found</td>
+ </tr>
+ {% endif %}
+ </table>
+</div>
+
+<HR ALIGN="center" WIDTH="100%">
+Updated={{object.srt_updated}}
+
+
<!-- Javascript support -->
<script>
diff --git a/lib/srtgui/templates/detail_search_header.html b/lib/srtgui/templates/detail_search_header.html
index 7a986590..6c61996a 100644
--- a/lib/srtgui/templates/detail_search_header.html
+++ b/lib/srtgui/templates/detail_search_header.html
@@ -30,12 +30,13 @@ $(document).ready(function() {
<div class="form-group">
<div class="btn-group">
- <input id="search" class="form-control" type="text" placeholder="Search {{search_what}}" name="search" value="{% if request.GET.search %}{{request.GET.search}}{% endif %}">
+ <input id="search" class="form-control" type="text" placeholder="@Search {{search_what}}" name="search" value="{% if request.GET.search %}{{request.GET.search}}{% endif %}">
<input type="hidden" value="name:+" name="orderby">
<input type="hidden" value="l" name="page">
{% if request.GET.search %}
<span class="remove-search-btn-detail-search search-clear glyphicon glyphicon-remove-circle"></span>
{% endif %}
+ <span class="glyphicon glyphicon-question-sign get-help" title="Default is an 'and' search; use 'OR' keyword to 'or' the terms"></span>
</div>
</div>
<button type="submit" class="btn btn-default">Search</button>
diff --git a/lib/srtgui/templates/history-cve-toastertable.html b/lib/srtgui/templates/history-cve-toastertable.html
new file mode 100755
index 00000000..78319466
--- /dev/null
+++ b/lib/srtgui/templates/history-cve-toastertable.html
@@ -0,0 +1,73 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} CVE Histories - SRTool {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'maintenance' %}">Maintenance</a></li><span class="divider">&rarr;</span>
+ <li>History CVE</li>
+ </ul>
+ </div>
+</div>
+
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <script>
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "History CVE";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No History CVE found";
+ }
+ else if (total > 0) {
+ title = total + " History CVE" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+
+ /* Set the report link */
+ var record_list=""
+ $(".data > span").each(function(){
+ var this_id=$(this).prop('id');
+ if (this_id.startsWith("dataid_")) {
+ record_list +=this_id.replace(/dataid_/,"") + ",";
+ }
+ });
+ $('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list="+record_list);
+
+ });
+ });
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/history-defect-toastertable.html b/lib/srtgui/templates/history-defect-toastertable.html
new file mode 100755
index 00000000..63e9ea4d
--- /dev/null
+++ b/lib/srtgui/templates/history-defect-toastertable.html
@@ -0,0 +1,73 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Defect Histories - SRTool {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'maintenance' %}">Maintenance</a></li><span class="divider">&rarr;</span>
+ <li>History Defect</li>
+ </ul>
+ </div>
+</div>
+
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <script>
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "History Defect";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No History Defect found";
+ }
+ else if (total > 0) {
+ title = total + " History Defect" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+
+ /* Set the report link */
+ var record_list=""
+ $(".data > span").each(function(){
+ var this_id=$(this).prop('id');
+ if (this_id.startsWith("dataid_")) {
+ record_list +=this_id.replace(/dataid_/,"") + ",";
+ }
+ });
+ $('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list="+record_list);
+
+ });
+ });
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/history-investigation-toastertable.html b/lib/srtgui/templates/history-investigation-toastertable.html
new file mode 100755
index 00000000..bde11ec7
--- /dev/null
+++ b/lib/srtgui/templates/history-investigation-toastertable.html
@@ -0,0 +1,73 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Investigation Histories - SRTool {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'maintenance' %}">Maintenance</a></li><span class="divider">&rarr;</span>
+ <li>History Investigation</li>
+ </ul>
+ </div>
+</div>
+
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <script>
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "History Investigation";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No History Investigation found";
+ }
+ else if (total > 0) {
+ title = total + " History Investigation" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+
+ /* Set the report link */
+ var record_list=""
+ $(".data > span").each(function(){
+ var this_id=$(this).prop('id');
+ if (this_id.startsWith("dataid_")) {
+ record_list +=this_id.replace(/dataid_/,"") + ",";
+ }
+ });
+ $('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list="+record_list);
+
+ });
+ });
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/history-vulnerability-toastertable.html b/lib/srtgui/templates/history-vulnerability-toastertable.html
new file mode 100755
index 00000000..bc3b7881
--- /dev/null
+++ b/lib/srtgui/templates/history-vulnerability-toastertable.html
@@ -0,0 +1,73 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Vulnerability Histories - SRTool {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'maintenance' %}">Maintenance</a></li><span class="divider">&rarr;</span>
+ <li>History Vulnerability</li>
+ </ul>
+ </div>
+</div>
+
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <script>
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "History Vulnerability";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No History Vulnerability found";
+ }
+ else if (total > 0) {
+ title = total + " History Vulnerabilit" + (total > 1 ? 'ies' : 'y') + " found";
+ }
+ }
+
+ titleElt.text(title);
+
+ /* Set the report link */
+ var record_list=""
+ $(".data > span").each(function(){
+ var this_id=$(this).prop('id');
+ if (this_id.startsWith("dataid_")) {
+ record_list +=this_id.replace(/dataid_/,"") + ",";
+ }
+ });
+ $('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list="+record_list);
+
+ });
+ });
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/investigation.html b/lib/srtgui/templates/investigation.html
index b662c5e1..f934d052 100644
--- a/lib/srtgui/templates/investigation.html
+++ b/lib/srtgui/templates/investigation.html
@@ -110,13 +110,16 @@
<p><b><label id="priority">Set Priority:</label></b>
<div id="priority-list" class="scrolling" style="width: 120px;">
<div class="checkbox"> <label>
- <input type="radio" name="priority" value="4" type="checkbox"> High
+ <input type="radio" name="priority" value="4" type="checkbox"> P1
</label><p></div>
<div class="checkbox"> <label>
- <input type="radio" name="priority" value="3" type="checkbox" checked="yes"> Medium
+ <input type="radio" name="priority" value="3" type="checkbox" checked="yes"> P2
</label><p></div>
<div class="checkbox"> <label>
- <input type="radio" name="priority" value="2" type="checkbox"> Low
+ <input type="radio" name="priority" value="2" type="checkbox"> P3
+ </label><p></div>
+ <div class="checkbox"> <label>
+ <input type="radio" name="priority" value="1" type="checkbox"> P4
</label><p></div>
</div>
</div>
@@ -135,8 +138,14 @@
</div>
</div>
</div>
- Reason: <input type="text" id="input-defect-reason" name="defect-reason" size="20" placeholder="(optional)"> (e.g. "Security Advisory [- REASON -] CVE-2020-1234")
+ Defect Reason: <input type="text" id="input-defect-reason" name="defect-reason" size="20" placeholder="(optional)"> (e.g. "Security Advisory [- REASON -] CVE-2020-1234")
+ <p><p>
+ <b><big>Affected Components: </big></b>
+ <input type="text" id="input-affected-components" name="components" size="40" value="{{affected_components}}"> (e.g. space-separated list of packages, recipes, sub-system list, applications, )
<p><p>
+ Found Version: {{found_version}}
+ <p><p>
+
</div>
</div>
@@ -163,7 +172,7 @@
<td>{{ id.defect.summary }}</td>
<td>{{ id.defect.get_priority_text }}</td>
<td>{{ id.defect.get_status_text }}</td>
- <td>{{ id.defect.get_resolution_text }}</td>
+ <td>{{ id.defect.get_defect_resolution_text }}</td>
<td>{{ id.defect.release_version }}</td>
<td><a href="{{id.defect.url}}" id="dataid_{{id.defect.id}}" target="_blank">{{id.defect.url}}</a></td>
{% if request.user.is_creator %}
@@ -268,10 +277,10 @@
<td>{{ u.author }}</td>
<td>
<span id="attachment_entry_'+{{u.id}}+'" class="js-config-var-name"></span>
- <form id="downloadbanner" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <form id="downloadbanner-{{forloop.counter}}" enctype="multipart/form-data" method="post" >{% csrf_token %}
<input type="hidden" id="action" name="action" value="download">
<input type="hidden" id="record_id" name="record_id" value={{u.id}}>
- <span class="glyphicon glyphicon-download-alt submit-downloadattachment" id="attachment_download_'+{{u.id}}+'" x-data="{{u.id}}"></span>
+ <span class="glyphicon glyphicon-download-alt submit-downloadattachment" id="attachment_download_'+{{u.id}}+'" x-data="{{forloop.counter}}"></span>
{% if request.user.is_creator %}
<span class="glyphicon glyphicon-trash trash-attachment" id="attachment_trash_'+{{u.id}}+'" x-data="{{u.id}}"></span>
{% endif %}
@@ -442,6 +451,9 @@
</table>
</div>
+<HR ALIGN="center" WIDTH="100%">
+Created={{object.srt_created}} Updated={{object.srt_updated}}
+
<script>
var selected_newcomment=false;
var selected_addusernotify=false;
@@ -539,6 +551,7 @@
postCommitAjaxRequest({
"action" : 'submit-createdefect',
"defect_reason" : $("#input-defect-reason").val(),
+ "affected_components" : $("#input-affected-components").val(),
"components": component_list,
"priority": priority,
});
@@ -614,8 +627,8 @@
}
});
- $('.submit-downloadattachment').click(function() {
- $("#downloadbanner").submit();
+ $('.submit-downloadattac hment').click(function() {
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
});
$('.trash-attachment').click(function() {
@@ -728,16 +741,18 @@
$('#submit-quickedit').click(function(){
var note=$('#text-note').val().trim()
var private_note=$('#text-private-note').val().trim()
+ var tags=$('#text-tags').val().trim();
var priority=$('#select-priority-state').val();
var status=$('#select-status-state').val();
var outcome=$('#select-outcome-state').val();
postCommitAjaxRequest({
- "action" : 'submit-quickedit',
- "note" : note,
- "private_note" : private_note,
- "status" : status,
- "outcome" : outcome,
- "priority" : priority,
+ "action" : 'submit-quickedit',
+ "priority" : priority,
+ "status" : status,
+ "note" : note,
+ "private_note" : private_note,
+ "tags" : tags,
+ "outcome" : outcome,
});
});
diff --git a/lib/srtgui/templates/maintenance.html b/lib/srtgui/templates/maintenance.html
new file mode 100755
index 00000000..63c60f33
--- /dev/null
+++ b/lib/srtgui/templates/maintenance.html
@@ -0,0 +1,78 @@
+{% extends "base.html" %}
+
+{% load static %}
+{% load projecttags %}
+{% load humanize %}
+
+{% block title %} Maintenance tools {% endblock %}
+{% block pagecontent %}
+ <div class="row">
+ <div class="col-md-7" style="padding-left: 50px;">
+ <h1>Maintenance</h1>
+ </div>
+ </div>
+ <div class="row">
+ <div class="jumbotron well-transparent">
+
+ <div class="col-md-6">
+ <div>
+ <table class="table table-striped table-condensed" data-testid="landing-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>Action</th>
+ <th>Description</th>
+ </tr>
+ </thead>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'history_cve' %}">History CVE</a></td>
+ <td>Examine History for CVEs</td>
+ </tr>
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'history_vulnerability' %}">History Vulnerabilities</a></td>
+ <td>Examine History for Vulnerabilities</td>
+ </tr>
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'history_investigation' %}">History Investigations</a></td>
+ <td>Examine History for Investigations</td>
+ </tr>
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'history_defect' %}">History Defects</a></td>
+ <td>Examine History for Defects</td>
+ </tr>
+
+ </table>
+ </div>
+
+ </div>
+
+ <div class="col-md-5">
+ <b>Quick Info</b>
+ <div class="well">
+ <dl class="dl-horizontal">
+
+ <dt>CVE History: Total Count =</dt>
+ <dd>
+ {{history_cve_total}}
+ </dd>
+ <dt>Vulnerability History: Total Count =</dt>
+ <dd>
+ {{history_vulnerability_total}}
+ </dd>
+ <dt>Investigation: Total Count =</dt>
+ <dd>
+ {{history_investigation_total}}
+ </dd>
+ <dt>Defect: Total Count =</dt>
+ <dd>
+ {{defect_investigation_total}}
+ </dd>
+
+ </dl>
+ </div>
+ </div>
+
+ </div>
+ </div>
+
+{% endblock %}
diff --git a/lib/srtgui/templates/management.html b/lib/srtgui/templates/management.html
index fe40ecb3..9b1e6456 100644
--- a/lib/srtgui/templates/management.html
+++ b/lib/srtgui/templates/management.html
@@ -40,8 +40,8 @@
</tr>
<tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'publish' %}">Publish Request</a></td>
- <td>Process the items that are ready to be published from SRTool</td>
+ <td><a class="btn btn-info btn-lg" href="{% url 'publish' %}">Publish Reports</a></td>
+ <td>Process items to be published from the SRTool</td>
</tr>
{% if request.user.is_admin %}
@@ -54,6 +54,11 @@
<td><a class="btn btn-info btn-lg" href="{% url 'sources' %}?nocache=1">Manage Sources</a></td>
<td>Manage source list, perform manual pulls</td>
</tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'maintenance' %}?nocache=1">Maintenance</a></td>
+ <td>Maintenance utilities</td>
+ </tr>
{% endif %}
</table>
@@ -75,15 +80,15 @@
</dd>
<dt>Investigate =</dt>
<dd>
- <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:new&default_orderby=name&filter_value=on&"> {{cve_investigate}} </a>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:investigate&default_orderby=name&filter_value=on&"> {{cve_investigate}} </a>
</dd>
<dt>Vulnerable =</dt>
<dd>
- <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:new&default_orderby=name&filter_value=on&"> {{cve_vulnerable}} </a>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:vulnerable&default_orderby=name&filter_value=on&"> {{cve_vulnerable}} </a>
</dd>
<dt>Not Vulnerable =</dt>
<dd>
- <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:new&default_orderby=name&filter_value=on&"> {{cve_not_vulnerable}} </a>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:not_vulnerable&default_orderby=name&filter_value=on&"> {{cve_not_vulnerable}} </a>
</dd>
<dt>Vulnerabilities: Total Count =</dt>
<dd>
@@ -93,17 +98,17 @@
<dd>
<a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_outcome:open&default_orderby=name&filter_value=on&"> {{vulnerability_open}} </a>
</dd>
- <dt>High active =</dt>
+ <dt>Critical active =</dt>
<dd>
- <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_severity:high&default_orderby=name&filter_value=on&" %}> {{vulnerability_high}} </a>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:critical&default_orderby=name&filter_value=on&" %}> {{vulnerability_critical}} </a>
</dd>
- <dt>Medium active =</dt>
+ <dt>High active =</dt>
<dd>
- <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_severity:medium&default_orderby=name&filter_value=on&" %}> {{vulnerability_medium}} </a>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:high&default_orderby=name&filter_value=on&" %}> {{vulnerability_high}} </a>
</dd>
- <dt>Low active =</dt>
+ <dt>Medium active =</dt>
<dd>
- <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_severity:low&default_orderby=name&filter_value=on&" %}> {{vulnerability_low}} </a>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:medium&default_orderby=name&filter_value=on&" %}> {{vulnerability_medium}} </a>
</dd>
<dt>Investigations: Total Count =</dt>
@@ -114,17 +119,17 @@
<dd>
<a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_outcome:open&default_orderby=name&filter_value=on&" %}> {{investigation_open}} </a>
</dd>
- <dt>High active =</dt>
+ <dt>Critical active =</dt>
<dd>
- <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_severity:high&default_orderby=name&filter_value=on&" %}> {{investigation_high}} </a>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:critical&default_orderby=name&filter_value=on&" %}> {{investigation_critical}} </a>
</dd>
- <dt>Medium active =</dt>
+ <dt>High active =</dt>
<dd>
- <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_severity:medium&default_orderby=name&filter_value=on&" %}> {{investigation_medium}} </a>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:high&default_orderby=name&filter_value=on&" %}> {{investigation_high}} </a>
</dd>
- <dt>Low active =</dt>
+ <dt>Medium active =</dt>
<dd>
- <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_severity:low&default_orderby=name&filter_value=on&" %}> {{investigation_low}} </a>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:medium&default_orderby=name&filter_value=on&" %}> {{investigation_medium}} </a>
</dd>
<dt>Defects: Total Count =</dt>
@@ -133,19 +138,19 @@
</dd>
<dt>Open =</dt>
<dd>
- <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_status:open&default_orderby=name&filter_value=on&" %}> {{defect_open}} </a>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_srt_outcome:open&default_orderby=name&filter_value=on&" %}> {{defect_open}} </a>
</dd>
<dt>InProgress =</dt>
<dd>
- <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_status:in_progress&default_orderby=name&filter_value=on&" %}> {{defect_inprogress}} </a>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_status:in_progress&default_orderby=name&filter_value=on&" %}> {{defect_inprogress}} </a>
</dd>
<dt>P1 active =</dt>
<dd>
- <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_status:in_progress&default_orderby=name&filter_value=on&" %}> {{defect_p1}} </a>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_priority:critical&default_orderby=name&filter_value=on&" %}> {{defect_p1}} </a>
</dd>
<dt>P2 active =</dt>
<dd>
- <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_status:in_progress&default_orderby=name&filter_value=on&" %}> {{defect_p2}} </a>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_priority:high&default_orderby=name&filter_value=on&" %}> {{defect_p2}} </a>
</dd>
<dt>Packages: Affected=</dt>
diff --git a/lib/srtgui/templates/notifications-toastertable.html b/lib/srtgui/templates/notifications-toastertable.html
index dde76482..c9e572df 100755
--- a/lib/srtgui/templates/notifications-toastertable.html
+++ b/lib/srtgui/templates/notifications-toastertable.html
@@ -19,7 +19,7 @@
<ul class="breadcrumb" id="breadcrumb">
<li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
<li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
- <li>Pending To-do Notifications</li>
+ <li>Pending Notifications</li>
</ul>
</div>
</div>
diff --git a/lib/srtgui/templates/publish-cve-toastertable.html b/lib/srtgui/templates/publish-cve-toastertable.html
new file mode 100755
index 00000000..c46128cc
--- /dev/null
+++ b/lib/srtgui/templates/publish-cve-toastertable.html
@@ -0,0 +1,162 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Publish Table via CVEs {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'publish' %}">Publish</a></li><span class="divider">&rarr;</span>
+ <li>Publish Table via CVEs</li>
+ </ul>
+ </div>
+</div>
+
+<div > <!--class="form-inline" -->
+ <b><big>Actions: </big></b>
+ <button id="mark-new" class="btn btn-default" type="button">Mark New</button>
+ <button id="mark-modified" class="btn btn-default" type="button">Mark Updated</button>
+ <button id="unmark" class="btn btn-default" type="button">Unmark</button>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <!-- Javascript support -->
+ <script>
+ var selected_notifyedit=false;
+
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Publish Table via CVEs";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "Publish CVEs found";
+ }
+ else if (total > 0) {
+ title = total + " Publish CVE" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ } else if (('results_msg' in data) && ("" != data.results_msg)) {
+ alert("Results: " + data.results_msg);
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:"{% url 'xhr_publish'%}",
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ })
+ }
+
+ $('#mark-new').click(function(){
+ var cve_list=[];
+ $('#publishcvetable input').each(function(){
+ if ($(this).is(':checked')) {
+ cve_list.push($(this).prop('name'));
+ }
+ });
+ cve_list = cve_list.join(",");
+ if ("" == cve_list) {
+ alert("No CVE's were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'mark-new',
+ "cves" : cve_list,
+ });
+ });
+
+ $('#mark-modified').click(function(){
+ var cve_list=[];
+ $('#publishcvetable input').each(function(){
+ if ($(this).is(':checked')) {
+ cve_list.push($(this).prop('name'));
+ }
+ });
+ cve_list = cve_list.join(",");
+ if ("" == cve_list) {
+ alert("No CVE's were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'mark-modified',
+ "cves" : cve_list,
+ });
+ });
+
+ $('#unmark').click(function(){
+ var cve_list=[];
+ $('#publishcvetable input').each(function(){
+ if ($(this).is(':checked')) {
+ cve_list.push($(this).prop('name'));
+ }
+ });
+ cve_list = cve_list.join(",");
+ if ("" == cve_list) {
+ alert("No CVE's were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'unmark',
+ "cves" : cve_list,
+ });
+ });
+
+
+ }); <!-- $(document).ready() -->
+
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/publish-defect-toastertable.html b/lib/srtgui/templates/publish-defect-toastertable.html
new file mode 100755
index 00000000..c31e3b6a
--- /dev/null
+++ b/lib/srtgui/templates/publish-defect-toastertable.html
@@ -0,0 +1,168 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Publish Table via Defects {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'publish' %}">Publish</a></li><span class="divider">&rarr;</span>
+ <li>Publish Table via Defects</li>
+ </ul>
+ </div>
+</div>
+
+<div > <!--class="form-inline" -->
+ <b><big>Actions: </big></b>
+ <button id="mark-new" class="btn btn-default" type="button">Mark New</button>
+ <button id="mark-modified" class="btn btn-default" type="button">Mark Updated</button>
+ <button id="unmark" class="btn btn-default" type="button">Unmark</button>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+<div id="table-loading">
+<h3><font color="blue">[ Table Loading... ]</font></h3>
+</div>
+
+ <!-- Javascript support -->
+ <script>
+ var selected_notifyedit=false;
+
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ $("#table-loading").slideDown();
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Publish Table via Defects";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "Publish CVEs found";
+ }
+ else if (total > 0) {
+ title = total + " Publish CVEs via Defect" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+ $("#table-loading").slideUp();
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ } else if (('results_msg' in data) && ("" != data.results_msg)) {
+ alert("Results: " + data.results_msg);
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:"{% url 'xhr_publish'%}",
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ })
+ }
+
+ $('#mark-new').click(function(){
+ var defect_list=[];
+ $('#publishdefecttable input').each(function(){
+ if ($(this).is(':checked')) {
+ defect_list.push($(this).prop('name'));
+ }
+ });
+ defect_list = defect_list.join(",");
+ if ("" == defect_list) {
+ alert("No Defects were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'mark-new',
+ "defects" : defect_list,
+ });
+ });
+
+ $('#mark-modified').click(function(){
+ var defect_list=[];
+ $('#publishdefecttable input').each(function(){
+ if ($(this).is(':checked')) {
+ defect_list.push($(this).prop('name'));
+ }
+ });
+ defect_list = defect_list.join(",");
+ if ("" == defect_list) {
+ alert("No Defects were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'mark-modified',
+ "defects" : defect_list,
+ });
+ });
+
+ $('#unmark').click(function(){
+ var defect_list=[];
+ $('#publishdefecttable input').each(function(){
+ if ($(this).is(':checked')) {
+ defect_list.push($(this).prop('name'));
+ }
+ });
+ defect_list = defect_list.join(",");
+ if ("" == defect_list) {
+ alert("No Defects were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'unmark',
+ "defects" : defect_list,
+ });
+ });
+
+
+ }); <!-- $(document).ready() -->
+
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/publish-list-toastertable.html b/lib/srtgui/templates/publish-list-toastertable.html
new file mode 100755
index 00000000..b5a88323
--- /dev/null
+++ b/lib/srtgui/templates/publish-list-toastertable.html
@@ -0,0 +1,162 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Publish Table {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'publish' %}">Publish</a></li><span class="divider">&rarr;</span>
+ <li>Publish Table</li>
+ </ul>
+ </div>
+</div>
+
+<div > <!--class="form-inline" -->
+ <b><big>Actions: </big></b>
+ <button id="mark-new" class="btn btn-default" type="button">Mark New</button>
+ <button id="mark-modified" class="btn btn-default" type="button">Mark Updated</button>
+ <button id="unmark" class="btn btn-default" type="button">Unmark</button>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <!-- Javascript support -->
+ <script>
+ var selected_notifyedit=false;
+
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Publish Table";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "Publish CVEs found";
+ }
+ else if (total > 0) {
+ title = total + " Publish CVE" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ } else if (('results_msg' in data) && ("" != data.results_msg)) {
+ alert("Results: " + data.results_msg);
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:"{% url 'xhr_publish'%}",
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ })
+ }
+
+ $('#mark-new').click(function(){
+ var cve_list=[];
+ $('#publishlisttable input').each(function(){
+ if ($(this).is(':checked')) {
+ cve_list.push($(this).prop('name'));
+ }
+ });
+ cve_list = cve_list.join(",");
+ if ("" == cve_list) {
+ alert("No CVE's were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'mark-new',
+ "cves" : cve_list,
+ });
+ });
+
+ $('#mark-modified').click(function(){
+ var cve_list=[];
+ $('#publishlisttable input').each(function(){
+ if ($(this).is(':checked')) {
+ cve_list.push($(this).prop('name'));
+ }
+ });
+ cve_list = cve_list.join(",");
+ if ("" == cve_list) {
+ alert("No CVE's were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'mark-modified',
+ "cves" : cve_list,
+ });
+ });
+
+ $('#unmark').click(function(){
+ var cve_list=[];
+ $('#publishlisttable input').each(function(){
+ if ($(this).is(':checked')) {
+ cve_list.push($(this).prop('name'));
+ }
+ });
+ cve_list = cve_list.join(",");
+ if ("" == cve_list) {
+ alert("No CVE's were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'unmark',
+ "cves" : cve_list,
+ });
+ });
+
+
+ }); <!-- $(document).ready() -->
+
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/publish.html b/lib/srtgui/templates/publish.html
index b1f3d83f..cf0f2294 100644
--- a/lib/srtgui/templates/publish.html
+++ b/lib/srtgui/templates/publish.html
@@ -13,32 +13,315 @@
<ul class="breadcrumb" id="breadcrumb">
<li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
<li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
- <li>Publish (Proposals)</li>
+ <li>Publish Report Management</li>
</ul>
</div>
</div>
-<h2> Manage Publish Requests</h2>
+<h2>Publish Report Management</h2>
<ul>
- <li>The SRTool supports an external publishing tool, for example a business table or the vendor's public website</li>
- <li>These tools can be used to (a) submit CVEs to that tool, and (b) update the CVEs when they have been published</li>
+ <li>The SRTool supports exporting new and updated CVEs to external publishing tools</li>
</ul>
-<h2> Publishing Actions</h2>
+<hr>
+
+<h2>Publish Via Database Snapshots</h2>
+<h3> On Demand</h3>
<ul>
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'select-publish' %}">Publish Request</a></td>
- <td>Process the items that are ready to be published from SRTool</td>
- </tr>
-
- <br>
- <br>
- <br>
-
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'update-published' %}">Published Update</a></td>
- <td>Process the items that have been published</td>
- </tr>
+ <li>This extracts the changes from a 'base' database backup snapshot to more recent 'top' snapshot</li>
+ <li>The 'start' and 'stop' dates can extract a subset of those changes. Normally they are set to the 'base' and 'top' dates</li>
</ul>
+<div style="padding-left:30px;">
+ <div>
+ <label> Start Snapshot: </label>
+ <select id="snap_date_base">
+ {% for snap in snapshot_list %}
+ <option value="{{snap.date}}" {% if snap_start_index == snap.index %}selected{% endif %}>
+ ({{snap.mode}}) {{snap.date}} {{snap.time}} | {{snap.day}}
+ </option>
+ {% endfor %}
+ </select>
+ </div>
+ <div>
+ <label> Stop Snapshot: </label>
+ <select id="snap_date_top">
+ {% for snap in snapshot_list %}
+ <option value="{{snap.date}}" {% if snap_stop_index == snap.index %}selected{% endif %}>
+ ({{snap.mode}}) {{snap.date}} {{snap.time}} | {{snap.day}}
+ </option>
+ {% endfor %}
+ </select>
+ </div>
+ <div>
+ Start Date: <input type="text" id="snap_date_start" value="{{snap_date_start}}">&nbsp;&nbsp;
+ Stop Date: <input type="text" id="snap_date_stop" value="{{snap_date_stop}}">&nbsp;&nbsp;
+ <I>(Format: yyyy-mm-dd)</I>
+ </div>
+<br>
+</div>
+
+<div>
+ <span style="padding-left:30px;"><button id="export-snapshot" class="btn btn-default" type="button">Generate</button></span>
+ <!--<button type="submit" name="action" value="export-snapshot">Export</button> -->
+ <span id="export-snapshot-text">Generate the publish table on-demand (using snapshots)</span>
+ <span id="generating-report" hidden style="color:red"><I>... Generating the report - this will take a few minutes ...</I></span>
+</div>
+<br>
+
+<form method="POST"> {% csrf_token %}
+<h3>Automatic (Under Development)</h3>
+<div style="padding-left: 25px;">
+ <label> Frequency: </label>
+ <select id="snap_frequency">
+ {% for snap in snapshot_frequency_list %}
+ <option value="{{snap}}" {% if snap == snap_frequency_select %}selected{% endif %}>
+ {{snap}}
+ </option>
+ {% endfor %}
+ </select>
+ <span style="padding-left:30px;"><button id="export-snapshot" class="btn btn-default" type="button" disabled>Save</button></span>
+ <!--<button type="submit" name="action" value="export-snapshot">Export</button> -->
+ Save the automatic publishing frequency
+</div>
+</form>
+
+<h3>Generated Reports</h3>
+<div style="padding-left: 25px;">
+ <table class="table table-striped table-condensed" data-testid="vuln-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>Size</th>
+ <th>Date</th>
+ <th>Manage</th>
+ </tr>
+ </thead>
+ {% if generated_report_list %}
+ {% for report in generated_report_list %}
+ <tr>
+ <td>{{report.name}}</td>
+ <td>{{report.size}}</td>
+ <td>{{report.date}}</td>
+ <td>
+ <span id="attachment_entry_'+{{report.name}}+'" class="js-config-var-name"></span>
+ <form id="downloadbanner-{{forloop.counter}}" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" id="action" name="action" value="download">
+ <input type="hidden" id="report_id" name="report_name" value={{report.name}}>
+ <span class="glyphicon glyphicon-download-alt submit-downloadreport" id="report_download_'+{{report.name}}+'" x-data="{{forloop.counter}}"></span>
+ {% if request.user.is_creator %}
+ <span class="glyphicon glyphicon-trash trash-report" id="report_trash_'+{{report.name}}+'" x-data="{{report.name}}"></span>
+ {% endif %}
+ </form>
+ </td>
+ </tr>
+ {% endfor %}
+ {% else %}
+ <tr>
+ <td>No report files found</td>
+ </tr>
+ {% endif %}
+ </table>
+ (last report = {{snap_last_calc}})
+</div>
+
+<hr>
+
+<form method="POST"> {% csrf_token %}
+<h2>Publish Via History Tables (Under development)</h2>
+<ul>
+ <li>These tools can be used to (a) gather the candidate CVEs, (b) review and edit the list if needed, (c) generate the report when ready</li>
+ <li>The user can explicitly include and exclude CVEs from the "New" list and the "Updated" list, in case the automatic caltulations need adjustment</li>
+ <li>These mark-ups are inserted into the respective CVE's history at a mid-point date of the period, so they are both persistent and period-specific</li>
+ <li>The user can clear the markups from the given period and start over, but this will not affect any other period</li>
+</ul>
+<h3> Publish Preparation</h3>
+<ul>
+ <div>
+ Start Date: <input type="text" name="date_start" value="{{date_start}}">&nbsp;&nbsp;
+ Stop Date: <input type="text" name="date_stop" value="{{date_stop}}">
+ </div>
+ <br>
+ <div>
+ Product filter:
+ <select name="product-filter" id="select-product-filter">
+ <option value="0">WR Linux Suported Products</option>
+ </select>
+ </div>
+ <br>
+ <div>
+ <button type="submit" name="action" value="recalculate">Recalculate publish table</button>
+ Gather the items for this period to be published from SRTool, with user changes (last done {{last_calc}})
+ </div>
+ <br>
+ <div>
+ <button type="submit" name="action" value="reset">Reset user edits, Recalculate</button>
+ Remove the user changes for this period, recalculate the table
+ </div>
+</ul>
+<h3> Publish Preview and Modifications</h3>
+<ul>
+ <div>
+ <button type="submit" name="action" value="view">View the publish table</button>
+ View the publish table, prune entries
+ </div>
+ <br>
+ <div>
+ <button type="submit" name="action" value="add-cve">Add via CVEs</button>
+ Add recent CVEs to the table
+ </div>
+ <br>
+ <div>
+ <button type="submit" name="action" value="add-defect">Add via defects</button>
+ Add CVEs of recent defects to the table
+ </div>
+ <br>
+</ul>
+<h3> Publish the Report</h3>
+<ul>
+ <div>
+ <button type="submit" name="action" value="export">Export</button>
+ Export the publish table (using history)
+ </div>
+ <br>
+</ul>
+</form>
+
+<script>
+ var selected_newcomment=false;
+
+ $(document).ready(function() {
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ document.getElementById("export-snapshot").disabled = false;
+ /* document.getElementById("download-snapshot").disabled = false;*/
+ document.getElementById("export-snapshot-text").innerText = "Generate the publish table on-demand (using snapshots)";
+ document.getElementById("generating-report").style.display = "block";
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ document.getElementById("export-snapshot").disabled = false;
+ document.getElementById("export-snapshot-text").innerText = "Generate the publish table on-demand (using snapshots)";
+ /* document.getElementById("download-snapshot").disabled = false; */
+ document.getElementById("generating-report").style.display = "block";
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:"{% url 'xhr_publish' %}",
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+ $("#snap_date_base").change(function(){
+ snap_date_base = $("#snap_date_base").val();
+ snap_date_top = $("#snap_date_top").val();
+ if (snap_date_base > snap_date_top) {
+ $("#snap_date_base").val(snap_date_top);
+ $("#snap_date_top").val(snap_date_base);
+ $("#snap_date_start").val(snap_date_top);
+ $("#snap_date_stop").val(snap_date_base);
+ } else {
+ snap_date_start = $("#snap_date_start").val();
+ snap_date_stop = $("#snap_date_stop").val();
+ $("#snap_date_start").val(snap_date_base);
+ if (snap_date_stop < snap_date_base) {
+ $("#snap_date_stop").val(snap_date_top);
+ }
+ }
+ });
+
+ $("#snap_date_top").change(function(){
+ snap_date_base = $("#snap_date_base").val();
+ snap_date_top = $("#snap_date_top").val();
+ if (snap_date_base > snap_date_top) {
+ $("#snap_date_base").val(snap_date_top);
+ $("#snap_date_top").val(snap_date_base);
+ $("#snap_date_start").val(snap_date_top);
+ $("#snap_date_stop").val(snap_date_base);
+ } else {
+ snap_date_start = $("#snap_date_start").val();
+ snap_date_stop = $("#snap_date_stop").val();
+ if (snap_date_start > snap_date_top) {
+ $("#snap_date_start").val(snap_date_base);
+ }
+ $("#snap_date_stop").val(snap_date_top);
+ }
+ });
+
+ $('#export-snapshot').click(function(){
+ snap_date_base = $("#snap_date_base").val();
+ snap_date_top = $("#snap_date_top").val();
+ snap_date_start = $("#snap_date_start").val();
+ snap_date_stop = $("#snap_date_stop").val();
+ if (snap_date_start > snap_date_stop) {
+ alert("Error: the start date is after the stop date");
+ return;
+ }
+ if (snap_date_start < snap_date_base) {
+ alert("Error: the start date is before the snapshot base date");
+ return;
+ }
+ if (snap_date_stop > snap_date_top) {
+ alert("Error: the stop date is after the snapshot top date");
+ return;
+ }
+ var result = confirm("Generate the report? This will take several minutes.");
+ if (result){
+ document.getElementById("export-snapshot").disabled = true;
+ document.getElementById("export-snapshot-text").innerText = "... Generating the report - this will take a few minutes ...";
+
+ /* document.getElementById("download-snapshot").disabled = true; */
+ document.getElementById("generating-report").style.display = "none";
+ postCommitAjaxRequest({
+ "action" : 'export-snapshot',
+ "snap_date_base" : snap_date_base,
+ "snap_date_top" : snap_date_top,
+ "snap_date_start" : snap_date_start,
+ "snap_date_stop" : snap_date_stop
+ });
+ }
+ });
+
+
+ /* Manage report files */
+
+ $('.submit-downloadreport').click(function() {
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
+ });
+
+ $('.trash-report').click(function() {
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-trashreport',
+ "report_name" : $(this).attr('x-data'),
+ });
+ }
+ });
+
+
+
+ });
+</script>
+
{% endblock %}
diff --git a/lib/srtgui/templates/report.html b/lib/srtgui/templates/report.html
index d4d27f76..4c2b2450 100644
--- a/lib/srtgui/templates/report.html
+++ b/lib/srtgui/templates/report.html
@@ -13,10 +13,16 @@
</div>
</div>
- <form method="POST">{% csrf_token %}
+ <form method="POST"> {% csrf_token %}
<input type="hidden" name="parent_page" value="{{parent_page}}">
<input type="hidden" name="record_list" value="{{record_list}}">
+ {% if error_message %}
+ <br>
+ <font size="3" color="red">{{error_message}}</font>
+ <br>
+ {% endif %}
+
{% if report_type_list %}
<hr>
Report Type:<br>
@@ -53,6 +59,12 @@
<hr>
{% endif %}
+ {% if report_date_list %}
+ Date Range (mm/dd/yyyy):<br>
+ {{report_date_list|safe}}
+ <hr>
+ {% endif %}
+
{% if report_custom_list %}
Page Specific Settings:<br>
{{report_custom_list|safe}}
@@ -68,6 +80,31 @@
</form>
-<br>
+ <br>
+ <!-- <input type="submit" id="submit-report-button" class="btn btn-primary btn-lg" value="Generate and Download Report"/> -->
+ <!-- <button type="button" id="submit-report-button" class="btn btn-primary btn-lg"> Generate and Download Report </button> -->
+ <!-- <button id="select-these" class="btn btn-default" type="button">Select these</button> -->
+
+ <!-- Javascript support -->
+ <script>
+
+ $(document).ready(function() {
+
+ /* Handle the post button */
+ $('#test-submit-report-button').click(function(){
+ document.getElementById("submit-report-button").innerText = " ... working ... ";
+ data = {
+ "action" : 'foo',
+ "pub" : $('#bar').is(':checked') ? "yes" : "no",
+ }
+
+ $.post("wr/report/management", data, function(data, status){
+ alert("Data: " + data + "\nStatus: " + status);
+ });
+ });
+
+ });
+ </script>
+
{% endblock %}
diff --git a/lib/srtgui/templates/srtool_metadata_include.html b/lib/srtgui/templates/srtool_metadata_include.html
index 7471f0f9..eb83c05f 100755
--- a/lib/srtgui/templates/srtool_metadata_include.html
+++ b/lib/srtgui/templates/srtool_metadata_include.html
@@ -9,15 +9,12 @@
<i>Status:</i> {{object.get_status_text}},&nbsp;&nbsp;
{% if default_category == "CVE" %}
{% if request.user.is_creator %}
- <i>Publish</i> = {{object.get_publish_text}}, <i>Publish Date</i> = {{object.publish_date}}
+ <i>Publish</i> = {{object.get_publish_text}}, <i>Publish Date</i> = {{object.publish_date}}, <i>Acknowledge Date</i> = {{object.acknowledge_date|date:'Y-m-d'}}, <i>Initial Release</i> = {{object.publishedDate}}, <i>Last Modified</i> = {{object.lastModifiedDate}}
<!--<a class="btn btn-default navbar-btn " id="login-button" href="">Publish Now</a> -->
{% else %}
<i>Publish = {{object.get_publish_text}}</i>
{% endif %}
</LI>
- <LI>
- <i>Packages:</i> {{object.packages}}
- </LI>
{% else %}
<i>Outcome:</i> {{object.get_outcome_text}}
<p>
@@ -26,9 +23,17 @@
<i>Public Notes:</i> {{object.comments}}
</LI>
{% if request.user.is_creator %}
+ <LI>
+ <i>Private Notes:</i> {{object.comments_private}}
+ </LI>
+ {% endif %}
<LI>
- <i>Private Notes:</i> {{object.comments_private}}
+ <i>Tags:</i> {{object.tags}}
</LI>
+ {% if default_category == "CVE" %}
+ <LI>
+ <i>Affected Components:</i> {{object.packages}}
+ </LI>
{% endif %}
</UL>
</fieldset>
@@ -42,10 +47,10 @@
<p><i>Priority</i> =
<select name="Priority" id="select-priority-state">
<option value="0" {% if 0 == object.priority %}selected{% endif %}>Undefined</option>
- <option value="1" {% if 1 == object.priority %}selected{% endif %}>Minor</option>
- <option value="2" {% if 2 == object.priority %}selected{% endif %}>Low</option>
- <option value="3" {% if 3 == object.priority %}selected{% endif %}>Medium</option>
- <option value="4" {% if 4 == object.priority %}selected{% endif %}>High</option>
+ <option value="1" {% if 1 == object.priority %}selected{% endif %}>Low</option>
+ <option value="2" {% if 2 == object.priority %}selected{% endif %}>Medium</option>
+ <option value="3" {% if 3 == object.priority %}selected{% endif %}>High</option>
+ <option value="4" {% if 4 == object.priority %}selected{% endif %}>Critical</option>
</select>
&nbsp;&nbsp;
<i>Status</i> =
@@ -56,6 +61,10 @@
<option value="3" {% if 3 == object.status %}selected{% endif %}>Investigate</option>
<option value="4" {% if 4 == object.status %}selected{% endif %}>Vulnerable</option>
<option value="5" {% if 5 == object.status %}selected{% endif %}>Not Vulnerable</option>
+ <option value="6" {% if 6 == object.status %}selected{% endif %}>(New)</option>
+ <option value="7" {% if 7 == object.status %}selected{% endif %}>(Investigate)</option>
+ <option value="8" {% if 8 == object.status %}selected{% endif %}>(Vulnerable)</option>
+ <option value="9" {% if 9 == object.status %}selected{% endif %}>(Not Vulnerable)</option>
</select>
<p>
{% if default_category == "CVE" %}
@@ -78,9 +87,14 @@
<option value="3" {% if 3 == object.outcome_state %}selected{% endif %}>Closed (Won't Fix)</option>
</select>
{% endif %}
- <p>Note: <input type="text" placeholder="Edit Note" id="text-note" size="80" value="{{object.comments}}"></p>
+ <p>Comments: <input type="text" placeholder="Edit comments" id="text-note" size="80" value="{{object.comments}}"></p>
{% if request.user.is_creator %}
- <p>Private Note: <input type="text" placeholder="Edit Private Note" id="text-private-note" size="80" value="{{object.comments_private}}"></p>
+ <p>Private Comments: <input type="text" placeholder="Edit private comments" id="text-private-note" size="80" value="{{object.comments_private}}"></p>
+ {% endif %}
+ <p>Tags: <input type="text" placeholder="Edit tags" id="text-tags" size="80" value="{{object.tags}}"></p>
+ {% if default_category == "CVE" %}
+ <p>Affected Components: <input type="text" placeholder="Edit affected components" id="text-affected-components" size="80" value="{{object.packages}}"></p>
+ <i>Acknowledge Date</i> = <input type="text" placeholder="Acknowledge Date" id="text-acknowledge-date" size="40" value="{{object.acknowledge_date|date:'Y-m-d'}}"> (YYYY-MM-DD, or empty string for None)<p>
{% endif %}
<p><p>
</fieldset>
diff --git a/lib/srtgui/templates/toastertable.html b/lib/srtgui/templates/toastertable.html
index 6882b394..99eb01e2 100644
--- a/lib/srtgui/templates/toastertable.html
+++ b/lib/srtgui/templates/toastertable.html
@@ -69,6 +69,7 @@
</div>
</div>
<button class="btn btn-default" id="search-submit-{{table_name}}" >Search</button>
+ <span class="glyphicon glyphicon-question-sign get-help" title="Default is to 'and' terms. Use 'OR' to 'or' terms. Use '-' to exclude terms. Example:abc OR 'def ghi' AND -jkl"></span>
</form>
<form class="navbar-form navbar-right">
<div clas="form-group">
diff --git a/lib/srtgui/templates/vulnerability.html b/lib/srtgui/templates/vulnerability.html
index 35b2c0e6..9290a1ef 100644
--- a/lib/srtgui/templates/vulnerability.html
+++ b/lib/srtgui/templates/vulnerability.html
@@ -148,26 +148,41 @@
</tr>
</thead>
- {% if object.vulnerability_investigation.all %}
- {% for investigation in object.vulnerability_investigation.all %}
+ <table class="table table-striped table-condensed" data-testid="vuln-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>Product Name</th>
+ <th>Investigation</th>
+ <th>Status</th>
+ <th>Outcome</th>
+ <th>Defect</th>
+ <th>Release Version</th>
+ {% if request.user.is_creator %}
+ <th>Manage</th>
+ {% endif %}
+ </tr>
+ </thead>
+
+ {% if object.investigation_list %}
+ {% for v2i in object.investigation_list %}
<tr>
- <td><a href="{% url 'product' investigation.product.id %}">{{ investigation.product.long_name }}<a></td>
- <td><a href="{% url 'investigation' investigation.id %}">{{ investigation.name }}<a></td>
- <td>{{ investigation.get_status_text }}</td>
- <td>{{ investigation.get_outcome_text }}</td>
+ <td><a href="{% url 'product' v2i.investigation.product.id %}">{{ v2i.investigation.product.long_name }}<a></td>
+ <td><a href="{% url 'investigation' v2i.investigation.id %}">{{ v2i.investigation.name }}<a></td>
+ <td>{{ v2i.investigation.get_status_text }}</td>
+ <td>{{ v2i.investigation.get_outcome_text }}</td>
<td>
- {% for ij in investigation.investigation_to_defect.all %}
+ {% for ij in v2i.investigation.investigation_to_defect.all %}
{% if not forloop.first %}| {% endif %}<a href="{% url 'defect' ij.defect.id %}">{{ij.defect.name}} </a>
{% endfor %}
</td>
<td>
- {% for ij in investigation.investigation_to_defect.all %}
+ {% for ij in v2i.investigation.investigation_to_defect.all %}
{% if not forloop.first %}| {% endif %}<a href="{% url 'defect' ij.defect.id %}">{{ij.defect.release_version}} </a>
{% endfor %}
</td>
{% if request.user.is_creator %}
<td>
- <span class="glyphicon glyphicon-trash trash-investigation" id="affected_trash_'+{{investigation.id}}+'" x-data="{{investigation.id}}"></span>
+ <span class="glyphicon glyphicon-trash trash-investigation" id="affected_trash_'+{{v2i.investigation.id}}+'" x-data="{{v2i.investigation.id}}"></span>
</td>
{% endif %}
</tr>
@@ -268,10 +283,10 @@
<td>{{ u.author }}</td>
<td>
<span id="attachment_entry_'+{{u.id}}+'" class="js-config-var-name"></span>
- <form id="downloadbanner" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <form id="downloadbanner-{{forloop.counter}}" enctype="multipart/form-data" method="post" >{% csrf_token %}
<input type="hidden" id="action" name="action" value="download">
<input type="hidden" id="record_id" name="record_id" value={{u.id}}>
- <span class="glyphicon glyphicon-download-alt submit-downloadattachment" id="attachment_download_'+{{u.id}}+'" x-data="{{u.id}}"></span>
+ <span class="glyphicon glyphicon-download-alt submit-downloadattachment" id="attachment_download_'+{{u.id}}+'" x-data="{{forloop.counter}}"></span>
{% if request.user.is_creator %}
<span class="glyphicon glyphicon-trash trash-attachment" id="attachment_trash_'+{{u.id}}+'" x-data="{{u.id}}"></span>
{% endif %}
@@ -440,6 +455,9 @@
</table>
</div>
+<HR ALIGN="center" WIDTH="100%">
+Created={{object.srt_created}} Updated={{object.srt_updated}}
+
<!-- Javascript support -->
<script>
var selected_addrelatedproduct=false;
@@ -583,7 +601,7 @@
});
$('.submit-downloadattachment').click(function() {
- $("#downloadbanner").submit();
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
});
$('.trash-attachment').click(function() {
@@ -694,6 +712,7 @@
$('#submit-quickedit').click(function(){
var note=$('#text-note').val().trim()
var private_note=$('#text-private-note').val().trim()
+ var tags=$('#text-tags').val().trim();
var priority=$('#select-priority-state').val();
var status=$('#select-status-state').val();
var outcome=$('#select-outcome-state').val();
@@ -701,6 +720,7 @@
"action" : 'submit-quickedit',
"note" : note,
"private_note" : private_note,
+ "tags" : tags,
"status" : status,
"outcome" : outcome,
"priority" : priority,
diff --git a/lib/srtgui/templatetags/projecttags.py b/lib/srtgui/templatetags/projecttags.py
index d7bc5319..0c5efc29 100644
--- a/lib/srtgui/templatetags/projecttags.py
+++ b/lib/srtgui/templatetags/projecttags.py
@@ -275,6 +275,24 @@ def get_dict_value(dictionary, key):
return ''
@register.filter
+def get_strdict_value(dictionary_str, key):
+ """ return the value of a dictionary key
+ where the dictionary is in string form
+ """
+ try:
+ dictionary = JsonLib.loads(dictionary_str)
+ return dictionary[key]
+ except (KeyError, IndexError):
+ return ''
+
+def get_tag_key(tag,key,default=None):
+ d = json.loads(tag)
+ if key in d:
+ return d[key]
+ return default
+
+
+@register.filter
def is_shaid(text):
""" return True if text length is 40 characters and all hex-digits
"""
diff --git a/lib/srtgui/urls.py b/lib/srtgui/urls.py
index 26c484d8..a4947c51 100644
--- a/lib/srtgui/urls.py
+++ b/lib/srtgui/urls.py
@@ -88,10 +88,19 @@ urlpatterns = [
tables.PackageFilterDetailTable.as_view(template_name="package-filter-detail-toastertable.html"),
name='package-filter-detail'),
+ url(r'^publish-list/$',
+ tables.PublishListTable.as_view(template_name="publish-list-toastertable.html"),
+ name='publish-list'),
+ url(r'^publish-cve/$',
+ tables.PublishCveTable.as_view(template_name="publish-cve-toastertable.html"),
+ name='publish-cve'),
+ url(r'^publish-defect/$',
+ tables.PublishDefectTable.as_view(template_name="publish-defect-toastertable.html"),
+ name='publish-defect'),
+
url(r'^select-publish/$',
tables.SelectPublishTable.as_view(template_name="publish-select-toastertable.html"),
name='select-publish'),
-
url(r'^update-published/$',
tables.UpdatePublishedTable.as_view(template_name="published-select-toastertable.html"),
name='update-published'),
@@ -120,6 +129,8 @@ urlpatterns = [
url(r'^xhr_packages/$', views.xhr_packages,
name='xhr_packages'),
+ url(r'^xhr_publish/$', views.xhr_publish,
+ name='xhr_publish'),
url(r'^manage/$', views.management, name='manage'),
url(r'^manage_cpes/$',
@@ -137,6 +148,19 @@ urlpatterns = [
name='sources'),
url(r'^users/$', views.users, name='users'),
+ url(r'^maintenance/$', views.maintenance, name='maintenance'),
+ url(r'^history_cve/$',
+ tables.HistoryCveTable.as_view(template_name="history-cve-toastertable.html"),
+ name='history_cve'),
+ url(r'^history_vulnerability/$',
+ tables.HistoryVulnerabilityTable.as_view(template_name="history-vulnerability-toastertable.html"),
+ name='history_vulnerability'),
+ url(r'^history_investigation/$',
+ tables.HistoryInvestigationTable.as_view(template_name="history-investigation-toastertable.html"),
+ name='history_investigation'),
+ url(r'^history_defect/$',
+ tables.HistoryDefectTable.as_view(template_name="history-defect-toastertable.html"),
+ name='history_defect'),
url(r'^guided_tour/$', views.guided_tour, name='guided_tour'),
diff --git a/lib/srtgui/views.py b/lib/srtgui/views.py
index d87dca38..79bf7b17 100644
--- a/lib/srtgui/views.py
+++ b/lib/srtgui/views.py
@@ -37,13 +37,15 @@ from orm.models import Investigation, InvestigationHistory, InvestigationToDefec
from orm.models import SrtSetting, Product
from orm.models import Package
from orm.models import DataSource
-from orm.models import Defect, PublishPending
+from orm.models import Defect, DefectHistory, PublishPending, PublishSet
from orm.models import Notify, NotifyAccess, NotifyCategories
+from orm.models import SRTool, Update
from users.models import SrtUser, UserSafe
from srtgui.reports import ReportManager
from srtgui.api import readCveDetails, writeCveDetails, summaryCveDetails, execute_process
+from srtgui.api import publishCalculate, publishReset, publishMarkNew, publishMarkModified, publishMarkNone
from django.urls import reverse, resolve
from django.core.paginator import EmptyPage, PageNotAnInteger
@@ -59,6 +61,10 @@ logger = logging.getLogger("srt")
# quick development/debugging support
from srtgui.api import _log
+#
+# ================= Helper Routines ============================================
+#
+
def get_name_sort(cve_name):
try:
a = cve_name.split('-')
@@ -67,6 +73,10 @@ def get_name_sort(cve_name):
cve_name_sort = cve_name
return cve_name_sort
+#
+# ================= Page Helper Routines ============================================
+#
+
class MimeTypeFinder(object):
# setting this to False enables additional non-standard mimetypes
# to be included in the guess
@@ -450,10 +460,10 @@ def management(request):
return redirect(landing)
# Keep it simple now, later use Q sets
- defect_open = Defect.objects.filter(status=Defect.OPEN)
- defects_inprogress = Defect.objects.filter(status=Defect.IN_PROGRESS)
- defect_p1 = defect_open.filter(priority=Defect.HIGH).count() + defects_inprogress.filter(priority=Defect.HIGH).count()
- defect_p2 = defect_open.filter(priority=Defect.MEDIUM).count() + defects_inprogress.filter(priority=Defect.MEDIUM).count()
+ defect_open = Defect.objects.filter(status=Defect.DEFECT_STATUS_OPEN)
+ defects_inprogress = Defect.objects.filter(status=Defect.DEFECT_STATUS_IN_PROGRESS)
+ defect_p1 = defect_open.filter(priority=Defect.CRITICAL).count() + defects_inprogress.filter(priority=Defect.CRITICAL).count()
+ defect_p2 = defect_open.filter(priority=Defect.HIGH).count() + defects_inprogress.filter(priority=Defect.HIGH).count()
defect_open = defect_open.count()
defects_inprogress = defects_inprogress.count()
@@ -467,15 +477,15 @@ def management(request):
'vulnerability_total' : Vulnerability.objects.all().count(),
'vulnerability_open' : Vulnerability.objects.filter(outcome=Vulnerability.OPEN).count(),
+ 'vulnerability_critical' : Vulnerability.objects.filter(outcome=Vulnerability.OPEN).filter(priority=Vulnerability.CRITICAL).count(),
'vulnerability_high' : Vulnerability.objects.filter(outcome=Vulnerability.OPEN).filter(priority=Vulnerability.HIGH).count(),
'vulnerability_medium' : Vulnerability.objects.filter(outcome=Vulnerability.OPEN).filter(priority=Vulnerability.MEDIUM).count(),
- 'vulnerability_low' : Vulnerability.objects.filter(outcome=Vulnerability.OPEN).filter(priority=Vulnerability.HIGH).count(),
'investigation_total' : Investigation.objects.all().count(),
'investigation_open' : Investigation.objects.filter(outcome=Investigation.OPEN).count(),
+ 'investigation_critical' : Investigation.objects.filter(outcome=Investigation.OPEN).filter(priority=Investigation.CRITICAL).count(),
'investigation_high' : Investigation.objects.filter(outcome=Investigation.OPEN).filter(priority=Investigation.HIGH).count(),
'investigation_medium' : Investigation.objects.filter(outcome=Investigation.OPEN).filter(priority=Investigation.MEDIUM).count(),
- 'investigation_low' : Investigation.objects.filter(outcome=Investigation.OPEN).filter(priority=Investigation.HIGH).count(),
'defect_total' : Defect.objects.all().count(),
'defect_open' : defect_open,
@@ -487,6 +497,20 @@ def management(request):
}
return render(request, 'management.html', context)
+def maintenance(request):
+ # does this user have permission to see this record?
+ if not UserSafe.is_creator(request.user):
+ return redirect(landing)
+
+ context = {
+ 'history_cve_total' : CveHistory.objects.all().count(),
+ 'history_vulnerability_total' : VulnerabilityHistory.objects.all().count(),
+ 'history_investigation_total' : InvestigationHistory.objects.all().count(),
+ 'defect_investigation_total' : DefectHistory.objects.all().count(),
+ }
+ return render(request, 'maintenance.html', context)
+
+
def cve(request, cve_pk, active_tab="1"):
if request.method == "GET":
template = "cve.html"
@@ -520,23 +544,31 @@ def cve(request, cve_pk, active_tab="1"):
cve_index = ord('1')
is_edit = ('Edit' == active_tab)
- # Prepend summary page?
+ # Fetch source tabs list
cve_sources = CveSource.objects.filter(cve=cve_object.id).order_by('datasource__key')
- if True or (1 < len(cve_sources)):
- tab_states[chr(cve_index)] = ''
- cveDetails,cve_html = summaryCveDetails(cve_object,cve_sources)
- cve_list_table.append([cveDetails,tab_states[chr(cve_index)],'Summary',cve_html])
- cve_index += 1
+ # Always pre-pend a summary page
+ tab_states[chr(cve_index)] = 'active'
+ cveDetails,cve_html = summaryCveDetails(cve_object,cve_sources)
+ cve_list_table.append([cveDetails,tab_states[chr(cve_index)],'Summary',cve_html])
+ cve_index += 1
# Add the source/edit tabs
- for cs in cve_sources:
+ for i in range(len(cve_sources)):
+ if (i < (len(cve_sources)-1)) and (cve_sources[i].datasource.source == cve_sources[i+1].datasource.source):
+ # Insure one source per vendor where the highest key wins (e.g. NIST Modified)
+ continue
+ pass
+
+ cs = cve_sources[i]
if active_tab == cs.datasource.name:
active_tab = chr(cve_index)
if ('Edit' == active_tab) and ('Local' == cs.datasource.name):
- tab_states[chr(cve_index)] = 'active'
+ #tab_states[chr(cve_index)] = 'active'
+ tab_states[chr(cve_index)] = ''
cve_list_table.append([readCveDetails(cve_object,cs.datasource),tab_states[chr(cve_index)],'Edit',{}])
else:
- tab_states[chr(cve_index)] = 'active' if (active_tab == chr(cve_index)) else ''
+ tab_states[chr(cve_index)] = ''
+ #tab_states[chr(cve_index)] = 'active' if (active_tab == chr(cve_index)) else ''
cve_list_table.append([readCveDetails(cve_object,cs.datasource),tab_states[chr(cve_index)],cs.datasource.name,{}])
cve_index += 1
if 0 == len(cve_sources):
@@ -552,11 +584,6 @@ def cve(request, cve_pk, active_tab="1"):
tab_states['1'] = 'active'
cve_list_table[0][1] = 'active'
-
- # cve_summary = copy.copy(cve_object)
- # cve_summary_detail = copy.copy(cve_object_detail)
- # cve_summary.source = 'Summary'
- #
context = {
'object' : cve_object,
'cve_list_table' : cve_list_table,
@@ -583,7 +610,7 @@ def cve(request, cve_pk, active_tab="1"):
# Is this not a save?
if not request.POST.get('cve-edit','').startswith('Save'):
- return redirect(cve, cve_object.id, "Local")
+ return redirect(cve, cve_object.id, "Summary")
# does this user have permission to see this record?
if (not cve_object.public) and (not UserSafe.is_admin(request.user)):
@@ -594,7 +621,7 @@ def cve(request, cve_pk, active_tab="1"):
writeCveDetails(cve_object.name,request)
# show the results
- return redirect(cve, cve_object.id, "Local")
+ return redirect(cve, cve_object.id, "Summary")
def cve_edit(request, cve_pk):
_log("CVE_EDIT1(%s):" % cve_pk)
@@ -677,12 +704,14 @@ def vulnerability(request, vulnerability_pk):
_log("EXPORT_POST:'fileupload' does not exist: %s" % e)
try:
- with open(path + "/" + file.name, 'xb+') as destination:
+ local_file_path = path + "/" + file.name
+ with open(local_file_path, 'xb+') as destination:
for line in file:
destination.write(line)
username = UserSafe.user_name(request.user)
- VulnerabilityUploads.objects.get_or_create(vulnerability_id=vulnerability_object.id, description=description, path=path + "/" + file.name, size=file.size, date=datetime.today().strftime('%Y-%m-%d'), author=username)
+ VulnerabilityUploads.objects.get_or_create(vulnerability_id=vulnerability_object.id, description=description, path=local_file_path, size=file.size, date=datetime.today().strftime('%Y-%m-%d'), author=username)
+ VulnerabilityHistory.objects.create(vulnerability_id=vulnerability_object.id, comment=Update.ATTACH_DOC % file.name, date=datetime.now().strftime(SRTool.DATE_FORMAT), author=username)
except Exception as e:
_log("EXPORT_POST:FILE ALREADY EXISTS: %s" % e)
return redirect(vulnerability,vulnerability_pk)
@@ -722,16 +751,32 @@ def investigation(request, investigation_pk):
except:
return redirect(landing)
+ ### TO-DO: replace with dynamic lookahead instead of static huge list
defects = Defect.objects.all()
+
+ # Calculate the default 'affected_components' list, if any
+ affected_components = ''
+ affected_components_list = []
+ vulnerability = investigation_object.vulnerability
+ vc_list = vulnerability.vulnerability_to_cve.all()
+ for vc in vc_list:
+ if vc.cve.packages:
+ affected_components_list.append(vc.cve.packages)
+ if affected_components_list:
+ affected_components = ' '.join(affected_components_list)
+
+ # Pass Investigation's defect list
investigation_to_defect = investigation_object.investigation_to_defect.all()
context = {
'object' : investigation_object,
'defects' : defects,
'investigation_to_defect' : investigation_to_defect,
+ 'affected_components' : affected_components,
'defect_example' : SrtSetting.objects.get(name='SRTOOL_DEFECT_SAMPLENAME').value,
'notify_categories' : NotifyCategories.objects.all(),
'users' : UserSafe.get_safe_userlist(True),
'components' : Defect.Components,
+ 'found_version' : investigation_object.product.get_defect_tag('found_version'),
}
return render(request, template, context)
elif request.method == "POST":
@@ -757,11 +802,13 @@ def investigation(request, investigation_pk):
_log("EXPORT_POST:'fileupload' does not exist: %s" % e)
try:
- with open(path + "/" + file.name, 'xb+') as destination:
+ local_file_path = path + "/" + file.name
+ with open(local_file_path, 'xb+') as destination:
for line in file:
destination.write(line)
username = UserSafe.user_name(request.user)
- InvestigationUploads.objects.get_or_create(investigation_id=investigation_object.id, description=description, path=path + "/" + file.name, size=file.size, date=datetime.today().strftime('%Y-%m-%d'), author=username)
+ InvestigationUploads.objects.get_or_create(investigation_id=investigation_object.id, description=description, path=local_file_path, size=file.size, date=datetime.today().strftime('%Y-%m-%d'), author=username)
+ InvestigationHistory.objects.create(investigation_id=investigation_object.id, comment=Update.ATTACH_DOC % file.name, date=datetime.now().strftime(SRTool.DATE_FORMAT), author=username)
except Exception as e:
_log("EXPORT_POST:FILE ALREADY EXISTS: %s" % e)
return redirect(investigation,investigation_pk)
@@ -797,6 +844,7 @@ def defect(request, defect_pk):
context = {
'object' : defect_object,
'users' : users,
+ 'SRTOOL_DEFECT_URLBASE' : SrtSetting.objects.get(name='SRTOOL_DEFECT_URLBASE').value
}
return render(request, template, context)
@@ -876,6 +924,7 @@ def users(request):
return render(request, template, context)
def report(request,page_name):
+ _log("REPORT!:%s" % (request))
if request.method == "GET":
context = ReportManager.get_context_data(page_name,request=request)
record_list = request.GET.get('record_list', '')
@@ -883,7 +932,7 @@ def report(request,page_name):
context['record_list'] = record_list
return render(request, 'report.html', context)
elif request.method == "POST":
- _log("EXPORT_POST!:%s|%s" % (request,request.FILES))
+ _log("EXPORT_POST!:%s" % (request))
parent_page = request.POST.get('parent_page', '')
file_name,response_file_name = ReportManager.exec_report(parent_page,request=request)
@@ -926,13 +975,153 @@ def create_vulnerability(request):
context = {}
return render(request, 'create_vulnerability.html', context)
+class Snap():
+ def __init__(self,snap_index=0,snap_mode='None',snap_dir='',snap_date='',snap_time='',snap_day=''):
+ self.index = '%02d' % snap_index
+ self.mode = snap_mode
+ self.dir = snap_dir
+ self.date = snap_date
+ self.time = snap_time
+ self.day = snap_day
+
+class ReportFile():
+ def __init__(self,name='',size=0,date=None):
+ self.name = name
+ self.size = size
+ self.date = date
+
def publish(request):
# does this user have permission to see this record?
if not UserSafe.is_creator(request.user):
return redirect(landing)
+ if request.method == "GET":
+
+ # Prepare available snapshots
+ snapshot_list = []
+ snap_start_index = 0
+ snap_stop_index = 0
+ snap_date_base = SrtSetting.get_setting('publish_snap_date_base','2019-06-08')
+ snap_date_top = SrtSetting.get_setting('publish_snap_date_top','2019-06-16')
+ snap_date_start = SrtSetting.get_setting('publish_snap_date_start','2019-06-08')
+ snap_date_stop = SrtSetting.get_setting('publish_snap_date_stop','2019-06-16')
+ snap_last_calc = SrtSetting.get_setting('publish_snap_last_calc','')
+ backup_returncode,backup_stdout,backup_result = execute_process('bin/common/srtool_backup.py','--list-backups-db')
+ for i,line in enumerate(backup_stdout.decode("utf-8").splitlines()):
+ # Week|backup_2019_19|2019-05-18|12:51:51|Saturday, May 18 2019
+ backup_mode,backup_dir,backup_date,backup_time,backup_day = line.split('|')
+ if 'Now' != backup_mode:
+ snap = Snap(i,backup_mode,backup_dir,backup_date,backup_time,backup_day)
+ snapshot_list.append(snap)
+ if snap_date_base == snap.date:
+ snap_start_index = i
+ if snap_date_start < snap.date:
+ snap_date_start = snap.date
+ if snap_date_stop < snap.date:
+ snap_date_stop = snap.date
+ if snap_date_top == snap.date:
+ snap_stop_index = i
+ if snap_date_stop > snap.date:
+ snap_date_stop = snap.date
+ if not snap_stop_index:
+ snap_stop_index = i
+ if snap_date_stop < snap.date:
+ snap_date_stop = snap.date
+ # Report automation
+ snap_frequency_select = SrtSetting.get_setting('publish_snap_frequency','Off')
+ snapshot_frequency_list = [
+ 'Off',
+ 'Monthly',
+ 'Bi-monthly',
+ 'Weekly',
+ 'Daily',
+ ]
+ # List of available reports
+ generated_report_list = []
+ for entry in os.scandir('data/wr'):
+ if entry.name.startswith('cve-svns-srtool'):
+ generated_report_list.append(ReportFile(entry.name,entry.stat().st_size,datetime.fromtimestamp(entry.stat().st_mtime)))
+# generated_report_list.sort()
+ generated_report_list = sorted(generated_report_list,key=lambda x: x.name)
+
+ # Prepare History data
+ last_calc = SrtSetting.get_setting('publish_last_calc','06/08/2019')
+ date_start = SrtSetting.get_setting('publish_date_start','06/08/2019')
+ date_stop = SrtSetting.get_setting('publish_date_stop','06/21/2019')
+
+ context = {
+ 'date_start' : date_start,
+ 'date_stop' : date_stop,
+ 'last_calc' : last_calc,
+
+ 'snap_date_start' : snap_date_start,
+ 'snap_date_stop' : snap_date_stop,
+ 'snap_date_base' : snap_date_base,
+ 'snap_date_top' : snap_date_top,
+ 'snapshot_list' : snapshot_list,
+ 'snap_start_index' : '%02d' % snap_start_index,
+ 'snap_stop_index' : '%02d' % snap_stop_index,
+ 'snap_last_calc' : snap_last_calc,
+ 'generated_report_list' : generated_report_list,
+
+ 'snapshot_frequency_list' : snapshot_frequency_list,
+ 'snap_frequency_select' : snap_frequency_select,
+ }
+ return render(request, 'publish.html', context)
+ elif request.method == "POST":
+ action = request.POST['action']
+
+ if request.POST["action"] == "download":
+ report_name = request.POST['report_name']
+ file_path = 'data/wr/%s' % report_name
+ if file_path:
+ fsock = open(file_path, "rb")
+ content_type = MimeTypeFinder.get_mimetype(file_path)
+ response = HttpResponse(fsock, content_type = content_type)
+ disposition = 'attachment; filename="{}"'.format(file_path)
+ response['Content-Disposition'] = 'attachment; filename="{}"'.format(file_path)
+ _log("EXPORT_POST_Q{%s} %s || %s " % (response, response['Content-Disposition'], disposition))
+ return response
+ else:
+ return render(request, "unavailable_artifact.html", context={})
+
+ # Dates (make as no timezone)
+ msg = ''
+ try:
+ msg = 'Start:%s' % request.POST.get('date_start', '')
+ date_start = datetime.strptime(request.POST.get('date_start', ''), '%m/%d/%Y')
+ msg = 'Stop:%s' % request.POST.get('date_stop', '')
+ date_stop = datetime.strptime(request.POST.get('date_stop', ''), '%m/%d/%Y')
+ if date_stop < date_start:
+# return 'Error:stop date is before start date'
+ _log('Error:stop date is before start date')
+ pass
+ except Exception as e:
+# return 'Error:bad format for dates (must be mm/dd/yyyy) (%s)(%s)' % (msg,e),''
+ _log('Error:bad format for dates (must be mm/dd/yyyy) (%s)(%s)' % (msg,e))
+ pass
+ SrtSetting.set_setting('publish_date_start',date_start.strftime('%m/%d/%Y'))
+ SrtSetting.set_setting('publish_date_stop',date_stop.strftime('%m/%d/%Y'))
+ if 'recalculate' == action:
+ # Calculate
+ publishCalculate(date_start,date_stop)
+ return redirect('publish')
+ if 'view' == action:
+ # Go to publish list page
+ return redirect('publish-list')
+ if 'add-cve' == action:
+ # Go to publish list page
+ return redirect('publish-cve')
+ if 'add-defect' == action:
+ # Go to publish list page
+ return redirect('publish-defect')
+ if 'reset' == action:
+ publishReset(date_start,date_stop)
+ publishCalculate(date_start,date_stop)
+ return redirect('publish')
+ if 'export' == action:
+ return redirect('/wr/report/publish')
+ return redirect('publish')
- context = {}
- return render(request, 'publish.html', context)
def manage_report(request):
# does this user have permission to see this record?
@@ -952,31 +1141,66 @@ def guided_tour(request):
def quicklink(request):
return redirect("/srtgui/select-publish")
-def _create_defect(investigation,defect_reason,components):
- _log("SRT_DEFECT=%s|%s|%s|" % (investigation.name,defect_reason,components))
+# Return defect_name,isCreated
+def _create_defect(investigation,reason,defect_reason,domain_components,affected_components,username):
+ _log("SRT_DEFECT=%s|%s|%s|%s|" % (investigation.name,defect_reason,domain_components,affected_components))
+
+ # Check to see if defect creation is allowed for this product
+ if 'no' == investigation.product.get_defect_tag('auto_create','yes'):
+ _log("SRT_DEFECT_SKIPPED:NO_auto_create:%s" % (investigation.product.defect_tags))
+ return '(%s skipped)' % investigation.product.key,False
+
+ # Check to see if a defect already is created for this investigation
+ try:
+ for id in InvestigationToDefect.objects.filter(investigation=investigation):
+ # First defect wins
+ _log("SRT_DEFECT_EXISTING:%s" % (id.defect.name))
+ return id.defect.name, False
+ except:
+ pass
vulnerability = investigation.vulnerability
vc_list = vulnerability.vulnerability_to_cve.all()
- # gather name(s) and link(s) of parent CVE(s)
+ # Gather name(s) and link(s) of parent CVE(s)
cve_list = [vc.cve.name for vc in vc_list]
cves = ','.join(cve_list)
+
+ # Offer a default defect description
description = ['%s\n' % vc.cve.description for vc in vc_list]
+
### TODO: normal NIST link might not always work
- link_list = ['https://nvd.nist.gov/vuln/detail/%s' % vc.cve.name for vc in vc_list]
- links = ','.join(cve_list)
+ link_list = []
+ for vc in vc_list:
+ link_list.append('https://nvd.nist.gov/vuln/detail/%s' % vc.cve.name)
+
+ # Fix links to make if Jira friendly
+ # CREATE(Triage): {Link=https://nvd.nist.gov/vuln/detail/CVE-2019-8934 User=admin}
+# links = "%s {%sLink=%s User=%s}" % (Update.CREATE_STR % Update.SOURCE_TRIAGE,"Reason='%s' " % reason if reason else '',' '.join(link_list),username)
+ # CREATE(Triage):(User=admin) [CVE-2019-8934|https://nvd.nist.gov/vuln/detail/CVE-2019-8934]
+ links = "%s%s(User=%s)" % (Update.CREATE_STR % Update.SOURCE_TRIAGE,"(Reason='%s')" % reason if reason else '',username)
+ for link in link_list:
+ links += ' [%s|%s]' % (os.path.basename(link),link)
# Assign the defect the same priority as the Investigation
priority = investigation.get_priority_text
+ _log("_create_defect:%s:%s:%s" % (investigation.name,priority,links))
- # Component string (e.g. 'kernel', 'userspace', ...)
- if not components:
- components = 'unknown'
- # Offer a defect summary
+ # Offer a default defect summary
+ if not defect_reason:
+ defect_reason = affected_components
if defect_reason:
summary = "Security Advisory - %s - %s" % (defect_reason,cves)
else:
summary = "Security Advisory %s" % (cves)
+
+ # Add the affect components
+ if affected_components:
+ affected_components.replace(',',' ').replace(';',' ').replace(' ',' ')
+ components = "%s {COMPONENTS:%s}" % (domain_components,affected_components)
+ else:
+ components = domain_components
+
defect_tool = SrtSetting.objects.get(name='SRTOOL_DEFECT_TOOL').value
result_returncode,result_stdout,result_stderr = execute_process(
defect_tool, '--new',
@@ -1019,6 +1243,11 @@ def _create_defect(investigation,defect_reason,components):
d = Defect.objects.create(name=d_name)
d.summary = summary
d.priority = investigation.priority
+ d.status = Defect.DEFECT_STATUS_OPEN
+ d.resolution = Defect.DEFECT_UNRESOLVED
+ d.srt_priority = investigation.priority
+ d.srt_status = Defect.VULNERABLE
+ d.srt_outcome = Defect.OPEN
d.product = investigation.product
d.url = d_url
d.save()
@@ -1026,7 +1255,24 @@ def _create_defect(investigation,defect_reason,components):
# Create Investigation to Defect
id = InvestigationToDefect.objects.create(investigation=investigation,defect=d,product=investigation.product)
id.save()
- return d.name
+ return d.name,True
+
+def _auto_map_cve_priority(cve,force=True):
+ if not force and (SRTool.UNDEFINED != cve.priority):
+ return(cve.priority)
+ severity = cve.cvssV3_baseSeverity.strip()
+ if not severity:
+ severity = cve.cvssV2_severity.strip()
+ if not severity:
+ severity = 'MEDIUM'
+ if 'CRITICAL' == severity:
+ return(SRTool.CRITICAL)
+ elif 'HIGH' == severity:
+ return(SRTool.HIGH)
+ elif 'MEDIUM' == severity:
+ return(SRTool.MEDIUM)
+ else:
+ return(SRTool.LOW)
def xhr_triage_commit(request):
_log("xhr_triage_commit(%s)" % request.POST)
@@ -1035,7 +1281,8 @@ def xhr_triage_commit(request):
try:
username = UserSafe.user_name(request.user)
action = request.POST['action']
- today = datetime.today().strftime("%Y-%m-%d")
+ srtool_today_time = datetime.today()
+ srtool_today = datetime.today().strftime("%Y-%m-%d")
if 'submit-notvulnerable' == action:
reason = request.POST['reason']
cves = request.POST['cves']
@@ -1044,85 +1291,83 @@ def xhr_triage_commit(request):
created_list = ''
for cve_name in cves.split(','):
cve = Cve.objects.get(name=cve_name)
+ history_update = []
+ history_update.append(Update.STATUS % (SRTool.status_text(cve.status),SRTool.status_text(Cve.NOT_VULNERABLE)))
+ cve.priority = _auto_map_cve_priority(cve,False)
cve.status = Cve.NOT_VULNERABLE
if cve.comments:
cve.comments += ', ' + reason
else:
cve.comments = reason
+ cve.acknowledge_date = srtool_today_time
cve.save()
created_list += ' %s' % cve_name
# add audit comment
cc = CveHistory.objects.create(cve=cve)
- cc.date = today
- cc.comment = "ACTION: marked not vulnerable, reason='%s'" % (reason)
+ cc.date = srtool_today
+ cc.comment = "%s%s {%s}" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_update),"Set by triage, reason='%s'" % reason)
cc.author = username
cc.save()
if created_list:
created_list = "NotVulnerable:" + created_list
- if 'submit-investigate' == action:
- cves = request.POST['cves']
- created_list = ''
- for cve_name in cves.split(','):
- cve = Cve.objects.get(name=cve_name)
- cve.status = Cve.INVESTIGATE
- cve.save()
- created_list += ' %s' % cve_name
- # add audit comment
- cc = CveHistory.objects.create(cve=cve)
- cc.date = today
- cc.comment = "ACTION: marked investigate"
- cc.author = username
- cc.save()
- if created_list:
- created_list = "Investigate:" + created_list
-
if 'submit-other' == action:
cves = request.POST['cves']
status = int(request.POST['status'])
created_list = ''
for cve_name in cves.split(','):
cve = Cve.objects.get(name=cve_name)
+ history_update = []
+ history_update.append(Update.STATUS % (SRTool.status_text(cve.status),SRTool.status_text(status)))
+ cve.priority = _auto_map_cve_priority(cve,False)
cve.status = status
+ cve.acknowledge_date = srtool_today_time
cve.save()
created_list += ' %s' % cve_name
# add audit comment
cc = CveHistory.objects.create(cve=cve)
- cc.date = today
- cc.comment = "ACTION: set status to %s" % cve.get_status_text
+ cc.date = srtool_today
+ cc.comment = "%s%s {%s}" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_update),"Set by triage")
cc.author = username
cc.save()
if created_list:
created_list = "Status=%s:%s" % (cve.get_status_text,created_list)
- if 'submit-isvulnerable' == action:
+ if action in ('submit-isvulnerable','submit-investigate'):
+ if 'submit-isvulnerable' == action:
+ notify_message = 'Triage:Vulnerable:'
+ new_status = SRTool.VULNERABLE
+ elif 'submit-investigate' == action:
+ notify_message = 'Triage:Investigate:'
+ new_status = SRTool.INVESTIGATE
reason = request.POST['reason'].strip()
defect_reason = request.POST['defect_reason'].strip()
cves = request.POST['cves']
products = request.POST['products']
components = request.POST['components']
- priority = request.POST['priority']
+ affected_components = request.POST['affected_components'].strip()
+ priority = int(request.POST['priority'])
make_defects = ('yes' == request.POST['mk_d'])
mark_publish = ('yes' == request.POST['pub'])
group_vulnerability = int(request.POST['vul_group'])
group_vulnerability_name = request.POST['vul_name'].strip()
notifications = ('yes' == request.POST['notify'])
+ acknowledge_date = request.POST['acknowledge_date']
add_for = request.POST['for']
_log("xhr_triage_commit:IS:%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|" % (reason,defect_reason,cves,products,components,make_defects,mark_publish,add_for,priority,group_vulnerability,group_vulnerability_name))
# Set up
- investigation_names = {}
created_list = ''
- notify_message = 'Triage:Vulnerable:'
+
# Map vulnerability grouping
- v = None
+ vulnerability = None
if 2 == group_vulnerability:
# Existing V all C
first_vulnerability = False
group_vulnerabilities = True
try:
- v = Vulnerability.objects.get(name=group_vulnerability_name)
- created_list += ' %s(found)' % v.name
- notify_message += ' Found:%s' % v.name
+ vulnerability = Vulnerability.objects.get(name=group_vulnerability_name)
+ created_list += ' %s(found)' % vulnerability.name
+ notify_message += ' Found:%s' % vulnerability.name
except Exception as e:
_log("xhr_triage_commit:No such Vulnerability name found (%s,%s)" % (group_vulnerability_name,e))
return HttpResponse(json.dumps({"error":"No such Vulnerability name found (%s)" % (group_vulnerability_name)}), content_type = "application/json")
@@ -1134,91 +1379,159 @@ def xhr_triage_commit(request):
# One V per C
first_vulnerability = True
group_vulnerabilities = False
+
# Process the CVE list
for cve_name in cves.split(','):
# update CVE
cve = Cve.objects.get(name=cve_name)
- cve.status = Cve.VULNERABLE
- cve.priority = priority
+ # Auto priority?
+ cve_priority = _auto_map_cve_priority(cve) if 99 == priority else priority
if cve.comments:
- cve.comments += ', ' + reason
+ cve_comments = '%s, %s' % (cve.comments,reason)
else:
- cve.comments = reason
+ cve_comments = reason
+ # Acknowledge date selection
+ try:
+ if ('publish' == acknowledge_date) and cve.publishedDate:
+ cve_acknowledge_date = datetime.strptime(cve.publishedDate, '%Y-%m-%d')
+ elif ('update' == acknowledge_date) and cve.lastModifiedDate:
+ cve_acknowledge_date = datetime.strptime(cve.lastModifiedDate, '%Y-%m-%d')
+ elif ('no_change' == acknowledge_date):
+ cve_acknowledge_date = cve.acknowledge_date
+ else:
+ cve_acknowledge_date = srtool_today_time
+ except:
+ cve_acknowledge_date = srtool_today_time
+ # Update history changes
+ history_update = []
+ if cve.status != new_status:
+ history_update.append(Update.STATUS % (SRTool.status_text(cve.status),SRTool.status_text(new_status)))
+ if cve.priority != cve_priority:
+ history_update.append(Update.PRIORITY % (SRTool.priority_text(cve.priority),SRTool.priority_text(cve_priority)))
+ if cve.acknowledge_date != cve_acknowledge_date:
+ history_update.append(Update.ACKNOWLEDGE_DATE % (cve.acknowledge_date.strftime("%Y/%m/%d") if cve.acknowledge_date else '',cve_acknowledge_date.strftime("%Y/%m/%d")))
+ # Update record
+ cve.status = new_status
+ cve.priority = cve_priority
+ cve.comments = cve_comments
+ cve.acknowledge_date = cve_acknowledge_date
+ cve.packages = affected_components
cve.save()
notify_message += " %s" % cve_name
- # create vulnerability
+ # Add history comment
+ if history_update:
+ cc = CveHistory.objects.create(cve=cve)
+ cc.date = srtool_today
+ cc.comment = "%s%s {%s}" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_update), "Triage:reason='%s'" % reason)
+ cc.author = username
+ cc.save()
+
+ # Find or create vulnerability
if first_vulnerability or not group_vulnerabilities:
first_vulnerability = False
- v_name = Vulnerability.new_vulnerability_name()
- v = Vulnerability.objects.create(name=v_name)
- v.public = True
- v.status = Vulnerability.VULNERABLE
- v.priority = priority
- v.comments = reason
- v.save()
- notify_message += " %s" % v_name
- created_list += ' %s' % v.name
- _log("Create First Vulnerability:%s" % v.name)
- # add audit comment
- cc = CveHistory.objects.create(cve=cve)
- cc.date = today
- cc.comment = "ACTION: created vulnerability '%s', reason='%s'" % (v.name,reason)
- cc.author = username
- cc.save()
+
+ # Check to see if a vulnerability already is created for this cve
+ vulnerability = None
+ try:
+ for cv in CveToVulnerablility.objects.filter(cve=cve):
+ # First vulnerability wins
+ vulnerability = cv.vulnerability
+ created_list += ' (%s)' % vulnerability.name
+ break
+ except:
+ pass
+
+ if not vulnerability:
+ v_name = Vulnerability.new_vulnerability_name()
+ vulnerability = Vulnerability.objects.create(name=v_name)
+ vulnerability.public = True
+ vulnerability.priority = cve_priority
+ vulnerability.status = new_status
+ vulnerability.outcome = Vulnerability.OPEN
+ vulnerability.comments = reason
+ vulnerability.save()
+ notify_message += " %s" % v_name
+ created_list += ' %s' % vulnerability.name
+ _log("Create First Vulnerability:%s" % vulnerability.name)
+
+ # add audit comment
+ vh = VulnerabilityHistory.objects.create(vulnerability=vulnerability)
+ vh.date = srtool_today
+ vh.comment = "%s {%s}" % (Update.CREATE_STR % Update.SOURCE_TRIAGE,'Created from triage')
+ vh.author = username
+ vh.save()
# map vulnerability to CVE
- cv = CveToVulnerablility.objects.create(vulnerability=v,cve=cve)
- cv.save()
- # add audit comment
- vc = VulnerabilityHistory.objects.create(vulnerability=v)
- vc.date = today
- vc.comment = "ACTION: created vulnerability for '%s', reason='%s'" % (cve.name,reason)
- vc.author = username
- vc.save()
+ cv,created = CveToVulnerablility.objects.get_or_create(vulnerability=vulnerability,cve=cve)
+ if created:
+ cv.save()
if products:
for product_id in products.split(','):
# fetch product
- p = Product.objects.get(pk=product_id)
+ product = Product.objects.get(pk=product_id)
# create (or group) investigation
- investigation_key = "%s-%s" % (v_name,product_id)
- i_name = ''
- if investigation_key in investigation_names:
- i_name = investigation_names[investigation_key]
- if not i_name or not group_vulnerabilities:
+
+ # Check to see if a investigation for this product already is created for this vulnerability
+ investigation = None
+ try:
+ for vi in VulnerabilityToInvestigation.objects.filter(vulnerability=vulnerability,investigation__product=product):
+ # First Investigation for this product wins
+ investigation = vi.investigation
+ created_list += ' (%s)' % investigation.name
+ break
+ except:
+ pass
+
+ if not investigation:
i_name = Investigation.new_investigation_name()
- i = Investigation.objects.create(name=i_name)
- i.vulnerability = v
- i.product = p
- i.priority = priority
- i.save()
- notify_message += " %s" % i_name
- created_list += ' %s' % i.name
- investigation_names[investigation_key] = i_name
+ investigation = Investigation.objects.create(name=i_name)
+ investigation.vulnerability = vulnerability
+ investigation.product = product
+ investigation.priority = cve_priority
+ investigation.outcome = Investigation.OPEN
+ # Check to see if product is active
+ _log("BOO1:")
+ if 'no' == product.get_product_tag('active','yes'):
+ _log("BOO2:%s,%s" % (investigation.status,SRTool.status_to_inactive(new_status)))
+ investigation.status = SRTool.status_to_inactive(new_status)
+ else:
+ _log("BOO3:")
+ investigation.status = new_status
+ _log("BOO4:%s" % investigation.status )
+
+ investigation.save()
+
+ notify_message += " %s" % investigation.name
+ created_list += ' %s' % investigation.name
# map vulnerability to investigation/product
- vi = VulnerabilityToInvestigation.objects.create(vulnerability=v,investigation = i)
+ vi = VulnerabilityToInvestigation.objects.create(vulnerability=vulnerability,investigation=investigation)
vi.save()
- else:
- i = Investigation.objects.get(name=i_name)
- # add audit comment
- ic = InvestigationHistory.objects.create(investigation=i)
- ic.date = today
- ic.comment = "ACTION: created investigation for '%s', reason='%s'" % (cve.name,reason)
- ic.author = username
- ic.save()
+
+ # add audit comment
+ ih = InvestigationHistory.objects.create(investigation=investigation)
+ ih.date = srtool_today
+ ih.comment = "%s {%s}" % (Update.CREATE_STR % Update.SOURCE_TRIAGE,'Created from triage')
+ ih.author = username
+ ih.save()
+
# create defects
if make_defects:
- defect_name = _create_defect(i,defect_reason,components)
- notify_message += " %s" % defect_name
- created_list += ' %s' % defect_name
- _log("NEW_DEFECT:%s|%s|%s|" % (defect_name,components,priority))
+ defect_name,created = _create_defect(investigation,reason,defect_reason,components,affected_components,username)
+ if created:
+ notify_message += ' %s' % defect_name
+ created_list += ' %s' % defect_name
+ else:
+ notify_message += ' (%s)' % defect_name
+ created_list += ' (%s)' % defect_name
+ _log("NEW_DEFECT:%s|%s|%s|" % (defect_name,components,cve_priority))
# Finish up
if notifications:
# Create the notify record
_log("xhr_notifications3")
notify = Notify()
notify.category = 'TRIAGE'
- notify.priority = priority
+ notify.priority = cve_priority
notify.description = notify_message
notify.url = ''
notify.author = username
@@ -1280,35 +1593,64 @@ def xhr_cve_commit(request):
try:
cve = Cve.objects.get(id=request.POST['cve_id'])
action = request.POST['action']
- history_comment = ''
+ history_update = []
new_name = ''
if 'submit-quickedit' == action:
- note = request.POST['note']
priority = int(request.POST['priority'])
status = int(request.POST['status'])
- private_note = request.POST['private_note']
- publish_state = request.POST['publish_state']
- publish_date = request.POST['publish_date']
- if (priority != cve.priority):
+ note = request.POST['note'].strip()
+ private_note = request.POST['private_note'].strip()
+ tags = request.POST['tags'].strip()
+ publish_state = int(request.POST['publish_state'])
+ publish_date = request.POST['publish_date'].strip()
+ acknowledge_date = request.POST['acknowledge_date'].strip()
+ affected_components = request.POST['affected_components'].strip()
+ # Convert simple date back to datetime
+ try:
+ if not acknowledge_date:
+ acknowledge_date = None
+ else:
+ acknowledge_date = datetime.strptime(acknowledge_date, '%Y-%m-%d')
+ except Exception as e:
+ acknowledge_date = cve.acknowledge_date
+ if (cve.priority != priority):
+ history_update.append(Update.PRIORITY % (SRTool.priority_text(cve.priority),SRTool.priority_text(priority)))
cve.priority = priority
- history_comment += "Priority, "
- if (status != cve.status):
+ if (cve.status != status):
+ history_update.append(Update.STATUS % (SRTool.status_text(cve.status),SRTool.status_text(status)))
cve.status = status
- history_comment += "Status, "
- if (note != cve.comments):
+ if (cve.comments != note):
+ history_update.append(Update.NOTE)
cve.comments = note
- history_comment += "Note, "
- if (private_note != cve.comments_private):
+ if (cve.comments_private != private_note):
+ history_update.append(Update.PRIVATE_NOTE)
cve.comments_private = private_note
- history_comment += "Private Note, "
- if (publish_state != cve.publish_state):
+ if ( cve.tags !=tags):
+ history_update.append(Update.TAG)
+ cve.tags = tags
+ if (cve.publish_state != publish_state):
+ history_update.append(Update.PUBLISH_STATE % (SRTool.publish_text(cve.publish_state),SRTool.publish_text(publish_state)))
cve.publish_state = publish_state
- history_comment += "Publish State, "
- if (publish_date != cve.publish_date):
+ if (cve.publish_date != publish_date):
+ history_update.append(Update.PUBLISH_DATE % (SRTool.date_ymd_text(cve.publish_date),SRTool.date_ymd_text(publish_date)))
cve.publish_date = publish_date
- history_comment += "Publish Date, "
+ if (cve.packages != affected_components):
+ history_update.append(Update.AFFECTED_COMPONENT % (cve.packages,affected_components))
+ cve.packages = affected_components
+ # Allow for either acknowledge_date to be empty/None
+ if (cve.acknowledge_date and not acknowledge_date):
+ history_update.append(Update.ACKNOWLEDGE_DATE % (SRTool.date_ymd_text(cve.acknowledge_date),''))
+ cve.acknowledge_date = None
+ elif (not cve.acknowledge_date and acknowledge_date):
+ cve.acknowledge_date = acknowledge_date
+ history_update.append(Update.ACKNOWLEDGE_DATE % (cve.acknowledge_date,SRTool.date_ymd_text(acknowledge_date)))
+ elif (cve.acknowledge_date != acknowledge_date):
+ history_update.append(Update.ACKNOWLEDGE_DATE % (SRTool.date_ymd_text(cve.acknowledge_date),SRTool.date_ymd_text(acknowledge_date)))
+ cve.acknowledge_date = acknowledge_date
+
cve.save()
if 'submit-notification' == action:
+ # Note: no history update
_submit_notification(request)
if 'submit-newname' == action:
old_name = request.POST['old_name']
@@ -1319,6 +1661,7 @@ def xhr_cve_commit(request):
return HttpResponse(json.dumps({"error":"name '%s' is already used\n" % new_name}), content_type = "application/json")
except:
_log("NewName3:%s -> %s" % (old_name,new_name))
+ history_update.append(Update.NEW_NAME % (old_name,new_name))
# Apply this unique name to CVE
cve.name = new_name
cve.name_sort = get_name_sort(new_name)
@@ -1337,6 +1680,7 @@ def xhr_cve_commit(request):
priority = cve.priority,
)
vulnerability.save()
+ history_update.append(Update.ATTACH_INV % (vname))
cve2vul = CveToVulnerablility.objects.create(cve = cve,vulnerability = vulnerability)
cve2vul.save()
_log("SUBMIT-CREATE-VULNERABILITY:%s,%s,%s" % (cve.id,vulnerability.id,cve2vul.id))
@@ -1348,9 +1692,16 @@ def xhr_cve_commit(request):
except Exception as e:
_log("xhr_triage_commit:No such Vulnerability name found (%s,%s)" % (vname,e))
return HttpResponse(json.dumps({"error":"No such Vulnerability name found (%s)" % (vname)}), content_type = "application/json")
+ history_update.append(Update.ATTACH_INV % (vname))
cve2vul = CveToVulnerablility.objects.create(cve = cve,vulnerability = vulnerability)
cve2vul.save()
_log("SUBMIT-CREATE-VULNERABILITY:%s,%s,%s" % (cve.id,vulnerability.id,cve2vul.id))
+ if 'submit-delete-cve' == action:
+ _log("SUBMIT-DELETE-CVE(%s)" % cve.name)
+ #history_update.append(Update.ATTACH_INV % (vname))
+ cve.delete()
+ _log("SUBMIT-DELETED-CVE(%s)!" % cve.name)
+ new_name = 'url:/srtgui/cves'
return_data = {
"error": "ok",
@@ -1358,10 +1709,9 @@ def xhr_cve_commit(request):
}
username = UserSafe.user_name(request.user)
- if (history_comment != ''):
- history_comment = history_comment[:-2]
- history_comment += " edited"
- CveHistory.objects.create(cve_id=cve.id, comment=history_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
+ if history_update:
+ update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_update))
+ CveHistory.objects.create(cve_id=cve.id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
_log("xhr_cve_commit:SUCCESS")
return HttpResponse(json.dumps( return_data ), content_type = "application/json")
@@ -1416,34 +1766,38 @@ def xhr_vulnerability_commit(request):
action = request.POST['action']
v_id = request.POST['vulnerability_id']
username = UserSafe.user_name(request.user)
- history_comment = ''
try:
+ history_update = []
if 'submit-quickedit' == action:
- note = request.POST['note']
- private_note = request.POST['private_note']
+ note = request.POST['note'].strip()
+ private_note = request.POST['private_note'].strip()
+ tags = request.POST['tags'].strip()
+ priority = int(request.POST['priority'])
+ status = int(request.POST['status'])
+ outcome = int(request.POST['outcome'])
v = Vulnerability.objects.get(id=v_id)
+ if (v.priority != priority):
+ history_update.append(Update.PRIORITY % (SRTool.priority_text(v.priority),SRTool.priority_text(priority)))
+ v.priority = priority
+ if (v.status != status):
+ history_update.append(Update.STATUS % (SRTool.status_text(v.status),SRTool.status_text(status)))
+ v.status = status
+ if (v.outcome != outcome):
+ history_update.append(Update.OUTCOME % (SRTool.status_text(v.outcome),SRTool.status_text(outcome)))
+ v.outcome = outcome
if (v.comments != note):
+ history_update.append(Update.NOTE)
v.comments = note
- history_comment += "Note, "
if (v.comments_private != private_note):
+ history_update.append(Update.PRIVATE_NOTE)
v.comments_private = private_note
- history_comment += "Private Note, "
- if (v.status != request.POST['status']):
- v.status = request.POST['status']
- history_comment += "Status, "
- if (v.outcome != request.POST['outcome']):
- v.outcome = request.POST['outcome']
- history_comment += "Outcome, "
- if (v.priority != request.POST['priority']):
- v.priority = request.POST['priority']
- history_comment += "Priority, "
- if (history_comment != ''):
- history_comment = history_comment[:-2]
- history_comment += " edited"
+ if (tags != v.tags):
+ history_update.append(Update.TAG)
+ v.tags = tags
v.save()
if 'submit-addproduct' == action:
products = request.POST['products']
- product_names = ''
+ investigation_names = []
vulnerability_obj = Vulnerability.objects.get(id=v_id)
for product_id in products.split(','):
product_obj = Product.objects.get(pk=product_id)
@@ -1460,72 +1814,70 @@ def xhr_vulnerability_commit(request):
)
vul2inv = VulnerabilityToInvestigation.objects.create(vulnerability=vulnerability_obj,investigation=investigation_obj)
vul2inv.save()
- product_names += "%s " % product_obj.long_name
- product_names = product_names[:-2]
- history_comment = product_names + " added to affected products"
+ investigation_names.append(iname)
+ history_update.append(Update.ATTACH_INV % ','.join(investigation_names))
if 'submit-trashinvestigation' == action:
inv_id = request.POST['record_id']
investigation_obj = Investigation.objects.get(pk=inv_id)
vul2inv = VulnerabilityToInvestigation.objects.filter(investigation=investigation_obj)
vul2inv.delete()
- history_comment = investigation_obj.name + " investigation(s) removed"
+ history_update.append(Update.DETACH_INV % (investigation_obj.name))
investigation_obj.delete()
if 'submit-newcomment' == action:
comment = request.POST['comment']
VulnerabilityComments.objects.create(vulnerability_id=v_id, comment=comment, date=datetime.today().strftime('%Y-%m-%d'), author=username)
- history_comment = "New comment submitted"
+ #NOTE: No History for this
if 'submit-trashcomment' == action:
record_id = request.POST['record_id']
comment = VulnerabilityComments.objects.get(id=record_id)
- history_comment = "Comment from " + comment.author + " deleted"
comment.delete()
+ #NOTE: No History for this
if 'submit-trashattachment' == action:
record_id = request.POST['record_id']
upload = VulnerabilityUploads.objects.get(id=record_id)
- history_comment = "Upload '" + upload.description + "' from " + upload.author + " deleted"
try:
os.remove(upload.path)
except OSError:
pass
+ history_update.append(Update.DETACH_DOC % (upload.path))
upload.delete()
if 'submit-addusernotify' == action:
users = request.POST['users']
- usernames = ''
+ usernames = []
for user_id in users.split(','):
- usernames += SrtUser.objects.get(pk=user_id).name + ', '
+ usernames.append(SrtUser.objects.get(pk=user_id).name)
VulnerabilityNotification.objects.get_or_create(vulnerability_id=v_id, user_id=user_id)
- usernames = usernames[:-2]
- history_comment = usernames + " added to notifications"
+ history_update.append(Update.ATTACH_USER_NOTIFY % ','.join(usernames))
if 'submit-trashusernotification' == action:
record_id = request.POST['record_id']
notification_record = VulnerabilityNotification.objects.get(id=record_id)
removed_user = SrtUser.objects.get(pk=notification_record.user_id).name
- history_comment = removed_user + " removed from notifications"
notification_record.delete()
+ history_update.append(Update.DETACH_USER_NOTIFY % removed_user)
if 'submit-adduseraccess' == action:
users = request.POST['users']
- usernames = ''
+ usernames = []
for user_id in users.split(','):
- usernames += SrtUser.objects.get(pk=user_id).name + ', '
+ usernames.append(SrtUser.objects.get(pk=user_id).name)
VulnerabilityAccess.objects.get_or_create(vulnerability_id=v_id, user_id=user_id)
- usernames = usernames[:-2]
- history_comment = usernames + " granted access"
+ history_update.append(Update.ATTACH_ACCESS % ','.join(usernames))
if 'submit-trashuseraccess' == action:
record_id = request.POST['record_id']
access_record = VulnerabilityAccess.objects.get(id=record_id)
- removed_user = username
- history_comment = removed_user + "'s access removed"
access_record.delete()
+ history_update.append(Update.DETACH_ACCESS % username)
if 'submit-notification' == action:
_submit_notification(request)
+ #NOTE: No History for this
if 'submit-trashvulnerability' == action:
record_id = request.POST['record_id']
vulnerability_obj = Vulnerability.objects.get(pk=record_id)
- history_comment = "Vulnerability '%s' is deleted" % vulnerability_obj.name
+# history_update.append(Update.DETACH_VUL % vulnerability_obj.name)
vulnerability_obj.delete()
- if (history_comment != ''):
- VulnerabilityHistory.objects.create(vulnerability_id=v_id, comment=history_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
+ if history_update:
+ update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_update))
+ VulnerabilityHistory.objects.create(vulnerability_id=v_id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
return_data = {
"error": "ok",
}
@@ -1644,43 +1996,48 @@ def xhr_investigation_commit(request):
action = request.POST['action']
invst_id = request.POST['investigation_id']
username = UserSafe.user_name(request.user)
- history_comment = "Nothing happened."
try:
+ history_update = []
if 'submit-quickedit' == action:
- note = request.POST['note']
- private_note = request.POST['private_note']
+ priority = int(request.POST['priority'])
+ status = int(request.POST['status'])
+ outcome = int(request.POST['outcome'])
+ note = request.POST['note'].strip()
+ private_note = request.POST['private_note'].strip()
+ tags = request.POST['tags'].strip()
invst = Investigation.objects.get(id=invst_id)
+ if (invst.priority != priority):
+ history_update.append(Update.PRIORITY % (SRTool.priority_text(invst.priority),SRTool.priority_text(priority)))
+ invst.priority = priority
+ if (invst.status != request.POST['status']):
+ history_update.append(Update.STATUS % (SRTool.status_text(invst.status),SRTool.status_text(status)))
+ invst.status = request.POST['status']
+ if (invst.outcome != outcome):
+ history_update.append(Update.OUTCOME % (SRTool.status_text(invst.outcome),SRTool.status_text(outcome)))
+ invst.outcome = outcome
if (invst.comments != note):
invst.comments = note
- history_comment += "Note, "
+ history_update.append(Update.NOTE)
if (invst.comments_private != private_note):
invst.comments_private = private_note
- history_comment += "Private Note, "
- if (invst.status != request.POST['status']):
- invst.status = request.POST['status']
- history_comment += "Status, "
- if (invst.outcome != request.POST['outcome']):
- invst.outcome = request.POST['outcome']
- history_comment += "Outcome, "
- if (invst.priority != request.POST['priority']):
- invst.priority = request.POST['priority']
- history_comment += "Priority, "
- if (history_comment != ''):
- history_comment = history_comment[:-2]
- history_comment += " edited"
+ history_update.append(Update.PRIVATE_NOTE)
+ if (invst.tags != tags):
+ invst.tags = tags
+ history_update.append(Update.TAG)
invst.save()
if 'submit-attachdefectlist' == action:
defects = request.POST['defects']
product_id = Investigation.objects.get(id=invst_id).product_id
- defect_names = ""
+ defect_names = []
for defect_id in defects.split(','):
- defect_names += Defect.objects.get(pk=defect_id).name + ", "
+ defect_names.append(Defect.objects.get(pk=defect_id).name)
InvestigationToDefect.objects.get_or_create(investigation_id=invst_id, defect_id=defect_id)
- defect_names = defect_names[:-2]
- history_comment = defect_names + " added to defects"
+ history_update.append(Update.ATTACH_DEV % ','.join(defect_names))
if 'submit-attachdefect' == action:
query = request.POST['query'].upper()
product_id = Investigation.objects.get(id=invst_id).product_id
+ # Courtesy removal of URL (or other) prefix
+ query = re.sub(r".*/", "", query)
#check if defect already in SRTool data
try:
defect = Defect.objects.get(name=query)
@@ -1697,79 +2054,86 @@ def xhr_investigation_commit(request):
defect = Defect.objects.get(name=query)
except subprocess.CalledProcessError as e:
_log("ERROR:submit-attachdefect:%d:STDOUT='%s':" % (e.returncode, e.output))
- return HttpResponse(json.dumps({"error":str(e) + "\n"}), content_type = "application/json")
+ error_message = "Could not find defect with the name '%s'\n\n(detail:%s)\n" % (query,str(e))
+ return HttpResponse(json.dumps({"error":error_message}), content_type = "application/json")
if defect:
InvestigationToDefect.objects.get_or_create(investigation_id=invst_id, defect_id=defect.id, product_id=product_id)
- history_comment = "Attached " + defect.name
+ # Enforce minimum status on open defects
+ if Defect.DEFECT_UNRESOLVED == defect.resolution:
+ invst = Investigation.objects.get(id=invst_id)
+ if defect.srt_status < invst.status:
+ defect.srt_status = invst.status
+ defect.save()
+ history_update.append(Update.ATTACH_DEV % defect.name)
if 'submit-createdefect' == action:
investigation = Investigation.objects.get(id=invst_id)
defect_reason = request.POST['defect_reason']
components = request.POST['components']
priority = request.POST['priority']
- defect_name = _create_defect(investigation,defect_reason,components)
- history_comment = "New defect '%s' created" % defect_name
+ affected_components = request.POST['affected_components'].strip()
+ defect_name,created = _create_defect(investigation,'',defect_reason,components,affected_components,username)
+ history_update.append(Update.ATTACH_DEV % defect_name)
if 'submit-detachdefect' == action:
defect_name = request.POST['defect']
product_id = Investigation.objects.get(id=invst_id).product_id
defect_id = Defect.objects.get(name=defect_name).id
InvestigationToDefect.objects.get(investigation_id=invst_id, defect_id=defect_id).delete()
- history_comment = defect_name + " detached from investigation"
+ history_update.append(Update.DETACH_DEV % defect_name)
if 'submit-newcomment' == action:
comment = request.POST['comment']
InvestigationComments.objects.create(investigation_id=invst_id, comment=comment, date=datetime.today().strftime('%Y-%m-%d'), author=username)
- history_comment = "New comment submitted"
+ #NOTE: No History for this
if 'submit-trashcomment' == action:
record_id = request.POST['record_id']
comment = InvestigationComments.objects.get(id=record_id)
- history_comment = "Comment from " + comment.author + " deleted"
comment.delete()
+ #NOTE: No History for this
if 'submit-trashattachment' == action:
record_id = request.POST['record_id']
upload = InvestigationUploads.objects.get(id=record_id)
- history_comment = "Upload '" + upload.description + "' from " + upload.author + " deleted"
try:
os.remove(upload.path)
except OSError:
pass
+ history_update.append(Update.DETACH_DOC % (upload.path))
upload.delete()
if 'submit-addusernotify' == action:
users = request.POST['users']
- usernames = ""
+ usernames = []
for user_id in users.split(','):
- usernames += SrtUser.objects.get(pk=user_id).name + ", "
+ usernames.append(SrtUser.objects.get(pk=user_id).name)
InvestigationNotification.objects.get_or_create(investigation_id=invst_id, user_id=user_id)
- usernames = usernames[:-2]
- history_comment = usernames + " added to notifications"
+ history_update.append(Update.ATTACH_USER_NOTIFY % ','.join(usernames))
if 'submit-trashusernotification' == action:
record_id = request.POST['record_id']
notification_record = InvestigationNotification.objects.get(id=record_id)
removed_user = SrtUser.objects.get(pk=notification_record.user_id).name
- history_comment = removed_user + " removed from notifications"
+ history_update.append(Update.DETACH_USER_NOTIFY % removed_user)
notification_record.delete()
if 'submit-adduseraccess' == action:
users = request.POST['users']
- usernames = ""
+ usernames = []
for user_id in users.split(','):
- usernames += SrtUser.objects.get(pk=user_id).name + ", "
+ usernames.append(SrtUser.objects.get(pk=user_id).name)
InvestigationAccess.objects.get_or_create(investigation_id=invst_id, user_id=user_id)
- history_comment = usernames + " granted access"
+ history_update.append(Update.ATTACH_ACCESS % ','.join(usernames))
if 'submit-trashuseraccess' == action:
record_id = request.POST['record_id']
access_record = InvestigationAccess.objects.get(id=record_id)
- removed_user = username
- history_comment = removed_user + "'s access removed"
+ history_update.append(Update.DETACH_ACCESS % username)
access_record.delete()
if 'submit-notification' == action:
_submit_notification(request)
- history_comment = ''
+ #NOTE: No History for this
if 'submit-trashinvestigation' == action:
record_id = request.POST['record_id']
investigation_obj = Investigation.objects.get(pk=record_id)
- history_comment = "Investigation '%s' is deleted" % investigation_obj.name
+# history_update.append(Update.DETACH_INV % investigation_obj.name)
investigation_obj.delete()
- if history_comment:
- InvestigationHistory.objects.create(investigation_id=invst_id, comment=history_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
+ if history_update:
+ update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_update))
+ InvestigationHistory.objects.create(investigation_id=invst_id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
return_data = {
"error": "ok",
}
@@ -1779,6 +2143,146 @@ def xhr_investigation_commit(request):
_log("xhr_investigation_commit:no(%s)" % e)
return HttpResponse(json.dumps({"error":str(e) + "\n"}), content_type = "application/json")
+def xhr_publish(request):
+ _log("xhr_publish(%s)" % request.POST)
+
+ def remove_mark(mark,line):
+ pos1 = line.find(mark)
+ if -1 == pos1:
+ return line
+ pos2 = line.find(')',pos1)
+ if -1 == pos2:
+ return line.replace(mark,'')
+ line = line[0:pos1] + line[pos2+1:]
+ return line
+
+ if not 'action' in request.POST:
+ return HttpResponse(json.dumps({"error":"missing action\n"}), content_type = "application/json")
+ try:
+ username = UserSafe.user_name(request.user)
+ action = request.POST['action']
+
+ if 'export-snapshot' == action:
+ snap_date_base = request.POST['snap_date_base']
+ snap_date_top = request.POST['snap_date_top']
+ snap_date_start = request.POST['snap_date_start']
+ snap_date_stop = request.POST['snap_date_stop']
+ _log("xhr_publish:export-snapshot:%s,%s,%s,%s" % (snap_date_base,snap_date_top,snap_date_start,snap_date_stop))
+
+ SrtSetting.set_setting('publish_snap_date_base',snap_date_base)
+ SrtSetting.set_setting('publish_snap_date_top',snap_date_top)
+ SrtSetting.set_setting('publish_snap_date_start',snap_date_start)
+ SrtSetting.set_setting('publish_snap_date_stop',snap_date_stop)
+
+ backup_returncode,backup_stdout,backup_result = execute_process('bin/common/srtool_backup.py','--list-backups-db')
+ base_dir = ''
+ top_dir = ''
+ for i,line in enumerate(backup_stdout.decode("utf-8").splitlines()):
+ # Week|backup_2019_19|2019-05-18|12:51:51|Saturday, May 18 2019
+ backup_mode,backup_dir,backup_date,backup_time,backup_day = line.split('|')
+ if (not base_dir) and (snap_date_base == backup_date):
+ base_dir = 'backups/%s' % backup_dir
+ if (not top_dir) and (snap_date_top == backup_date) and ('Now' != backup_mode):
+ top_dir = 'backups/%s' % backup_dir
+
+ _log('Publish:./bin/wr/srtool_publish.py --srt2update ' + base_dir)
+ report_returncode,report_stdout,report_error = execute_process('./bin/wr/srtool_publish.py','--srt2update',base_dir)
+ if 0 != report_returncode:
+ return_data = {"error": "Error: base dir prep:%s:%s" % (report_error,report_stdout),}
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+ _log('Publish:./bin/wr/srtool_publish.py --srt2update ' + top_dir)
+ report_returncode,report_stdout,report_error = execute_process('./bin/wr/srtool_publish.py','--srt2update',top_dir)
+ if 0 != report_returncode:
+ return_data = {"error": "Error: top dir prep:%s:%s" % (report_error,report_stdout),}
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+ _log('Publish:./bin/wr/srtool_publish.py --validate-update-svns --previous '+base_dir+' --current '+top_dir+' --start '+snap_date_start+' --stop '+snap_date_stop)
+ report_returncode,report_stdout,report_error = execute_process('./bin/wr/srtool_publish.py',
+ '--validate-update-svns','--previous',base_dir,'--current',top_dir,
+ '--start',snap_date_start,'--stop',snap_date_stop)
+ if 0 != report_returncode:
+ return_data = {"error": "Error: publish report:%s:%s" % (report_error,report_stdout),}
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+
+ publish_snap_last_calc = 'Base:%s, Top:%s, Start:%s, Stop:%s, On:%s' % (
+ snap_date_base,snap_date_top,snap_date_start,snap_date_stop,
+ datetime.today().strftime("%Y-%m-%d %H:%M:%S")
+ )
+ SrtSetting.set_setting('publish_snap_last_calc',publish_snap_last_calc)
+
+ _log('Publish:Done!')
+ elif 'submit-trashreport' == action:
+ report_name = request.POST['report_name']
+ os.remove('data/wr/%s' % report_name)
+ else:
+ srtool_today_time = datetime.today()
+ srtool_today = datetime.today().strftime("%Y-%m-%d")
+ reason_map = {}
+ if 'defects' in request.POST:
+ cve_table = []
+ for defect_name in request.POST['defects'].split(','):
+ try:
+ defect = Defect.objects.get(name = defect_name)
+ cve_names = defect.get_cve_names
+ for cve_name in cve_names.split(','):
+ cve_table.append(cve_name)
+ reason_map[cve_name] = defect_name
+ except Exception as e:
+ _log("ERROR:xhr_publish:defectlist:%s" % e)
+ cve_list = ','.join(cve_table)
+ else:
+ cve_list = request.POST['cves']
+ for cve_name in cve_list.split(','):
+ reason_map[cve_name] = ''
+ _log("xhr_publish_defect2cves3:%s:%d" % (cve_list,len(cve_list)))
+
+ date_start = datetime.strptime(SrtSetting.get_setting('publish_date_start','02/15/2019'), '%m/%d/%Y')
+ date_stop = datetime.strptime(SrtSetting.get_setting('publish_date_stop','03/15/2019'), '%m/%d/%Y')
+ # set date_stop to 11:59pm for end of 'incusive' day
+ date_stop = date_stop.replace(hour=11, minute=59)
+ if 'mark-new' == action:
+ for cve_name in cve_list.split(','):
+ _log("xhr_publish_defect2cvesNEW:%s" % (cve_name))
+ cve = Cve.objects.get(name=cve_name)
+ publish_object,created = PublishSet.objects.get_or_create(cve=cve)
+ publish_object.state = PublishSet.PUBLISH_SET_NEW_USER
+ publish_object.reason = remove_mark('Mark_New',publish_object.reason)
+ publish_object.reason = remove_mark('Mark_Updated',publish_object.reason)
+ publish_object.reason += ' Mark_New(%s)' % reason_map[cve_name]
+ publish_object.reason = publish_object.reason.replace(' ',' ').strip()
+ publish_object.save()
+ publishMarkNew(cve_list,reason_map,date_start,date_stop)
+ if 'mark-modified' == action:
+ for cve_name in cve_list.split(','):
+ _log("xhr_publish_defect2cvesMOD:%s" % (cve_name))
+ cve = Cve.objects.get(name=cve_name)
+ publish_object,created = PublishSet.objects.get_or_create(cve=cve)
+ publish_object.state = PublishSet.PUBLISH_SET_MODIFIED_USER
+ publish_object.reason = remove_mark('Mark_New',publish_object.reason)
+ publish_object.reason = remove_mark('Mark_Updated',publish_object.reason)
+ publish_object.reason += ' Mark_Updated(%s)' % reason_map[cve_name]
+ publish_object.reason = publish_object.reason.replace(' ',' ').strip()
+ publish_object.save()
+ publishMarkModified(cve_list,reason_map,date_start,date_stop)
+ if 'unmark' == action:
+ for cve_name in cve_list.split(','):
+ cve = Cve.objects.get(name=cve_name)
+ publish_object,created = PublishSet.objects.get_or_create(cve=cve)
+ publish_object.state = PublishSet.PUBLISH_SET_NONE
+ publish_object.reason = remove_mark('Mark_New',publish_object.reason)
+ publish_object.reason = remove_mark('Mark_Updated',publish_object.reason)
+ publish_object.reason = publish_object.reason.replace(' ',' ').strip()
+ publish_object.save()
+ publishMarkNone(cve_list,date_start,date_stop)
+
+ return_data = {
+ "error": "ok",
+ }
+
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+ except Exception as e:
+ _log("xhr_publish:no(%s)(%s)" % (e,traceback.print_stack()))
+ return HttpResponse(json.dumps({"error":str(e) + "\n"}), content_type = "application/json")
+
def cve_alternates(request, cve_pk):
try:
@@ -1795,6 +2299,15 @@ def cve_alternates(request, cve_pk):
cve_source_object,created = CveSource.objects.get_or_create(cve=cve_object,datasource=ds)
_log("Alternate CVE source %s for %s (created=%s)" % (ds.key,cve_object.name,created))
+ # Force update the CVE summary data from sources
+ result_returncode,result_stdout,result_stderr = execute_process(
+ './bin/nist/srtool_nist.py',
+ '--update-cve-list',
+ cve_object.name,
+ '--force'
+ )
+ _log("CVE_ALT_REFRESH=%s|%s|%s" % (result_returncode,result_stdout,result_stderr))
+
return redirect(cve, cve_pk)
diff --git a/lib/srtmain/management/commands/update.py b/lib/srtmain/management/commands/update.py
index 8304e199..7da17acd 100755
--- a/lib/srtmain/management/commands/update.py
+++ b/lib/srtmain/management/commands/update.py
@@ -7,36 +7,45 @@ class Command(BaseCommand):
help = "Trigger a data source update"
def add_arguments(self, parser):
+ print("UPDATE:add_arguments")
parser.add_argument('--cron-start', action='store_const', const='cron_start', dest='command', help='Start the SRTool backgroud updater')
parser.add_argument('--cron-stop', action='store_const', const='cron_stop', dest='command', help='Stop the SRTool backgroud updater')
parser.add_argument('--list', '-l', action='store_const', const='list', dest='command', help='List data sources')
parser.add_argument('--run-updates', '-u', action='store_const', const='run-updates', dest='command', help='update scheduled data sources')
parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
- parser.add_argument('--name-filter', '-n', nargs='+', type=str, dest='name_filter', help='Filter for datasource name')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates for cummulative status')
parser.add_argument('--verbose', action='store_true', dest='verbose', help='Debugging: verbose output')
parser.add_argument('--trial', '-t', action='store_true', dest='is_trial', help='Debugging: trial run')
+ # NOTE: we have to do shenanigans with name_filter to support spaces
+ parser.add_argument('--name-filter', '-n', nargs='+', dest='name_filter', help='Filter for datasource name')
+
def handle(self, *args, **options):
- #print("UPDATE:%s|%s" % (str(args),str(options)))
+ print("UPDATE:%s|%s" % (str(args),str(options)))
command = ''
if 'cron_start' == options['command']: command = '--cron-start'
if 'cron_stop' == options['command']: command = '--cron-stop'
if 'list' == options['command']: command = '--list'
if 'run-updates' == options['command']: command = '--run-updates'
-
- # NOTE: we have to do shenanigans with name_filter to support spaces
- name_filter = '--name-filter "%s"' % ' '.join(options['name_filter']) if options['name_filter'] else ''
-
- force = '--force' if options['force'] else ''
- is_trial = '--trial' if options['is_trial'] else ''
- verbose = '--verbose' if options['verbose'] or (options['verbosity'] > 1) else ''
- context = '> /dev/null 2>&1 &' if 'cron_start' == options['command'] else ''
-
- update_command = "./bin/common/srtool_update.py %s %s %s %s %s %s" % (command,name_filter,force,is_trial,verbose,context)
- if verbose:
- print("RUN UPDATE SCRIPT: %s" % (update_command))
- os.chdir(os.environ['SRT_BASE_DIR'])
- os.system("%s" % (update_command))
+ if not command:
+ print("manage update: missing command '%s %s'" % (str(args),str(options)))
+ else:
+ if options['verbose'] or (options['verbosity'] > 1):
+ command += ' --verbose'
+ verbose = True
+ else:
+ verbose = False
+ if options['force']: command += ' --force'
+ if options['update_skip_history']: command += ' --update-skip-history'
+ if options['is_trial']: command += ' --trial'
+ # NOTE: we have to do shenanigans with name_filter to support spaces
+ if options['name_filter']: command += ' --name-filter "%s"' % ' '.join(options['name_filter'])
+ if 'cron_start' == options['command']: command += ' > /dev/null 2>&1 &'
+ update_command = "./bin/common/srtool_update.py %s" % (command)
+ if verbose:
+ print("RUN UPDATE SCRIPT: %s" % (update_command))
+ os.chdir(os.environ['SRT_BASE_DIR'])
+ os.system("%s" % (update_command))
diff --git a/lib/users/models.py b/lib/users/models.py
index e91f317b..b59f9fee 100755
--- a/lib/users/models.py
+++ b/lib/users/models.py
@@ -49,10 +49,25 @@ class SrtUser(AbstractUser):
def get_groups(self):
groups = [ group.name for group in self.groups.all() ]
if not groups:
+ # Dynamically assign a group if none attached, in particular
+ # for (super)users created on command line
if self.is_superuser:
- return 'Superuser'
+ command_line_fixup = False
+ if not self.role:
+ self.role = "Superuser"
+ command_line_fixup = True
+ if not self.last_name:
+ self.last_name = self.username
+ command_line_fixup = True
+ if command_line_fixup:
+ self.save()
+ group = Group.objects.get(name = 'Admin')
+ group.user_set.add(self)
+ return group.name
else:
- return ''
+ group = Group.objects.get(name = 'Reader')
+ group.user_set.add(self)
+ return group.name
return ",".join(groups)
@property
def get_group_perm(self):