aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDavid Reyna <David.Reyna@windriver.com>2019-01-13 22:27:35 -0800
committerDavid Reyna <David.Reyna@windriver.com>2019-01-13 22:27:35 -0800
commita10fe453f2634d15cca1495e84085e43c65fbd3b (patch)
tree2628e3c29cba028add7364f8e26e366d8e889ede
parent4ed4bf9a2559f1450081e1b79b22ff677235aa24 (diff)
downloadsrtool-a10fe453f2634d15cca1495e84085e43c65fbd3b.tar.gz
srtool-a10fe453f2634d15cca1495e84085e43c65fbd3b.tar.bz2
srtool-a10fe453f2634d15cca1495e84085e43c65fbd3b.zip
srtool: fix core update implementation
Fix the data source update mechanism: * Move the update functions to "bin/common/srtool_update.py" * Remove 'lastModifiedDate' from the data source JSON files (since every restart overwrites any updated values) * Change the 'update_time' field to a dictionary of offset values e.g. "{\"weekday\":\"6\",\"hour\":\"2\"}" = day of week, hour of day * Implement the update frequency calculations * Implement data source name filters for selected manual updates * Add a log status file [YOCTO #13131] Signed-off-by: David Reyna <David.Reyna@windriver.com>
-rwxr-xr-xbin/acme/datasource.json_sample6
-rwxr-xr-xbin/common/datasource.json32
-rwxr-xr-xbin/common/srtool_update.py265
-rwxr-xr-xbin/debian/datasource.json4
-rwxr-xr-xbin/mitre/datasource_2015.json4
-rwxr-xr-xbin/mitre/datasource_2016.json4
-rwxr-xr-xbin/mitre/datasource_2017.json4
-rwxr-xr-xbin/mitre/datasource_2018.json4
-rwxr-xr-xbin/mitre/datasource_2019.json4
-rwxr-xr-xbin/mitre/srtool_mitre.py2
-rw-r--r--bin/nist/datasource.json8
-rwxr-xr-xbin/nist/datasource_2015.json4
-rwxr-xr-xbin/nist/datasource_2016.json4
-rwxr-xr-xbin/nist/datasource_2017.json4
-rwxr-xr-xbin/nist/datasource_2018.json4
-rwxr-xr-xbin/nist/datasource_2019.json4
-rwxr-xr-xbin/nist/srtool_nist.py11
-rwxr-xr-xbin/redhat/datasource.json4
-rwxr-xr-xbin/yp/datasource.json6
-rw-r--r--lib/orm/management/commands/checksettings.py8
-rw-r--r--lib/orm/management/commands/lsupdates.py3
21 files changed, 325 insertions, 64 deletions
diff --git a/bin/acme/datasource.json_sample b/bin/acme/datasource.json_sample
index 62fea987..48e3838e 100755
--- a/bin/acme/datasource.json_sample
+++ b/bin/acme/datasource.json_sample
@@ -57,9 +57,8 @@
"init" : "bin/acme/srtool_acme.py --init-products --file bin/acme/acme-products.json",
"update" : "",
"lookup" : "",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "5",
- "update_time" : "02:00:00"
+ "update_time" : "{}"
},
{
"_comment_" : "All organizations should use the built-in parser for user tables (e.g. password hashing)",
@@ -72,9 +71,8 @@
"init" : "file:bin/acme/acme-users.json",
"update" : "",
"lookup" : "",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "5",
- "update_time" : "02:00:00"
+ "update_time" : "{}"
}
]
}
diff --git a/bin/common/datasource.json b/bin/common/datasource.json
index 2625abca..789c67cc 100755
--- a/bin/common/datasource.json
+++ b/bin/common/datasource.json
@@ -24,9 +24,8 @@
"init" : "bin/common/srtool_common.py --generate-schema-header",
"update" : "",
"lookup" : "",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "5",
- "update_time" : "02:00:00"
+ "update_time" : "{}"
},
{
"key" : "0001-common-keywords",
@@ -38,9 +37,9 @@
"init" : "bin/common/srtool_common.py --init-package-keywords",
"update" : "",
"lookup" : "",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
},
{
"key" : "0001-common-notify",
@@ -52,9 +51,9 @@
"init" : "bin/common/srtool_common.py --init-notify-categories",
"update" : "",
"lookup" : "",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
},
{
@@ -67,9 +66,8 @@
"init" : "",
"update" : "",
"lookup" : "",
- "lastModifiedDate" : "2018-03-01 01:01:01",
- "update_frequency" : "0",
- "update_time" : "02:00:00"
+ "update_frequency" : "5",
+ "update_time" : ""
},
{
"key" : "0910-common-weekly",
@@ -79,11 +77,11 @@
"description" : "Weekly archive database backup",
"cve_filter" : "",
"init" : "",
- "update" : "bin/common/srtool_utils.py --backup-db-json",
+ "update" : "bin/common/srtool_backup.py --backup-db-json",
"lookup" : "",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
},
{
"key" : "0911-common-daily",
@@ -93,11 +91,11 @@
"description" : "Daily archive database backup",
"cve_filter" : "",
"init" : "",
- "update" : "bin/common/srtool_utils.py --backup-db-json-daily",
+ "update" : "bin/common/srtool_backup.py --backup-db-json-daily",
"lookup" : "",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "2",
- "update_time" : "02:00:00"
+ "_comment_" : "Update at 7:00 am",
+ "update_time" : "{\"hour\":\"7\"}"
},
{
@@ -110,9 +108,9 @@
"init" : "bin/common/srtool_common.py --score-new-cves NEW",
"update" : "bin/common/srtool_common.py --score-new-cves NEW",
"lookup" : "",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "0",
- "update_time" : "02:00:00"
+ "_comment_" : "Update every 10 minutes",
+ "update_time" : "{\"minutes\":\"10\"}"
}
],
diff --git a/bin/common/srtool_update.py b/bin/common/srtool_update.py
new file mode 100755
index 00000000..f73d6800
--- /dev/null
+++ b/bin/common/srtool_update.py
@@ -0,0 +1,265 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Commandline Tool
+#
+# Copyright (C) 2018 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys
+import re
+import argparse
+import sqlite3
+import subprocess
+import json
+import urllib
+
+# load the srt.sqlite schema indexes
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from srt_schema import ORM
+
+from datetime import datetime, timedelta, date
+from pprint import pprint
+from urllib.request import urlopen, URLError
+from urllib.parse import urlparse
+
+# setup
+is_verbose = False
+
+srtDbName = 'srt.sqlite'
+UPDATE_STATUS_LOG = 'update_status.log'
+
+#################################
+# Common routines
+#
+
+# quick development/debugging support
+def _log(msg):
+ DBG_LVL = os.environ['SRTDBG_LVL'] if ('SRTDBG_LVL' in os.environ) else 2
+ DBG_LOG = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srt_dbg.log'
+ if 1 == DBG_LVL:
+ print(msg)
+ elif 2 == DBG_LVL:
+ f1=open(DBG_LOG, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+def get_tag_key(tag,key,default=''):
+ try:
+ d = json.loads(tag)
+ if key in d:
+ return d[key]
+ else:
+ return default
+ except:
+ print("ERROR TAG FORMAT:get_tag_key(%s,%s)" % (tag,key))
+ return default
+
+#################################
+# Update routines
+#
+# Example 'update_time' filters:
+# MINUTELY = 0 "{\"minutes\":\"10\"}" # every ten minutes
+# HOURLY = 1 "{\"minute\":\"10\"}" # at ten minutes past the hour
+# DAILY = 2 "{\"hour\":\"2\"}" # at 2 hours after midnight
+# WEEKLY = 3 "{\"weekday\":\"6\",\"hour\":\"2\"}" # day of week, hour
+# MONTHLY = 4 "{\"day\":\"1\"\"hour\":\"2\"}" # day of month
+# ONDEMAND = 5 "{}" # only on demand
+
+def run_updates(force_all,name_filter,is_trial):
+
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ time_now = datetime.now() #datetime.now(pytz.utc)
+ print("time_now = %s" % time_now.strftime('%Y-%m-%d %H:%M:%S'))
+ status_str = "====================\n"
+ status_str += "Update: Date=%s,Filter='%s',Force=%s\n" % (time_now.strftime('%Y-%m-%d %H:%M:%S'),name_filter,force_all)
+
+ #get sources that have update command
+ sources = cur.execute("SELECT * FROM orm_datasource").fetchall()
+ for source in sources:
+ # Only process datasoures with update command
+ if not source[ORM.DATASOURCE_UPDATE]:
+ continue
+
+ # Test filter
+ if 'all' != name_filter:
+ is_match = \
+ (name_filter == source[ORM.DATASOURCE_DESCRIPTION]) or \
+ (name_filter == source[ORM.DATASOURCE_NAME]) or \
+ (name_filter == source[ORM.DATASOURCE_SOURCE]) or \
+ (name_filter == source[ORM.DATASOURCE_DATA])
+ if not is_match:
+ status_str += " Skip '%s': name not a match\n" % source[ORM.DATASOURCE_DESCRIPTION]
+ continue
+
+ # Test the update time
+ if not force_all:
+ # testdate = datetime(year, month, day, hour=0, minute=0, second=0, microsecond=0, tzinfo=None, *,
+ # testdiff = timedelta(days=0, seconds=0, microseconds=0, milliseconds=0, minutes=0, hours=0, weeks=0)
+
+ #print("Update datasource:'%s'" % source[ORM.DATASOURCE_DESCRIPTION])
+
+ # Get the datasource values
+ update_frequency = source[ORM.DATASOURCE_UPDATE_FREQUENCY]
+ if not source[ORM.DATASOURCE_LASTMODIFIEDDATE]:
+ # Force update if no registed modified date for datasource (e.g. Init)
+ last_modified_date = time_now-timedelta(days=365)
+ else:
+ last_modified_date = datetime.strptime(source[ORM.DATASOURCE_LASTMODIFIEDDATE], '%Y-%m-%d %H:%M:%S')
+ # Get the update presets
+ update_time = source[ORM.DATASOURCE_UPDATE_TIME]
+ delta_minutes = get_tag_key(update_time,'minutes',None)
+ delta_minute = get_tag_key(update_time,'minute',None)
+ delta_hour = get_tag_key(update_time,'hour',None)
+ delta_weekday = get_tag_key(update_time,'weekday',None)
+ delta_day = get_tag_key(update_time,'day',None)
+
+ # Calulate the next update datetime
+ if ORM.DATASOURCE_MINUTELY == update_frequency:
+ if not delta_minutes:
+ print("ERROR:Missing minutes in '%s' for '%s'" % (source[ORM.DATASOURCE_DESCRIPTION],update_time))
+ delta_minutes = 10
+ testdiff = timedelta(minutes=int(delta_minutes))
+ elif ORM.DATASOURCE_HOURLY == update_frequency:
+ testdiff = timedelta(hours=1)
+ elif ORM.DATASOURCE_DAILY == update_frequency:
+ testdiff = timedelta(days=1)
+ elif ORM.DATASOURCE_WEEKLY == update_frequency:
+ testdiff = timedelta(weeks=1)
+ elif ORM.DATASOURCE_MONTHLY == update_frequency:
+ testdiff = timedelta(months=1)
+ elif ORM.DATASOURCE_ONDEMAND == update_frequency:
+ continue
+ testdate = last_modified_date + testdiff
+
+ # Adjust for update presets
+ if None != delta_minute:
+ # Force to selected day of month
+ testdate = datetime(testdate.year, testdate.month, testdate.day, testdate.hour, int(delta_minute), testdate.second)
+ if None != delta_day:
+ # Force to selected day of month
+ testdate = datetime(testdate.year, testdate.month, testdate.day, int(delta_hour), testdate.minute, testdate.second)
+ if None != delta_day:
+ # Force to selected day of month
+ testdate = datetime(testdate.year, testdate.month, int(delta_day), testdate.hour, testdate.minute, testdate.second)
+ if None != delta_weekday:
+ # Force to selected day of week
+ testdiff = timedelta( days=(int(delta_weekday) - testdate.weekday()) )
+ testdate += testdiff
+
+ # Not yet?
+ if testdate > time_now:
+ status_str += " Skip '%s': update time not reached (%s)\n" % (source[ORM.DATASOURCE_DESCRIPTION],testdate.strftime('%Y-%m-%d %H:%M:%S'))
+ continue
+ else:
+ status_str += " UPDATE '%s': update time reached (%s)\n" % (source[ORM.DATASOURCE_DESCRIPTION],testdate.strftime('%Y-%m-%d %H:%M:%S'))
+
+ # Execute the update
+ if is_trial:
+ print("TRIAL: Update required\t...\texecuting '%s'" % (source[ORM.DATASOURCE_UPDATE]))
+ status_str += " > TRIAL: execute '%s'\n" % (source[ORM.DATASOURCE_UPDATE])
+ else:
+ print("Update required\t...\texecuting '%s'" % (source[ORM.DATASOURCE_UPDATE]))
+ status_str += " > EXECUTE: execute '%s'\n" % (source[ORM.DATASOURCE_UPDATE])
+ os.system(os.path.join(script_pathname, source[ORM.DATASOURCE_UPDATE]))
+
+ # Reset datasource's last_modified_date
+ sql = "UPDATE orm_datasource SET lastModifiedDate=? WHERE id=?"
+ cur_write.execute(sql, (time_now.strftime('%Y-%m-%d %H:%M:%S'),source[ORM.DATASOURCE_ID],) )
+ conn.commit()
+ conn.close()
+
+ # Status summary
+ fd=open(os.path.join(script_pathname,UPDATE_STATUS_LOG), 'w')
+ fd.write(status_str)
+ fd.close()
+ if verbose:
+ print(status_str)
+
+#time must be in '%H:%M:%S' format
+def configure_ds_update(datasource_description, frequency, time):
+ conn = sqlite3.connect(srtDbName)
+ cur = conn.cursor()
+
+ sql = "UPDATE orm_datasource SET update_frequency=?, update_time=? WHERE description=?"
+ cur.execute(sql, (frequency, time, datasource_description))
+
+ conn.commit()
+ conn.close()
+
+
+#################################
+# main loop
+#
+def main(argv):
+ global verbose
+
+ # setup
+ parser = argparse.ArgumentParser(description='srtool.py: manage the SRTool database')
+
+ parser.add_argument('--cron-start', action='store_const', const='cron-start', dest='command', help='Start the SRTool backgroud updater')
+ parser.add_argument('--cron-stop', action='store_const', const='cron-stop', dest='command', help='Stop the SRTool backgroud updater')
+
+ parser.add_argument('--run-updates', '-u', action='store_const', const='run-updates', dest='command', help='update scheduled data sources')
+ parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
+ parser.add_argument('--name-filter', '-n', dest='name_filter', help='Filter for datasource name')
+
+ parser.add_argument('--configure_ds_update', '-T', nargs=3, help='Set update frequency and time for specified datasource. Check bin/README.txt for more info')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
+ parser.add_argument('--trial', '-t', action='store_true', dest='is_trial', help='Debugging: trial run')
+
+ args = parser.parse_args()
+
+ master_log = open(os.path.join(script_pathname, "update_logs/master_log.txt"), "a")
+
+ verbose = args.verbose
+ name_filter = 'all'
+ if args.name_filter:
+ name_filter = args.name_filter
+
+ if 'run-updates' == args.command:
+ if True: #try:
+ print("BEGINNING UPDATING DATASOURCES... this MAY take a long time")
+ run_updates(args.force,name_filter,args.is_trial)
+ master_log.write("SRTOOL:%s:UPDATING DATASOURCES:\t\t\t...\t\t\tSUCCESS\n" %(date.today()))
+ print("FINISHED UPDATING ALL DATASOURCES\n")
+ if False: #except Exception as e:
+ print("FAILED UPDATING ALL DATASOURCES (%s)" % e)
+ master_log.write("SRTOOL:%s:UPDATING DATASOURCES\t\t\t...\t\t\tFAILED ... %s\n" % (date.today(), e))
+ elif args.configure_ds_update:
+ try:
+ print("CHANGING UPDATE CONFIGURATION FOR %s" % args.configure_ds_update[0])
+ configure_ds_update(args.configure_ds_update[0], args.configure_ds_update[1], args.configure_ds_update[2])
+ master_log.write("SRTOOL:%s:%s\t\t\t...\t\t\tCONFIGURED" % (date.today(), args.configure_ds_update[0]))
+ except Exception as e:
+ print("FAILED TO CONFIGURE UPDATE SETTINGS FOR %s" % args.configure_ds_update[0])
+ master_log.write("SRTOOL:%s:%s\t\t\t...\t\t\tFAILED ... %s" % (date.today(), args.configure_ds_update[0], e))
+
+ else:
+ print("Command not found")
+ master_log.close()
+
+if __name__ == '__main__':
+ global script_pathname
+ from os.path import abspath
+ script_pathname = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ main(sys.argv[1:])
diff --git a/bin/debian/datasource.json b/bin/debian/datasource.json
index 0a78e155..eb8b5170 100755
--- a/bin/debian/datasource.json
+++ b/bin/debian/datasource.json
@@ -11,9 +11,9 @@
"init" : "bin/debian/srtool_debian.py -i --source='Debian CVE' --file=data/debian_cve_list.txt",
"update" : "bin/debian/srtool_debian.py -u --source='Debian CVE' --file=data/debian_cve_list.txt",
"lookup" : "bin/debian/srtool_debian.py --file=data/debian_cve_list.txt %command%",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
}
]
}
diff --git a/bin/mitre/datasource_2015.json b/bin/mitre/datasource_2015.json
index 9d015a4d..0ce89f12 100755
--- a/bin/mitre/datasource_2015.json
+++ b/bin/mitre/datasource_2015.json
@@ -10,9 +10,9 @@
"init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml",
"update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2015.xml %command%",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
}
]
}
diff --git a/bin/mitre/datasource_2016.json b/bin/mitre/datasource_2016.json
index 4daad16e..36ca814f 100755
--- a/bin/mitre/datasource_2016.json
+++ b/bin/mitre/datasource_2016.json
@@ -10,9 +10,9 @@
"init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml",
"update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2016.xml %command%",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
}
]
}
diff --git a/bin/mitre/datasource_2017.json b/bin/mitre/datasource_2017.json
index 1b1e4b32..2b326bf4 100755
--- a/bin/mitre/datasource_2017.json
+++ b/bin/mitre/datasource_2017.json
@@ -10,9 +10,9 @@
"init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml",
"update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2017.xml %command%",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
}
]
}
diff --git a/bin/mitre/datasource_2018.json b/bin/mitre/datasource_2018.json
index cc9c560b..ebb6eff2 100755
--- a/bin/mitre/datasource_2018.json
+++ b/bin/mitre/datasource_2018.json
@@ -10,9 +10,9 @@
"init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml",
"update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2018.xml %command%",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
}
]
}
diff --git a/bin/mitre/datasource_2019.json b/bin/mitre/datasource_2019.json
index 5f04ca16..7113aa95 100755
--- a/bin/mitre/datasource_2019.json
+++ b/bin/mitre/datasource_2019.json
@@ -10,9 +10,9 @@
"init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml",
"update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2019.xml %command%",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
}
]
}
diff --git a/bin/mitre/srtool_mitre.py b/bin/mitre/srtool_mitre.py
index 0464156a..021b417a 100755
--- a/bin/mitre/srtool_mitre.py
+++ b/bin/mitre/srtool_mitre.py
@@ -317,7 +317,7 @@ def append_cve_database(is_init,file_xml):
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
cur.execute(sql, (cve_name, get_name_sort(cve_name), ORM.PRIORITY_UNDEFINED, status, '', '', '', '', '', 1, ORM.PUBLISH_UNPUBLISHED, '', summary['Description'], summary['Published'], summary['Modified'],'', '', '', '', '', '', datetime.now(),''))
cve_id = cur.lastrowid
- print("MITRE:ADDED %20s" % cve_name)
+ print("MITRE:ADDED %20s\r" % cve_name)
# Add this data source to the CVE
sql = '''SELECT * FROM orm_cvesource WHERE cve_id=? AND datasource_id=? '''
diff --git a/bin/nist/datasource.json b/bin/nist/datasource.json
index c9e58121..8bdd5196 100644
--- a/bin/nist/datasource.json
+++ b/bin/nist/datasource.json
@@ -10,9 +10,9 @@
"init" : "file:data/nist-cwe-summary.html",
"update" : "",
"lookup" : "",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
},
{
"key" : "0011-nist-modified",
@@ -24,9 +24,9 @@
"init" : "",
"update" : "bin/nist/srtool_nist.py -i --source='NIST Modified Data' --file=data/nvdcve-1.0-modified.json --url-file=nvdcve-1.0-modified.json.gz --url-meta=nvdcve-1.0-modified.meta",
"lookup" : "",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "2",
- "update_time" : "02:00:00"
+ "_comment_" : "Update at 7:00 am",
+ "update_time" : "{\"hour\":\"7\"}"
}
]
}
diff --git a/bin/nist/datasource_2015.json b/bin/nist/datasource_2015.json
index e26b0f75..ccca2f3f 100755
--- a/bin/nist/datasource_2015.json
+++ b/bin/nist/datasource_2015.json
@@ -10,9 +10,9 @@
"init" : "bin/nist/srtool_nist.py -I --source='NIST 2015' --file=data/nvdcve-1.0-2015.json --url-file=nvdcve-1.0-2015.json.gz --url-meta=nvdcve-1.0-2015.meta",
"update" : "bin/nist/srtool_nist.py -n --source='NIST 2015' --file=data/nvdcve-1.0-2015.json --url-file=nvdcve-1.0-2015.json.gz --url-meta=nvdcve-1.0-2015.meta",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2015.json %command%",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
}
]
}
diff --git a/bin/nist/datasource_2016.json b/bin/nist/datasource_2016.json
index 026060b8..9c87ef92 100755
--- a/bin/nist/datasource_2016.json
+++ b/bin/nist/datasource_2016.json
@@ -10,9 +10,9 @@
"init" : "bin/nist/srtool_nist.py -I --source='NIST 2016' --file=data/nvdcve-1.0-2016.json --url-file=nvdcve-1.0-2016.json.gz --url-meta=nvdcve-1.0-2016.meta",
"update" : "bin/nist/srtool_nist.py -n --source='NIST 2016' --file=data/nvdcve-1.0-2016.json --url-file=nvdcve-1.0-2016.json.gz --url-meta=nvdcve-1.0-2016.meta",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2016.json %command%",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
}
]
}
diff --git a/bin/nist/datasource_2017.json b/bin/nist/datasource_2017.json
index 64bd0171..40695ef5 100755
--- a/bin/nist/datasource_2017.json
+++ b/bin/nist/datasource_2017.json
@@ -10,9 +10,9 @@
"init" : "bin/nist/srtool_nist.py -I --source='NIST 2017' --file=data/nvdcve-1.0-2017.json --url-file=nvdcve-1.0-2017.json.gz --url-meta=nvdcve-1.0-2017.meta",
"update" : "bin/nist/srtool_nist.py -n --source='NIST 2017' --file=data/nvdcve-1.0-2017.json --url-file=nvdcve-1.0-2017.json.gz --url-meta=nvdcve-1.0-2017.meta",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2017.json %command%",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
}
]
}
diff --git a/bin/nist/datasource_2018.json b/bin/nist/datasource_2018.json
index 216ef5a4..cf87ca2a 100755
--- a/bin/nist/datasource_2018.json
+++ b/bin/nist/datasource_2018.json
@@ -10,9 +10,9 @@
"init" : "bin/nist/srtool_nist.py -I --source='NIST 2018' --file=data/nvdcve-1.0-2018.json --url-file=nvdcve-1.0-2018.json.gz --url-meta=nvdcve-1.0-2018.meta",
"update" : "bin/nist/srtool_nist.py -n --source='NIST 2018' --file=data/nvdcve-1.0-2018.json --url-file=nvdcve-1.0-2018.json.gz --url-meta=nvdcve-1.0-2018.meta",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2018.json %command%",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
}
]
}
diff --git a/bin/nist/datasource_2019.json b/bin/nist/datasource_2019.json
index ce9fc12d..f3315526 100755
--- a/bin/nist/datasource_2019.json
+++ b/bin/nist/datasource_2019.json
@@ -10,9 +10,9 @@
"init" : "bin/nist/srtool_nist.py -I --source='NIST 2019' --file=data/nvdcve-1.0-2019.json --url-file=nvdcve-1.0-2019.json.gz --url-meta=nvdcve-1.0-2019.meta",
"update" : "bin/nist/srtool_nist.py -n --source='NIST 2019' --file=data/nvdcve-1.0-2019.json --url-file=nvdcve-1.0-2019.json.gz --url-meta=nvdcve-1.0-2019.meta",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2019.json %command%",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
}
]
}
diff --git a/bin/nist/srtool_nist.py b/bin/nist/srtool_nist.py
index c05e65d0..807afbc1 100755
--- a/bin/nist/srtool_nist.py
+++ b/bin/nist/srtool_nist.py
@@ -170,7 +170,11 @@ def update_nist(is_init,datasource_description, url_file, url_meta, cve_file, in
# trim the UTC offset to avoid time zone and day light savings glitches
content = content[:content.rfind('-')]
date_new = datetime.strptime(content, 'lastModifiedDate:%Y-%m-%dT%H:%M:%S')
- date_past = datetime.strptime(ds[ORM.DATASOURCE_LASTMODIFIEDDATE], '%Y-%m-%d %H:%M:%S')
+ if not ds[ORM.DATASOURCE_LASTMODIFIEDDATE]:
+ # Force update if no registed modified date for datasource (e.g. Init)
+ date_past = date_new-timedelta(days=1)
+ else:
+ date_past = datetime.strptime(ds[ORM.DATASOURCE_LASTMODIFIEDDATE], '%Y-%m-%d %H:%M:%S')
log.write("BEGINNING NIST %s\n" % ('INITS' if is_init else 'UPDATES'))
#determine if we are out of date and apply updates if true
@@ -194,8 +198,8 @@ def update_nist(is_init,datasource_description, url_file, url_meta, cve_file, in
log.write("\n")
print("NO %s NEEDED" % ('INIT' if is_init else 'UPDATE'))
- # Reset datasource's update_time as today
- sql = "UPDATE orm_datasource SET update_time = ? WHERE id='%s'" % ds[ORM.DATASOURCE_ID]
+ # Reset datasource's lastModifiedDate as today
+ sql = "UPDATE orm_datasource SET lastModifiedDate = ? WHERE id='%s'" % ds[ORM.DATASOURCE_ID]
c.execute(sql, (datetime.today().strftime('%Y-%m-%d %H:%M:%S'),) )
conn.commit()
@@ -408,7 +412,6 @@ def sql_cve_query(conn, cve, is_init, log):
if exists is None:
# Get the default CVE status
status = get_cve_default_status(is_init,cve.publishedDate)
- print("BAR:%s=%s" % (cve.name,status))
sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, srt_updated, packages)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
diff --git a/bin/redhat/datasource.json b/bin/redhat/datasource.json
index 9b62754e..d5066a06 100755
--- a/bin/redhat/datasource.json
+++ b/bin/redhat/datasource.json
@@ -11,9 +11,9 @@
"init" : "",
"update" : "",
"lookup" : "bin/redhat/srtool_redhat.py %command%",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "3",
- "update_time" : "02:00:00"
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
}
]
}
diff --git a/bin/yp/datasource.json b/bin/yp/datasource.json
index b9dd2c4c..97486956 100755
--- a/bin/yp/datasource.json
+++ b/bin/yp/datasource.json
@@ -60,9 +60,8 @@
"init" : "bin/yp/srtool_yp.py --init-products --file bin/yp/yocto-project-products.json",
"update" : "",
"lookup" : "",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "5",
- "update_time" : "02:00:00"
+ "update_time" : "{}"
},
{
"key" : "0101-yp-users",
@@ -74,9 +73,8 @@
"init" : "file:bin/yp/yocto-project-users.json",
"update" : "",
"lookup" : "",
- "lastModifiedDate" : "2018-03-01 01:01:01",
"update_frequency" : "5",
- "update_time" : "02:00:00"
+ "update_time" : "{}"
}
]
}
diff --git a/lib/orm/management/commands/checksettings.py b/lib/orm/management/commands/checksettings.py
index 7dbcf66f..c4b646d0 100644
--- a/lib/orm/management/commands/checksettings.py
+++ b/lib/orm/management/commands/checksettings.py
@@ -36,7 +36,7 @@ class Command(BaseCommand):
super(Command, self).__init__(*args, **kwargs)
self.guesspath = DN(DN(DN(DN(DN(DN(DN(__file__)))))))
- # NOTE: explicitly skip "_comment_" elements or items
+ # NOTE: explicitly skip "_comment*" elements or items
# to allow embedding comments in the JSON files
def _load_datasource(self,dir):
for ds in glob.glob(os.path.join(dir,'datasource*.json')):
@@ -55,7 +55,7 @@ class Command(BaseCommand):
#print(" LOAD_DATASOURCE:%s:%s" % (datasource['key'],datasource['description']))
ds,create = DataSource.objects.get_or_create(key=datasource['key'])
for key in datasource.keys():
- if "_comment_" == key:
+ if key.startswith("_comment"):
continue;
setattr(ds, key, datasource[key])
ds.save()
@@ -76,7 +76,7 @@ class Command(BaseCommand):
#print(" LOAD_GROUPS:%s" % (group['name']))
ds,create = Group.objects.get_or_create(name=group['name'])
for key in group.keys():
- if "_comment_" == key:
+ if key.startswith("_comment"):
continue;
setattr(ds, key, group[key])
ds.save()
@@ -92,7 +92,7 @@ class Command(BaseCommand):
#print(" LOAD_SRTUSER:%s" % (srtuser['name']))
ds,create = SrtUser.objects.get_or_create(username=srtuser['name'])
for key in srtuser.keys():
- if "_comment_" == key:
+ if key.startswith("_comment"):
continue;
setattr(ds, key, srtuser[key])
ds.save()
diff --git a/lib/orm/management/commands/lsupdates.py b/lib/orm/management/commands/lsupdates.py
index 82358620..7bae1720 100644
--- a/lib/orm/management/commands/lsupdates.py
+++ b/lib/orm/management/commands/lsupdates.py
@@ -136,8 +136,7 @@ class Command(BaseCommand):
updated_source=DataSource.objects.get(id=id)
updated_source.loaded = True
if update_modified:
- updated_source.update_time = datetime.today().strftime('%Y-%m-%d %H:%M:%S')
- updated_source.lastModifiedDate = updated_source.update_time
+ updated_source.lastModifiedDate = datetime.today().strftime('%Y-%m-%d %H:%M:%S')
updated_source.save()
def nist_cwe(self, content):