aboutsummaryrefslogtreecommitdiffstats
path: root/bin/nist/srtool_nist.py
diff options
context:
space:
mode:
Diffstat (limited to 'bin/nist/srtool_nist.py')
-rwxr-xr-xbin/nist/srtool_nist.py54
1 files changed, 23 insertions, 31 deletions
diff --git a/bin/nist/srtool_nist.py b/bin/nist/srtool_nist.py
index e69420c7..37116140 100755
--- a/bin/nist/srtool_nist.py
+++ b/bin/nist/srtool_nist.py
@@ -27,25 +27,19 @@
import os
import sys
import re
-import csv
-import xml.etree.ElementTree as ET
import argparse
import sqlite3
-import subprocess
import json
-import urllib
from datetime import datetime, date, timedelta
import pytz
-
from urllib.request import urlopen, URLError
-from urllib.parse import urlparse
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
from common.srt_schema import ORM
-# setup
+# Setup:
lookupTable = []
cveIndex = {}
db_change = False
@@ -179,24 +173,24 @@ def update_nist(is_init,datasource_description, url_file, url_meta, cve_file, in
log.write("BEGINNING NIST %s\n" % ('INITS' if is_init else 'UPDATES'))
#determine if we are out of date and apply updates if true
if (date_new > date_past) or force_update:
- pre_update_time = datetime.now() #used for logging purposes only
-
- nist_json(is_init,nist_cve_url, ds[ORM.DATASOURCE_ID], nist_file, log, date_new, incremental)
- log.write("began %s: %s\n" % ( 'init' if is_init else 'updates', str(pre_update_time) ))
- log.write("finished %s: %s\n" % ( 'init' if is_init else 'updates', str(datetime.now()) ))
- log.write("=============================================================================\n")
- log.write("\n")
-
- #update datasource's lastModifiedDate after successsfuly updating it
- sql = "UPDATE orm_datasource SET lastModifiedDate = ? WHERE id='%s'" % ds[ORM.DATASOURCE_ID]
- c.execute(sql, (str(date_new),))
- conn.commit()
+ pre_update_time = datetime.now() #used for logging purposes only
+
+ nist_json(is_init,nist_cve_url, ds[ORM.DATASOURCE_ID], nist_file, log, date_new, incremental)
+ log.write("began %s: %s\n" % ( 'init' if is_init else 'updates', str(pre_update_time) ))
+ log.write("finished %s: %s\n" % ( 'init' if is_init else 'updates', str(datetime.now()) ))
+ log.write("=============================================================================\n")
+ log.write("\n")
+
+ #update datasource's lastModifiedDate after successsfuly updating it
+ sql = "UPDATE orm_datasource SET lastModifiedDate = ? WHERE id='%s'" % ds[ORM.DATASOURCE_ID]
+ c.execute(sql, (str(date_new),))
+ conn.commit()
else:
- log.write("No %s needed\n" % ('init' if is_init else 'update'))
- log.write("Checked: %s\n" % datetime.now())
- log.write("=============================================================================\n")
- log.write("\n")
- print("NO %s NEEDED" % ('INIT' if is_init else 'UPDATE'))
+ log.write("No %s needed\n" % ('init' if is_init else 'update'))
+ log.write("Checked: %s\n" % datetime.now())
+ log.write("=============================================================================\n")
+ log.write("\n")
+ print("NO %s NEEDED" % ('INIT' if is_init else 'UPDATE'))
# Reset datasource's lastModifiedDate as today
sql = "UPDATE orm_datasource SET lastModifiedDate = ? WHERE id='%s'" % ds[ORM.DATASOURCE_ID]
@@ -215,7 +209,6 @@ def update_nist(is_init,datasource_description, url_file, url_meta, cve_file, in
f.close()
except URLError as e:
raise Exception("Failed to open %s: %s" % (nist_meta_url, e.reason))
- continue
log.close()
c.close()
conn.close()
@@ -290,7 +283,7 @@ def nist_json(is_init,summary_json_url, datasource_id, datasource_file, log, dat
v.public = True # Always true since NIST is public source
# We do not know yet if this has been published to the SRTool management
- v.publish = ORM.PUBLISH_UNPUBLISHED
+ v.publish_state = ORM.PUBLISH_UNPUBLISHED
v.publish_date = ''
if ('impact' in CVE_Item) and ('baseMetricV3' in CVE_Item['impact']):
@@ -507,7 +500,7 @@ def fetch_cve(cve_name,cve_source_file):
f = open(cve_cache_path, 'r')
CVE_Item = json.load(f)
except Exception as e:
- print("Description=ERROR reading CVE summary file '%s':e" % (cve_cache_path,e))
+ print("Description=ERROR reading CVE summary file '%s':%s" % (cve_cache_path,e))
return
elif cve_source_file:
try:
@@ -531,11 +524,11 @@ def fetch_cve(cve_name,cve_source_file):
cve_cache_file.write(json.dumps(CVE_Item))
break
except Exception as e:
- print("Description=ERROR creating CVE cache file '%s':e" % (cve_source_file,e))
+ print("Description=ERROR creating CVE cache file '%s':%s" % (cve_source_file,e))
return
else:
# No data source for details
- return v
+ return
if not CVE_Item:
print("description=There is no CVE record for %s in the loaded NIST public CVE database." % cve_name)
@@ -603,7 +596,7 @@ def fetch_cve(cve_name,cve_source_file):
elif "OR" == config['operator']:
summary['cpe_list'] += nist_scan_configuration_or(config, cve_name, 0)
else:
- print("ERROR CONFIGURE:OP?:%s" % config_rec['operator'])
+ print("ERROR CONFIGURE:OP?:%s" % config['operator'])
summary['cpe_list'] += '[/and]|'
summary['cpe_list'] += '[/config]|'
@@ -728,6 +721,5 @@ def main(argv):
exit(ret)
if __name__ == '__main__':
- global srtool_basepath
srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
main(sys.argv[1:])