aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xbin/acme/datasource.json_sample5
-rwxr-xr-xbin/acme/srtool_acme.py7
-rwxr-xr-xbin/acme/srtool_defect.py6
-rwxr-xr-xbin/acme/srtool_env.sh1
-rwxr-xr-xbin/acme/srtool_jira_acme.py20
-rwxr-xr-xbin/common/datasource.json56
-rwxr-xr-xbin/common/srtool_backup.py216
-rwxr-xr-xbin/common/srtool_common.py964
-rwxr-xr-xbin/common/srtool_email.py2
-rw-r--r--bin/common/srtool_jira_template.py20
-rwxr-xr-xbin/common/srtool_job.py791
-rwxr-xr-xbin/common/srtool_progress.py75
-rwxr-xr-xbin/common/srtool_sanity_test.py31
-rwxr-xr-xbin/common/srtool_sql.py492
-rwxr-xr-xbin/common/srtool_test.py204
-rwxr-xr-xbin/common/srtool_update.py376
-rwxr-xr-xbin/common/srtool_utils.py2907
-rwxr-xr-xbin/cve_checker/datasource.json25
-rwxr-xr-xbin/cve_checker/patcher.json41
-rwxr-xr-xbin/cve_checker/srtool_cvechecker.py950
-rwxr-xr-xbin/cve_checker/srtool_cvechecker_util.py465
-rwxr-xr-xbin/cve_checker/srtool_env.sh4
-rwxr-xr-xbin/debian/srtool_debian.py2
-rwxr-xr-xbin/dev_tools/db_migration_config_sample.yml42
-rwxr-xr-xbin/dev_tools/db_migrations.py339
-rwxr-xr-xbin/dev_tools/dump_jason.py29
-rwxr-xr-xbin/dev_tools/dump_jason.sh29
-rwxr-xr-xbin/dev_tools/history.py254
-rwxr-xr-xbin/dev_tools/lssrt.sh3
-rwxr-xr-xbin/dev_tools/master_app.sh13
-rwxr-xr-xbin/dev_tools/migrate.sh5
-rwxr-xr-xbin/dev_tools/nohup_start.sh12
-rwxr-xr-xbin/dev_tools/prepare_environment.sh64
-rwxr-xr-xbin/dev_tools/quick_find.sh23
-rwxr-xr-xbin/dev_tools/restart.sh4
-rwxr-xr-xbin/dev_tools/srt_env.sh79
-rwxr-xr-xbin/dev_tools/start.sh5
-rwxr-xr-xbin/dev_tools/stop.sh3
-rwxr-xr-xbin/dev_tools/tail.sh12
-rwxr-xr-xbin/dev_tools/update_status.sh48
-rwxr-xr-xbin/mitre/datasource_2010.json19
-rwxr-xr-xbin/mitre/datasource_2011.json19
-rwxr-xr-xbin/mitre/datasource_2012.json19
-rwxr-xr-xbin/mitre/datasource_2013.json19
-rwxr-xr-xbin/mitre/datasource_2014.json19
-rwxr-xr-xbin/mitre/datasource_2015.json5
-rwxr-xr-xbin/mitre/datasource_2016.json5
-rwxr-xr-xbin/mitre/datasource_2017.json5
-rwxr-xr-xbin/mitre/datasource_2018.json5
-rwxr-xr-xbin/mitre/datasource_2019.json5
-rwxr-xr-xbin/mitre/datasource_2020.json19
-rwxr-xr-xbin/mitre/datasource_2021.json19
-rwxr-xr-xbin/mitre/datasource_2022.json19
-rwxr-xr-xbin/mitre/datasource_2023.json19
-rwxr-xr-xbin/mitre/srtool_mitre.py146
-rw-r--r--bin/nist/datasource.json9
-rwxr-xr-xbin/nist/datasource_2002.json19
-rwxr-xr-xbin/nist/datasource_2003.json19
-rwxr-xr-xbin/nist/datasource_2004.json19
-rwxr-xr-xbin/nist/datasource_2005.json19
-rwxr-xr-xbin/nist/datasource_2006.json19
-rwxr-xr-xbin/nist/datasource_2007.json19
-rwxr-xr-xbin/nist/datasource_2008.json19
-rwxr-xr-xbin/nist/datasource_2009.json19
-rwxr-xr-xbin/nist/datasource_2010.json18
-rwxr-xr-xbin/nist/datasource_2011.json18
-rwxr-xr-xbin/nist/datasource_2012.json18
-rwxr-xr-xbin/nist/datasource_2013.json18
-rwxr-xr-xbin/nist/datasource_2014.json18
-rwxr-xr-xbin/nist/datasource_2015.json6
-rwxr-xr-xbin/nist/datasource_2016.json6
-rwxr-xr-xbin/nist/datasource_2017.json6
-rwxr-xr-xbin/nist/datasource_2018.json6
-rwxr-xr-xbin/nist/datasource_2019.json6
-rwxr-xr-xbin/nist/datasource_2020.json18
-rwxr-xr-xbin/nist/datasource_2021.json18
-rwxr-xr-xbin/nist/datasource_2022.json18
-rwxr-xr-xbin/nist/datasource_2023.json18
-rwxr-xr-xbin/nist/srtool_nist.py1494
-rwxr-xr-xbin/redhat/srtool_redhat.py32
-rwxr-xr-xbin/srt97
-rw-r--r--bin/srt_dbconfig.yml41
-rwxr-xr-xbin/srtool-requirements.txt10
-rwxr-xr-xbin/ubuntu_trivy/datasource.json19
-rwxr-xr-xbin/ubuntu_trivy/license.txt4
-rwxr-xr-xbin/ubuntu_trivy/srtool_ubuntu_trivy.py295
-rwxr-xr-xbin/wr_trivy/datasource.json19
-rwxr-xr-xbin/wr_trivy/license.txt4
-rwxr-xr-xbin/wr_trivy/srtool_wr_trivy.py264
-rwxr-xr-xbin/yp/datasource.json5
-rwxr-xr-xbin/yp/srtool_cve_checker.py277
-rwxr-xr-xbin/yp/srtool_defect.py6
-rwxr-xr-xbin/yp/srtool_publish.py1052
-rwxr-xr-xbin/yp/srtool_yp.py7
-rwxr-xr-xbin/yp/yocto-project-products.json89
-rwxr-xr-xdata/notify-categories.json4
-rwxr-xr-xdata/recipe_names_from_layer_index.txt3844
-rwxr-xr-xlib/acme/reports.py2
-rwxr-xr-xlib/acme/tables.py2
-rwxr-xr-xlib/acme/templates/acme_hello.html2
-rwxr-xr-xlib/acme/templates/acme_product.html2
-rwxr-xr-xlib/acme/templates/base.html3
-rwxr-xr-xlib/acme/urls.py2
-rwxr-xr-xlib/cve_checker/__init__.py0
-rwxr-xr-xlib/cve_checker/admin.py3
-rwxr-xr-xlib/cve_checker/apps.py5
-rw-r--r--lib/cve_checker/migrations/0001_initial.py71
-rw-r--r--lib/cve_checker/migrations/0002_ckpackage2cve_ck_audit.py19
-rw-r--r--lib/cve_checker/migrations/0003_alter_ckpackage2cve_ck_package.py19
-rw-r--r--lib/cve_checker/migrations/0004_ck_package_ignored_cnt_ck_package_patched_cnt_and_more.py28
-rw-r--r--lib/cve_checker/migrations/0005_ckuploadmanager.py27
-rw-r--r--lib/cve_checker/migrations/0006_rename_mode_ckuploadmanager_import_mode.py18
-rw-r--r--lib/cve_checker/migrations/0007_ckuploadmanager_select_list_and_more.py23
-rwxr-xr-xlib/cve_checker/migrations/__init__.py0
-rwxr-xr-xlib/cve_checker/models.py165
-rwxr-xr-xlib/cve_checker/reports.py511
-rwxr-xr-xlib/cve_checker/tables.py695
-rwxr-xr-xlib/cve_checker/templates/ck-audit-toastertable.html223
-rwxr-xr-xlib/cve_checker/templates/ck-auditcve-toastertable.html431
-rwxr-xr-xlib/cve_checker/templates/ck-audits-toastertable.html425
-rwxr-xr-xlib/cve_checker/templates/ck-import_manager-toastertable.html266
-rwxr-xr-xlib/cve_checker/templates/ck-issue-toastertable.html347
-rwxr-xr-xlib/cve_checker/templates/ck-product-toastertable.html309
-rwxr-xr-xlib/cve_checker/tests.py3
-rwxr-xr-xlib/cve_checker/urls.py47
-rwxr-xr-xlib/cve_checker/views.py325
-rw-r--r--lib/orm/management/commands/checksettings.py14
-rw-r--r--lib/orm/management/commands/lsupdates.py26
-rw-r--r--lib/orm/migrations/0001_initial.py92
-rwxr-xr-xlib/orm/migrations/0003_modified.py61
-rwxr-xr-xlib/orm/migrations/0004_defect_status.py35
-rwxr-xr-xlib/orm/migrations/0005_publish_report.py34
-rwxr-xr-xlib/orm/migrations/0006_reconcile.py410
-rwxr-xr-xlib/orm/migrations/0007_components_errorlog.py39
-rw-r--r--lib/orm/migrations/0008_cveaccess.py24
-rw-r--r--lib/orm/migrations/0009_recipetable.py20
-rw-r--r--lib/orm/migrations/0010_job.py35
-rw-r--r--lib/orm/migrations/0011_extend_field_sizes.py33
-rwxr-xr-xlib/orm/migrations/0012_job_user.py21
-rwxr-xr-xlib/orm/migrations/0013_update_preinit.py18
-rw-r--r--lib/orm/migrations/0014_alter_packagetocve_applicable.py18
-rw-r--r--lib/orm/models.py1000
-rw-r--r--lib/srtgui/api.py521
-rw-r--r--lib/srtgui/reports.py1331
-rw-r--r--lib/srtgui/static/js/libtoaster.js133
-rwxr-xr-xlib/srtgui/static/js/mrjsection.js131
-rw-r--r--lib/srtgui/static/js/table.js107
-rwxr-xr-xlib/srtgui/static/js/typeahead_affected_components.js9
-rw-r--r--lib/srtgui/tables.py1294
-rw-r--r--lib/srtgui/templates/base.html25
-rw-r--r--lib/srtgui/templates/basetable_top.html7
-rw-r--r--lib/srtgui/templates/create_vulnerability.html2
-rwxr-xr-xlib/srtgui/templates/cve-edit-local.html2
-rwxr-xr-xlib/srtgui/templates/cve-nist-local.html13
-rwxr-xr-xlib/srtgui/templates/cve-nist.html16
-rw-r--r--lib/srtgui/templates/cve.html213
-rwxr-xr-xlib/srtgui/templates/cve.html_orig2
-rw-r--r--lib/srtgui/templates/cves-select-toastertable.html151
-rw-r--r--lib/srtgui/templates/cves-toastertable.html2
-rwxr-xr-xlib/srtgui/templates/date-time-test.html88
-rw-r--r--lib/srtgui/templates/defect.html59
-rw-r--r--lib/srtgui/templates/detail_search_header.html3
-rw-r--r--lib/srtgui/templates/detail_sorted_header.html2
-rwxr-xr-xlib/srtgui/templates/email_admin.html70
-rwxr-xr-xlib/srtgui/templates/email_success.html49
-rwxr-xr-xlib/srtgui/templates/errorlog-toastertable.html142
-rw-r--r--lib/srtgui/templates/export.html2
-rw-r--r--lib/srtgui/templates/filtersnippet.html2
-rw-r--r--lib/srtgui/templates/generic-toastertable-page.html2
-rw-r--r--lib/srtgui/templates/guided_tour.html2
-rwxr-xr-xlib/srtgui/templates/history-cve-toastertable.html73
-rwxr-xr-xlib/srtgui/templates/history-defect-toastertable.html73
-rwxr-xr-xlib/srtgui/templates/history-investigation-toastertable.html73
-rwxr-xr-xlib/srtgui/templates/history-vulnerability-toastertable.html73
-rw-r--r--lib/srtgui/templates/investigation.html87
-rwxr-xr-xlib/srtgui/templates/joblog.html39
-rw-r--r--lib/srtgui/templates/js-unit-tests.html2
-rw-r--r--lib/srtgui/templates/landing.html11
-rw-r--r--lib/srtgui/templates/landing_not_managed.html2
-rw-r--r--lib/srtgui/templates/login.html2
-rwxr-xr-xlib/srtgui/templates/maintenance.html216
-rwxr-xr-xlib/srtgui/templates/manage-jobs-toastertable.html126
-rw-r--r--lib/srtgui/templates/management.html329
-rwxr-xr-xlib/srtgui/templates/mrj_section.html194
-rwxr-xr-xlib/srtgui/templates/notifications-toastertable.html2
-rw-r--r--lib/srtgui/templates/product.html2
-rwxr-xr-xlib/srtgui/templates/publish-cve-toastertable.html162
-rwxr-xr-xlib/srtgui/templates/publish-defect-toastertable.html168
-rwxr-xr-xlib/srtgui/templates/publish-list-toastertable.html162
-rw-r--r--lib/srtgui/templates/publish.html80
-rw-r--r--lib/srtgui/templates/publish_diff_snapshot.html365
-rw-r--r--lib/srtgui/templates/report.html45
-rw-r--r--lib/srtgui/templates/snippets/gitrev_popover.html2
-rw-r--r--lib/srtgui/templates/snippets/investigations_popover.html2
-rw-r--r--lib/srtgui/templates/snippets/pkg_dependencies_popover.html2
-rw-r--r--lib/srtgui/templates/snippets/pkg_revdependencies_popover.html2
-rw-r--r--lib/srtgui/templates/sources-toastertable.html81
-rw-r--r--lib/srtgui/templates/sources.html2
-rwxr-xr-xlib/srtgui/templates/srtool_metadata_include.html59
-rw-r--r--lib/srtgui/templates/tablesort.html2
-rw-r--r--lib/srtgui/templates/tbd.html2
-rw-r--r--lib/srtgui/templates/toastertable-simple.html2
-rw-r--r--lib/srtgui/templates/toastertable.html30
-rw-r--r--lib/srtgui/templates/triage_cves.html2
-rw-r--r--lib/srtgui/templates/unavailable_artifact.html2
-rw-r--r--lib/srtgui/templates/users.html203
-rw-r--r--lib/srtgui/templates/vulnerability.html150
-rwxr-xr-x[-rw-r--r--]lib/srtgui/templatetags/jobtags.py (renamed from lib/srtgui/templatetags/projecttags.py)29
-rwxr-xr-xlib/srtgui/templatetags/multi_tags.py22
-rw-r--r--lib/srtgui/templatetags/project_url_tag.py34
-rw-r--r--lib/srtgui/typeaheads.py26
-rw-r--r--lib/srtgui/urls.py81
-rw-r--r--lib/srtgui/views.py1979
-rw-r--r--lib/srtgui/widgets.py300
-rw-r--r--lib/srtmain/management/commands/checksocket.py4
-rwxr-xr-xlib/srtmain/management/commands/update.py41
-rw-r--r--lib/srtmain/settings.py48
-rw-r--r--lib/srtmain/urls.py32
-rw-r--r--lib/srtmain/wsgi.py17
-rw-r--r--lib/users/migrations/0002_last_name.py18
-rw-r--r--lib/users/migrations/0003_srtuser_timezone.py18
-rwxr-xr-xlib/users/migrations/0004_timezone_default.py18
-rw-r--r--lib/users/migrations/0005_alter_srtuser_first_name.py18
-rwxr-xr-xlib/users/models.py95
-rwxr-xr-xlib/users/templates/user_edit.html16
-rwxr-xr-xlib/users/urls.py4
-rwxr-xr-xlib/users/views.py71
-rwxr-xr-xlib/yp/reports.py381
-rwxr-xr-xlib/yp/templates/landing.html93
-rwxr-xr-xlib/yp/templates/management.html199
-rwxr-xr-xlib/yp/templates/yp_hello.html2
-rwxr-xr-xlib/yp/urls.py12
-rwxr-xr-xlib/yp/views.py82
233 files changed, 31287 insertions, 2297 deletions
diff --git a/bin/acme/datasource.json_sample b/bin/acme/datasource.json_sample
index dc1d0188..98ef1def 100755
--- a/bin/acme/datasource.json_sample
+++ b/bin/acme/datasource.json_sample
@@ -33,6 +33,11 @@
"helptext" : "Text schema of an example defect",
"value" : "54321"
},
+ {
+ "name" : "SRTOOL_DEFECT_DOESNOTIMPACT",
+ "helptext" : "Comment message to mark CVEs that do not affect the products",
+ "value" : "It doesn't impact ACME"
+ },
{
"name" : "SRTOOL_DEFECT_TOOL",
"helptext" : "The registered script to manage defects",
diff --git a/bin/acme/srtool_acme.py b/bin/acme/srtool_acme.py
index 1aa1b911..7f4aeda0 100755
--- a/bin/acme/srtool_acme.py
+++ b/bin/acme/srtool_acme.py
@@ -33,7 +33,7 @@
import os
import sys
import argparse
-import sqlite3
+from common.srtool_sql import *
import json
# load the srt.sqlite schema indexes
@@ -85,7 +85,7 @@ def init_products(source_file):
with open(source_doc) as json_data:
dct = json.load(json_data)
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
Product_Items = dct['Product_Items']
@@ -103,7 +103,7 @@ def init_products(source_file):
product = cur.execute(sql).fetchone()
if product is None:
# NOTE: 'order' is a reserved SQL keyword, so we have to quote it
- sql = ''' INSERT into orm_product ("order", key, name, version, profile, cpe, defect_tags, product_tags) VALUES (?, ?, ?, ?, ?, ?, ?, ?)'''
+ sql = ''' INSERT INTO orm_product (`order`, `key`, name, version, profile, cpe, defect_tags, product_tags) VALUES (?, ?, ?, ?, ?, ?, ?, ?)'''
cur.execute(sql, (order, key, name, version, profile, cpe, defect_tags, product_tags))
else:
sql = ''' UPDATE orm_product
@@ -124,6 +124,7 @@ def main(argv):
parser.add_argument('--file', dest='file', help='Source file')
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose debugging')
args = parser.parse_args()
diff --git a/bin/acme/srtool_defect.py b/bin/acme/srtool_defect.py
index 0e189a3a..e5ac0a7a 100755
--- a/bin/acme/srtool_defect.py
+++ b/bin/acme/srtool_defect.py
@@ -26,7 +26,7 @@
import os
import sys
import argparse
-import sqlite3
+from common.srtool_sql import *
import json
# load the srt.sqlite schema indexes
@@ -133,7 +133,7 @@ class Defect:
#
def new_defect_name(product_prefix):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
sql = "SELECT * FROM orm_srtsetting WHERE name='current_defect_simulation_index'"
@@ -147,7 +147,7 @@ def new_defect_name(product_prefix):
sql = '''UPDATE orm_srtsetting SET value=? WHERE id = ?'''
cur.execute(sql, (index, cvi[ORM.SRTSETTING_ID]))
conn.commit() #commit to db
- conn.close()
+ SQL_CLOSE_CONN(conn)
defect_name = "DEFECT-%s-%05d" % (product_prefix,index)
return defect_name
diff --git a/bin/acme/srtool_env.sh b/bin/acme/srtool_env.sh
index 4eac83b0..e72ccb5a 100755
--- a/bin/acme/srtool_env.sh
+++ b/bin/acme/srtool_env.sh
@@ -1,4 +1,5 @@
# Main application shell settings
export SRT_MAIN_APP="acme"
+export SRT_MAIN_URL="acme"
diff --git a/bin/acme/srtool_jira_acme.py b/bin/acme/srtool_jira_acme.py
index f11af1df..313a5f1e 100755
--- a/bin/acme/srtool_jira_acme.py
+++ b/bin/acme/srtool_jira_acme.py
@@ -53,7 +53,7 @@ import os
import sys
import re
import argparse
-import sqlite3
+from common.srtool_sql import *
import json
from datetime import datetime, date
@@ -196,7 +196,7 @@ def do_update_jira():
print("CONNECTION TO JIRA FAILED")
return 1
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
today = datetime.today()
@@ -252,7 +252,7 @@ def do_update_jira():
conn.commit()
c.close()
- conn.close()
+ SQL_CLOSE_CONN(conn)
#############################################################################3
###
@@ -468,7 +468,7 @@ def update_project_issues(project, issues, conn, log):
except:
cve_name_sort = cve.name
- sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, packages, srt_updated)
+ sql = ''' INSERT INTO orm_cve (name, name_sort, priority, status, comments, comments_private, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, packages, srt_updated)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
c.execute(sql, (cve_name, cve_name_sort, d.priority, cve_status, '', '', '', '', '', 1, 0, '', 'Created from defect %s' % d.name, '', '', 0, '', '', '', '', '', '', datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT)))
@@ -562,7 +562,7 @@ def jira_update_list(jira_list):
print("CONNECTION TO JIRA FAILED")
return 1
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
products = c.execute('''SELECT * FROM orm_product''').fetchall()
@@ -629,7 +629,7 @@ def jira_add_to_defect_db(jira_name):
#try connecting to jira
try:
jira = JIRA(JIRA_PRODUCTION_LINK, auth=(srt_user, srt_passwd))
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
except Exception as e:
print("xhr_investigation_commit:CONNECTION TO JIRA FAILED:(%s)\n" % e, file=sys.stderr)
@@ -679,7 +679,7 @@ def jira_add_to_defect_db(jira_name):
c.execute(sql, (d.name, d.summary, d.url, d.priority, d.status, d.resolution, str(d.publish), d.release_version, d.product_id, d.date_created, d.date_updated))
conn.commit()
c.close()
- conn.close()
+ SQL_CLOSE_CONN(conn)
except Exception as e:
print("ERROR:could not find/import defect(%s)" % e, file=sys.stderr)
return 1
@@ -701,7 +701,7 @@ JIRA_IS_TEST = True
JIRA_IS_SIMULATE = True
def simulate_new_defect_name(product_prefix):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
sql = "SELECT * FROM orm_srtsetting WHERE name='current_defect_simulation_index'"
@@ -715,7 +715,7 @@ def simulate_new_defect_name(product_prefix):
sql = '''UPDATE orm_srtsetting SET value=? WHERE id = ?'''
cur.execute(sql, (index, cvi[ORM.SRTSETTING_ID]))
conn.commit() #commit to db
- conn.close()
+ SQL_CLOSE_CONN(conn)
defect_name = "%s-%05d" % (product_prefix,index)
return defect_name
@@ -746,7 +746,7 @@ def jira_new_defect(product_defect_tags,summary,cve_list,description,reason,prio
return 1
#srt_error_log("Jira connection made")
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
# append the jira link to description
diff --git a/bin/common/datasource.json b/bin/common/datasource.json
index 81d5c289..34ca5404 100755
--- a/bin/common/datasource.json
+++ b/bin/common/datasource.json
@@ -55,6 +55,19 @@
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
},
+ {
+ "key" : "0001-common-recipes",
+ "data" : "common_recipes",
+ "source" : "common",
+ "name" : "Common Recipes",
+ "description" : "Common recipe names for typeahead",
+ "cve_filter" : "",
+ "init" : "bin/common/srtool_common.py --import-recipe-names",
+ "update" : "bin/common/srtool_common.py --import-recipe-names",
+ "lookup" : "",
+ "update_frequency" : "6",
+ "update_time" : "{}"
+ },
{
"key" : "0900-common-local",
@@ -97,6 +110,20 @@
"_comment_" : "Update daily at 7:00 am",
"update_time" : "{\"hour\":\"7\"}"
},
+ {
+ "key" : "0912-common-log-daily",
+ "data" : "backup_log_daily",
+ "source" : "common",
+ "name" : "Daily Logs Backup",
+ "description" : "Daily logs backup",
+ "cve_filter" : "",
+ "init" : "",
+ "update" : "bin/common/srtool_backup.py --backup-logs",
+ "lookup" : "",
+ "update_frequency" : "2",
+ "_comment_" : "Update daily at 11:00 pm",
+ "update_time" : "{\"hour\":\"23\"}"
+ },
{
"_comment_" : "Only score 100 at a time to prevent run-away database overloading",
@@ -105,14 +132,37 @@
"source" : "common",
"name" : "Score",
"description" : "Score CVEs",
+ "attributes" : "DISABLE ",
"cve_filter" : "",
- "init" : "bin/common/srtool_common.py --score-new-cves NEW --count=100",
- "update" : "bin/common/srtool_common.py --score-new-cves NEW --count=100",
+ "init" : "bin/common/srtool_common.py --score-new-cves NEW --count=100 --progress",
+ "update" : "bin/common/srtool_common.py --score-new-cves NEW --count=100 --progress",
"lookup" : "",
- "update_frequency" : "0",
+ "update_frequency" : "5",
"_comment_" : "Update every 10 minutes",
"update_time" : "{\"minutes\":\"10\"}"
+ },
+ {
+ "_comment1_" : "Test srtool_update.py execution",
+ "key" : "0930-updater-test",
+ "data" : "unit_test",
+ "source" : "common",
+ "name" : "updater_test",
+ "description" : "<Updater test>",
+ "_comment2_" : "By default, disable this datasource",
+ "attributes" : "DISABLE ",
+ "cve_filter" : "",
+ "init" : "",
+ "_comment3_" : "Use '!' prefix for built-ins to stop attempted CWD path insertion",
+ "update" : "!date > foo.txt",
+ "lookup" : "",
+ "_comment4_" : "Update every two minutes MINUTELY(=0)",
+ "update_frequency" : "5",
+ "update_time" : "{\"minutes\":\"2\"}"
}
+
+
+
+
],
"permissions" : [
diff --git a/bin/common/srtool_backup.py b/bin/common/srtool_backup.py
index b37e2d08..f065e2f2 100755
--- a/bin/common/srtool_backup.py
+++ b/bin/common/srtool_backup.py
@@ -5,7 +5,7 @@
#
# Security Response Tool Commandline Tool
#
-# Copyright (C) 2018-2019 Wind River Systems
+# Copyright (C) 2018-2021 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -27,7 +27,6 @@
import os
import sys
import argparse
-import sqlite3
import json
from datetime import datetime, date
@@ -35,11 +34,17 @@ from datetime import datetime, date
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
from common.srt_schema import ORM
+from common.srtool_sql import *
# Setup:
verbose = False
+force_update = False
srtDbName = 'srt.sqlite'
+BACKUP_DIR = 'backups'
+BACKUP_PREFIX = 'backup_'
+BACKUP_LOG_DIR = 'update_logs'
+BACKUP_LOG_PREFIX = 'backup_log_'
#################################
# Common routines
@@ -57,20 +62,35 @@ def _log(msg):
f1.close()
#################################
+# Set backup database stamp file
+#
+
+def backup_stamp(backup_dir):
+ if not os.path.isdir(backup_dir):
+ print("ERROR: no such directory '%s'" % backup_dir)
+ exit(1)
+ statinfo = os.stat(os.path.join(backup_dir, 'srt.sqlite'))
+ mod_timestamp = datetime.fromtimestamp(statinfo.st_mtime)
+ stamp_str = mod_timestamp.strftime('%Y-%m-%d %H:%M:%S | %A, %B %d %Y')
+ with open(os.path.join(backup_dir,'timestamp.txt'), 'w') as file:
+ file.write('%s\n' % stamp_str)
+ print("* Set Timestamp:%s" % mod_timestamp.strftime('%Y-%m-%d|%H:%M:%S|%A, %B %d %Y'))
+
+#################################
# Backup the database and data files
#
def backup_db(is_daily):
today = datetime.today()
weeknum = today.strftime("%W")
- weekday = today.isoweekday()
+ weekday = today.strftime("%A") #today.isoweekday()
year = today.strftime("%Y")
# Where are we backing up to
if is_daily:
- backup_dir = os.path.join(script_pathname, "backups/backup_%s" % (weekday))
+ backup_dir = os.path.join(script_pathname, "%s/%s%s" % (BACKUP_DIR,BACKUP_PREFIX,weekday))
else:
- backup_dir = os.path.join(script_pathname, "backups/backup_%s_%s" % (year,weeknum))
+ backup_dir = os.path.join(script_pathname, "%s/%s%s_%s" % (BACKUP_DIR,BACKUP_PREFIX,year,weeknum))
# Make sure directory exists
try:
os.makedirs(backup_dir)
@@ -82,36 +102,199 @@ def backup_db(is_daily):
print("*** Backup dir='%s' ***" % backup_dir)
print("* Copy database")
- cmd = 'cp %s %s' % (os.path.join(script_pathname,srtDbName),os.path.join(script_pathname,backup_dir))
+ cmd = 'cp -p %s %s' % (os.path.join(script_pathname,srtDbName),backup_dir)
print(cmd)
os.system(cmd)
# Copy data but skip cache dir (no deep copy)
print("* Copy data files")
- cmd = 'cp %s/data/* %s/data' % (script_pathname,os.path.join(script_pathname,backup_dir))
+ cmd = 'cp -p %s/data/* %s/data' % (script_pathname,backup_dir)
print(cmd)
os.system(cmd)
# Copy attachments
print("* Copy attachment files")
- cmd = 'cp -r %s/downloads %s' % (script_pathname,os.path.join(script_pathname,backup_dir))
+ cmd = 'cp -r -p %s/downloads %s' % (script_pathname,backup_dir)
print(cmd)
os.system(cmd)
+ # Set stamp file
+ backup_stamp(backup_dir)
+
+ # Additional remote backup directory copy
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ sql = '''SELECT * FROM orm_srtsetting WHERE name=?'''
+ setting = SQL_EXECUTE(cur, sql, ('SRT_REMOTE_BACKUP_PATH',)).fetchone()
+ print("* Check for remote backup (%s) " % str(setting))
+ cur.close()
+ conn.close()
+ if setting and setting[ORM.SRTSETTING_VALUE].strip():
+ remote_backup_dir = setting[ORM.SRTSETTING_VALUE]
+ if is_daily:
+ remote_backup_dir = os.path.join(script_pathname, "%s/%s%s" % (remote_backup_dir,BACKUP_PREFIX,weekday))
+ else:
+ remote_backup_dir = os.path.join(script_pathname, "%s/%s%s_%s" % (remote_backup_dir,BACKUP_PREFIX,year,weeknum))
+
+ print("* Safety copy to remote location '%s'" % remote_backup_dir)
+ try:
+ os.makedirs(remote_backup_dir)
+ except:
+ # If exists, clean it out
+ os.system("rm -rf %s/*" % (remote_backup_dir))
+
+ # Is path a failure?
+ if not os.path.isdir(remote_backup_dir):
+ print("ERROR: no such remote backup path '%s'" % remote_backup_dir)
+ return(-1)
+
+ # Copy the backup directory to the remote location
+ cmd = 'cp -r -p %s %s' % (backup_dir,remote_backup_dir)
+ print(cmd)
+ os.system(cmd)
+
+ os.system('bash -c "echo \"BACKUP:`date`:%s\" >> backup_db.log"' % cmd)
+
+
+
+#######################################################################
+# backup_logs:
+#
+# Back up the logs to a daily wheel, and reset the logs
+# to empty for the next day.
+# If end of week, concatenate the respective daily logs into a
+# week-of-year log
+#
+
+def backup_logs():
+ today = datetime.today()
+ date_str = today.strftime("%Y/%m/%d")
+ weeknum = today.strftime("%W") # 00, 01, ... , 53
+ weekday = today.strftime("%A") # Sunday, Monday, ... , Saturday
+ weekday_num = today.strftime("%w") # 0, 1, ..., 6
+ year = today.strftime("%Y")
+ log_name_list = ('srt_web.log','srt_update.log','srt_dbg.log','update_logs/master_log.txt')
+
+ # Perform the daily back up
+ backup_day_dir = os.path.join(script_pathname, "%s/%s%s" % (BACKUP_LOG_DIR,BACKUP_LOG_PREFIX,weekday))
+ # Make sure directory exists
+ try:
+ os.makedirs(backup_day_dir)
+ except:
+ # If exists, clean it out
+ os.system("rm -rf %s/*" % (backup_day_dir))
+ pass
+ os.makedirs(os.path.join(backup_day_dir,'data'))
+
+ print("*** Backup dir[%s]='%s' ***" % (weekday_num,backup_day_dir))
+ print("* Copy logs")
+ for logname in log_name_list:
+ logname_base = os.path.basename(logname)
+ # Backup log file
+ cmd = 'echo "=== Backup: %s ===" > %s' % (date_str,os.path.join(backup_day_dir,logname_base))
+ print(cmd)
+ os.system(cmd)
+ cmd = 'cat %s | grep -v "/srtgui/mostrecentjobs?format=json" >> %s' % (os.path.join(script_pathname,logname),os.path.join(backup_day_dir,logname_base))
+ print(cmd)
+ os.system(cmd)
+ # Reset log file to empty
+ cmd = 'echo "" > %s' % (os.path.join(script_pathname,logname))
+ print(cmd)
+ os.system(cmd)
+
+ # If week's end, perform the weekly backup
+ if (6 == weekday) or force_update:
+ backup_week_dir = os.path.join(script_pathname, "%s/%s%s_%s" % (BACKUP_LOG_DIR,BACKUP_LOG_PREFIX,year,weeknum))
+ if not os.path.isdir(backup_week_dir):
+ os.makedirs(backup_week_dir)
+ day_names = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']
+ concat = '>'
+ for weekday_num,weekday in enumerate(day_names):
+ backup_day_dir = os.path.join(script_pathname, "%s/%s%s" % (BACKUP_LOG_DIR,BACKUP_LOG_PREFIX,weekday))
+ if os.path.isdir(backup_day_dir):
+ # Concatinate the respective logs
+ for logname in log_name_list:
+ logname_base = os.path.basename(logname)
+ cmd = 'cat %s %s %s' % (os.path.join(backup_day_dir,logname_base),concat,os.path.join(backup_week_dir,logname_base))
+ print(cmd)
+ os.system(cmd)
+ concat = '>>'
+
+#######################################################################
+# list
+#
+
+def backup_list():
+ def sort_key(elem):
+ return elem[0]+elem[2]
+
+ stamps = []
+ for directory in os.listdir(os.path.join(script_pathname, 'backups')):
+ prefix = '1Week' if not directory[len(BACKUP_PREFIX)].isalpha() else '2Day'
+ directory = os.path.join(script_pathname, 'backups', directory)
+ with open(os.path.join(directory,'timestamp.txt'), 'r') as file:
+ line = file.read().strip()
+ #print("DIR=%s,%s" % (directory,line))
+ stamps.append([prefix, directory, line])
+
+ # Add the current database (now)
+ prefix = '3Now'
+ directory = script_pathname
+ statinfo = os.stat(os.path.join(directory, 'srt.sqlite'))
+ mod_timestamp = datetime.fromtimestamp(statinfo.st_mtime)
+ stamp_str = mod_timestamp.strftime('%Y-%m-%d %H:%M:%S | %A, %B %d %Y')
+ stamps.append([prefix, directory, stamp_str])
+
+ # Sort my time and return
+ stamps.sort(key=sort_key)
+ return stamps
+
+def list(db_list=False):
+ stamps = backup_list()
+ for stamp in stamps:
+ # Insert a separator between the date and the time
+ stamp[2] = stamp[2].replace(' ','|',1)
+ if db_list:
+ print("%s|%s|%s" % (stamp[0][1:],os.path.basename(stamp[1]),stamp[2].replace(' | ','|')))
+ else:
+ snap_date,snap_time,snap_day = stamp[2].split('|')
+ print("%-4s,%-16s,%s" % (stamp[0][1:],os.path.basename(stamp[1]),stamp[2].replace(' | ','|')))
+
+#################################
+# Init stamps
+#
+
+def init_stamps():
+ stamps = backup_list()
+ for stamp in stamps:
+ stamp_prefix, stamp_directory, stamp_line = stamp
+ backup_stamp(stamp_directory)
+
#################################
# main loop
#
+
def main(argv):
global verbose
global cmd_skip
global cmd_count
+ global force_update
# setup
parser = argparse.ArgumentParser(description='srtool_backup.py: backup the SRTool database')
parser.add_argument('--backup-db', '-b', action='store_const', const='backup', dest='command', help='Backup the database, save to year_weeknum dir')
parser.add_argument('--backup-db-daily', '-d', action='store_const', const='backup-daily', dest='command', help='Backup the database, save to weekday dir')
+ parser.add_argument('--backup-logs', '-B', action='store_const', const='backup_logs', dest='command', help='Backup the logs, save to year_weeknum and weekday dir')
+
+ parser.add_argument('--init-stamps', '-I', action='store_const', const='init-stamps', dest='command', help='Initialize the backup directory timestamps')
+ parser.add_argument('--init-dir-stamp', '-D', dest='init_dir_stamp', help='Initialize a specific backup directory timestamp')
+
+ parser.add_argument('--list-backups', '-l', action='store_const', const='list', dest='command', help='List the backup directory timestamps')
+ parser.add_argument('--list-backups-db', '-L', action='store_const', const='list-db', dest='command', help='Dump the backup directory timestamps')
+
parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
parser.add_argument('--count', dest='count', help='Debugging: short run record count')
@@ -121,6 +304,7 @@ def main(argv):
master_log = open(os.path.join(script_pathname, "update_logs/master_log.txt"), "a")
verbose = args.verbose
+ force_update = args.force
cmd_skip = 0
if None != args.skip:
cmd_skip = int(args.skip)
@@ -136,6 +320,22 @@ def main(argv):
except Exception as e:
print ("DATABASE BACKUP FAILED ... %s" % e)
master_log.write("SRTOOL:%s:DATABASE BACKUP:\t\t\t\t...\t\t\tFAILED ... %s\n" % (date.today(), e))
+
+ elif 'backup_logs' == args.command:
+ try:
+ backup_logs()
+ except Exception as e:
+ print ("Log BACKUP FAILED ... %s" % e)
+ master_log.write("SRTOOL:%s:LOGS BACKUP:\t\t\t\t...\t\t\tFAILED ... %s\n" % (date.today(), e))
+ elif 'list' == args.command:
+ list()
+ elif 'list-db' == args.command:
+ list(True)
+ elif 'init-stamps' == args.command:
+ init_stamps()
+ elif args.init_dir_stamp:
+ backup_stamp(args.init_dir_stamp)
+
else:
print("Command not found")
master_log.close()
diff --git a/bin/common/srtool_common.py b/bin/common/srtool_common.py
index 13b5893d..b9765cfa 100755
--- a/bin/common/srtool_common.py
+++ b/bin/common/srtool_common.py
@@ -5,7 +5,7 @@
#
# Security Response Tool Implementation
#
-# Copyright (C) 2017-2018 Wind River Systems
+# Copyright (C) 2017-2023 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -32,7 +32,6 @@ import re
import csv
import json
import argparse
-import sqlite3
import subprocess
from time import sleep
from datetime import datetime
@@ -40,8 +39,14 @@ from datetime import datetime
# Load the srt.sqlite schema index file
# Since it is generated from this script
# it may not exist on the first pass
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from common.srtool_progress import *
+from common.srtool_sql import *
+is_orm = False
try:
- from srt_schema import ORM
+ from common.srt_schema import ORM
+ is_orm = True
except:
# Do a pass so that '--generate-schema-header' can fix it
print("Warning: srt_schema not yet created or bad format")
@@ -51,6 +56,9 @@ except:
verbose = False
cmd_skip = 0
cmd_count = 0
+cmd_test = False
+debug_sql = False
+is_progress = False
srtDbName = 'srt.sqlite'
packageKeywordsFile = 'data/package_keywords.csv'
@@ -85,6 +93,33 @@ def get_name_sort(cve_name):
cve_name_sort = cve_name
return cve_name_sort
+def get_tag_key(tag,key,default=None):
+ d = json.loads(tag)
+ if key in d:
+ return d[key]
+ return default
+
+#################################
+# Global error log registration
+#
+
+def log_error(description,severity=99):
+ # Bootstrap protection if ORM not yet generated
+ if not is_orm:
+ return
+
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+
+ if (severity < int(ORM.ERRORLOG_INFO)) or (severity > int(ORM.ERRORLOG_ERROR)):
+ severity = int(ORM.ERRORLOG_ERROR)
+ sql = ''' INSERT INTO orm_errorlog (severity,description,srt_created) VALUES (?, ?, ?)'''
+ SQL_EXECUTE(cur, sql, (severity,description,datetime.now()))
+
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
#################################
# Load the package keyword source into the database
#
@@ -101,8 +136,8 @@ def init_package_keywords(filename):
print("ERROR: DB NOT FOUND '%s'" % filename)
return
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
is_first_row = True
lookupTable = []
@@ -134,26 +169,28 @@ def init_package_keywords(filename):
# ARG!: we have to use an escaped "LIKE", because even simple 'WHERE' applies
# wild card on random '-' in the text
sql = '''SELECT 1 FROM orm_package WHERE name LIKE ? ESCAPE '-' '''
- package = cur.execute(sql, ( name, )).fetchone()
+ package = SQL_EXECUTE(cur, sql, ( name, )).fetchone()
PACKAGE_ID = 0
if package is None:
- sql = ''' INSERT into orm_package (mode, name, realname, invalidname, weight, cve_count, vulnerability_count, investigation_count,defect_count ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)'''
- cur.execute(sql, (mode,name,realname,invalidname,weight,0,0,0,0))
+ sql = ''' INSERT INTO orm_package (mode, name, realname, invalidname, weight, cve_count, vulnerability_count, investigation_count,defect_count ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)'''
+ SQL_EXECUTE(cur, sql, (mode,name,realname,invalidname,weight,0,0,0,0))
else:
sql = ''' UPDATE orm_package
SET mode = ?, realname = ?, invalidname = ?, weight = ?
WHERE id = ?'''
- cur.execute(sql, (mode,realname,invalidname,weight,package[PACKAGE_ID]))
+ SQL_EXECUTE(cur, sql, (mode,realname,invalidname,weight,package[PACKAGE_ID]))
- if 0 == (i % 10):
- print("%04d:%30s\r" % (i,name), end='')
+ if not is_progress:
+ # Do not block on '\r' if displaying progress
+ if 0 == (i % 10):
+ print("%04d:%30s" % (i,name), end='\r')
i += 1
print("%04d:%30s" % (i,name))
- conn.commit()
- cur.close()
- conn.close()
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
#################################
# Score new CVEs for the triage review
@@ -241,9 +278,9 @@ def attach_packages(cur, cve, recommend_list):
# Find or create a package record (WARNING: some package names have <'>)
pkg_name = pkg_name.replace('"',"'")
- sql = '''SELECT * FROM orm_package where name = "%s" AND mode = "%s";''' % (pkg_name,mode)
+ sql = f"""SELECT * FROM orm_package where name = '{pkg_name}' AND mode = '{mode}';"""
if verbose: print("PKG_TEST:%s" % sql)
- cur.execute(sql)
+ SQL_EXECUTE(cur, sql)
package = cur.fetchone()
if package:
if verbose: print("FOUND PACKAGE ID for %s" % (pkg_name))
@@ -252,17 +289,17 @@ def attach_packages(cur, cve, recommend_list):
# Create Package
if verbose: print("INSERTING PACKAGE for %s,%s" % (cve[ORM.CVE_NAME],pkg_name))
sql = '''INSERT INTO orm_package (mode, name, realname, invalidname, weight, cve_count, vulnerability_count, investigation_count,defect_count ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)'''
- cur.execute(sql, (mode, pkg_name, pkg_name, '', 1 if FOR==mode else -1,0,0,0,0,))
- pkg_id = cur.lastrowid
+ SQL_EXECUTE(cur, sql, (mode, pkg_name, pkg_name, '', 1 if FOR==mode else -1,0,0,0,0,))
+ pkg_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
# Also create Package2CVE
sql = "SELECT * FROM orm_packagetocve where package_id = '%s' AND cve_id = '%s';" % (pkg_id,cve_id)
- cur.execute(sql)
+ SQL_EXECUTE(cur, sql)
package2cve = cur.fetchone()
if not package2cve:
- AFFECTED = 0
- RELATED = 1
+ AFFECTED = False
+ RELATED = True
sql = '''INSERT INTO orm_packagetocve (package_id, cve_id, applicable) VALUES (?,?,?)'''
- cur.execute(sql, (pkg_id,cve_id,AFFECTED))
+ SQL_EXECUTE(cur, sql, (pkg_id,cve_id,AFFECTED))
# Add FOR packages to field in CVE
if FOR == mode:
@@ -277,14 +314,17 @@ def score_new_cves(cve_filter):
global recommends
global cmd_skip
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
- cur_write = conn.cursor()
- cur_ds = conn.cursor()
+ if debug_sql:
+ SQL_DEBUG(True,'SCR')
+
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+ cur_write = SQL_CURSOR(conn)
+ cur_ds = SQL_CURSOR(conn)
# Load the package filter table
sql = "SELECT * FROM orm_package"
- cur.execute(sql)
+ SQL_EXECUTE(cur, sql)
for package in cur:
# Fixup notation not intended to be regex
name = package[ORM.PACKAGE_NAME].replace('++',r'\+\+')
@@ -293,17 +333,18 @@ def score_new_cves(cve_filter):
# Scan the open CVEs
if 'NEW' == cve_filter:
- sql = "SELECT * FROM orm_cve WHERE (status='%s' OR status='%s');" % (ORM.STATUS_NEW,ORM.STATUS_NEW_RESERVED)
- cur.execute(sql)
+# sql = "SELECT * FROM orm_cve WHERE (status='%s' OR status='%s') AND score_date IS NULL;" % (ORM.STATUS_NEW,ORM.STATUS_NEW_RESERVED)
+ sql = "SELECT * FROM orm_cve WHERE status='%s' AND score_date IS NULL;" % (ORM.STATUS_NEW)
+ SQL_EXECUTE(cur, sql)
elif cve_filter.startswith('CVE-'):
- cur.execute('SELECT * FROM orm_cve WHERE name LIKE "'+cve_filter+'%"')
+ SQL_EXECUTE(cur, 'SELECT * FROM orm_cve WHERE name LIKE "'+cve_filter+'%"')
else:
print("ERROR: Unrecognized filter '%s'" % filter)
exit(1)
# Pre-gather the potential data sources
sql = "SELECT * FROM orm_datasource WHERE data = ?"
- cur_ds.execute(sql, ('cve',))
+ SQL_EXECUTE(cur_ds, sql, ('cve',))
ds_list = []
for ds in cur_ds:
if not "ALT-SOURCE" in ds[ORM.DATASOURCE_ATTRIBUTES]:
@@ -318,70 +359,94 @@ def score_new_cves(cve_filter):
record_count = 0
write_count = 0
ds_count = 0
+ is_change = False
time_now = datetime.now()
+
+ # Progress expected max
+ if cmd_count:
+ progress_set_max(cmd_count)
+ else:
+ progress_set_max(len(cur))
+
for i,cve in enumerate(cur):
cve_name = cve[ORM.CVE_NAME]
- if cve[ORM.CVE_SCORE_DATE]:
- #cve_score_date = datetime.strptime(source[ORM.CVE_SCORE_DATE], '%Y-%m-%d %H:%M:%S')
- # If there is any score_date, then nothing to do here
- continue
+ print("C=%s,I=%s" % (cve_name,i), file=sys.stderr)
+ sys.stdout.flush()
+ if 0 == (i % 10):
+ progress_set_current(i)
+ progress_show(cve_name)
+
+# if cve[ORM.CVE_SCORE_DATE]:
+# #cve_score_date = datetime.strptime(source[ORM.CVE_SCORE_DATE], '%Y-%m-%d %H:%M:%S')
+# # If there is any score_date, then nothing to do here
+# continue
+#
# Progress indicator support
- if 0 == i % 10:
- print('%04d: %20s\r' % (i,cve_name), end='')
- if (0 == i % 200) and not cmd_skip:
- conn.commit()
+ if not is_progress:
+ # Do not block on '\r' if displaying progress
+ if 0 == i % 10:
+ print('%04d: %20s' % (i,cve_name), end='\r')
+ if (0 == i % 200) and (not cmd_skip) and is_change:
print("%4d: COMMIT" % i)
- sleep(2)
+ sys.stdout.flush()
+ SQL_COMMIT(conn)
+ #sleep(0.5)
+ is_change = False
# Development/debug support
- if cmd_skip:
- if i < cmd_skip:
- continue
- else:
- cmd_skip = 0
- if cmd_count:
- if record_count < cmd_count:
- record_count += 1
- else:
- print("Count return: %s,%s,%s" % (i,record_count,cmd_count))
- break
+ if cmd_skip > i:
+ continue
+ if cmd_count < (i - cmd_skip):
+ print("Count return: %s,%s" % (i,cmd_count))
+ sys.stdout.flush()
+ break
if verbose: print("TEST CVE = %20s" % (cve[ORM.CVE_NAME]))
recommend,recommend_list = compute_recommends(cve)
cve_packages = ''
if recommend_list:
-
# Go ahead and create/attach packages to CVEs
cve_packages = attach_packages(cur_write, cve, recommend_list)
- #cve_packages = cve[ORM.CVE_PACKAGES]
+ else:
+ cve_packages = cve[ORM.CVE_PACKAGES]
- sql = ''' UPDATE orm_cve
- SET recommend = ?,
- recommend_list = ?,
- packages = ?,
- score_date = ?
- WHERE id = ?'''
- cur_write.execute(sql, (recommend, recommend_list, cve_packages, time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT), cve[ORM.CVE_ID]))
- write_count += 1
+ # Always set score_date since it has been evaluated
+ # NOTE: we do not touch 'cve.srt_updated' for this background change
+ sql = ''' UPDATE orm_cve
+ SET recommend = ?,
+ recommend_list = ?,
+ packages = ?,
+ score_date = ?
+ WHERE id = ?'''
+ SQL_EXECUTE(cur_write, sql, (recommend, recommend_list, cve_packages, time_now.strftime(ORM.DATASOURCE_DATE_FORMAT), cve[ORM.CVE_ID]))
+ write_count += 1
+ is_change = True
- if verbose: print(" %d:%s:%s" % (recommend,recommend_list,cve_packages))
+# if verbose: print(" %d:%s:%s" % (recommend,recommend_list,cve_packages))
# Attach all matching CVE sources
for ds_obj in ds_list:
if cve[ORM.CVE_NAME].startswith(ds_obj['filter']):
#print(" Alternate CVE source %s for %s " % (ds_obj['id'],cve[ORM.CVE_ID]))
sql = ''' SELECT * FROM orm_cvesource WHERE cve_id = ? AND datasource_id = ?'''
- if not cur_write.execute(sql, (cve[ORM.CVE_ID],ds_obj['id'],)).fetchone():
+ if not SQL_EXECUTE(cur_write, sql, (cve[ORM.CVE_ID],ds_obj['id'],)).fetchone():
### TO-DO: only add sources that have CVE matches
- sql = ''' INSERT into orm_cvesource (cve_id, datasource_id ) VALUES (?, ?)'''
- cur_write.execute(sql, (cve[ORM.CVE_ID],ds_obj['id']))
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id ) VALUES (?, ?)'''
+ SQL_EXECUTE(cur_write, sql, (cve[ORM.CVE_ID],ds_obj['id']))
ds_count += 1
- print("%30sADDED [%4d]: %20s <- %20s\r" % ('',ds_count,ds_obj['key'],cve[ORM.CVE_NAME]),end='')
+ if verbose: print("%30sADDED [%4d]: %20s <- %20s" % ('',ds_count,ds_obj['key'],cve[ORM.CVE_NAME]),end='\r')
- conn.commit()
- print("COMMIT")
+ if is_change:
+ SQL_COMMIT(conn)
+ print("COMMIT")
print("\nUpdated CVEs=%d, Added alternate sources=%d" % (write_count,ds_count))
+ sys.stdout.flush()
+ # End progress
+ progress_done('Done')
+ # Dump the SQL transaction data
+ if debug_sql:
+ SQL_DUMP()
#################################
# init_notify_categories
@@ -391,25 +456,483 @@ def init_notify_categories(filename):
with open(filename) as json_data:
dct = json.load(json_data)
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
Category_Items = dct['Notify_Categories']
for i,category in enumerate(Category_Items):
if verbose: print("%s" % category['name'])
category_name = str(category['name'])
- sql = '''SELECT * FROM orm_notifycategories where category = "%s";''' % (category_name)
- nc = cur.execute(sql).fetchone()
+ sql = f"""SELECT * FROM orm_notifycategories where category = '{category_name}';"""
+ nc = SQL_EXECUTE(cur, sql).fetchone()
if not nc:
sql = '''INSERT INTO orm_notifycategories (category) VALUES (?)'''
# REMINDER: we need the ',' else the 'category_name' will be seen as an array of chars
- cur.execute(sql, (category_name,))
+ SQL_EXECUTE(cur, sql, (category_name,))
else:
if verbose: print("FOUND_CATEGORY:%s" % category['name'])
pass
- conn.commit()
- cur.close()
- conn.close()
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# Update cumulative Cve/Vulnerability/Investigation status
+#
+# * Scan the respective child Vulnerabilities/Investigations/Defects, and
+# sum them into cumulative status for parent
+# * Rules for Status:
+# If any child is VULNERABLE, then the parent is VULNERABLE
+# else if any child is INVESTIGATE, then the parent is INVESTIGATE
+# else if any child is NEW, then the parent is INVESTIGATE
+# else the parent is NOT_VULNERABLE
+# * Exceptions:
+# Children that are 'ORM.STATUS_HISTORICAL' or 'ORM.STATUS_NEW_RESERVED' have no vote
+# If there are no children nor any children with votes, then the status is left unchanged
+# * Rules for Priority:
+# If any child has a higher priority, that priority is used
+#
+
+def _update_cve_status(cur,cve,srtool_today,update_skip_history):
+ if verbose: print("Cve:%s:%s" % (cve[ORM.CVE_NAME],ORM.get_orm_string(cve[ORM.CVE_STATUS],ORM.STATUS_STR)))
+ # Is status locked?
+ # if cve[ORM.CVE_STATUS_LOCK]:
+ # return
+
+ # Get the CVE's Vulnerabilities
+ cve_priority = cve[ORM.CVE_PRIORITY]
+ cve_status = None
+ vote_count = 0
+ cve2vuls = SQL_EXECUTE(cur, "SELECT * FROM orm_cvetovulnerablility where cve_id = '%s'" % cve[ORM.CVE_ID]).fetchall()
+ for cve2vul in cve2vuls:
+ vulnerability_id = cve2vul[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID]
+ vulnerability = SQL_EXECUTE(cur, "SELECT * FROM orm_vulnerability where id = '%s'" % vulnerability_id).fetchone()
+ # Compute Status
+ status = vulnerability[ORM.VULNERABILITY_STATUS]
+ if verbose: print(" %s,%s" % (vulnerability[ORM.VULNERABILITY_NAME],ORM.get_orm_string(status,ORM.STATUS_STR)))
+ if ORM.STATUS_VULNERABLE == status:
+ if verbose: print(" %s => %s" % (ORM.get_orm_string(cve_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR)))
+ cve_status = ORM.STATUS_VULNERABLE
+ vote_count += 1
+ break
+ elif status in (ORM.STATUS_INVESTIGATE,ORM.STATUS_NEW) and cve_status in (None,ORM.STATUS_INVESTIGATE):
+ if verbose: print(" %s => (%s),%s" % (ORM.get_orm_string(cve_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR),ORM.get_orm_string(ORM.STATUS_INVESTIGATE,ORM.STATUS_STR)))
+ cve_status = ORM.STATUS_INVESTIGATE
+ vote_count += 1
+ elif ORM.STATUS_NOT_VULNERABLE == status:
+ # tentative not vulnerable
+ vote_count += 1
+ continue
+ else:
+ # Non-voting status: Active:Historical,New-Reserved Inactive:(New),(Investigate),(Vulnerable),Vulnerable)
+ continue
+ # Compute Priority
+ if cve_priority < vulnerability[ORM.VULNERABILITY_PRIORITY]:
+ cve_priority = vulnerability[ORM.VULNERABILITY_PRIORITY]
+
+ # If no votes, skip and leave existing status
+ if 0 == vote_count:
+ if verbose: print(" No votes:skip")
+ return
+ # if no votes away from 'not vulnerable', defer to 'not vulnerable'
+ if None == cve_status:
+ cve_status = ORM.STATUS_NOT_VULNERABLE
+ if verbose: print(" defer => %s" % (ORM.get_orm_string(cve_status,ORM.STATUS_STR)))
+
+ # Update status
+ history_update = []
+ if cve[ORM.CVE_STATUS] != cve_status:
+ history_update.append(ORM.UPDATE_STATUS % (
+ ORM.get_orm_string(cve[ORM.CVE_STATUS],ORM.STATUS_STR),
+ ORM.get_orm_string(cve_status,ORM.STATUS_STR)))
+ if cve[ORM.CVE_PRIORITY] < cve_priority:
+ history_update.append(ORM.UPDATE_PRIORITY % (
+ ORM.get_orm_string(cve[ORM.CVE_PRIORITY],ORM.PRIORITY_STR),
+ ORM.get_orm_string(cve_priority,ORM.PRIORITY_STR)))
+ if history_update:
+ if verbose: print(" Change CVE:%s" % ';'.join(history_update))
+ if not cmd_test:
+ sql = "UPDATE orm_cve SET status=?, priority=?, srt_updated=? WHERE id=?"
+ SQL_EXECUTE(cur, sql, (cve_status,cve_priority,srtool_today,cve[ORM.CVE_ID],) )
+ if not update_skip_history:
+ # Add status update in history
+ update_comment = "%s%s {%s}" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_DEFECT,';'.join(history_update),'Cumulative update from vulnerabilities')
+ sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
+ SQL_EXECUTE(cur, sql, (cve[ORM.CVE_ID],update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+
+ # Create notification
+ ### TO-DO
+ pass
+ else:
+ if verbose: print(" No status change needed!")
+
+def update_cve_status(cve_list,update_skip_history):
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+ srtool_today = datetime.today()
+
+ if 'all' == cve_list:
+ cves = SQL_EXECUTE(cur, "SELECT * FROM orm_cve").fetchall()
+ else:
+ cve_paren_list = str(cve_list.split(',')).replace('[','(').replace(']',')')
+ if verbose: print("SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list)
+ cves = SQL_EXECUTE(cur, "SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list).fetchall()
+
+ if verbose: print("ACTION:update_cve_status:count=%d" % (len(cves)))
+
+ i = 0
+ for cve in cves:
+
+ # Leave "New" CVEs to Triage
+ if ORM.STATUS_NEW == cve[ORM.CVE_STATUS]:
+ continue
+
+ _update_cve_status(cur,cve,srtool_today,update_skip_history)
+ i += 1
+ if not is_progress:
+ # Do not block on '\r' if displaying progress
+ if (0 == i % 100):
+ print("%5d: %-10s" % (i,cve[ORM.CVE_NAME]),end='\r')
+ if (0 == i % 200):
+ SQL_COMMIT(conn)
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ print("%5d:" % (i))
+ SQL_CLOSE_CUR(cur)
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CONN(conn)
+
+# Indexes into the product table cache
+PRODUCT_DICT_KEY = 0
+PRODUCT_DICT_TAG = 1
+
+def _update_vulnerability_status(cur,vulnerability,srtool_today,product_dict,update_skip_history):
+ if verbose: print("Vulnerability:%s:%s" % (vulnerability[ORM.VULNERABILITY_NAME],ORM.get_orm_string(vulnerability[ORM.VULNERABILITY_STATUS],ORM.STATUS_STR)))
+ # Is status locked?
+ # if vulnerability[ORM.VULNERABILITY_STATUS_LOCK]:
+ # return
+
+ # Get the Vulnerability's Investigations
+ vulnerability_priority = vulnerability[ORM.VULNERABILITY_PRIORITY]
+ vulnerability_status = None
+ vote_count = 0
+ vul2invs = SQL_EXECUTE(cur, "SELECT * FROM orm_vulnerabilitytoinvestigation where vulnerability_id = '%s'" % vulnerability[ORM.VULNERABILITY_ID]).fetchall()
+ for vul2inv in vul2invs:
+ investigation_id = vul2inv[ORM.VULNERABILITYTOINVESTIGATION_INVESTIGATION_ID]
+ investigation = SQL_EXECUTE(cur, "SELECT * FROM orm_investigation where id = '%s'" % investigation_id).fetchone()
+
+ # For now, only calculate the "Public Status", so skip non-supported products
+ product_mode = get_tag_key(product_dict[investigation[ORM.INVESTIGATION_PRODUCT_ID]][PRODUCT_DICT_TAG],'mode')
+ if 'support' != product_mode:
+ if verbose: print(" SKIP:Product %s is mode=%s" % (product_dict[investigation[ORM.INVESTIGATION_PRODUCT_ID]][PRODUCT_DICT_KEY],product_mode))
+ continue
+
+ # Compute Status
+ status = investigation[ORM.INVESTIGATION_STATUS]
+ if verbose: print(" %s,%s" % (investigation[ORM.INVESTIGATION_NAME],ORM.get_orm_string(status,ORM.STATUS_STR)))
+ if ORM.STATUS_VULNERABLE == status:
+ if verbose: print(" %s => %s" % (ORM.get_orm_string(vulnerability_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR)))
+ vulnerability_status = ORM.STATUS_VULNERABLE
+ vote_count += 1
+ break
+ elif status in (ORM.STATUS_INVESTIGATE,ORM.STATUS_NEW) and vulnerability_status in (None,ORM.STATUS_INVESTIGATE):
+ if verbose: print(" %s => (%s),%s" % (ORM.get_orm_string(vulnerability_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR),ORM.get_orm_string(ORM.STATUS_INVESTIGATE,ORM.STATUS_STR)))
+ vulnerability_status = ORM.STATUS_INVESTIGATE
+ vote_count += 1
+ elif ORM.STATUS_NOT_VULNERABLE == status:
+ # tentative not vulnerable
+ vote_count += 1
+ continue
+ else:
+ # Non-voting status: Active:Historical,New-Reserved Inactive:(New),(Investigate),(Vulnerable),Vulnerable)
+ continue
+ # Compute Priority
+ if vulnerability_priority < investigation[ORM.INVESTIGATION_PRIORITY]:
+ vulnerability_priority = investigation[ORM.INVESTIGATION_PRIORITY]
+
+ # If no votes, skip and leave existing status
+ if 0 == vote_count:
+ if verbose: print(" No votes:skip")
+ return
+ # if no votes away from 'not vulnerable', defer to 'not vulnerable'
+ if None == vulnerability_status:
+ vulnerability_status = ORM.STATUS_NOT_VULNERABLE
+ if verbose: print(" defer => %s" % (ORM.get_orm_string(vulnerability_status,ORM.STATUS_STR)))
+
+ # Update status
+ history_update = []
+ if vulnerability[ORM.VULNERABILITY_STATUS] != vulnerability_status:
+ history_update.append(ORM.UPDATE_STATUS % (
+ ORM.get_orm_string(vulnerability[ORM.VULNERABILITY_STATUS],ORM.STATUS_STR),
+ ORM.get_orm_string(vulnerability_status,ORM.STATUS_STR)))
+ if vulnerability[ORM.VULNERABILITY_PRIORITY] < vulnerability_priority:
+ history_update.append(ORM.UPDATE_PRIORITY % (
+ ORM.get_orm_string(vulnerability[ORM.VULNERABILITY_PRIORITY],ORM.PRIORITY_STR),
+ ORM.get_orm_string(vulnerability_priority,ORM.PRIORITY_STR)))
+ if history_update:
+ if verbose: print(" Change Vulnerability:%s" % ';'.join(history_update))
+ if not cmd_test:
+ sql = "UPDATE orm_vulnerability SET status=?, priority=?, srt_updated=? WHERE id=?"
+ SQL_EXECUTE(cur, sql, (vulnerability_status,vulnerability_priority,srtool_today,vulnerability[ORM.VULNERABILITY_ID],) )
+ if not update_skip_history:
+ # Add status update in history
+ update_comment = "%s%s {%s}" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_DEFECT,';'.join(history_update),'Cumulative update from investigations')
+ sql = '''INSERT INTO orm_vulnerabilityhistory (vulnerability_id, comment, date, author) VALUES (?,?,?,?)'''
+ SQL_EXECUTE(cur, sql, (vulnerability[ORM.VULNERABILITY_ID],update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+
+ # Create notification
+ ### TO-DO
+ pass
+ else:
+ if verbose: print(" No status change needed!")
+
+def update_vulnerability_status(vulnerability_list,update_skip_history):
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+ srtool_today = datetime.today()
+
+ # Pre-gather and cache the product information
+ product_dict = {}
+ products = SQL_EXECUTE(cur, "SELECT * FROM orm_product").fetchall()
+ for product in products:
+ product_dict[ product[ORM.PRODUCT_ID] ] = [product[ORM.PRODUCT_KEY],product[ORM.PRODUCT_PRODUCT_TAGS]]
+
+ if 'all' == vulnerability_list:
+ vulnerabilities = SQL_EXECUTE(cur, "SELECT * FROM orm_vulnerability").fetchall()
+ else:
+ vulnerability_paren_list = str(vulnerability_list.split(',')).replace('[','(').replace(']',')')
+ if verbose: print("SELECT * FROM orm_vulnerability WHERE name IN %s" % vulnerability_paren_list)
+ vulnerabilities = SQL_EXECUTE(cur, "SELECT * FROM orm_vulnerability WHERE name IN %s" % vulnerability_paren_list).fetchall()
+
+ i = 0
+ for vulnerability in vulnerabilities:
+ _update_vulnerability_status(cur,vulnerability,srtool_today,product_dict,update_skip_history)
+ i += 1
+ if not is_progress:
+ # Do not block on '\r' if displaying progress
+ if (0 == i % 100):
+ print("%5d: %-10s" % (i,vulnerability[ORM.VULNERABILITY_NAME]),end='\r')
+ if (0 == i % 200):
+ SQL_COMMIT(conn)
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ print("%5d:" % (i))
+ SQL_CLOSE_CUR(cur)
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CONN(conn)
+
+
+def _update_investigation_status(cur,investigation,srtool_today,update_skip_history):
+ if verbose: print("Investigation:%s:%s" % (investigation[ORM.INVESTIGATION_NAME],ORM.get_orm_string(investigation[ORM.INVESTIGATION_STATUS],ORM.STATUS_STR)))
+ # Is status locked?
+ # if investigation[ORM.INVESTIGATION_STATUS_LOCK]:
+ # return
+
+ # Get the Investigation's Defects
+ investigation_priority = investigation[ORM.INVESTIGATION_PRIORITY]
+ investigation_status = None
+ vote_count = 0
+ inv2defs = SQL_EXECUTE(cur, "SELECT * FROM orm_investigationtodefect where investigation_id = '%s'" % investigation[ORM.INVESTIGATION_ID]).fetchall()
+ for inv2def in inv2defs:
+ defect_id = inv2def[ORM.INVESTIGATIONTODEFECT_DEFECT_ID]
+ defect = SQL_EXECUTE(cur, "SELECT * FROM orm_defect where id = '%s'" % defect_id).fetchone()
+ # Compute Status
+ status = defect[ORM.DEFECT_SRT_STATUS]
+ if verbose: print(" %s,%s" % (defect[ORM.DEFECT_NAME],ORM.get_orm_string(status,ORM.STATUS_STR)))
+ if ORM.STATUS_VULNERABLE == status:
+ if verbose: print(" %s => %s" % (ORM.get_orm_string(investigation_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR)))
+ investigation_status = ORM.STATUS_VULNERABLE
+ vote_count += 1
+ break
+ elif status in (ORM.STATUS_INVESTIGATE,ORM.STATUS_NEW) and investigation_status in (None,ORM.STATUS_INVESTIGATE):
+ if verbose: print(" %s => (%s),%s" % (ORM.get_orm_string(investigation_status,ORM.STATUS_STR),ORM.get_orm_string(status,ORM.STATUS_STR),ORM.get_orm_string(ORM.STATUS_INVESTIGATE,ORM.STATUS_STR)))
+ investigation_status = ORM.STATUS_INVESTIGATE
+ vote_count += 1
+ elif ORM.STATUS_NOT_VULNERABLE == status:
+ # tentative not vulnerable
+ vote_count += 1
+ continue
+ else:
+ # Non-voting status: Active:Historical,New-Reserved Inactive:(New),(Investigate),(Vulnerable),Vulnerable)
+ continue
+ # Compute Priority
+ if investigation_priority < defect[ORM.DEFECT_SRT_PRIORITY]:
+ investigation_priority = defect[ORM.DEFECT_SRT_PRIORITY]
+
+ # If no votes, skip and leave existing status
+ if 0 == vote_count:
+ if verbose: print(" No votes:skip")
+ return
+ # if no votes away from 'not vulnerable', defer to 'not vulnerable'
+ if None == investigation_status:
+ investigation_status = ORM.STATUS_NOT_VULNERABLE
+ if verbose: print(" defer => %s" % (ORM.get_orm_string(investigation_status,ORM.STATUS_STR)))
+
+ investigation_outcome = None
+ for inv2def in inv2defs:
+ outcome = defect[ORM.DEFECT_SRT_OUTCOME]
+ if (ORM.OUTCOME_OPEN == outcome) or (ORM.OUTCOME_OPEN == investigation_outcome):
+ investigation_outcome = ORM.OUTCOME_OPEN
+ continue
+ if (ORM.OUTCOME_FIXED == outcome) or (ORM.OUTCOME_FIXED == investigation_outcome):
+ investigation_outcome = ORM.OUTCOME_FIXED
+ continue
+ # ORM.OUTCOME_CLOSED
+ # ORM.OUTCOME_NOT_FIX
+ investigation_outcome = outcome
+
+ if not investigation_outcome:
+ investigation_outcome = investigation[ORM.INVESTIGATION_OUTCOME]
+
+
+ ### TO_DO: DOUBLE CHECK
+ if False:
+ ### WIND_RIVER_EXTENSION_BEGIN ###
+ # FIXUP: Status: overwrite if new is Fixed and old isn't "VULNERABLE"
+ update_fixup = ('Fixed' == jira_resolution) and (ORM.STATUS_VULNERABLE != cve[ORM.CVE_STATUS])
+ ### WIND_RIVER_EXTENSION_END ###
+
+
+
+ # Update status
+ history_update = []
+ if investigation[ORM.INVESTIGATION_STATUS] != investigation_status:
+ history_update.append(ORM.UPDATE_STATUS % (
+ ORM.get_orm_string(investigation[ORM.INVESTIGATION_STATUS],ORM.STATUS_STR),
+ ORM.get_orm_string(investigation_status,ORM.STATUS_STR)))
+ if investigation[ORM.INVESTIGATION_OUTCOME] != investigation_outcome:
+ history_update.append(ORM.UPDATE_OUTCOME % (
+ ORM.get_orm_string(investigation[ORM.INVESTIGATION_OUTCOME],ORM.OUTCOME_STR),
+ ORM.get_orm_string(investigation_outcome,ORM.OUTCOME_STR)))
+ if investigation[ORM.INVESTIGATION_PRIORITY] < investigation_priority:
+ history_update.append(ORM.UPDATE_PRIORITY % (
+ ORM.get_orm_string(investigation[ORM.INVESTIGATION_PRIORITY],ORM.PRIORITY_STR),
+ ORM.get_orm_string(investigation_priority,ORM.PRIORITY_STR)))
+ if history_update:
+ if verbose: print(" Change Investigation:%s" % ';'.join(history_update))
+ if not cmd_test:
+ sql = "UPDATE orm_investigation SET status=?, outcome=?, priority=?, srt_updated=? WHERE id=?"
+ SQL_EXECUTE(cur, sql, (investigation_status,investigation_outcome,investigation_priority,srtool_today,investigation[ORM.INVESTIGATION_ID],) )
+ if not update_skip_history:
+ # Add status update in history
+ update_comment = "%s%s {%s}" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_DEFECT,';'.join(history_update),'Cumulative update from defects')
+ sql = '''INSERT INTO orm_investigationhistory (investigation_id, comment, date, author) VALUES (?,?,?,?)'''
+ SQL_EXECUTE(cur, sql, (investigation[ORM.INVESTIGATION_ID],update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+
+ # Create notification
+ ### TO-DO
+ pass
+ else:
+ if verbose: print(" No status change needed!")
+
+def update_investigation_status(investigation_list,update_skip_history):
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+ srtool_today = datetime.today()
+
+ if 'all' == investigation_list:
+ investigations = SQL_EXECUTE(cur, "SELECT * FROM orm_investigation").fetchall()
+ else:
+ investigation_paren_list = str(investigation_list.split(',')).replace('[','(').replace(']',')')
+ if verbose: print("SELECT * FROM orm_investigation WHERE name IN %s" % investigation_paren_list)
+ investigations = SQL_EXECUTE(cur, "SELECT * FROM orm_investigation WHERE name IN %s" % investigation_paren_list).fetchall()
+
+ i = 0
+ for investigation in investigations:
+ _update_investigation_status(cur,investigation,srtool_today,update_skip_history)
+ i += 1
+ if not is_progress:
+ # Do not block on '\r' if displaying progress
+ if (0 == i % 100):
+ print("%5d: %-10s" % (i,investigation[ORM.INVESTIGATION_NAME]),end='\r')
+ if (0 == i % 200):
+ SQL_COMMIT(conn)
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ SQL_CLOSE_CUR(cur)
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CONN(conn)
+
+# This routine is intended for incremental cumulative status updates
+def update_cve_status_tree(cve_list,update_skip_history):
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+
+ if 'all' == cve_list:
+ # global cumulative update
+ update_investigation_status('all', update_skip_history)
+ update_vulnerability_status('all', update_skip_history)
+ update_cve_status('all', update_skip_history)
+ return
+
+ # Perform a deep update on the CVEs, their vunerabilities, and their investigations
+ cve_paren_list = str(cve_list.split(',')).replace('[','(').replace(']',')')
+ if verbose: print("SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list)
+ cves = SQL_EXECUTE(cur, "SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list).fetchall()
+
+ if verbose: print("ACTION:update_cve_status_tree:count=%d" % (len(cves)))
+
+ i = 0
+ cve_list = []
+ for cve in cves:
+ cve_list.append(cve[ORM.CVE_NAME])
+ vulnerability_list = []
+ investigation_list = []
+
+ cve2vuls = SQL_EXECUTE(cur, "SELECT * FROM orm_cvetovulnerablility where cve_id = '%s'" % cve[ORM.CVE_ID]).fetchall()
+ for cve2vul in cve2vuls:
+ vulnerability_id = cve2vul[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID]
+ vulnerability = SQL_EXECUTE(cur, "SELECT * FROM orm_vulnerability where id = '%s'" % vulnerability_id).fetchone()
+ vulnerability_list.append(vulnerability[ORM.VULNERABILITY_NAME])
+
+ vul2invs = SQL_EXECUTE(cur, "SELECT * FROM orm_vulnerabilitytoinvestigation where vulnerability_id = '%s'" % vulnerability_id).fetchall()
+ for vul2inv in vul2invs:
+ investigation_id = vul2inv[ORM.VULNERABILITYTOINVESTIGATION_INVESTIGATION_ID]
+ investigation = SQL_EXECUTE(cur, "SELECT * FROM orm_investigation where id = '%s'" % investigation_id).fetchone()
+ investigation_list.append(investigation[ORM.INVESTIGATION_NAME])
+
+ # Update the CVE's children status
+ update_investigation_status(','.join(investigation_list), update_skip_history)
+ update_vulnerability_status(','.join(vulnerability_list), update_skip_history)
+
+ # Childred are updated, now update the CVEs
+ update_cve_status(','.join(cve_list), update_skip_history)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# import_recipe_names
+#
+# Import the common recipe names from text file
+# Use for CVE triage affected component typeahead
+#
+
+def import_recipe_names():
+
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+
+ # Preclear the recipe table
+ sql = 'DELETE FROM orm_recipetable'
+ SQL_EXECUTE(cur, sql)
+
+ # Load the recipe list (derived from Layer Index)
+ with open('data/recipe_names_from_layer_index.txt') as f:
+ for line in f:
+ recipe = line[:-1]
+ sql = "SELECT * FROM orm_recipetable WHERE recipe_name = '%s'" % (recipe)
+ cvi = SQL_EXECUTE(cur, sql).fetchone()
+ if not cvi:
+ sql = '''INSERT INTO orm_recipetable (recipe_name) VALUES (?)'''
+ SQL_EXECUTE(cur, sql, (recipe,))
+
+ SQL_COMMIT(conn)
#################################
# Generate database schema offsets
@@ -419,21 +942,39 @@ def init_notify_categories(filename):
# CREATE TABLE "orm_notifycategories" ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, "category" varchar(50) NULL);
# ...
-def gen_schema_header():
+
+def gen_schema_header(database_dir,schema_dir):
+
+ database_file = os.path.join(database_dir, 'srt.sqlite')
+ schema_file = os.path.join(schema_dir, 'srt_schema.py')
+
+ # Fetch USER_SRTOOL_ID
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+ USER_SRTOOL_NAME = 'SRTool'
+ user = SQL_EXECUTE(cur, "SELECT * FROM users_srtuser where username = '%s'" % USER_SRTOOL_NAME).fetchone()
+ USER_SRTOOL_ID = user[0] # Hardcoded 'ORM.USERS_SRTUSER_ID'
+ SQL_CLOSE_CONN(conn)
+
create_re = re.compile(r"CREATE TABLE[A-Z ]* \"(\w+)\" \((.+)\);")
try:
- cmd = ('sqlite3', os.path.join(srtool_basepath, 'srt.sqlite'), '.schema')
+ cmd = ('sqlite3', database_file, '.schema')
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
print("ERROR(%d): %s" % (e.returncode, e.output))
return
- with open(os.path.join(srtool_basepath,'bin/common/srt_schema.py'), 'w') as fd:
+
+
+ with open(schema_file, 'w') as fd:
fd.write("# SRTool database table schema indexes\n")
fd.write("# Generated by: './bin/common/srtool_common.py --generate-schema-header'\n")
fd.write("# Should be run after any schema changes to sync commandline tools\n")
fd.write("\n")
fd.write("class ORM():\n")
+ fd.write(" USER_SRTOOL_NAME = '%s'\n" % USER_SRTOOL_NAME)
+ fd.write(" USER_SRTOOL_ID = %d\n" % USER_SRTOOL_ID)
+
for line in output.decode("utf-8").splitlines():
match = create_re.match(line)
if not match:
@@ -446,18 +987,31 @@ def gen_schema_header():
for i, col in enumerate(columns.split(',')):
col = col.strip()
name = col[1:]
- name = name[:name.index('"')]
+ #
+ try:
+ name = name[:name.index('"')]
+ except Exception as e:
+ print("ERROR:%s:%s:" % (e,col))
+ name = col[:col.index(' ')]
+
#print("%s_%s = %d" % (table.upper(),name.upper(),i))
fd.write(" %s_%s = %d\n" % (table.upper(),name.upper(),i))
+ schema_indices = gen_schema_indices()
+ fd.write(schema_indices)
+
+ #
+ # Common SRTool Status Mappings
+ #
+
fd.write("\n # Shared Constants\n")
fd.write(" %s_%s = %d\n" % ('PRIORITY','UNDEFINED',0))
- fd.write(" %s_%s = %d\n" % ('PRIORITY','MINOR' ,1))
- fd.write(" %s_%s = %d\n" % ('PRIORITY','LOW' ,2))
- fd.write(" %s_%s = %d\n" % ('PRIORITY','MEDIUM' ,3))
- fd.write(" %s_%s = %d\n" % ('PRIORITY','HIGH' ,4))
+ fd.write(" %s_%s = %d\n" % ('PRIORITY','LOW' ,1))
+ fd.write(" %s_%s = %d\n" % ('PRIORITY','MEDIUM' ,2))
+ fd.write(" %s_%s = %d\n" % ('PRIORITY','HIGH' ,3))
+ fd.write(" %s_%s = %d\n" % ('PRIORITY','CRITICAL' ,4))
fd.write(" %s = '%s'\n" % ('PRIORITY_STR', \
- 'Undefined,Minor,Low,Medium,High' \
+ 'UNDEFINED,Low,Medium,High,Critical' \
))
fd.write(" %s_%s = %d\n" % ('STATUS','HISTORICAL' ,0))
@@ -466,8 +1020,12 @@ def gen_schema_header():
fd.write(" %s_%s = %d\n" % ('STATUS','INVESTIGATE' ,3))
fd.write(" %s_%s = %d\n" % ('STATUS','VULNERABLE' ,4))
fd.write(" %s_%s = %d\n" % ('STATUS','NOT_VULNERABLE',5))
+ fd.write(" %s_%s = %d\n" % ('STATUS','NEW_INACTIVE' ,6))
+ fd.write(" %s_%s = %d\n" % ('STATUS','INVESTIGATE_INACTIVE' ,7))
+ fd.write(" %s_%s = %d\n" % ('STATUS','VULNERABLE_INACTIVE' ,8))
+ fd.write(" %s_%s = %d\n" % ('STATUS','NOT_VULNERABLE_INACTIVE',9))
fd.write(" %s = '%s'\n" % ('STATUS_STR', \
- 'Historical,New,New_Reserved,Investigate,Vulnerable,Not_Vulnerable' \
+ 'Historical,New,New_Reserved,Investigate,Vulnerable,Not_Vulnerable,(New),(Investigate),(Vulnerable),(Not Vulnerable)' \
))
fd.write(" %s_%s = %d\n" % ('PUBLISH','UNPUBLISHED',0))
@@ -488,6 +1046,10 @@ def gen_schema_header():
'Open,Closed,Fixed,Not_Fix' \
))
+ #
+ # External Defect Record Mappings
+ #
+
fd.write(" %s_%s = %d\n" % ('DEFECT','UNRESOLVED' ,0))
fd.write(" %s_%s = %d\n" % ('DEFECT','RESOLVED' ,1))
fd.write(" %s_%s = %d\n" % ('DEFECT','FIXED' ,2))
@@ -500,12 +1062,39 @@ def gen_schema_header():
fd.write(" %s_%s = %d\n" % ('DEFECT','CANNOT_REPRODUCE' ,9))
fd.write(" %s_%s = %d\n" % ('DEFECT','DONE' ,10))
fd.write(" %s_%s = '%s'\n" % ('DEFECT','RESOLUTION_STR', \
- 'Unresolved,Resolved,Fixed,Will_Not_Fix,Withdrawn,Rejected,Duplicate,Not_Applicable,Replaced_By_Requirement,Cannot_Reproduce,Done' \
+ 'Unresolved,Resolved,Fixed,Will Not Fix,Withdrawn,Rejected,Duplicate,Not Applicable,Replaced By Requirement,Cannot Reproduce,Done' \
+ ))
+
+ fd.write(" %s_%s = %d\n" % ('DEFECT','UNDEFINED',0))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','LOW' ,1))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','MEDIUM' ,2))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','HIGH' ,3))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','CRITICAL' ,4))
+ fd.write(" %s_%s = '%s'\n" % ('DEFECT','PRIORITY_STR', \
+ 'UNDEFINED,P4,P3,P2,P1' \
+ ))
+
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_OPEN' ,0))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_IN_PROGRESS' ,1))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_ON_HOLD' ,2))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_CHECKED_IN' ,3))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_RESOLVED' ,4))
+ fd.write(" %s_%s = %d\n" % ('DEFECT','STATUS_CLOSED' ,5))
+ fd.write(" %s_%s = '%s'\n" % ('DEFECT','STATUS_STR', \
+ 'Open,In progress,On Hold,Checked In,Resolved,Closed' \
))
+ #
+ # Package Record Mappings
+ #
+
fd.write(" %s_%s = %d\n" % ('PACKAGE','FOR' ,0))
fd.write(" %s_%s = %d\n" % ('PACKAGE','AGAINST' ,1))
+ #
+ # Data source Record Mappings
+ #
+
fd.write(" %s_%s = %d\n" % ('DATASOURCE','MINUTELY' ,0))
fd.write(" %s_%s = %d\n" % ('DATASOURCE','HOURLY' ,1))
fd.write(" %s_%s = %d\n" % ('DATASOURCE','DAILY' ,2))
@@ -513,12 +1102,84 @@ def gen_schema_header():
fd.write(" %s_%s = %d\n" % ('DATASOURCE','MONTHLY' ,4))
fd.write(" %s_%s = %d\n" % ('DATASOURCE','ONDEMAND' ,5))
fd.write(" %s_%s = %d\n" % ('DATASOURCE','ONSTARTUP' ,6))
+ fd.write(" %s_%s = %d\n" % ('DATASOURCE','PREINIT' ,7))
fd.write(" %s_%s = '%s'\n" % ('DATASOURCE','FREQUENCY_STR', \
- 'Minute,Hourly,Daily,Weekly,Monthly,OnDemand,OnStartup' \
+ 'Minute,Hourly,Daily,Weekly,Monthly,OnDemand,OnStartup,PreInit' \
))
fd.write(" %s_%s = '%s'\n" % ('DATASOURCE','DATE_FORMAT','%Y-%m-%d'))
fd.write(" %s_%s = '%s'\n" % ('DATASOURCE','DATETIME_FORMAT','%Y-%m-%d %H:%M:%S'))
+ #
+ # Job Status Mappings
+ #
+
+ fd.write(" %s_%s = %d\n" % ('JOB_STATUS','NOTSTARTED' ,0))
+ fd.write(" %s_%s = %d\n" % ('JOB_STATUS','INPROGRESS' ,1))
+ fd.write(" %s_%s = %d\n" % ('JOB_STATUS','SUCCESS' ,2))
+ fd.write(" %s_%s = %d\n" % ('JOB_STATUS','ERRORS' ,3))
+ fd.write(" %s_%s = %d\n" % ('JOB_STATUS','CANCELLED' ,4))
+ fd.write(" %s_%s = '%s'\n" % ('JOB_STATUS','STR', \
+ 'NotStarted,InProgress,Success,Errors,Cancelled' \
+ ))
+
+ #
+ # Update class Mappings
+ #
+
+ fd.write("\n\n")
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','UPDATE_STR','UPDATE(%s):'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','CREATE_STR','CREATE(%s):'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SOURCE_USER','User'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SOURCE_TRIAGE','Triage'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SOURCE_CVE','CVE'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SOURCE_DEFECT','Defect'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','NEW_NAME','New_Name(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','PRIORITY','Priority(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','STATUS','Status(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SEVERITY_V3','Severity_V3(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','SEVERITY_V2','Severity_V2(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DESCRIPTION','Description()'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','LASTMODIFIEDDATE','LastModifiedDate(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','OUTCOME','Outcome(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','RELEASE','Release(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','NOTE','User_Note()'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','PRIVATE_NOTE','Private_Note()'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','TAG','Tag()'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','PUBLISH_STATE','Publish_State(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','PUBLISH_DATE','Publish_Date(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ACKNOWLEDGE_DATE','AcknowledgeDate(%s,%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_CVE','Attach_CVE(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_CVE','Detach_CVE(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_VUL','Attach_Vulnerability(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_VUL','Detach_Vulnerability(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_INV','Attach_Investigration(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_INV','Detach_Investigration(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_DEV','Attach_Defect(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_DEV','Detach_Defect(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_DOC','Attach_Document(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_DOC','Detach_Document(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_USER_NOTIFY','Attach_User_Notify(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_USER_NOTIFY','Detach_User_Notify(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_ACCESS','Attach_Access(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_ACCESS','Detach_Access(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','ATTACH_PRODUCT','Attach_Product(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','DETACH_PRODUCT','Detach_Product(%s)'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','MARK_NEW','Mark_New()'))
+ fd.write(" %s_%s = '%s'\n" % ('UPDATE','MARK_UPDATED','Mark_Updated()'))
+
+ #
+ # ErrorLog class Mappings
+ #
+
+ fd.write("\n\n")
+ fd.write(" %s_%s = '%s'\n" % ('ERRORLOG','INFO',0))
+ fd.write(" %s_%s = '%s'\n" % ('ERRORLOG','WARNING',1))
+ fd.write(" %s_%s = '%s'\n" % ('ERRORLOG','ERROR',2))
+
+ #
+ # Helper routine to map values to string names
+ #
+
fd.write("\n\n")
fd.write(" # General routine to return string name of a constant (e.g. 'DATASOURCE_FREQUENCY_STR')\n")
fd.write(" @staticmethod\n")
@@ -536,63 +1197,17 @@ def gen_schema_header():
fd.write("\n")
#################################
-# fixups
-#
-
-# Recompute all of the CVE name_sort fields
-def fix_name_sort():
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
- cur_write = conn.cursor()
-
- cur.execute('SELECT * FROM orm_cve')
- for i,cve in enumerate(cur):
- name_sort = get_name_sort(cve[ORM.CVE_NAME])
-
- # Progress indicator support
- if 0 == i % 10:
- print('%05d: %20s to %20s\r' % (i,cve[ORM.CVE_NAME],name_sort), end='')
- if (0 == i % 200):
- conn.commit()
-
- sql = ''' UPDATE orm_cve
- SET name_sort = ?
- WHERE id = ?'''
- cur_write.execute(sql, (name_sort, cve[ORM.CVE_ID],))
- conn.commit()
-
-# Reset empty CVE recommend fields to the proper integer zero
-def fix_cve_recommend():
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
- cur_write = conn.cursor()
-
- cur.execute('SELECT * FROM orm_cve WHERE recommend = ""')
- i = 0
- for cve in cur:
- i += 1
-
- # Progress indicator support
- if 0 == i % 10:
- print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
- if (0 == i % 200):
- conn.commit()
-
- sql = ''' UPDATE orm_cve
- SET recommend = ?
- WHERE id = ?'''
- cur_write.execute(sql, (0, cve[ORM.CVE_ID],))
- print("CVE RECOMMEND FIX COUNT=%d" % i)
- conn.commit()
-
-#################################
# main loop
#
def main(argv):
global verbose
+ global update_skip_history
global cmd_skip
global cmd_count
+ global cmd_test
+ global debug_sql
+ global is_progress
# setup
parser = argparse.ArgumentParser(description='srtool_common.py: manage SRTool common source data')
@@ -600,15 +1215,27 @@ def main(argv):
parser.add_argument('--init-notify-categories', '-n', action='store_const', const='init_notify_categories', dest='command', help='Initialize notify categories')
parser.add_argument('--score-new-cves', '-s', dest='score_new_cves', help='Score CVEs for triage [NEW|CVE-1234]')
parser.add_argument('--generate-schema-header', '-g', action='store_const', const='gen_schema_header', dest='command', help='Generate database schema header')
+ parser.add_argument('--generate-schema-header-dir', dest='gen_schema_header_dir', help='Generate database schema header for a give database directory')
+ parser.add_argument('--import-recipe-names', action='store_const', const='import_recipe_names', dest='command', help='Import recipe names table into database')
+
+ parser.add_argument('--update-cve-status-tree', '-S', dest='update_cve_status_tree', help="Update CVEs and their children's cumulative status")
+ parser.add_argument('--update-investigation-status', '-I', dest='update_investigation_status', help='Update Investigation cumulative status')
+ parser.add_argument('--update-vulnerability-status', '-V', dest='update_vulnerability_status', help='Update Vulnerability cumulative status')
+ parser.add_argument('--update-cve-status', '-C', dest='update_cve_status', help='Update CVE cumulative status')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
+
+ parser.add_argument('--progress', '-P', action='store_true', dest='do_progress', help='Progress output')
parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
+ parser.add_argument('--test', '-t', action='store_true', dest='test', help='Test run')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
parser.add_argument('--count', dest='count', help='Debugging: short run record count')
- parser.add_argument('--fix-name-sort', action='store_const', const='fix_name_sort', dest='command', help='Recalulate the CVE name sort values')
- parser.add_argument('--fix-cve-recommend', action='store_const', const='fix_cve_recommend', dest='command', help='Fix the empty CVE recommend values')
+ parser.add_argument('--debug-sql', action='store_true', dest='debug_sql', help='Debug SQL writes')
args = parser.parse_args()
verbose = args.verbose
+ update_skip_history = args.update_skip_history
+ cmd_test = args.test
cmd_skip = 0
if None != args.skip:
cmd_skip = int(args.skip)
@@ -617,6 +1244,12 @@ def main(argv):
cmd_count = int(args.count)
if get_override('SRTDBG_MINIMAL_DB'):
cmd_count = 40
+ debug_sql = args.debug_sql
+ is_progress = args.do_progress
+ progress_set_on(is_progress)
+
+ if verbose:
+ print('srtool_common %s' % args)
if 'init_package_keywords' == args.command:
init_package_keywords(packageKeywordsFile)
@@ -625,16 +1258,53 @@ def main(argv):
elif args.score_new_cves:
score_new_cves(args.score_new_cves)
elif 'gen_schema_header' == args.command:
- gen_schema_header()
- ### TO-DO: TEMPORARY WORKAROUND
- fix_cve_recommend()
- elif 'fix_name_sort' == args.command:
- fix_name_sort()
- elif 'fix_cve_recommend' == args.command:
- fix_cve_recommend()
+ gen_schema_header(srtool_basepath,os.path.join(srtool_basepath,'bin/common'))
+ elif args.gen_schema_header_dir:
+ gen_schema_header(args.gen_schema_header_dir,args.gen_schema_header_dir)
+ elif 'import_recipe_names' == args.command:
+ import_recipe_names()
+
+ elif args.update_cve_status_tree:
+ update_cve_status_tree(args.update_cve_status_tree, update_skip_history)
+ elif args.update_cve_status:
+ update_cve_status(args.update_cve_status, update_skip_history)
+ elif args.update_vulnerability_status:
+ update_vulnerability_status(args.update_vulnerability_status, update_skip_history)
+ elif args.update_investigation_status:
+ update_investigation_status(args.update_investigation_status, update_skip_history)
+
else:
print("Command not found")
+### generate schema indices
+def gen_schema_indices():
+ conn=SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+
+ if srt_dbtype == "mysql":
+ sql = f"""SELECT * FROM information_schema.columns where table_name like '%%' and table_schema = '{srt_dbconfig['name']}' order by table_name,ordinal_position"""
+ elif srt_dbtype == "postgres":
+ sql = """SELECT * FROM information_schema.columns where table_schema = 'public' order by table_name,ordinal_position;"""
+ else:
+ sql = """SELECT m.name as table_name, p.name as column_name FROM sqlite_master AS m JOIN pragma_table_info(m.name) AS p where table_name != 'sqlite_sequence' ORDER BY m.name, p.cid"""
+ cur.execute(sql)
+
+ columns = cur.description
+ results = [{columns[index][0]:column for index, column in enumerate(value)} for value in cur.fetchall()]
+ if srt_dbtype =="mysql":
+ results = [{'column_name': col['COLUMN_NAME'], 'table_name': col['TABLE_NAME']}for col in results]
+ columns = [SimpleNamespace(**col) for col in results]
+ current_table = None
+ count = 0
+ schema = ""
+ for col in columns:
+ if current_table != col.table_name:
+ count = 0
+ current_table = col.table_name
+ schema += f" {col.table_name.replace('orm_', '').upper()}_{col.column_name.upper()} = {count}\n"
+ count += 1
+ return schema
+
if __name__ == '__main__':
srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
main(sys.argv[1:])
diff --git a/bin/common/srtool_email.py b/bin/common/srtool_email.py
index c57fa9ab..54c65eab 100755
--- a/bin/common/srtool_email.py
+++ b/bin/common/srtool_email.py
@@ -132,7 +132,7 @@ def main(argv):
parser.add_argument('--subject', '-s', dest='subject', help='Subject for email address')
parser.add_argument('--server', dest='smtpserver', help='SMTP server address')
parser.add_argument('--user', dest='user', help='User name for Jira access')
- parser.add_argument('--passwd', dest='passwd', help='User password for Jira access')
+ parser.add_argument('--passwd', dest='passwd', help='User password for access')
parser.add_argument('--tls', '-t', action='store_true', dest='tls', help='Use TLS encryption')
parser.add_argument('--message', '-m', dest='message', help='Message to send')
parser.add_argument('--file', '-f', dest='file', help='File to send')
diff --git a/bin/common/srtool_jira_template.py b/bin/common/srtool_jira_template.py
index 82f2dc94..c059de17 100644
--- a/bin/common/srtool_jira_template.py
+++ b/bin/common/srtool_jira_template.py
@@ -41,7 +41,7 @@ import os
import sys
import re
import argparse
-import sqlite3
+from common.srtool_sql import *
import json
from datetime import datetime, date
@@ -172,7 +172,7 @@ def do_update_jira():
print("CONNECTION TO JIRA FAILED")
return 1
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
today = datetime.today()
@@ -228,7 +228,7 @@ def do_update_jira():
conn.commit()
c.close()
- conn.close()
+ SQL_CLOSE_CONN(conn)
#############################################################################3
###
@@ -444,7 +444,7 @@ def update_project_issues(project, issues, conn, log):
except:
cve_name_sort = cve.name
- sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, packages, srt_updated)
+ sql = ''' INSERT INTO orm_cve (name, name_sort, priority, status, comments, comments_private, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, packages, srt_updated)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
c.execute(sql, (cve_name, cve_name_sort, d.priority, cve_status, '', '', '', '', '', 1, 0, '', 'Created from defect %s' % d.name, '', '', 0, '', '', '', '', '', '', datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT)))
@@ -538,7 +538,7 @@ def jira_update_list(jira_list):
print("CONNECTION TO JIRA FAILED")
return 1
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
products = c.execute('''SELECT * FROM orm_product''').fetchall()
@@ -605,7 +605,7 @@ def jira_add_to_defect_db(jira_name):
#try connecting to jira
try:
jira = JIRA(JIRA_PRODUCTION_LINK, auth=(srt_user, srt_passwd))
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
except Exception as e:
print("xhr_investigation_commit:CONNECTION TO JIRA FAILED:(%s)\n" % e, file=sys.stderr)
@@ -655,7 +655,7 @@ def jira_add_to_defect_db(jira_name):
c.execute(sql, (d.name, d.summary, d.url, d.priority, d.status, d.resolution, str(d.publish), d.release_version, d.product_id, d.date_created, d.date_updated))
conn.commit()
c.close()
- conn.close()
+ SQL_CLOSE_CONN(conn)
except Exception as e:
print("ERROR:could not find/import defect(%s)" % e, file=sys.stderr)
return 1
@@ -677,7 +677,7 @@ JIRA_IS_TEST = True
JIRA_IS_SIMULATE = True
def simulate_new_defect_name(product_prefix):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
sql = "SELECT * FROM orm_srtsetting WHERE name='current_defect_simulation_index'"
@@ -691,7 +691,7 @@ def simulate_new_defect_name(product_prefix):
sql = '''UPDATE orm_srtsetting SET value=? WHERE id = ?'''
cur.execute(sql, (index, cvi[ORM.SRTSETTING_ID]))
conn.commit() #commit to db
- conn.close()
+ SQL_CLOSE_CONN(conn)
defect_name = "%s-%05d" % (product_prefix,index)
return defect_name
@@ -722,7 +722,7 @@ def jira_new_defect(product_defect_tags,summary,cve_list,description,reason,prio
return 1
#srt_error_log("Jira connection made")
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
# append the jira link to description
diff --git a/bin/common/srtool_job.py b/bin/common/srtool_job.py
new file mode 100755
index 00000000..4c9214b0
--- /dev/null
+++ b/bin/common/srtool_job.py
@@ -0,0 +1,791 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# SRTool Implementation
+#
+# Copyright (C) 2020-2021 Wind River Systems
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+#
+# Usage:
+#
+# https://kevinmccarthy.org/2016/07/25/streaming-subprocess-stdin-and-stdout-with-asyncio-in-python/
+# "python How do I get real time output from my commands"
+
+import os
+import sys
+import re
+import argparse
+from datetime import datetime, date
+import subprocess
+import asyncio
+import time
+import traceback
+
+# load the srttool.sqlite schema indexes
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+from common.srtool_progress import *
+from common.srtool_sql import *
+from common.srtool_common import log_error
+
+# Setup:
+job_errors = 0
+job_warnings = 0
+verbose = False
+debug_sql = False
+DBName = 'srt.sqlite'
+
+#################################
+# Helper methods
+#
+
+def debugMsg(msg):
+ if verbose:
+ print(msg)
+
+srtErrorLog = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srtool_dbg.log'
+def _log(msg):
+ f1=open(srtErrorLog, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+# Sub Process calls
+def execute_process(*args):
+ cmd_list = []
+ for arg in args:
+ if isinstance(arg, (list, tuple)):
+ # Flatten all the way down
+ for a in arg:
+ cmd_list.append(a)
+ else:
+ cmd_list.append(arg)
+
+ result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return(result.returncode,result.stdout.decode('utf-8'),result.stderr.decode('utf-8'))
+
+#################################
+# SQL as dict support
+#
+# https://stackoverflow.com/questions/3300464/how-can-i-get-dict-from-sqlite-query
+#
+
+def connectDatabase():
+ return SQL_CONNECT()
+
+def dict_factory(cursor, row):
+ d = {}
+ for idx, col in enumerate(cursor.description):
+ d[col[0]] = row[idx]
+ return d
+
+#################################
+#
+#
+
+async def do_read_stream(stream, logfd, cur, conn, job_id, mode):
+ global job_errors
+ global job_warnings
+ while True:
+ line = await stream.readline()
+ if line:
+ line = line.decode("utf-8").strip()
+
+ # Is this an error line?
+ line_strip = line.strip()
+ line_strip_lower = line_strip.strip().lower()
+ if line_strip_lower.startswith('traceback') or \
+ line_strip_lower.startswith('syntaxerror') or \
+ (line_strip_lower.startswith('error') and (not '"ok"' in line)):
+ job_errors += 1
+ if verbose:
+ print("[CUR]:[ERROR DETECTED:%d]<%s>" % (job_errors,line))
+ _log("FOO:JOB:DO_READ_STREAM|%s|" % line_strip)
+ sys.stdout.flush()
+
+ # Is this an warning line?
+ line_strip = line.strip()
+ if line_strip_lower.startswith('warning'):
+ job_warnings += 1
+ if verbose:
+ print("[CUR]:[WARNING DEFECTED:%d]<%s>" % (job_warnings,line))
+ sys.stdout.flush()
+
+ # Is this a progress line?
+ if line.startswith('[PROGRESS'):
+ # Parse the progress line
+ #[PROGRESS:0,3,foobar1]
+ cnt = -1
+ max = -1
+ msg = line
+ now = datetime.now().strftime('%H:%M:%S')
+
+ m = re.search(r'\[PROGRESS:(\d+),(\d+),(.*)\]', line)
+ try:
+ if m:
+ cnt = int(m.group(1))
+ max = int(m.group(2))
+ msg = m.group(3)
+ except:
+ if verbose:
+ print("[CUR]:[PARSE_ERROR]<%s>" % line)
+ sys.stdout.flush()
+ continue
+
+ # Update database
+ if cur:
+ sql = ''' UPDATE orm_job
+ SET status = ?, count = ?, max=?, message=?
+ WHERE id=?'''
+ if cnt > max:
+ cnt = max
+ ret = SQL_EXECUTE(cur, sql, (ORM.JOB_STATUS_INPROGRESS, cnt, max, msg[:49], job_id, ))
+ SQL_COMMIT(conn)
+ if verbose:
+ print("[CUR][%s]:Cnt=%s,Max=%s,Msg='%s',Now=%s" % (job_id,cnt,max,msg,now))
+ sys.stdout.flush()
+
+ # Is this an refresh line?
+ elif line_strip.startswith('[REFRESH'):
+ if cur:
+ sql = ''' UPDATE orm_job
+ SET refresh=?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, ('1', job_id, ))
+ SQL_COMMIT(conn)
+ if verbose:
+ print("[CUR]:[REFRESH_REQUESTED]")
+ sys.stdout.flush()
+ else:
+ # Update log file
+ if verbose and ('stderr' == mode):
+ line = "[STDERR]" + line
+ if logfd:
+ logfd.write(line + '\n')
+ logfd.flush()
+ else:
+ print(line)
+ sys.stdout.flush()
+ else:
+ break
+
+async def do_stream_subprocess(cmd, logfd, cur, conn, job_id):
+ process = await asyncio.create_subprocess_exec(*cmd,
+ stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)
+
+ await asyncio.wait([
+ do_read_stream(process.stdout, logfd, cur, conn, job_id, 'stdout'),
+ do_read_stream(process.stderr, logfd, cur, conn, job_id, 'stderr')
+ ])
+ return await process.wait()
+
+
+def do_execute(cmd,job_name,job_desc,log_file,job_id,options='',parent_name=''):
+ conn = None
+ cur = None
+ logfd = None
+
+ # Log file
+ if log_file:
+ logfd=open(log_file, 'w')
+
+ if verbose: print("DO_EXECUTE(%s,%s,%s)" % (cmd, log_file,job_id),file=logfd)
+
+ # Database cursor
+ if job_id:
+ if verbose: print("...DO_EXECUTE(job_id=%s)" % (job_id),file=logfd)
+ conn = connectDatabase()
+ cur = conn.cursor()
+ # Prepare/validate job id, create new job record on request
+ job_id = prepare_job_record(job_id,job_name,job_desc,' '.join(cmd),log_file,options,parent_name)
+ # Initialize the generated values
+ status = ORM.JOB_STATUS_INPROGRESS
+ started_on = datetime.now()
+ completed_on = None
+ pid = os.getpid()
+ sql = ''' UPDATE orm_job
+ SET status = ?, count = ?, max = ?, errors=?, started_on = ?, completed_on=?, message = ?, pid=?
+ WHERE id=?'''
+ ret = SQL_EXECUTE(cur, sql, (status, 0, 0, 0, started_on, completed_on, '', pid, job_id, ))
+ if verbose: print("...DO_EXECUTE(PRESET=%s)" % (ret),file=logfd)
+ SQL_COMMIT(conn)
+
+ loop = asyncio.get_event_loop()
+ rc = loop.run_until_complete(
+ do_stream_subprocess(
+ cmd,
+ logfd,
+ cur,
+ conn,
+ job_id,
+ ))
+
+ #
+ # Finish up
+ #
+
+ # Close handles
+ if cur:
+ # (Re)set the status in case the job died before final progress update
+ ### TODO set error if job returns an error code
+ if job_errors or job_warnings:
+ status = ORM.JOB_STATUS_ERRORS
+ else:
+ status = ORM.JOB_STATUS_SUCCESS
+ completed_on = datetime.now() #datetime.today()
+#
+ # Give the user a moment to savor the 100% display,
+ # and time enough for the javascript loop to catch it
+ time.sleep(2)
+#
+ sql = ''' UPDATE orm_job
+ SET status = ?, completed_on=?, errors = ?, warnings = ?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (status, completed_on, job_errors, job_warnings, job_id, ))
+ SQL_COMMIT(conn)
+
+ # Close handles
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+ loop.close()
+ if logfd:
+ logfd.close()
+ return rc
+
+def execute_command(command,job_name,job_desc,log_file,job_id,options,parent_name):
+ rc = do_execute(
+ ["bash", "-c", command],job_name,job_desc,log_file,job_id,options,parent_name,
+ )
+
+#################################
+# Unit tests
+#
+
+def test_async(job_id,log_file,delay):
+ do_execute(
+ ["bash", "-c", "echo stdout && sleep %s && echo stderr 1>&2 && sleep %s && echo done" % (delay,delay)],
+ 'Test_Async','Test_Async',log_file,job_id
+ )
+
+def do_test_unit(job_id,log_file,delay=1):
+ if not log_file:
+ log_file = 'unit_test.log'
+ testfd=open('.test1.txt', 'w')
+ testfd.write("foo1\n")
+ testfd.write("[PROGRESS:0,3,foobar1]\n")
+ testfd.write("foo2\n")
+ testfd.close()
+ testfd=open('.test2.txt', 'w')
+ testfd.write("foo3\n")
+ testfd.write("[PROGRESS:1,3,foobar2]\n")
+ testfd.write("foo4\n")
+ testfd.close()
+ testfd=open('.test3.txt', 'w')
+ testfd.write("foo5\n")
+ testfd.write("[PROGRESS:2,3,foobar3]\n")
+ testfd.write("foo6\n")
+ testfd.close()
+ testfd=open('.test4.txt', 'w')
+ testfd.write("foo7\n")
+ testfd.write("[PROGRESS:3,3,foobar4]\n")
+ testfd.close()
+ do_execute(
+ ["bash", "-c", "cat .test1.txt && sleep %s && cat .test2.txt && sleep %s && cat .test3.txt && sleep %s && cat .test4.txt " % (delay,delay,delay)],
+ 'Test_Unit','Do_Test_Unit',log_file,job_id
+ )
+ os.remove('.test1.txt')
+ os.remove('.test2.txt')
+ os.remove('.test3.txt')
+ os.remove('.test4.txt')
+
+
+def do_test_error1_unit(job_id,log_file,delay=1):
+ if not log_file:
+ log_file = 'unit_test.log'
+ testfd=open('.test1.txt', 'w')
+ testfd.write("foo1\n")
+ testfd.write("[PROGRESS:0,3,foobar1]\n")
+ testfd.write("foo2\n")
+ testfd.close()
+ testfd=open('.test2.txt', 'w')
+ testfd.write("foo3\n")
+ testfd.write("Error: bad data in structure\n")
+ testfd.close()
+ do_execute(
+ ["bash", "-c", "cat .test1.txt && sleep %s && cat .test2.txt" % (delay)],
+ 'Do_Test_Error1_Unit','Do_Test_Error1_Unit',log_file,job_id
+ )
+ os.remove('.test1.txt')
+ os.remove('.test2.txt')
+
+def do_test_error2_unit(job_id,log_file,delay=1):
+ if not log_file:
+ log_file = 'unit_test.log'
+ testfd=open('.test1.txt', 'w')
+ testfd.write("foo1\n")
+ testfd.write("[PROGRESS:0,3,foobar1]\n")
+ testfd.write("foo2\n")
+ testfd.close()
+ testfd=open('.test2.txt', 'w')
+ testfd.write('Traceback (most recent call last):\n')
+ testfd.write('File "/home/test/1.txt", line 994, in <module>\n')
+ testfd.write('main(sys.argv[1:])\n')
+ testfd.write('File "/home/test/2.txt", line 970, in main\n')
+ testfd.write('read_db(work_dir)\n')
+ testfd.write('File "/home/test/3.txt", line 373, in read_db\n')
+ testfd.write('write_db()\n')
+ testfd.write("TypeError: write_db() missing 1 required positional argument: 'work_dir'\n")
+ testfd.close()
+ do_execute(
+ ["bash", "-c", "cat .test1.txt && sleep %s && cat .test2.txt" % (delay)],
+ 'Do_Test_Error2_Unit','Do_Test_Error2_Unit',log_file,job_id
+ )
+ os.remove('.test1.txt')
+ os.remove('.test2.txt')
+
+def do_test_hang(job_id,log_file,delay=1):
+ if not log_file:
+ log_file = 'unit_test.log'
+ testfd=open('.test1.txt', 'w')
+ testfd.write("foo1\n")
+ testfd.write("[PROGRESS:0,3,foobar1]\n")
+ testfd.write("foo2\n")
+ testfd.close()
+ testfd=open('.test2.txt', 'w')
+ testfd.write("foo3\n")
+ testfd.write("[PROGRESS:1,3,foobar2]\n")
+ testfd.write("foo4\n")
+ testfd.close()
+ testfd=open('.test3.txt', 'w')
+ testfd.write("foo5\n")
+ testfd.write("[PROGRESS:2,3,NOTE:HANG_40_SEC]\n")
+ testfd.write("foo6\n")
+ testfd.close()
+ do_execute(
+ ["bash", "-c", "cat .test1.txt && sleep %s && cat .test2.txt && sleep %s && cat .test3.txt && sleep %s && cat .test4.txt " % (delay,delay,40)],
+ 'Do_Test_Hang','Do_Test_Hang',log_file,job_id
+ )
+ os.remove('.test1.txt')
+ os.remove('.test2.txt')
+ os.remove('.test3.txt')
+
+def do_test_refresh(job_id,log_file,delay=1):
+ if not log_file:
+ log_file = 'unit_test.log'
+ testfd=open('.test1.txt', 'w')
+ testfd.write("foo1\n")
+ testfd.write("[PROGRESS:0,4,foobar1]\n")
+ testfd.write("foo2\n")
+ testfd.close()
+ testfd=open('.test2.txt', 'w')
+ testfd.write("foo3\n")
+ testfd.write("[PROGRESS:1,4,foobar2]\n")
+ testfd.write("[REFRESH:foobar3]\n")
+ testfd.write("foo4\n")
+ testfd.close()
+ testfd=open('.test3.txt', 'w')
+ testfd.write("foo5\n")
+ testfd.write("[PROGRESS:2,4,foobar3]\n")
+ testfd.write("foo6\n")
+ testfd.close()
+ testfd=open('.test4.txt', 'w')
+ testfd.write("foo7\n")
+ testfd.write("[PROGRESS:3,4,foobar4]\n")
+ testfd.close()
+ testfd=open('.test5.txt', 'w')
+ testfd.write("foo7\n")
+ testfd.write("[PROGRESS:4,4,foobar4]\n")
+ testfd.close()
+ do_execute(
+ ["bash", "-c", "cat .test1.txt && sleep %s && cat .test2.txt && sleep %s && cat .test3.txt && sleep %s && cat .test4.txt && sleep %s && cat .test5.txt " % (delay,delay,delay,delay)],
+ 'Do_Test_Refresh','Do_Test_Refresh',log_file,job_id
+ )
+ os.remove('.test1.txt')
+ os.remove('.test2.txt')
+ os.remove('.test3.txt')
+ os.remove('.test4.txt')
+ os.remove('.test5.txt')
+
+# Run a parent job that calls sub-jobs
+# Example: bin/common/srtool_job.py -c "./bin/common/srtool_job.py --test-parent-job" --job-id 9 --log logs/run_job_parent.log --verbose --name "ParentTest" --description "Run a parent job test"
+def do_test_parent_job(parent_job_id,parent_log_file):
+ child_job_id = 8
+ child_log_file = 'logs/run_job_child.log'
+ progress_set_on(True)
+ progress_set_max(3)
+ print("Test_Parent_Job:Start")
+ child_command = 'bin/common/srtool_job.py -c SELFTEST --job-id %s --log %s ' % (child_job_id,child_log_file)
+ print("CHILD:%s" % child_command)
+ progress_show("Child pass #1")
+ print("Test_Parent_Job:Child pass #1")
+ ret = os.system(child_command)
+ progress_show("Child pass #2")
+ print("Test_Parent_Job:Child pass #2")
+ ret = os.system(child_command)
+ progress_show("Child pass #3")
+ print("Test_Parent_Job:Child pass #3")
+ ret = os.system(child_command)
+ print("Test_Parent_Job:Done")
+ progress_done('Done')
+
+#################################
+# Job record creation control
+#
+# Prepare the Job record, create new one if needed/requested
+# Job_id of '0' means create a new record
+
+def prepare_job_record(job_id=0,name='AutoJob',description = 'AutoJob',command = '',log_file='logs/run_job_auto.log',options='',parent_name=''):
+ conn = connectDatabase()
+ cur = conn.cursor()
+
+ if verbose: print("DO_CREATE_JOB(%s,%s,%s,%s,%s)" % (job_id,name,description,command,log_file))
+
+ # Generated values
+ message = ''
+ status = ORM.JOB_STATUS_NOTSTARTED
+ started_on = None
+ completed_on = None
+ pid = 0
+ count = 0
+ max = 0
+ errors = 0
+ warnings = 0
+ refresh = 0
+
+ # Validate requested job number as a positive integer
+ try:
+ job_id_number = int(job_id)
+ if 0 > job_id_number:
+ job_id_number = 0
+ except:
+ job_id_number = 0
+
+ # Create / Update Job
+ if 0 != job_id_number:
+ SQL_EXECUTE(cur, "SELECT * FROM orm_job where id = %d;" % job_id_number)
+ job = cur.fetchone()
+ # If that job_id is in progress, force a new job record
+ if job and (ORM.JOB_STATUS_INPROGRESS == job[ORM.JOB_STATUS]):
+ if verbose: print("FOUND JOB BUSY %s, force new job" % job_id_number)
+ job = None
+ job_id_number = 0
+ else:
+ # Autocreate new job record
+ job = None
+
+ if not job:
+ if verbose: print("CREATE JOB %s" % job_id_number)
+ # Create the new job
+ # Offset ... 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
+ sql = ''' INSERT INTO orm_job (name, description, command, parent_name, log_file, status, pid, count, max, errors, warnings, message, started_on, completed_on, options, refresh)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
+ SQL_EXECUTE(cur, sql, (name, description, command, parent_name, log_file, status, pid, count, max, errors, warnings, message, started_on, completed_on, options, refresh))
+ new_job_id_number = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ SQL_COMMIT(conn)
+
+ # If a specific job_id was requested (e.g. external tracking purposes),
+ # force that id (we know here that it is unique)
+ if 0 != job_id_number:
+ if verbose: print("SET JOB ID %d" % job_id_number)
+ sql = ''' UPDATE orm_job
+ SET id = ?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (job_id_number, new_job_id_number, ))
+ SQL_COMMIT(conn)
+ else:
+ job_id_number = new_job_id_number
+ else:
+ if verbose: print("FOUND JOB %d" % job_id_number)
+ sql = ''' UPDATE orm_job
+ SET name = ?, description = ?, command = ?, parent_name = ?, log_file = ?, status = ?, pid = ?, count = ?, max = ?, errors = ?, warnings = ?, message = ?, started_on = ?, completed_on = ?, options = ?, refresh = ?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (name, description, command, parent_name, log_file, status, pid, count, max, errors, warnings, message, started_on, completed_on, options, refresh, job_id_number, ))
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CONN(conn)
+
+ return job_id_number
+
+def progress_step(job_id,count):
+ conn = connectDatabase()
+ cur = conn.cursor()
+ # Use job #90 as default job
+ if 0 == job_id:
+ job_id = 90
+ SQL_EXECUTE(cur, "SELECT * FROM orm_job where id = %s;" % job_id)
+ job=cur.fetchone()
+ # Create any missing job record on-the-fly
+ if not job:
+ job_id = prepare_job_record(job_id)
+ SQL_EXECUTE(cur, "SELECT * FROM orm_job where id = %s;" % job_id)
+ job=cur.fetchone()
+
+ status = job[ORM.JOB_STATUS]
+# started_on = job[ORM.JOB_STARTED_ON]
+# completed_on = job[ORM.JOB_COMPLETED_ON]
+ if count == 0:
+ started_on = datetime.now()
+ completed_on = None
+ status = ORM.JOB_STATUS_INPROGRESS
+ elif count == 4:
+ completed_on = datetime.now()
+ status = ORM.JOB_STATUS_SUCCESS
+
+ max = 4
+ message = 'STEP%d' % count
+ command = 'COMMAND%d' % count
+
+ sql = ''' UPDATE orm_job
+ SET status = ?, count = ?, max=?, message=?, command = ?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (status, count, max, message[:49], command, job_id, ))
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# List the status of the PIDs in the Job table
+#
+
+def job_pid_status():
+ conn = connectDatabase()
+ conn.row_factory = dict_factory # sqlite3.Row
+ cur = conn.cursor()
+
+ # Job Status
+ SUCCESS = 2
+ # PID Status
+ RUNNING = 0
+ MISSING = 1
+
+ pid_table = {}
+ for job in SQL_EXECUTE(cur, "SELECT * FROM orm_job"):
+ if job['pid']:
+ pid_table[int(job['pid'])] = MISSING
+
+ # Fetch pid data
+ result_returncode,result_stdout,result_stderr = execute_process(['ps','-a','-x'])
+ if 0 != result_returncode:
+ result_stdout = str(result_stdout)
+ print("ERROR(%s):%s" % (result_returncode,result_stderr))
+ exit(1)
+ for line in result_stdout.splitlines():
+ try:
+ pid = int(line[:line.index(' ')])
+ except:
+ continue
+ #value = line[line.index('=')+1:]
+ if pid in pid_table:
+ pid_table[pid] = RUNNING
+
+ for pid in pid_table:
+ print('%s:%s' % (pid,pid_table[pid]))
+
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# External job monitor support
+#
+def dump():
+ conn = connectDatabase()
+ cur = conn.cursor()
+ for job in SQL_EXECUTE(cur, "SELECT * FROM orm_job ORDER BY id;"):
+ print("Job=%03d,Cnt=%04s,Max=%04s,Status=%-10s,Err=%s,Pid=%5s,Start=%s,Stop=%s,Re:%s,Name=%10s,Msg='%s'" % (
+ job[ORM.JOB_ID],job[ORM.JOB_COUNT],job[ORM.JOB_MAX],
+ ORM.get_orm_string(job[ORM.JOB_STATUS],ORM.JOB_STATUS_STR),job[ORM.JOB_ERRORS],job[ORM.JOB_PID],
+ job[ORM.JOB_STARTED_ON][5:19] if job[ORM.JOB_STARTED_ON] else ' ',
+ job[ORM.JOB_COMPLETED_ON][11:19] if job[ORM.JOB_COMPLETED_ON] else ' ',
+ job[ORM.JOB_REFRESH],job[ORM.JOB_NAME],job[ORM.JOB_MESSAGE],
+ ))
+ sys.stdout.flush()
+
+def monitor_job():
+ print("Monitor Job:: Now=%s" % (datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
+ conn = connectDatabase()
+ cur = conn.cursor()
+ job_prev = {}
+ i=0
+ # Common date_time string expansion
+ def datetime_text(dt):
+ # 2021-04-01 12:10:09
+ return dt.strftime('%Y-%m-%d %H:%M:%S')
+
+ while True:
+ i += 1
+ msg = []
+ progress = []
+ for job in SQL_EXECUTE(cur, "SELECT * FROM orm_job ORDER BY id;"):
+ job_id = job[ORM.JOB_ID]
+ is_change = False or (0 == (i % 40))
+ status = ' '
+ if job_id in job_prev:
+ # Marker if new job started
+ if (job_prev[job_id][ORM.JOB_STATUS] in (ORM.JOB_STATUS_SUCCESS,ORM.JOB_STATUS_ERRORS,ORM.JOB_STATUS_CANCELLED)) and \
+ (job_prev[job_id][ORM.JOB_STATUS] != job[ORM.JOB_STATUS]):
+ status = '*'
+ # Line if job status change
+ for i in range(len(job)):
+ if job_prev[job_id][i] != job[i]:
+ is_change = True
+ else:
+ is_change = True
+ job_prev[job_id] = job
+ if is_change:
+ msg.append("%s Job=%2s,Name=%10s,Cnt=%04s,Max=%04s,Status=%-10s,Err=%s,Pid=%5s,Start=%s,Stop=%s,Re:%s,Msg=%s| " % (
+ status,job_id,job[ORM.JOB_NAME],job[ORM.JOB_COUNT],job[ORM.JOB_MAX],
+ ORM.get_orm_string(job[ORM.JOB_STATUS],ORM.JOB_STATUS_STR),job[ORM.JOB_ERRORS],job[ORM.JOB_PID],
+ datetime_text(job[ORM.JOB_STARTED_ON])[11:19] if job[ORM.JOB_STARTED_ON] else ' ',
+ datetime_text(job[ORM.JOB_COMPLETED_ON])[11:19] if job[ORM.JOB_COMPLETED_ON] else ' ',
+ job[ORM.JOB_REFRESH],job[ORM.JOB_MESSAGE],
+ ))
+ if (ORM.JOB_STATUS_INPROGRESS == job[ORM.JOB_STATUS]):
+ progress.append("%d=%d%%" % (job_id,((job[ORM.JOB_COUNT] * 100)/job[ORM.JOB_MAX]) if job[ORM.JOB_MAX] else 0))
+
+ if msg:
+ print("=== %s (%s) ===" % (datetime.now().strftime('%Y-%m-%d %H:%M:%S'),','.join(progress)))
+ for str in msg:
+ print(str)
+ sys.stdout.flush()
+ time.sleep(0.1)
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+ global DBName
+ global debug_sql
+
+ parser = argparse.ArgumentParser(description='xxx_job.py: Run command line jobs, update progress in GUI')
+
+ # Main options
+ parser.add_argument('--command', '-c', dest='command', help='Command to execute')
+ parser.add_argument('--name', '-n', dest='job_name', help='Job Name')
+ parser.add_argument('--description', '-d', dest='job_desc', help='Job Desciption')
+ parser.add_argument('--log', '-l', dest='log_file', help='Log file')
+ parser.add_argument('--options', '-o', dest='options', help='Job options')
+ parser.add_argument('--job-id', '-j', dest='job_id', help='Select a specific job record ID')
+ parser.add_argument('--parent-name', '-p', dest='parent_name', help='Parent record name, if any')
+ # UI helpers
+ parser.add_argument('--job-pid-status', action='store_const', const='job_pid_status', dest='command', help='Dump the PID status of the open jobs')
+
+ # Unit tests
+ parser.add_argument('--test-async', '-T', action='store_true', dest='do_test_async', help='Test Aync')
+ parser.add_argument('--test-unit', '-U', action='store_true', dest='do_test_unit', help='Unit Test')
+ parser.add_argument('--test-hang', '-H', action='store_true', dest='do_test_hang', help='Simulate app hang')
+ parser.add_argument('--test-refresh', '-R', action='store_true', dest='do_test_refresh', help='Insert a refresh request')
+ parser.add_argument('--test-parent-job', '-P', action='store_true', dest='do_test_parent_job', help='Insert a refresh request')
+
+ # Step tests
+ parser.add_argument('-0', action='store_true', dest='progress_step_0', help='Manual progress step 0 (init)')
+ parser.add_argument('-1', action='store_true', dest='progress_step_1', help='Manual progress step 1 (1/4)')
+ parser.add_argument('-2', action='store_true', dest='progress_step_2', help='Manual progress step 2 (2/4)')
+ parser.add_argument('-3', action='store_true', dest='progress_step_3', help='Manual progress step 3 (3/4)')
+ parser.add_argument('-4', action='store_true', dest='progress_step_4', help='Manual progress step 4 (done)')
+
+ # Debugging support
+ parser.add_argument('--dump', '-D', action='store_true', dest='dump', help='Dump jobs in the database')
+ parser.add_argument('--monitor-job', '-M', action='store_true', dest='monitor_job', help='Monitor a job in the database')
+ parser.add_argument('--delay', '-L', dest='delay', help='Test delays')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose debugging')
+ parser.add_argument('--database', '-B', dest='database', help='Set the database path')
+ parser.add_argument('--debug-sql', action='store_true', dest='debug_sql', help='Debug SQL writes')
+ parser.add_argument('--debug-sql-compare', dest='debug_sql_compare', help='Compare Debug SQL logs (e.g. "JOB,SCR")')
+
+ # Be flexible with arguments to support sub-parse trees
+ args, argv = parser.parse_known_args()
+
+ _log("Job:args:%s" % args)
+
+ try:
+ # Basic parameters
+ verbose = args.verbose
+ debug_sql = args.debug_sql
+ if args.log_file:
+ log_file = args.log_file
+ else:
+ log_file = ''
+ # Default is to create a new job record
+ if args.job_id:
+ job_id = args.job_id
+ else:
+ job_id = 0
+ if args.database:
+ DBName = args.database
+ if args.delay:
+ delay = args.delay
+ else:
+ delay = 1
+ job_name = args.job_name if args.job_name else ''
+ job_desc = args.job_desc if args.job_desc else ''
+ job_options = args.options if args.options else ''
+ job_parent_name = args.parent_name if args.parent_name else ''
+
+ # Enable SQL tracing
+ if debug_sql:
+ SQL_DEBUG(True,'JOB')
+
+ # Unit tests
+ ret = 0
+ if args.command == 'SELFTEST':
+ do_test_unit(job_id,log_file,'3')
+ elif args.command == 'SELFERROR1':
+ do_test_error1_unit(job_id,log_file,'3')
+ elif args.command == 'SELFERROR2':
+ do_test_error2_unit(job_id,log_file,'3')
+
+ elif args.do_test_async:
+ test_async(job_id,log_file,delay)
+ elif args.do_test_unit:
+ do_test_unit(job_id,log_file,delay)
+ elif args.do_test_hang:
+ do_test_hang(job_id,log_file,delay)
+ elif args.do_test_refresh:
+ do_test_refresh(job_id,log_file,delay)
+ elif args.do_test_parent_job:
+ do_test_parent_job(job_id,log_file)
+
+ elif args.monitor_job:
+ monitor_job()
+ elif args.dump:
+ dump()
+ elif args.debug_sql_compare:
+ SQL_DUMP_COMPARE(args.debug_sql_compare)
+
+ elif args.progress_step_0: progress_step(job_id,0);
+ elif args.progress_step_1: progress_step(job_id,1);
+ elif args.progress_step_2: progress_step(job_id,2);
+ elif args.progress_step_3: progress_step(job_id,3);
+ elif args.progress_step_4: progress_step(job_id,4);
+
+ # UI Helpers
+ elif 'job_pid_status' == args.command:
+ job_pid_status();
+
+ # Test the parameters
+ elif not args.command:
+ print("ERROR: Job: Missing command")
+ return(1)
+
+ # Execute the main command
+ else:
+ ret = execute_command(args.command,job_name,job_desc,log_file,job_id,job_options,job_parent_name)
+
+ # Dump the SQL transaction data
+ if debug_sql:
+ SQL_DUMP()
+
+ if 0 != ret:
+ exit(ret)
+
+ except Exception as e:
+ print("ERROR:%s" % e)
+ print("%s" % traceback.print_exc())
+ log_error("ERROR:SRTOOL_JOB:'%s'" % (e))
+ exit(1)
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/bin/common/srtool_progress.py b/bin/common/srtool_progress.py
new file mode 100755
index 00000000..2d87811f
--- /dev/null
+++ b/bin/common/srtool_progress.py
@@ -0,0 +1,75 @@
+#################################
+# Progress helper methods
+#
+
+import sys
+
+progress_count = -1
+progress_max = 0
+progress_percent_prev = -1
+progress_enabled = False
+
+PROGRESS_STATUS_ENABLE = 0
+PROGRESS_STATUS_COUNT = 1
+PROGRESS_STATUS_MAX = 2
+
+# Debugging support
+progress_debug = False
+
+def progress_set_on(value = True):
+ global progress_enabled
+ progress_enabled = value
+ if progress_debug: print("PROGRESS_SET_ON=%s" % value)
+
+def progress_status():
+ return progress_enabled, progress_count, progress_max
+
+def progress_set_max(max):
+ global progress_max
+ progress_max = max
+ progress_show('Start',0)
+ if progress_debug: print("PROGRESS_SET_MAX=%s" % max)
+
+def progress_set_current(current):
+ global progress_count
+ progress_count = current
+ if progress_debug: print("PROGRESS_SET_CURRENT=%s" % current)
+
+def progress_get_current(current):
+ global progress_count
+ progress_count = current
+
+def progress_show(msg,add_cnt=1,force_newline=False):
+ global progress_count
+ global progress_percent_prev
+ if not progress_enabled:
+ return
+ progress_count += add_cnt
+ if progress_max:
+ progress_percent_new = (progress_count * 100) // progress_max
+ else:
+ progress_percent_new = 0
+ if progress_debug: print("PROGRESS_SHOW=%s (%s:%s)(%s:%s)" % (msg,progress_percent_prev,progress_percent_new,progress_count,progress_max))
+ if progress_percent_prev < progress_percent_new:
+ progress_percent_prev = progress_percent_new
+ else:
+ return
+ # Force a new line to unblock STDIO if application uses prints with <end="">
+ if force_newline:
+ print("\n")
+ print("[PROGRESS:%d,%d,%s]" % (progress_count,progress_max,msg))
+ sys.stdout.flush()
+
+def progress_done(msg):
+ if not progress_enabled:
+ return
+ print("[PROGRESS:%d,%d,%s]" % (progress_max,progress_max,msg))
+
+# Formally post an error message for the Job Control to catch
+def progress_error(msg):
+ if not progress_enabled:
+ return
+ print("ERROR:%s" % (msg))
+
+
+
diff --git a/bin/common/srtool_sanity_test.py b/bin/common/srtool_sanity_test.py
index 4bd116a7..4a0a91d0 100755
--- a/bin/common/srtool_sanity_test.py
+++ b/bin/common/srtool_sanity_test.py
@@ -33,18 +33,15 @@
import os
import sys
import argparse
-import sqlite3
import subprocess
from django import VERSION as DJANGO_VERSION
-# Load the srt.sqlite schema index file
-# Since it is generated from this script
-# it may not exist on the first pass
-try:
- from srt_schema import ORM
-except ImportError:
- pass
+# load the srt.sqlite schema indexes
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+from common.srtool_sql import *
# Setup:
verbose = False
@@ -81,7 +78,7 @@ def get_override(key):
#
def get_host_statistics():
-
+ print("* Host statistics ...")
try:
cmd = ('uname', '-vm')
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
@@ -127,8 +124,9 @@ def get_host_statistics():
def get_database_statistics():
global table_counts
+ print("* Database statistics ... (use '-v' for details)")
# Get List of Tables:
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
tableListQuery = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY Name"
cur.execute(tableListQuery)
@@ -139,9 +137,9 @@ def get_database_statistics():
numberOfRows = cur.fetchone()[0]
table_counts[table] = numberOfRows
if verbose:
- print("%d\t%s" % (numberOfRows,table, ))
- cur.close()
- conn.close()
+ print("%11d\t%s" % (numberOfRows,table, ))
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
#################################
# init test
@@ -172,13 +170,14 @@ def init_test():
get_host_statistics()
get_database_statistics()
+ print("* Table checks ...")
ret = 0
for table in init_table_list:
if (not table in table_counts) or (0 == table_counts[table]):
- print("ERROR: Table '%s' is empty" % table)
- ret = 1
+ print("Note: Table '%s' is empty" % table)
if not ret:
+ print("* Summary ...")
print("CVEs = %s" % table_counts['orm_cve'])
print("Users = %s" % table_counts['users_srtuser'])
print("Data sources= %s" % table_counts['orm_datasource'])
@@ -206,7 +205,7 @@ def main(argv):
ret = init_test()
exit(ret)
else:
- print("Command not found")
+ print("Run: './srtool_sanity_test.py -i'")
exit(1)
if __name__ == '__main__':
diff --git a/bin/common/srtool_sql.py b/bin/common/srtool_sql.py
new file mode 100755
index 00000000..673793d6
--- /dev/null
+++ b/bin/common/srtool_sql.py
@@ -0,0 +1,492 @@
+#################################
+# Python SQL helper methods
+#
+# Provide SQL extended support via wrappers
+# * Enable retry for errors, specifically database locks
+# * Capture start/stop second+millisecond timestamps
+# * Provide post-dump of time tracking and retry counts
+#
+# Solution source:
+# https://stackoverflow.com/questions/15143871/simplest-way-to-retry-sqlite-query-if-db-is-locked
+# Quote: "Python will retry regularly if the table is locked. It will not retry if the Database is locked."
+#
+
+import sys
+import time
+import subprocess
+from datetime import datetime, date
+from collections import OrderedDict
+import sqlite3
+import re
+import os
+import yaml
+from types import SimpleNamespace
+
+# Globals
+SQL_TRACE = False
+SQL_VERBOSE = False
+SQL_CONTEXT = "NN"
+SQL_TIMEOUT_MAX = 10
+SQL_TIMEOUT_TIME = 0.0001
+SQL_LOG_DIR = 'logs'
+
+# Load the database configuration
+SRT_BASE_DIR = os.getenv('SRT_BASE_DIR', '.')
+srt_dbconfig = None
+srt_dbtype = None
+with open(f"{SRT_BASE_DIR}/srt_dbconfig.yml", "r") as ymlfile:
+ SRT_DBCONFIG = yaml.safe_load(ymlfile)
+ SRT_DBSELECT = SRT_DBCONFIG['dbselect']
+ srt_dbconfig = SRT_DBCONFIG[SRT_DBSELECT]
+ srt_dbtype = srt_dbconfig['dbtype']
+if not srt_dbtype:
+ print(f"ERROR: Missing {SRT_BASE_DIR}/srt_dbconfig.yml'")
+ exit(1)
+if ("mysql" == srt_dbtype) or ('1' == os.getenv('SRT_MYSQL', '0')):
+ import MySQLdb
+if ("postgres" == srt_dbtype) or ('1' == os.getenv('SRT_POSTGRES', '0')):
+ import psycopg2
+ from psycopg2.extras import RealDictCursor
+
+# quick development/debugging support
+def _log(msg):
+ DBG_LVL = os.environ['SRTDBG_LVL'] if ('SRTDBG_LVL' in os.environ) else 2
+ DBG_LOG = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srt_dbg.log'
+ if 1 == DBG_LVL:
+ print(msg)
+ elif 2 == DBG_LVL:
+ f1=open(DBG_LOG, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+#with open(f"{SRT_BASE_DIR}/db_migration_config.yml", "r") as migfile:
+# DB_MIG_CONFIG = yaml.safe_load(migfile)
+
+#################################
+# Debug Support
+#
+
+SQL_TRACE_log = []
+SQL_VERBOSE_log = []
+
+# Enable debug tracking, optional context
+def SQL_DEBUG(is_trace,context=None,is_verbose=False):
+ global SQL_TRACE
+ global SQL_VERBOSE
+ global SQL_CONTEXT
+ SQL_TRACE = is_trace
+ if context:
+ SQL_CONTEXT = context
+ if is_verbose:
+ SQL_VERBOSE = context
+ if SQL_TRACE:
+ print("SRTSQL_DEBUG:Trace=%s,Context=%s,Verbose=%s)" % (SQL_TRACE,context,is_verbose))
+ sys.stdout.flush()
+
+def _SQL_GET_MS():
+ if not SQL_TRACE: return 0
+ dt = datetime.now()
+ return (dt.minute * 100000000) + (dt.second * 1000000) + dt.microsecond
+
+def _SQL_TRACE_LOG_ADD(start,stop,loop):
+ global SQL_TRACE_log
+ if not SQL_TRACE: return
+ SQL_TRACE_log.append([SQL_CONTEXT,start,stop,loop])
+
+def SQL_DUMP():
+ if not SQL_TRACE: return
+ if not os.path.isdir(SQL_LOG_DIR):
+ os.makedirs(SQL_LOG_DIR)
+ log_file = '%s/SQL_TRACE_%s.log' % (SQL_LOG_DIR,SQL_CONTEXT)
+ with open(log_file, 'w') as fd:
+ print(" (Context) Start Stop (Retries)",file=fd)
+ print("===============================================",file=fd)
+ for context,start,stop,loop in SQL_TRACE_log:
+ print("sql_dump:(%3s) %d to %d (%d)" % (context[:3],start,stop,loop),file=fd)
+ print("SQL debug trace log:%s" % log_file)
+
+def SQL_DUMP_COMPARE(param,is_csv=False):
+ tag1,tag2 = param.split(',')
+ if not os.path.isdir(SQL_LOG_DIR):
+ os.makedirs(SQL_LOG_DIR)
+ log1 = '%s/SQL_TRACE_%s.log' % (SQL_LOG_DIR,tag1)
+ log2 = '%s/SQL_TRACE_%s.log' % (SQL_LOG_DIR,tag2)
+
+ log = []
+
+ def load_log(logfile):
+ p = re.compile(r'sql_dump:\((\w+)\) (\d+) to (\d+) \((\d+)\)')
+ with open(logfile, 'r') as fs:
+ for line in fs.readlines():
+ # sql_dump:(JOB) 39290879 to 39293849 (0)
+ m = p.match(line)
+ if not m:
+ continue
+ tag,start,stop,retry = m.groups()
+ log.append([tag,int(start),int(stop),retry])
+ # Load the logs
+ load_log(log1)
+ load_log(log2)
+ # Sort the log
+ def sortOnStart(e):
+ return e[1]
+ log.sort(key=sortOnStart)
+
+ # Display log table with diffs
+ if not is_csv:
+ print(" # |Tag|Start uSec|Stop uSec |Re|(diff prev )|(diff next )|(diff write)")
+ print("======|===|==========|==========|==|============|============|============")
+ else:
+ print("Index,Tag,Start,Stop,Retries,Diff_prev,Diff_next,Diff write")
+ logmax = len(log)
+ i = -1
+ for tag,start,stop,retry in log:
+ i += 1
+ if i == 0:
+ pre_diff = 0
+ else:
+ pre_diff = log[i][1] - log[i-1][2]
+ if i == (logmax - 1):
+ post_diff = 0
+ else:
+ post_diff = log[i+1][1] - log[i][2]
+ write_diff = log[i][2] - log[i][1]
+ if not is_csv:
+ print("[%4d]:%s,%010d,%010d,%s (^ %8d) (v %8d) (~ %8d)" % (i,tag,start,stop,retry,pre_diff,post_diff,write_diff))
+ else:
+ print("%d,%s,%010d,%010d,%s,%8d,%8d,%8d" % (i,tag,start,stop,retry,pre_diff,post_diff,write_diff))
+ if SQL_VERBOSE:
+ print('')
+ print('Executed SQL commands:')
+ for line in SQL_VERBOSE_log:
+ print(line)
+ print('')
+
+def SQL_FETCH_INDEXES(conn, dbconfig=None):
+ # Define the database type connection
+ if not dbconfig:
+ dbconfig = srt_dbconfig
+ dbtype = dbconfig['dbtype']
+
+ #Goal: Create a data structure that has:
+ # ordered list of tables
+ # table column names
+ # index(ordinal) of table
+ # columns: (table_name, column_name, ordinal_postion) -> Should be list[list]
+ # Formatting should not be done in the subroutine...do it in srtool_common.py
+ # (name, value) -> name is table_name,_column name and value is the index
+ # Should be returned as tuples rather than preformatted strings
+ # Returns should be consistent (also for error)
+
+ cur = conn.cursor()
+ if 'postgres' == dbtype:
+ sql = "SELECT * FROM information_schema.columns where table_schema = 'public' order by table_name,ordinal_position;"
+ print("cursor stat: {}".format(cur))
+ cur.execute(sql)
+ columns = cur.description
+ results = [{columns[index][0]:column for index, column in enumerate(value)} for value in cur.fetchall()]
+ tables = {}
+ # TODO last line of for is hardcoded in postgres format
+ for i in results:
+ if i['table_name'] not in tables:
+ # {'table_name' : 'column_name', 'ordinal_position'}
+ tables[i['table_name']] = {'ordinal_position' : 'column_name'}
+ tables[i['table_name']][i['ordinal_position']-1] = i['column_name']
+ for val_d in tables:
+ tables[val_d].pop('ordinal_position')
+ ret_list = []
+ for table in tables:
+ table_items = tables[table].items()
+ sorted_tabl = sorted(table_items)
+ for offset,i in enumerate(sorted_tabl):
+ table = table.replace('orm_','')
+ ret_list.append(("{}_{}".format(table.upper(), i[1].upper()), offset))
+ return(ret_list)
+ elif 'sqlite' == dbtype:
+ database_file = dbconfig['path']
+ create_re = re.compile(r"CREATE TABLE[A-Z ]* \"(\w+)\" \((.+)\);")
+ try:
+ cmd = ('sqlite3', database_file, '.schema') # must be abstracted
+ output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ return([("ERROR","(%d) %s" % (e.returncode, e.output))])
+ ret_list = []
+ # print('RET LIST: {}'.format(ret_list))
+ # print('cmd OUTPUT: {}'.format(output))
+ # problem -> for loop is not executing (nothing returned from command 'sqlite3 srt-backup.sqlite .schema')
+ for line in output.decode("utf-8").splitlines():
+ print(line)
+ print('In for loop - retlist: {}'.format(ret_list))
+ match = create_re.match(line)
+ if not match:
+ print('ERROR: no match')
+ continue
+
+ table = match.group(1).upper()
+ table = table.replace('ORM_','')
+
+ columns = match.group(2)
+ for i, col in enumerate(columns.split(',')):
+ col = col.strip()
+ name = col[1:]
+ #
+ try:
+ name = name[:name.index('"')]
+ print('NOTE: passed try #2: {}'.format(name))
+ except Exception as e:
+ return([("ERROR","%s:%s:" % (e,col))])
+ name = col[:col.index(' ')]
+ ret_list.append(("%s_%s" % (table.upper(),name.upper()), i))
+ return(ret_list)
+ else:
+ return([("ERROR","No support for MySQL or MariahDB. Update coming..."),])
+
+#################################
+# SQL wrapper methods
+#
+
+def _SQL_ACTION(action,cur_conn,sql=None,params=None,dbconfig=None):
+ # Define the database type connection
+ if not dbconfig:
+ dbconfig = srt_dbconfig
+ dbtype = dbconfig['dbtype']
+
+ ret = None
+ timeout_count = 0
+ if SQL_VERBOSE:
+ SQL_VERBOSE_log.append("SQL_ACTION:%s:%s:%s:%s:" % (action,cur_conn,sql,params))
+ sleep_time = SQL_TIMEOUT_TIME
+ start = _SQL_GET_MS()
+ exception_occured = False
+ for x in range(0, SQL_TIMEOUT_MAX):
+ exception_occured = False
+ try:
+ if 'exec' == action:
+ # to account for difference between mysql/postgres and sqlite
+ if not dbtype == "sqlite":
+ sql = sql.replace("?", "%s")
+ if dbtype == "postgres": # for postgres case insenstive issue
+ sql = sql.replace('`', '"') # replace backticks with double quotes
+ if "INSERT INTO" in sql:
+ sql += " RETURNING *"
+ camel_case_columns = ["lastModifiedDate", "publishedDate", "cvssV3_baseScore", "cvssV3_baseSeverity", "cvssV2_baseScore", "cvssV2_severity"]
+ for col in camel_case_columns:
+ if col in sql and f'"{col}"' not in sql:
+ sql = sql.replace(f'{col}', f'"{col}"')
+ if params:
+ ret = cur_conn.execute(sql, params)
+ else:
+ ret = cur_conn.execute(sql)
+ elif 'commit' == action:
+ ret = cur_conn.commit()
+ except Exception as e:
+ exception_occured = True
+ print(f"Error occured while running\nsql: {sql}\nparams:{params}\naction:{action}")
+ print(e)
+ time.sleep(sleep_time)
+ timeout_count += 1
+ pass
+ finally:
+ _SQL_TRACE_LOG_ADD(start,_SQL_GET_MS(),timeout_count)
+ break
+ else:
+ # Give up, dump what we had, and trigger a proper error
+ SQL_TRACE_log_add(start,_SQL_GET_MS(),timeout_count)
+ sql_dump()
+ if 'exec' == action:
+ ret = cur_conn.execute(sql,params)
+ elif 'commit' == action:
+ ret = cur_conn.commit()
+ if not dbtype == "sqlite":
+ ret = cur_conn
+ return ret
+
+def SQL_CONNECT(column_names=False,dbconfig=None):
+ # Define the database type connection
+ if not dbconfig:
+ dbconfig = srt_dbconfig
+ dbtype = dbconfig['dbtype']
+
+ if dbtype == "mysql":
+ conn = MySQLdb.connect(
+ passwd=dbconfig["passwd"],
+ db=dbconfig["name"],
+ host=dbconfig["host"],
+ user=dbconfig["user"],
+ port=dbconfig["port"]
+ )
+ return conn
+ elif dbtype == "postgres":
+ if column_names:
+ conn = psycopg2.connect(
+ password=dbconfig["passwd"],
+ database=dbconfig["name"],
+ host=dbconfig["host"],
+ user=dbconfig["user"],
+ port=dbconfig["port"],
+ cursor_factory=RealDictCursor,
+ )
+ else:
+ conn = psycopg2.connect(
+ password=dbconfig["passwd"],
+ database=dbconfig["name"],
+ host=dbconfig["host"],
+ user=dbconfig["user"],
+ port=dbconfig["port"],
+ )
+ return conn
+ else: # Sqlite
+ conn = sqlite3.connect(dbconfig["path"])
+ if column_names:
+ conn.row_factory = sqlite3.Row
+ return conn
+
+def SQL_CURSOR(conn,dbconfig=None):
+ return(conn.cursor())
+
+def SQL_EXECUTE(cur,sql,params=None,dbconfig=None):
+ return(_SQL_ACTION('exec',cur,sql,params,dbconfig))
+
+def SQL_COMMIT(conn,dbconfig=None):
+ return(_SQL_ACTION('commit',conn,dbconfig))
+
+def SQL_CLOSE_CUR(cur,dbconfig=None):
+ return(cur.close())
+
+def SQL_CLOSE_CONN(conn,dbconfig=None):
+ return(conn.close())
+
+def SQL_GET_LAST_ROW_INSERTED_ID(cur,dbconfig=None):
+ # Define the database type connection
+ if not dbconfig:
+ dbconfig = srt_dbconfig
+ dbtype = dbconfig['dbtype']
+
+ if dbtype == "postgres":
+ return(SQL_FETCH_ONE(cur).id)
+ else:
+ return cur.lastrowid
+
+def SQL_FETCH_ONE(cur,dbconfig=None):
+ columns = cur.description
+ result = {columns[index][0]:column for index, column in enumerate(cur.fetchone()) }
+ return SimpleNamespace(**result)
+
+def SQL_FETCH_ALL(cur,dbconfig=None):
+ columns = cur.description
+ results = [{columns[index][0]:column for index, column in enumerate(value)} for value in cur.fetchall()]
+ return [SimpleNamespace(**result) for result in results]
+
+def GET_DB_TYPE(dbconfig=None):
+ # Define the database type connection
+ if not dbconfig:
+ dbconfig = srt_dbconfig
+ dbtype = dbconfig['dbtype']
+ return dbtype
+
+def SQL_BATCH_WRITE(cur_conn, table, records, dbconfig=None, fields=None, override_values=None):
+ '''
+ Batch write wrapper function
+ - Records must contain tuples of the same length
+
+ :param cur_conn: SQL connection
+ :param table: target table name
+ :param records: list of tuples containing records to be inserted
+ :param dbconfig: dbconfig['dbtype'] contains DB type
+ :param fields: list of specified fields to insert into
+ :param override_values: list of specified values
+
+ :return: SQL DB connection's cursor
+ '''
+
+ # Define the database type connection
+ if not dbconfig:
+ dbconfig = srt_dbconfig
+ dbtype = dbconfig['dbtype']
+
+ # invalid parameters check
+ if cur_conn == None or table == None or records == None:
+ raise Exception("SQL Batch Write Failed: invalid parameters provided")
+ if not isinstance(records, list) and not isinstance(records, tuple):
+ raise Exception("SQL Batch Write Failed: records must be of type 'list' or 'tuple'")
+
+ # invalid number of fields supplied check
+ std_record_ct = len(records[0])
+ for record in records:
+ if len(record) != std_record_ct:
+ raise BaseException("SQL Batch Write Failed: incorrect number of fields supplied")
+
+ # bulk insert
+ if fields != None:
+ _fields = "(" + ','.join([str(field) for field in fields]) + ")"
+ else:
+ _fields = ''
+
+ if dbtype == 'sqlite':
+ if override_values is None:
+ _ov = f"({','.join(['?'] * len(records[0]))})"
+ elif isinstance(override_values, list):
+ _ov = "(" + ','.join([str(ov) for ov in override_values]) + ")"
+ elif isinstance(override_values, str):
+ _ov = override_values
+ cur_conn.executemany(f"INSERT INTO {table}{_fields} VALUES{_ov};", records)
+ elif dbtype == 'postgres':
+ if override_values is None:
+ _ov = f"({','.join(['%s'] * len(records[0]))})"
+ elif isinstance(override_values, list):
+ _ov = "(" + ','.join([str(ov) for ov in override_values]) + ")"
+ elif isinstance(override_values, str):
+ _ov = override_values
+ psycopg2.extras.execute_batch(cur_conn, f"INSERT INTO {table}{_fields} VALUES{_ov};", records)
+
+ # conn.commit()
+ return cur_conn
+
+
+def SQL_BATCH_UPDATE(cur_conn, table, values_list, set_field, where_field, dbconfig=None):
+ '''
+ Batch update wrapper function (not tested)
+ - Records must contain tuples of the same length
+
+ :param cur_conn: SQL connection
+ :param table: target table name
+ :param values_list: parameter values provided to the SQL query
+ :param set_field: list containing the 'SET' parameterized fields in the SQL query
+ :param where_field: list containing the 'WHERE' parameterized fields in the SQL query
+ :param dbconfig: dbconfig['dbtype'] contains DB type
+
+ :return: SQL DB connection's cursor
+ '''
+
+ # Define the database type connection
+ if not dbconfig:
+ dbconfig = srt_dbconfig
+ dbtype = dbconfig['dbtype']
+
+ # invalid parameters check
+ if (cur_conn == None) or (table == None) or (set_field == None) or (where_field == None) or (values_list == None):
+ raise Exception("SQL Batch Update Failed: invalid parameters provided")
+
+ # invalid number of fields supplied check
+ if (len(set_field) + len(where_field)) != len(values_list[0]):
+ raise Exception(f"SQL Batch Update Failed: number of fields and values supplied mismatches ({len(set_field)},{len(where_field)},{len(values_list)})")
+
+ if dbtype == 'sqlite':
+ # generate the SQL command for sqlite
+ update_comm = f"UPDATE {table}"
+ set_comm = " SET " + ", ".join([f"{s_field} = ?" for s_field in set_field])
+ where_comm = " WHERE " + ", ".join([f"{w_field} = ?" for w_field in where_field])
+ sql = update_comm + set_comm + where_comm + ";"
+ cur_conn.executemany(sql, values_list)
+
+ elif dbtype == 'postgres':
+ # generate the SQL command for postgresql
+ update_comm = f"UPDATE {table}"
+ set_comm = " SET " + ", ".join([f"{s_field} = %s" for s_field in set_field])
+ where_comm = " WHERE " + ", ".join([f"{w_field} = %s" for w_field in where_field])
+ sql = update_comm + set_comm + where_comm + ";"
+ psycopg2.extras.execute_batch(cur_conn, sql, values_list)
+
+ # conn.commit()
+ return cur_conn
+
diff --git a/bin/common/srtool_test.py b/bin/common/srtool_test.py
new file mode 100755
index 00000000..b3af8033
--- /dev/null
+++ b/bin/common/srtool_test.py
@@ -0,0 +1,204 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2017-2018 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+#
+# Theory of operation
+#
+# * This script manages the common SRTool data source files
+#
+
+import os
+import sys
+import re
+import csv
+import json
+import argparse
+from common.srtool_sql import *
+import subprocess
+from time import sleep
+from datetime import datetime
+
+# Load the srt.sqlite schema index file
+# Since it is generated from this script
+# it may not exist on the first pass
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+try:
+ from common.srtool_progress import *
+ from common.srt_schema import ORM
+except:
+ # Do a pass so that '--generate-schema-header' can fix it
+ print("Warning: srt_schema not yet created or bad format")
+ pass
+
+# Setup:
+verbose = False
+cmd_skip = 0
+cmd_count = 0
+cmd_test = False
+
+srtDbName = 'srt.sqlite'
+packageKeywordsFile = 'data/package_keywords.csv'
+notifyCategoriesFile = 'data/notify-categories.json'
+
+#################################
+# Helper methods
+#
+
+overrides = {}
+
+def set_override(key,value=None):
+ if not value is None:
+ overrides[key] = value
+ elif key in os.environ.keys():
+ overrides[key] = 'yes' if os.environ[key].startswith('1') else 'no'
+ else:
+ overrides[key] = ''
+ if overrides[key]:
+ print("OVERRIDE: %s = %s" % (key,overrides[key]))
+
+def get_override(key):
+ if key in overrides.keys():
+ return overrides[key]
+ return ''
+
+def get_name_sort(cve_name):
+ try:
+ a = cve_name.split('-')
+ cve_name_sort = '%s-%s-%07d' % (a[0],a[1],int(a[2]))
+ except:
+ cve_name_sort = cve_name
+ return cve_name_sort
+
+def get_tag_key(tag,key,default=None):
+ d = json.loads(tag)
+ if key in d:
+ return d[key]
+ return default
+
+###############################################################
+#
+#
+
+def reset_new():
+ global recommends
+ global cmd_skip
+ global cmd_count
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+ cur_ds = conn.cursor()
+ is_change = False
+ write_count = 0
+
+ # Cap this
+ if cmd_count == 0:
+ cmd_count = 201
+ progress_set_max(cmd_count)
+
+ # Scan the open CVEs
+ sql = "SELECT * FROM orm_cve WHERE status='%s' AND score_date IS NOT NULL;" % (ORM.STATUS_NEW)
+ cur.execute(sql)
+ for i,cve in enumerate(cur):
+ cve_name = cve[ORM.CVE_NAME]
+ progress_show(cve_name)
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%04d: %20s\r' % (i,cve_name), end='')
+ if (0 == i % 200) and (not cmd_skip) and is_change:
+ conn.commit()
+ print("%4d: COMMIT" % i)
+ sleep(2)
+ is_change = False
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ else:
+ cmd_skip = 0
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ sql = ''' UPDATE orm_cve
+ SET score_date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (None, cve[ORM.CVE_ID]))
+ write_count += 1
+ is_change = True
+
+ if is_change:
+ conn.commit()
+ print("COMMIT")
+ print("\nUpdated CVEs=%d" % (write_count))
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+ global update_skip_history
+ global cmd_skip
+ global cmd_count
+ global cmd_test
+
+ # setup
+ parser = argparse.ArgumentParser(description='srtool_common.py: manage SRTool common source data')
+ parser.add_argument('--reset-new', action='store_const', const='reset_new', dest='command', help='Rese new CVEs for score test')
+
+ parser.add_argument('--progress', '-P', action='store_true', dest='do_progress', help='Progress output')
+ parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
+ parser.add_argument('--test', '-t', action='store_true', dest='test', help='Test run')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
+ parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ args = parser.parse_args()
+
+ verbose = args.verbose
+ cmd_test = args.test
+ cmd_skip = 0
+ if None != args.skip:
+ cmd_skip = int(args.skip)
+ cmd_count = 0
+ if None != args.count:
+ cmd_count = int(args.count)
+ progress_set_on(args.do_progress)
+
+ if verbose:
+ print('srtool_common %s' % args)
+
+ if 'reset_new' == args.command:
+ reset_new()
+
+ else:
+ print("Command not found")
+
+if __name__ == '__main__':
+ srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ main(sys.argv[1:])
+
+
+
diff --git a/bin/common/srtool_update.py b/bin/common/srtool_update.py
index 1ec6c0a3..3227534b 100755
--- a/bin/common/srtool_update.py
+++ b/bin/common/srtool_update.py
@@ -5,7 +5,7 @@
#
# Security Response Tool Commandline Tool
#
-# Copyright (C) 2018 Wind River Systems
+# Copyright (C) 2018-2023 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -23,16 +23,17 @@
import os
import sys
import argparse
-import sqlite3
import json
import time
from datetime import datetime, timedelta
+import pytz
+import traceback
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
from common.srt_schema import ORM
-
+from common.srtool_sql import *
# Setup:
verbose = False
master_log = ''
@@ -85,24 +86,107 @@ def get_tag_key(tag,key,default=''):
# ONDEMAND = 5 "{}" # only on demand
# ONSTARTUP = 6 "{}" # on every SRTool start up
-def run_updates(force_all,name_filter,is_trial):
-
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
- cur_write = conn.cursor()
-
+def next_refresh_date(update_frequency,update_time_keys,last_updated_date,display=False):
+ # Get the update keys
+ delta_minutes = int(get_tag_key(update_time_keys,'minutes','10'))
+ delta_months = int(get_tag_key(update_time_keys,'months','1'))
+ minute_of_day = int(get_tag_key(update_time_keys,'minute','10'))
+ hour_of_day = int(get_tag_key(update_time_keys,'hour','12'))
+ weekday_of_week = int(get_tag_key(update_time_keys,'weekday','4'))
+ day_of_week = int(get_tag_key(update_time_keys,'day','4'))
+
+ # Calulate the next update datetime
+ if ORM.DATASOURCE_MINUTELY == update_frequency:
+ # Time relative to last
+ test_date = last_updated_date + timedelta(minutes=delta_minutes)
+ else:
+ # Time relative to a time_of_day
+ test_date = last_updated_date
+ if ORM.DATASOURCE_HOURLY == update_frequency:
+ test_date = test_date.replace(minute = minute_of_day)
+ if test_date < last_updated_date:
+ test_date += timedelta(hours=1)
+ elif ORM.DATASOURCE_DAILY == update_frequency:
+ test_date = test_date.replace(hour = hour_of_day)
+ test_date = test_date.replace(minute = minute_of_day)
+ if test_date < last_updated_date:
+ test_date += timedelta(days=1)
+ elif ORM.DATASOURCE_WEEKLY == update_frequency:
+ test_date = test_date.replace(hour = hour_of_day)
+ test_date = test_date.replace(minute = minute_of_day)
+ weekday = test_date.weekday()
+ if weekday >= weekday_of_week:
+ test_date += timedelta(days=(7 + weekday_of_week - weekday))
+ elif weekday < weekday_of_week:
+ test_date += timedelta(days=(weekday_of_week - weekday))
+ elif ORM.DATASOURCE_MONTHLY == update_frequency:
+ test_date = test_date.replace(day = day_of_week)
+ test_date = test_date.replace(hour = hour_of_day)
+ test_date = test_date.replace(minute = minute_of_day)
+ if test_date < last_updated_date:
+ test_date += timedelta(days=31)
+ else:
+ print("ERROR:unknown update '%s'" % update_frequency)
+ exit(1)
+
+ if display:
+ # ORM.DATASOURCE_DATETIME_FORMAT
+ print("%s <= %s,%s,%s" % (test_date.strftime("%c"),last_updated_date.strftime("%c"),update_frequency,update_time_keys))
+ return(test_date)
+
+def update_unit_test():
+ # datetime(year, month, day, hour=0, minute=0, tzinfo=None)
+ date_now = datetime.now(pytz.utc)
+ print("Unit test the update differentials and modes")
+
+ next_refresh_date(ORM.DATASOURCE_MINUTELY,"{\"minutes\":\"10\"}",date_now,True)
+ next_refresh_date(ORM.DATASOURCE_MINUTELY,"{\"minutes\":\"10\"}",date_now.replace(minute=59),True)
+
+ next_refresh_date(ORM.DATASOURCE_HOURLY,"{\"minute\":\"10\"}",date_now.replace(minute=11),True)
+ next_refresh_date(ORM.DATASOURCE_HOURLY,"{\"minutes\":\"10\"}",date_now.replace(minute=9),True)
+
+ next_refresh_date(ORM.DATASOURCE_DAILY,"{\"hour\":\"2\"}",date_now.replace(hour=1),True)
+ next_refresh_date(ORM.DATASOURCE_DAILY,"{\"hour\":\"2\"}",date_now.replace(hour=3),True)
+
+ # May need to adjust this relative to today's test day of week
+ next_refresh_date(ORM.DATASOURCE_WEEKLY,"{\"weekday\":\"5\",\"hour\":\"2\"}",date_now.replace(day=3,hour=1),True)
+ next_refresh_date(ORM.DATASOURCE_WEEKLY,"{\"weekday\":\"5\",\"hour\":\"2\"}",date_now.replace(day=4,hour=1),True)
+ next_refresh_date(ORM.DATASOURCE_WEEKLY,"{\"weekday\":\"5\",\"hour\":\"2\"}",date_now.replace(day=6,hour=3),True)
+
+ next_refresh_date(ORM.DATASOURCE_MONTHLY,"{\"day\":\"5\",\"hour\":\"2\"}",date_now.replace(day=4,hour=1),True)
+ next_refresh_date(ORM.DATASOURCE_MONTHLY,"{\"day\":\"25\",\"hour\":\"2\"}",date_now.replace(day=24,hour=1),True)
+ next_refresh_date(ORM.DATASOURCE_MONTHLY,"{\"day\":\"25\",\"hour\":\"2\"}",date_now.replace(day=26,hour=1),True)
+
+def time_delta_to_dhms(time_to_go):
+ days = time_to_go.days
+ clicks_to_go = time_to_go.seconds
+ seconds = clicks_to_go % 60
+ clicks_to_go //= 60
+ minutes = clicks_to_go % 60
+ hours = clicks_to_go // 60
+ return [days,hours,minutes,seconds]
+
+def run_updates(force_all,name_filter,update_skip_history,is_trial):
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+ cur_write = SQL_CURSOR(conn)
+
+ # get local timezone
+# local_tz = datetime.now().astimezone().tzinfo
+# time_now = datetime.now(local_tz) #datetime.now(pytz.utc)
time_now = datetime.now() #datetime.now(pytz.utc)
- if verbose:
- print("SRTool Update: time_now = %s" % time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT))
status_str = "============================================================\n"
- status_str += "Update: Date=%s,Filter='%s',Force=%s\n" % (time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT),name_filter,force_all)
+ status_str += "Update: Date=%s,Filter='%s',Force=%s,Skip_History=%s\n" % (time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT),name_filter,force_all,update_skip_history)
#get sources that have update command
- sources = cur.execute("SELECT * FROM orm_datasource").fetchall()
+ sources = SQL_EXECUTE(cur, '''SELECT * FROM orm_datasource''').fetchall()
for source in sources:
# Only process datasoures with update command
if not source[ORM.DATASOURCE_UPDATE]:
continue
+ elif 'DISABLE ' in source[ORM.DATASOURCE_ATTRIBUTES]:
+ # Data source disabled
+ continue
# Test filter
if 'all' != name_filter:
@@ -120,94 +204,76 @@ def run_updates(force_all,name_filter,is_trial):
# testdate = datetime(year, month, day, hour=0, minute=0, second=0, microsecond=0, tzinfo=None, *,
# testdiff = timedelta(days=0, seconds=0, microseconds=0, milliseconds=0, minutes=0, hours=0, weeks=0)
- #print("Update datasource:'%s'" % source[ORM.DATASOURCE_DESCRIPTION])
-
# Get the datasource values
update_frequency = source[ORM.DATASOURCE_UPDATE_FREQUENCY]
+ if update_frequency in (ORM.DATASOURCE_ONDEMAND,ORM.DATASOURCE_ONSTARTUP,ORM.DATASOURCE_PREINIT):
+ continue
if not source[ORM.DATASOURCE_LASTUPDATEDDATE]:
- # Force update if no registed updated date for datasource (i.e. at Init phase)
- last_updated_date = time_now - timedelta(days=365)
+ if ORM.DATASOURCE_MINUTELY == update_frequency:
+ # Force MINUTELY to the current time)
+ last_updated_date = time_now
+ else:
+ # Force update if no registed updated date for datasource (i.e. at Init phase)
+ last_updated_date = time_now - timedelta(days=365)
+ sql = "UPDATE orm_datasource SET lastUpdatedDate=? WHERE id=?"
+ ret = SQL_EXECUTE(cur, sql, (time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT),source[ORM.DATASOURCE_ID],) )
+ SQL_COMMIT(conn)
else:
last_updated_date = datetime.strptime(source[ORM.DATASOURCE_LASTUPDATEDDATE], ORM.DATASOURCE_DATETIME_FORMAT)
- # Get the update presets
- update_time = source[ORM.DATASOURCE_UPDATE_TIME]
- delta_minutes = get_tag_key(update_time,'minutes',None)
- delta_minute = get_tag_key(update_time,'minute',None)
- delta_hour = get_tag_key(update_time,'hour',None)
- delta_weekday = get_tag_key(update_time,'weekday',None)
- delta_day = get_tag_key(update_time,'day',None)
- # Calulate the next update datetime
- if ORM.DATASOURCE_MINUTELY == update_frequency:
- if not delta_minutes:
- print("ERROR:Missing minutes in '%s' for '%s'" % (source[ORM.DATASOURCE_DESCRIPTION],update_time))
- delta_minutes = 10
- testdiff = timedelta(minutes=int(delta_minutes))
- elif ORM.DATASOURCE_HOURLY == update_frequency:
- testdiff = timedelta(hours=1)
- elif ORM.DATASOURCE_DAILY == update_frequency:
- testdiff = timedelta(days=1)
- elif ORM.DATASOURCE_WEEKLY == update_frequency:
- testdiff = timedelta(weeks=1)
- elif ORM.DATASOURCE_MONTHLY == update_frequency:
- testdiff = timedelta(months=1)
- elif ORM.DATASOURCE_ONDEMAND == update_frequency:
- continue
- elif ORM.DATASOURCE_ONSTARTUP == update_frequency:
- continue
- testdate = last_updated_date + testdiff
-
- # Adjust for update presets
- if None != delta_minute:
- # Force to selected day of month
- testdate = datetime(testdate.year, testdate.month, testdate.day, testdate.hour, int(delta_minute), testdate.second)
- if None != delta_day:
- # Force to selected day of month
- testdate = datetime(testdate.year, testdate.month, testdate.day, int(delta_hour), testdate.minute, testdate.second)
- if None != delta_day:
- # Force to selected day of month
- testdate = datetime(testdate.year, testdate.month, int(delta_day), testdate.hour, testdate.minute, testdate.second)
- if None != delta_weekday:
- # Force to selected day of week
- testdiff = timedelta( days=(int(delta_weekday) - testdate.weekday()) )
- testdate += testdiff
+
+ # Get the calculated next update datetime
+ update_time = source[ORM.DATASOURCE_UPDATE_TIME]
+ testdate = next_refresh_date(update_frequency,update_time,last_updated_date)
# Not yet?
+ frequency_str = ORM.get_orm_string(source[ORM.DATASOURCE_UPDATE_FREQUENCY],ORM.DATASOURCE_FREQUENCY_STR)
if testdate > time_now:
time_to_go = testdate - time_now
- frequency_str = ORM.get_orm_string(source[ORM.DATASOURCE_UPDATE_FREQUENCY],ORM.DATASOURCE_FREQUENCY_STR)
- status_str += " Skip (next=%s in days=%2s minutes=%4s,%7s):%s\n" % (testdate.strftime(ORM.DATASOURCE_DATETIME_FORMAT),time_to_go.days,time_to_go.seconds//60,frequency_str,source[ORM.DATASOURCE_DESCRIPTION])
+ dhms = time_delta_to_dhms(time_to_go)
+ if ORM.DATASOURCE_MINUTELY == update_frequency:
+ status_str += " Pend (next<%s in days=%2d hours=%2d mins=%02d:%02d,%7s):%s\n" % (testdate.strftime(ORM.DATASOURCE_DATETIME_FORMAT),dhms[0],dhms[1],dhms[2],dhms[3],frequency_str,source[ORM.DATASOURCE_DESCRIPTION])
+ else:
+ status_str += " Pend (next<%s in days=%2d hours=%2d minutes=%2d,%7s):%s\n" % (testdate.strftime(ORM.DATASOURCE_DATETIME_FORMAT),dhms[0],dhms[1],dhms[2],frequency_str,source[ORM.DATASOURCE_DESCRIPTION])
continue
else:
- status_str += " UPDATE '%s': Time reached (%s)\n" % (source[ORM.DATASOURCE_DESCRIPTION],testdate.strftime(ORM.DATASOURCE_DATETIME_FORMAT))
+ status_str += " GO (GO >%s' (%s) ,%7s):%s\n" % (testdate.strftime(ORM.DATASOURCE_DATETIME_FORMAT), last_updated_date , frequency_str,source[ORM.DATASOURCE_DESCRIPTION])
# Execute the update
if is_trial:
print("TRIAL: Update required\t...\texecuting '%s'" % (source[ORM.DATASOURCE_UPDATE]))
status_str += " > TRIAL: execute '%s'\n" % (source[ORM.DATASOURCE_UPDATE])
else:
- # First update the datasource's last_updated_date so avoid dual triggers
+ # First update the datasource's last_updated_date to avoid dual triggers
# (e.g. a manual test run on top of an automatic run)
sql = "UPDATE orm_datasource SET lastUpdatedDate=? WHERE id=?"
- cur_write.execute(sql, (time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT),source[ORM.DATASOURCE_ID],) )
- conn.commit()
+ ret = SQL_EXECUTE(cur, sql, (time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT),source[ORM.DATASOURCE_ID],) )
+ SQL_COMMIT(conn)
- print("Update required\t...\texecuting '%s'" % (source[ORM.DATASOURCE_UPDATE]))
+ print("Update required\t...\texecuting '%s' (%s)" % (source[ORM.DATASOURCE_UPDATE],time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT)))
status_str += " > EXECUTE: execute '%s'\n" % (source[ORM.DATASOURCE_UPDATE])
- master_write("SRTOOL_UPDATE:%s:%s:%s\n" %(datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),source[ORM.DATASOURCE_DESCRIPTION],source[ORM.DATASOURCE_UPDATE]))
+ master_write("SRTOOL_UPDATE_STRT:%s:%s:%s\n" %(datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),source[ORM.DATASOURCE_DESCRIPTION],source[ORM.DATASOURCE_UPDATE]))
update_command = source[ORM.DATASOURCE_UPDATE]
if force_all:
update_command += " --force"
- if update_command.startswith('./'):
+ if update_skip_history:
+ update_command += " --update-skip-history"
+ if update_command.startswith('!'):
+ update_command = update_command[1:]
+ elif not update_command.startswith('/'):
update_command = os.path.join(script_pathname, update_command)
os.system("echo 'Update:%s,%s' > %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),update_command,os.path.join(script_pathname,SRT_UPDATE_TASK_FILE)))
- os.system(update_command)
+
+ #
+ # bin/common/srtool_job.py -c "<cmnd>" -j 1 -l update_logs/run_job.log
+ os.system("bin/common/srtool_job.py --name %s --command \"%s\" --job-id 1 --log update_logs/run_job.log" % (source[ORM.DATASOURCE_NAME],update_command))
+ #
os.system("echo 'Done:%s,%s' >> %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),update_command,os.path.join(script_pathname,SRT_UPDATE_TASK_FILE)))
master_write("SRTOOL_UPDATE_DONE:%s:%s:%s\n" %(datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),source[ORM.DATASOURCE_DESCRIPTION],source[ORM.DATASOURCE_UPDATE]))
# Take a breath, let any commits settle
time.sleep(10)
- conn.close()
+ SQL_CLOSE_CONN(conn)
# Status summary
with open(os.path.join(script_pathname,UPDATE_STATUS_LOG), 'w') as status_file:
@@ -215,31 +281,73 @@ def run_updates(force_all,name_filter,is_trial):
if verbose:
print(status_str)
+def fetch_updates_dhm():
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+ cur_write = SQL_CURSOR(conn)
+ time_now = datetime.now() #datetime.now(pytz.utc)
+ # Get sources
+ sources = SQL_EXECUTE(cur, '''SELECT * FROM orm_datasource ORDER BY id ASC''').fetchall()
+ for source in sources:
+ update_time = source[ORM.DATASOURCE_UPDATE_TIME]
+ update_frequency = source[ORM.DATASOURCE_UPDATE_FREQUENCY]
+ frequency_str = ORM.get_orm_string(source[ORM.DATASOURCE_UPDATE_FREQUENCY],ORM.DATASOURCE_FREQUENCY_STR)
+ # Non-update states
+ if update_frequency in (ORM.DATASOURCE_ONDEMAND,ORM.DATASOURCE_ONSTARTUP,ORM.DATASOURCE_PREINIT):
+ print("%s,(%s)" % (source[ORM.DATASOURCE_ID],frequency_str))
+ continue
+ elif not source[ORM.DATASOURCE_UPDATE]:
+ print("%s,(NoUpdate)" % source[ORM.DATASOURCE_ID])
+ continue
+# elif 'DISABLE ' in source[ORM.DATASOURCE_ATTRIBUTES]:
+# print("%s,(Disabled)" % source[ORM.DATASOURCE_ID])
+# continue
+ # Get the datasource values
+ if not source[ORM.DATASOURCE_LASTUPDATEDDATE]:
+ last_updated_date = time_now
+ else:
+ last_updated_date = datetime.strptime(source[ORM.DATASOURCE_LASTUPDATEDDATE], ORM.DATASOURCE_DATETIME_FORMAT)
+ # Get the calculated next update datetime
+ testdate = next_refresh_date(update_frequency,update_time,last_updated_date)
+ if testdate > time_now:
+ time_to_go = testdate - time_now
+ dhms = time_delta_to_dhms(time_to_go)
+ if ORM.DATASOURCE_MINUTELY == update_frequency:
+ print("%s,%02d|%02d:%02d:%02d" % (source[ORM.DATASOURCE_ID],dhms[0],dhms[1],dhms[2],dhms[3]))
+ else:
+ print("%s,%02d|%02d:%02d:00" % (source[ORM.DATASOURCE_ID],dhms[0],dhms[1],dhms[2]))
+ else:
+ print("%s,Next!" % source[ORM.DATASOURCE_ID])
+ SQL_CLOSE_CONN(conn)
+
+####################################################################
+###
+
#time must be in '%H:%M:%S' format
def configure_ds_update(datasource_description, frequency, time):
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
sql = "UPDATE orm_datasource SET update_frequency=?, update_time=? WHERE description=?"
cur.execute(sql, (frequency, time, datasource_description))
conn.commit()
- conn.close()
+ SQL_CLOSE_CONN(conn)
#################################
# List update data sources
#
def list():
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
- cur_write = conn.cursor()
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+ cur_write = SQL_CURSOR(conn)
- format_str = "%16s %7s %14s %10s %28s %s"
+ format_str = "%16s %9s %14s %10s %28s '%s'"
print("SRTool Update List:")
- print(format_str % ('Data','Source','Name','Frequency','Offset','Description'))
- print("================ ======= ============== ========== ============================ ===========================================")
+ print(format_str % ('Data',' Source','Name','Frequency','Offset','Description'))
+ print("================ ========= ============== ========== ============================ ===========================================")
#get sources that have update command
sources = cur.execute("SELECT * FROM orm_datasource").fetchall()
for source in sources:
@@ -247,20 +355,57 @@ def list():
if not source[ORM.DATASOURCE_UPDATE]:
continue
frequency_str = ORM.get_orm_string(source[ORM.DATASOURCE_UPDATE_FREQUENCY],ORM.DATASOURCE_FREQUENCY_STR)
+ if 'DISABLE ' in source[ORM.DATASOURCE_ATTRIBUTES]:
+ frequency_str = 'DISABLED'
print(format_str % (source[ORM.DATASOURCE_DATA],source[ORM.DATASOURCE_SOURCE],source[ORM.DATASOURCE_NAME],frequency_str,source[ORM.DATASOURCE_UPDATE_TIME],source[ORM.DATASOURCE_DESCRIPTION]))
if verbose:
print('')
- run_updates(False,'all',True)
+ run_updates(False,'all',True,True)
+
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CUR(cur_write)
+ SQL_CLOSE_CONN(conn)
#################################
# Start 'cron' job for updates
#
-def cron_start():
+def check_updates_enabled(follow_pid_file):
+ if verbose: print(f"CHECK_UPDATES_ENABLED...")
+ # First check any follow PID file
+ if follow_pid_file:
+ if not os.path.isfile(follow_pid_file):
+ if verbose: print(f"CHECK_UPDATES_ENABLED:FOLLOW_PID_FILE:NOT_EXIST:{follow_pid_file}")
+ return(False)
+ with open(follow_pid_file) as f:
+ lines = f.readlines()
+ pid = lines[0].strip()
+ ret = os.system(f"ps -p {pid} > /dev/null 2>&1")
+ if ret:
+ if verbose: print(f"CHECK_UPDATES_ENABLED:FOLLOW_PID_FILE:NOT_RUNNING:{pid}")
+ return(False)
+ else:
+ if verbose: print(f"CHECK_UPDATES_ENABLED:FOLLOW_PID_FILE:RUNNING:{pid}")
+
+ # Check if master disable
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+ sql = 'SELECT * FROM orm_srtsetting WHERE "name" = ?'
+ enable_update_setting = SQL_EXECUTE(cur, sql, ('SRT_DISABLE_UPDATES',)).fetchone()
+ ret = (not enable_update_setting) or ('yes' != enable_update_setting['value'])
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+ if verbose: print(f"CHECK_UPDATES_ENABLED:SRT_DISABLE_UPDATES:{'GO' if ret else 'SKIP'}:")
+ return(ret)
+
+def cron_start(follow_pid_file):
pid = os.getpid()
master_write("SRTOOL_UPDATE:%s:Starting -v update cron job, pid=%s\n" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),pid))
- os.system("echo 'Start:%s,<cron_start>' > %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),os.path.join(script_pathname,SRT_UPDATE_TASK_FILE)))
+ os.system("echo 'Start:%s,<cron_start>!' > %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),os.path.join(script_pathname,SRT_UPDATE_TASK_FILE)))
+
+#
+ print("echo 'Start:%s,<cron_start>!' > %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),os.path.join(script_pathname,SRT_UPDATE_TASK_FILE)))
# Preserve this app's pid
srt_update_pid_file = os.path.join(script_pathname,SRT_UPDATE_PID_FILE)
@@ -273,16 +418,25 @@ def cron_start():
# Loop until app is killed
extra_line = False
while True:
- # Run the updates
- run_updates(False,'all',False)
- # Toggle an extra line in the log to make updates obvious
- if extra_line:
- extra_line = False
- os.system("echo '' >> %s" % os.path.join(script_pathname,UPDATE_STATUS_LOG))
- else:
- extra_line = True
- # Default to 5 minute loop
- time.sleep(5 * 60)
+ try:
+ if check_updates_enabled(follow_pid_file):
+ # Run the updates
+ run_updates(False,'all',False,False)
+ # Toggle an extra line in the log to make updates obvious
+ if extra_line:
+ extra_line = False
+ os.system("echo '' >> %s" % os.path.join(script_pathname,UPDATE_STATUS_LOG))
+ else:
+ extra_line = True
+ # Default to 5 minute loop
+
+ os.system("echo 'Sleep:%s,update in 5 minutes (%s)' > %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),check_updates_enabled(follow_pid_file),os.path.join(script_pathname,SRT_UPDATE_TASK_FILE)))
+
+ run_updates(False,'all',True,True)
+
+ time.sleep(5 * 60)
+ except Exception as e:
+ master_write("SRTOOL_UPDATE:ERROR:%s:%s\n" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),e))
def cron_stop():
# Fetch the stored update app's pid
@@ -308,18 +462,29 @@ def main(argv):
global master_log
# setup
- parser = argparse.ArgumentParser(description='srtool.py: manage the SRTool database')
+ parser = argparse.ArgumentParser(description='srtool_update.py: manage the SRTool backgtoup tasks')
+ # Commands
parser.add_argument('--cron-start', action='store_const', const='cron_start', dest='command', help='Start the SRTool background updater')
parser.add_argument('--cron-stop', action='store_const', const='cron_stop', dest='command', help='Stop the SRTool background updater')
+ parser.add_argument('--follow-pid-file', dest='follow_pid_file', help='Only update when PID in this file is running')
+ # Status
parser.add_argument('--list', '-l', action='store_const', const='list', dest='command', help='List data sources')
parser.add_argument('--run-updates', '-u', action='store_const', const='run-updates', dest='command', help='Update scheduled data sources')
parser.add_argument('--name-filter', '-n', dest='name_filter', help='Filter for datasource name')
+ parser.add_argument('--status', '-s', action='store_const', const='status', dest='command', help='Current status of the run queue')
+ parser.add_argument('--fetch-updates-dhm', action='store_const', const='fetch_updates_dhm', dest='command', help='Fetch next updates for all sources')
+ parser.add_argument('--check-updates-enabled', action='store_const', const='check_updates_enabled', dest='command', help='Unit test the update offsets')
- parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
- parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
- parser.add_argument('--trial', '-t', action='store_true', dest='is_trial', help='Debugging: trial run')
+ # Test
+ parser.add_argument('--update-unit-test', '-U', action='store_const', const='update_unit_test', dest='command', help='Unit test the update offsets')
+
+ # Debugging support
+ parser.add_argument('--force', '-f', action='store_true', dest='force', help='Flag: Force the update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Flag: Skip history updates')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Flag: debug verbose output')
+ parser.add_argument('--trial', '-t', action='store_true', dest='is_trial', help='Flag: Debugging: trial run')
parser.add_argument('--configure_ds_update', '-T', nargs=3, help='Set update frequency and time for specified datasource. Check bin/README.txt for more info')
@@ -337,12 +502,18 @@ def main(argv):
elif 'run-updates' == args.command:
try:
print("BEGINNING UPDATING DATASOURCES... this MAY take a long time")
- run_updates(args.force,name_filter,args.is_trial)
+ run_updates(args.force,name_filter,args.update_skip_history,args.is_trial)
master_log.write("SRTOOL:%s:UPDATING DATASOURCES:\t\t\t...\t\t\tSUCCESS\n" %(datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT)))
print("FINISHED UPDATING ALL DATASOURCES\n")
except Exception as e:
print("FAILED UPDATING ALL DATASOURCES (%s)" % e)
master_log.write("SRTOOL:%s:UPDATING DATASOURCES\t\t\t...\t\t\tFAILED ... %s\n" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT), e))
+ traceback.print_exc(file=sys.stdout)
+ elif 'fetch_updates_dhm' == args.command:
+ fetch_updates_dhm()
+ elif 'check_updates_enabled' == args.command:
+ verbose = True
+ check_updates_enabled(args.follow_pid_file)
elif args.configure_ds_update:
try:
print("CHANGING UPDATE CONFIGURATION FOR %s" % args.configure_ds_update[0])
@@ -351,12 +522,21 @@ def main(argv):
except Exception as e:
print("FAILED TO CONFIGURE UPDATE SETTINGS FOR %s" % args.configure_ds_update[0])
master_log.write("SRTOOL:%s:%s\t\t\t...\t\t\tFAILED ... %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT), args.configure_ds_update[0], e))
+ elif 'status' == args.command:
+ verbose = True
+ run_updates(False,'all',True,True)
+
elif 'cron_start' == args.command:
- cron_start()
+ cron_start(args.follow_pid_file)
elif 'cron_stop' == args.command:
cron_stop()
+
+ elif 'update_unit_test' == args.command:
+ verbose = True
+ update_unit_test()
+
else:
- print("Command not found")
+ print("srtool_update: Command not found")
master_log.close()
if __name__ == '__main__':
diff --git a/bin/common/srtool_utils.py b/bin/common/srtool_utils.py
index 8c13f3a1..cd0305a1 100755
--- a/bin/common/srtool_utils.py
+++ b/bin/common/srtool_utils.py
@@ -5,7 +5,7 @@
#
# Security Response Tool Commandline Tool
#
-# Copyright (C) 2018-2019 Wind River Systems
+# Copyright (C) 2018-2020 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -24,19 +24,29 @@
import os
import sys
import argparse
-import sqlite3
+from datetime import datetime, date
+import time
+import re
+import subprocess
+import json
+import xml.etree.ElementTree as ET
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
-from common.srt_schema import ORM
+from common.srtool_progress import *
+#from common.srt_schema import ORM
+from common.srtool_sql import *
# Setup:
verbose = False
cmd_skip = 0
cmd_count = 0
+force = False
+debug_sql = False
srtDbName = 'srt.sqlite'
+srtSchemaName = 'srt_schema.py'
#################################
# Common routines
@@ -53,6 +63,85 @@ def _log(msg):
f1.write("|" + msg + "|\n" )
f1.close()
+# Sub Process calls
+def execute_process(*args):
+ cmd_list = []
+ for arg in args:
+ if isinstance(arg, (list, tuple)):
+ # Flatten all the way down
+ for a in arg:
+ cmd_list.append(a)
+ else:
+ cmd_list.append(arg)
+
+ # Python < 3.5 compatible
+ if sys.version_info < (3,5):
+ process = subprocess.Popen(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ try:
+ stdout, stderr = process.communicate(input)
+ except:
+ process.kill()
+ process.wait()
+ raise
+ retcode = process.poll()
+ return retcode, stdout, stderr
+ else:
+ result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return result.returncode,result.stdout,result.stderr
+
+#################################
+# ORM mapping for the given database file
+#
+
+# ORM mapping for the given database file
+class ORM_Class(object):
+ # Members will be added dynamically
+
+ # General routine to return string name of a constant (e.g. 'DATASOURCE_FREQUENCY_STR')
+ @staticmethod
+ def get_orm_string(value,string_set):
+ if None == value: return('None')
+ string_list = string_set.split(',')
+ string_count = len(string_list)
+ value = int(value)
+ if (value < 0) or (value >= string_count):
+ print("ERROR: value '%d' out of range of '%s'" % (value,string_set))
+ return '<error>'
+ return string_list[value]
+
+# Instantiate the ORM class object
+ORM = ORM_Class()
+
+# Attach the specific database schema attibutes and values
+def import_orm_schema(databaseDir):
+ global ORM
+
+ # Local or generate database schema file
+ if not databaseDir or ('.' == databaseDir):
+ # Normal database and schema locations
+ schema = os.path.join(databaseDir,'bin/common',srtSchemaName)
+ else:
+ # Remote database location (e.g. backups)
+ schema = os.path.join(databaseDir,srtSchemaName)
+ # Generate the schema file if not found
+ if not os.path.isfile(schema):
+ ret = os.system("%s --generate-schema-header-dir %s" % (os.path.join(srtool_basepath,'bin/common/srtool_common.py'),databaseDir))
+
+ with open(schema) as fp:
+ for line in fp:
+ try:
+ name = line[:line.index('=')].strip()
+ value = line[line.index('=')+1:].strip()
+ if '"' == value[0]:
+ value = value[1:-1]
+ elif "'" == value[0]:
+ value = value[1:-1]
+ else:
+ value = int(value)
+ except:
+ continue
+ setattr(ORM, name, value)
+
#################################
# reset sources
#
@@ -64,10 +153,10 @@ def commit_to_source(conn, source_data):
WHERE id = ?'''
cur = conn.cursor()
print("UPDATE_SCORE:%s" % str(source_data))
- cur.execute(sql, source_data)
+ SQL_EXECUTE(cur, sql, source_data)
def sources(cmnd):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
print('Sources(%s)' % cmnd)
@@ -101,7 +190,7 @@ def sources(cmnd):
def settings():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
# Scan the SRTool Settings
@@ -116,7 +205,7 @@ def settings():
#
def remove_app_sources(master_app):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_write = conn.cursor()
@@ -133,7 +222,7 @@ def remove_app_sources(master_app):
if is_change:
conn.commit()
- conn.close()
+ SQL_CLOSE_CONN(conn)
#################################
# fix_new_reserved
@@ -141,10 +230,7 @@ def remove_app_sources(master_app):
# Is this reserved by Mitre? Is '** RESERVED **' within the first 20 char positions?
def fix_new_reserved():
- global cmd_skip
- global cmd_count
-
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_write = conn.cursor()
@@ -171,6 +257,7 @@ def fix_new_reserved():
reserved_pos = cve[ORM.CVE_DESCRIPTION].find('** RESERVED **')
if (0 <= reserved_pos) and (20 > reserved_pos):
print("STATUS_NEW_RESERVED:%s:%s:%s" % (cve[ORM.CVE_STATUS],cve[ORM.CVE_NAME],cve[ORM.CVE_DESCRIPTION][:40]))
+ # NOTE: we do not touch 'cve.srt_updated' for this background change
sql = ''' UPDATE orm_cve
SET status = ?
WHERE id = ?'''
@@ -180,36 +267,2756 @@ def fix_new_reserved():
conn.commit()
#################################
+# fix_new_tags
+#
+
+# Fix the None "cve.tags" fields
+def fix_new_tags():
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ cur.execute('SELECT * FROM orm_cve')
+ i = 0
+ j = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+ if (0 == i % 200):
+ conn.commit()
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ if not cve[ORM.CVE_TAGS]:
+ # NOTE: we do not touch 'cve.srt_updated' for this background change
+ sql = ''' UPDATE orm_cve
+ SET tags = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, ('', cve[ORM.CVE_ID],))
+ j += 1
+ print("\nCVE COUNT=%5d,%5d" % (i,j))
+ conn.commit()
+
+#################################
+# fixup fix_name_sort
+#
+
+# Recompute all of the CVE name_sort fields
+def fix_name_sort():
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ cur.execute('SELECT * FROM orm_cve')
+ for i,cve in enumerate(cur):
+ name_sort = get_name_sort(cve[ORM.CVE_NAME])
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s to %20s\r' % (i,cve[ORM.CVE_NAME],name_sort), end='')
+ if (0 == i % 200):
+ conn.commit()
+
+ # NOTE: we do not touch 'cve.srt_updated' for this background change
+ sql = ''' UPDATE orm_cve
+ SET name_sort = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (name_sort, cve[ORM.CVE_ID],))
+ conn.commit()
+
+#################################
+# fixup fix_cve_recommend
+#
+
+# Reset empty CVE recommend fields to the proper integer zero
+def fix_cve_recommend():
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ cur.execute('SELECT * FROM orm_cve WHERE recommend = ""')
+ i = 0
+ fix_count = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+
+ #
+ # Fix miss-write to lastModifiedDate, missing integer for recommend
+ #
+
+ fix = False
+
+ lastModifiedDate = cve[ORM.CVE_LASTMODIFIEDDATE]
+ if '0' == lastModifiedDate:
+ lastModifiedDate = ''
+ fix = True
+
+ recommend = cve[ORM.CVE_RECOMMEND]
+ if not recommend:
+ recommend = 0
+ fix = True
+
+ # NOTE: we do not touch 'cve.srt_updated' for this background change
+ if fix:
+ sql = ''' UPDATE orm_cve
+ SET recommend = ?, lastModifiedDate = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (recommend, lastModifiedDate, cve[ORM.CVE_ID],))
+
+ fix_count += 1
+ if (199 == fix_count % 200):
+ conn.commit()
+
+ print("CVE RECOMMEND FIX COUNT=%d of %d" % (fix_count,i))
+ if fix_count:
+ conn.commit()
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# fixup fix_srt_dates
+#
+
+# Reset older 'date' values as 'datetime' values
+
+def _fix_datetime(value,default):
+ if (not value) or (not value[0].isdigit()):
+ return(default)
+ elif ':' in value:
+ return(value)
+ else:
+ return(datetime.strptime(value, '%Y-%m-%d'))
+
+def _fix_date(value,default):
+ if (not value) or (not value[0].isdigit()):
+ return(False,default)
+ elif not ':' in value:
+ return(False,value)
+ else:
+ value = re.sub('\..*','',value)
+ dt = datetime.strptime(value,ORM.DATASOURCE_DATETIME_FORMAT)
+ return(True,dt.strftime(ORM.DATASOURCE_DATE_FORMAT))
+
+
+def fix_srt_datetime(scope):
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ if ('d' == scope) or ('all' == scope):
+ cur.execute('SELECT * FROM orm_defect')
+ i = 0
+ is_change_count = 0
+ for defect in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,defect[ORM.DEFECT_NAME]), end='')
+ if (0 == i % 200):
+ if force: conn.commit()
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ defect_srt_updated = _fix_datetime(defect[ORM.DEFECT_SRT_UPDATED],defect[ORM.DEFECT_DATE_UPDATED])
+ if defect_srt_updated == defect[ORM.DEFECT_SRT_UPDATED]:
+ continue
+
+ if force:
+ sql = ''' UPDATE orm_defect
+ SET srt_updated = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (defect_srt_updated, defect[ORM.DEFECT_ID],))
+ is_change_count += 1
+ print("DEFECT DATE FIX COUNT=%d/%d" % (is_change_count,i))
+ if force: conn.commit()
+
+ # INVESTIGATION DATE FIX COUNT=1089363, real 12m20.041s = 1472 recs/sec
+ if ('i' == scope) or ('all' == scope):
+ cur.execute('SELECT * FROM orm_investigation')
+ i = 0
+ is_change_count = 0
+ for investigation in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,investigation[ORM.INVESTIGATION_NAME]), end='')
+ if (0 == i % 200):
+ if force: conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ srt_updated = _fix_datetime(investigation[ORM.INVESTIGATION_SRT_UPDATED],None)
+ srt_created = _fix_datetime(investigation[ORM.INVESTIGATION_SRT_CREATED],None)
+ if (not srt_updated) or (not srt_created):
+ print("ERROR[%d]: bad date field at '%s', U=%s,C=%s" % (i,investigation[ORM.INVESTIGATION_ID],investigation[ORM.INVESTIGATION_SRT_UPDATED],investigation[ORM.INVESTIGATION_SRT_CREATED]))
+ exit(1)
+ if (srt_updated == investigation[ORM.INVESTIGATION_SRT_UPDATED]) and (srt_created == investigation[ORM.INVESTIGATION_SRT_CREATED]):
+ continue
+
+ if force:
+ sql = ''' UPDATE orm_investigation
+ SET srt_updated = ?, srt_created = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (srt_updated, srt_created, investigation[ORM.INVESTIGATION_ID],))
+ is_change_count += 1
+ print("INVESTIGATION DATE FIX COUNT=%d/%d" % (is_change_count,i))
+ if force: conn.commit()
+
+ # VULNERABILITY DATE FIX COUNT=86585, real 1m2.969s = 1374 recs/sec
+ if ('v' == scope) or ('all' == scope):
+ cur.execute('SELECT * FROM orm_vulnerability')
+ i = 0
+ is_change_count = 0
+ for vulnerability in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,vulnerability[ORM.VULNERABILITY_NAME]), end='')
+ if (0 == i % 200):
+ if force: conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ srt_updated = _fix_datetime(vulnerability[ORM.VULNERABILITY_SRT_UPDATED],None)
+ srt_created = _fix_datetime(vulnerability[ORM.VULNERABILITY_SRT_CREATED],None)
+ if (not srt_updated) or (not srt_created):
+ print("ERROR[%d]: bad date field at '%s', U=%s,C=%s" % (i,vulnerability[ORM.VULNERABILITY_ID],vulnerability[ORM.VULNERABILITY_SRT_UPDATED],vulnerability[ORM.VULNERABILITY_SRT_CREATED]))
+ exit(1)
+ if (srt_updated == vulnerability[ORM.VULNERABILITY_SRT_UPDATED]) and (srt_created == vulnerability[ORM.VULNERABILITY_SRT_CREATED]):
+ continue
+
+ if force:
+ sql = ''' UPDATE orm_vulnerability
+ SET srt_updated = ?, srt_created = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (srt_updated, srt_created, vulnerability[ORM.VULNERABILITY_ID],))
+ is_change_count += 1
+ print("VULNERABILITY DATE FIX COUNT=%d/%d" % (is_change_count,i))
+ if force: conn.commit()
+
+ # CVE DATE FIX COUNT=86585, real 1m2.969s = 1374 recs/sec
+ # NOTE: only ACK dates need fixing, received bad apha content from srtool_mitre
+ if ('c' == scope) or ('all' == scope):
+ cur.execute('SELECT * FROM orm_cve')
+ i = 0
+ # Sparse updates
+ is_change = False
+ is_change_count = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+ if (0 == i % 200) and is_change:
+ if force: conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ is_change = False
+
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if is_change_count > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ is_change = False
+
+ if cve[ORM.CVE_ACKNOWLEDGE_DATE]:
+ acknowledge_date = _fix_datetime(cve[ORM.CVE_ACKNOWLEDGE_DATE],'alpha')
+ # If the default 'alpha' happens, then date had bad format and must go away
+ if ('alpha' == acknowledge_date) or (acknowledge_date != cve[ORM.CVE_ACKNOWLEDGE_DATE]):
+ acknowledge_date = None
+ is_change = True
+
+ srt_updated = _fix_datetime(cve[ORM.CVE_SRT_UPDATED],None)
+ srt_created = _fix_datetime(cve[ORM.CVE_SRT_CREATED],None)
+ if (not srt_updated) or (not srt_created):
+ print("ERROR[%d]: bad date field at '%s', U=%s,C=%s" % (i,cve[ORM.CVE_ID],cve[ORM.CVE_SRT_UPDATED],cve[ORM.CVE_SRT_CREATED]))
+ exit(1)
+ if (srt_updated != cve[ORM.CVE_SRT_UPDATED]) or (srt_created != cve[ORM.CVE_SRT_CREATED]):
+ is_change = True
+
+ # Anything to do?
+ if not is_change:
+ continue
+
+ if force:
+ sql = ''' UPDATE orm_cve
+ SET srt_updated = ?, srt_created = ?, acknowledge_date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (srt_updated, srt_created, acknowledge_date, cve[ORM.CVE_ID],))
+ is_change = True
+ is_change_count += 1
+ print("CVE DATE FIX COUNT=%d/%d" % (is_change_count,i))
+ if force and is_change: conn.commit()
+
+ # Fix CVE History
+ if scope in ('ch','all','history'):
+ cur.execute('SELECT * FROM orm_cvehistory')
+ i = 0
+ # Sparse updates
+ is_change = False
+ is_change_count = 0
+ for cve_history in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: \r' % (i), end='')
+ if (0 == i % 200) and is_change:
+ if force: conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ is_change = False
+
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if is_change_count > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ updated,history_date = _fix_date(cve_history[ORM.CVEHISTORY_DATE],'')
+ if not updated:
+ continue
+
+ if force:
+ sql = ''' UPDATE orm_cvehistory
+ SET date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (history_date, cve_history[ORM.CVEHISTORY_ID],))
+ is_change = True
+ is_change_count += 1
+
+ # Commit all remaining changes
+ if force and is_change: conn.commit()
+ print("CVE HISTORY DATE FIX COUNT=%d/%d" % (is_change_count,i))
+
+ # Fix Vulnerability History
+ if scope in ('vh','all','history'):
+ cur.execute('SELECT * FROM orm_vulnerabilityhistory')
+ i = 0
+ # Sparse updates
+ is_change = False
+ is_change_count = 0
+ for vulnerabilityhistory in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: \r' % (i), end='')
+ if (0 == i % 200) and is_change:
+ if force: conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ is_change = False
+
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if is_change_count > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ updated,history_date = _fix_date(vulnerabilityhistory[ORM.VULNERABILITYHISTORY_DATE],'')
+ if not updated:
+ continue
+
+ if force:
+ is_change = True
+ sql = ''' UPDATE orm_vulnerabilityhistory
+ SET date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (history_date, vulnerabilityhistory[ORM.VULNERABILITYHISTORY_ID],))
+ is_change_count += 1
+
+ # Commit all remaining changes
+ if force and is_change: conn.commit()
+ print("VULNERABILITY HISTORY DATE FIX COUNT=%d/%d" % (is_change_count,i))
+
+ # Fix Investigation History
+ if scope in ('ih','all','history'):
+ cur.execute('SELECT * FROM orm_investigationhistory')
+ i = 0
+ # Sparse updates
+ is_change = False
+ is_change_count = 0
+ for investigation_history in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: \r' % (i), end='')
+ if (0 == i % 200) and is_change:
+ if force: conn.commit()
+ time.sleep(0.1) # give time for Sqlite to sync
+ is_change = False
+
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if is_change_count > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ updated,history_date = _fix_date(investigation_history[ORM.INVESTIGATIONHISTORY_DATE],'')
+ if not updated:
+ continue
+
+ if force:
+ is_change = True
+ sql = ''' UPDATE orm_investigationhistory
+ SET date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (history_date, investigation_history[ORM.INVESTIGATIONHISTORY_ID],))
+ is_change_count += 1
+
+ # Commit all remaining changes
+ if force and is_change: conn.commit()
+ print("INVESTIGATION HISTORY DATE FIX COUNT=%d/%d" % (is_change_count,i))
+
+#################################
+# fixup fix_cve_srt_create
+#
+
+# Reset CVE srt_create to NIST release dates
+def fix_reset_nist_to_create(cve_prefix):
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ def date_nist2srt(nist_date,default,cve_name,i):
+ if not nist_date or (4 > len(nist_date)):
+ return default
+ try:
+ return(datetime.strptime(nist_date, '%Y-%m-%d'))
+ except Exception as e:
+ print("\n\ndate_nist2srt:%s,%s,%s,%s" % (cve_name,e,cve_name,i))
+ exit(1)
+ return default
+
+ cur.execute('SELECT * FROM orm_cve WHERE name LIKE "'+cve_prefix+'%"')
+
+ i = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+ if (0 == i % 200):
+ conn.commit()
+ print('')
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ nist_released = date_nist2srt(cve[ORM.CVE_PUBLISHEDDATE],cve[ORM.CVE_SRT_CREATED],cve[ORM.CVE_NAME],i)
+ nist_modified = date_nist2srt(cve[ORM.CVE_LASTMODIFIEDDATE],cve[ORM.CVE_SRT_UPDATED],cve[ORM.CVE_NAME],i)
+
+ sql = ''' UPDATE orm_cve
+ SET srt_created = ?, srt_updated = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (nist_released, nist_modified, cve[ORM.CVE_ID],))
+ print("CVE DATE FIX COUNT=%d" % i)
+ conn.commit()
+
+#################################
+# fixup fix_missing_create_dates
+#
+
+# Reset CVE None creation dates to 2019-01-01, out of the way of reports
+def fix_missing_create_dates():
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ fix_date = datetime.strptime('Jan 1 2019', '%b %d %Y')
+ fix_count = 0
+
+ cur.execute('SELECT * FROM orm_cve')
+ i = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+ if (0 == i % 200):
+# conn.commit()
+ #print('')
+ pass
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ fix = False
+ if not cve[ORM.CVE_SRT_CREATED] or (0 > cve[ORM.CVE_SRT_CREATED].find(':')):
+ srt_created = fix_date
+ fix = True
+ else:
+ srt_created = cve[ORM.CVE_SRT_CREATED]
+ #srt_created = datetime.strptime(cve[ORM.CVE_SRT_CREATED],'%Y-%m-%d')
+ if not cve[ORM.CVE_SRT_UPDATED] or (0 > cve[ORM.CVE_SRT_UPDATED].find(':')):
+ srt_updated = fix_date
+ fix = True
+ else:
+ srt_updated = cve[ORM.CVE_SRT_UPDATED]
+ #srt_updated = datetime.strptime(cve[ORM.CVE_SRT_UPDATED],'%Y-%m-%d')
+
+ if fix:
+ sql = ''' UPDATE orm_cve
+ SET srt_created = ?, srt_updated = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (srt_created, srt_updated, cve[ORM.CVE_ID],))
+ fix_count += 1
+ print("CVE DATE FIX COUNT=%d of %d" % (fix_count,i))
+ conn.commit()
+
+#################################
+# fixup fix_public_reserved
+#
+
+# Reset CVE 'New-Reserved' if now public from NIST
+def fix_public_reserved():
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+
+ fix_count = 0
+
+ cur.execute('SELECT * FROM orm_cve WHERE status = "%s"' % ORM.STATUS_NEW_RESERVED)
+ i = 0
+ for cve in cur:
+ i += 1
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%05d: %20s %d\r' % (i,cve[ORM.CVE_NAME],cve[ORM.CVE_STATUS]), end='')
+ if (0 == i % 200):
+ conn.commit()
+ #print('')
+ pass
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ if cve[ORM.CVE_CVSSV3_BASESCORE] or cve[ORM.CVE_CVSSV2_BASESCORE]:
+ sql = ''' UPDATE orm_cve
+ SET status = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (ORM.STATUS_NEW, cve[ORM.CVE_ID],))
+ fix_count += 1
+ print("CVE DATE FIX COUNT=%d of %d" % (fix_count,i))
+ conn.commit()
+
+#################################
+# fix_remove_bulk_cve_history
+#
+
+# Remove a specific/accidental set of bulk CVE history updates intended to be background
+def fix_remove_bulk_cve_history():
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_cve = conn.cursor()
+ cur_del = conn.cursor()
+
+ fix_count = 0
+
+ cur.execute('SELECT * FROM orm_cvehistory WHERE date LIKE "2019-03-2%"')
+
+ i = 0
+ for cvehistory in cur:
+ i += 1
+
+ # Progress indicator support
+ if 9 == i % 10:
+# print('%05d: %20s %s \r' % (i,cvehistory[ORM.CVEHISTORY_COMMENT],cvehistory[ORM.CVEHISTORY_DATE]), end='')
+ pass
+ if (0 == i % 200):
+# conn.commit()
+ #print('')
+ pass
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ if not (cvehistory[ORM.CVEHISTORY_DATE] in ('2019-03-28','2019-03-27')):
+ continue
+ if not (cvehistory[ORM.CVEHISTORY_COMMENT].startswith("UPDATE(CVE):")):
+ continue
+
+ cur_cve.execute('SELECT * FROM orm_cve WHERE id = "%s"' % cvehistory[ORM.CVEHISTORY_CVE_ID])
+ cve = cur_cve.fetchone()
+ if not (cve[ORM.CVE_NAME].startswith("CVE-200")):
+ continue
+
+ if 19 == fix_count % 20:
+ print("%4d) CVE=%s,CH_Comment=%s,CH_Date=%s" % (fix_count,cve[ORM.CVE_NAME],cvehistory[ORM.CVEHISTORY_COMMENT],cvehistory[ORM.CVEHISTORY_DATE]))
+
+ mydata = cur_del.execute("DELETE FROM orm_cvehistory WHERE id=?", (cvehistory[ORM.CVEHISTORY_ID],))
+ fix_count += 1
+
+ print("CVE DATE FIX COUNT=%d of %d" % (fix_count,i))
+ conn.commit()
+
+#################################
+# report_defects_to_products
+#
+# Report all defects without a product link
+#
+
+def report_defects_to_products():
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_cve = conn.cursor()
+ cur_del = conn.cursor()
+
+ fix_count = 0
+
+ # Find all products
+ products = {}
+ cur.execute('SELECT * FROM orm_product')
+ for product in cur:
+ id = product[ORM.PRODUCT_ID]
+ name = "%s %s %s" % (product[ORM.PRODUCT_NAME],product[ORM.PRODUCT_VERSION],product[ORM.PRODUCT_PROFILE])
+ products[id] = name
+ print("[%2d] %s" % (id,name))
+
+ # Test product field for all defects
+ cur.execute('SELECT * FROM orm_defect')
+ i = 0
+ for defect in cur:
+ i += 1
+
+ # Progress indicator support
+ if 99 == i % 100:
+ print('%05d: %-20s\r' % (i,defect[ORM.DEFECT_NAME]), end='')
+ pass
+ if (0 == i % 200):
+# conn.commit()
+ #print('')
+ pass
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ product_id = defect[ORM.DEFECT_PRODUCT_ID]
+ if not product_id in products:
+ print("ERROR:[%5d] %-20s => %s" % (defect[ORM.DEFECT_ID],defect[ORM.DEFECT_NAME],product_id))
+
+# print("CVE DATE FIX COUNT=%d of %d" % (fix_count,i))
+ conn.commit()
+
+#################################
+# fix_bad_mitre_init
+#
+
+#
+# Fix MITRE reserved CVEs that were mistakenly set at "New" instead of
+# "New-Reserved" due to column ordering issue in the MITRE "Init" routine.
+#
+def fix_bad_mitre_init():
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_ds = conn.cursor()
+ cur_cve = conn.cursor()
+ cur_del = conn.cursor()
+
+ new_count = 0
+ mitre_count = 0
+ cve_name = ''
+
+ nist_source_list = []
+ # Find NIST data sources
+ cur.execute('SELECT * FROM orm_datasource WHERE source = "nist"')
+ for i,ds in enumerate(cur):
+ nist_source_list.append(ds[ORM.DATASOURCE_ID])
+ print('NIST DataSource List=[%s]' % nist_source_list)
+
+ mitre_source_list = []
+ # Find MITRE data sources
+ cur.execute('SELECT * FROM orm_datasource WHERE source = "mitre"')
+ for i,ds in enumerate(cur):
+ mitre_source_list.append(ds[ORM.DATASOURCE_ID])
+ print('MITRE DataSource List=[%s]' % mitre_source_list)
+
+ # Find all bad MITRE reserved CVEs
+ cur.execute('SELECT * FROM orm_cve WHERE description = ""')
+ for i,cve in enumerate(cur):
+ new_count += 1
+
+ cur_ds.execute('SELECT * FROM orm_cvesource WHERE cve_id = %d' % cve[ORM.CVE_ID])
+ is_mitre = False
+ is_nist = False
+ for cvesource in cur_ds:
+ if cvesource[ORM.CVESOURCE_DATASOURCE_ID] in mitre_source_list:
+ is_mitre = True
+ if cvesource[ORM.CVESOURCE_DATASOURCE_ID] in nist_source_list:
+ is_nist = True
+
+ if is_mitre and not is_nist:
+ mitre_count += 1
+ cve_name = cve[ORM.CVE_NAME]
+
+ if force:
+ sql = ''' UPDATE orm_cve
+ SET status = ?
+ WHERE id = ?'''
+ cur_cve.execute(sql, (ORM.STATUS_NEW_RESERVED,cve[ORM.CVE_ID],))
+ conn.commit()
+
+ # Progress indicator support
+ if 19 == i % 20:
+ print('%05d: %-20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+ pass
+ if (0 == i % 200):
+# conn.commit()
+ print('')
+ pass
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ print("\nCVE NEW_COUNT=%d, mitre=%d, name=%s, database=%s" % (new_count,mitre_count,cve_name,srtDbName))
+# conn.commit()
+
+
+#
+# Fix MITRE CVEs that are missing a description in the top level CVE
+# records due to column ordering issue in the MITRE "Init" routine.
+#
+def fix_bad_mitre_descr(datasource_list):
+ conn = SQL_CONNECT()
+ cur_ds = conn.cursor()
+ cur_cs = conn.cursor()
+ cur_cve = conn.cursor()
+ cur_del = conn.cursor()
+
+ cve_count = 0
+ fix_count = 0
+ nist_ds_list = {}
+ mitre_ds_list = {}
+ modified_cve_list = []
+
+ DATA_MAP_DESCRIPTION = 0
+
+ def description_summary(description):
+ desc_sum = 0
+ for ch in description:
+ desc_sum += ord(ch)
+ if 37 < len(description):
+ description = "%-37s..." % description[:37]
+ return("%-40s [sum=%d]" % (description,desc_sum))
+
+ #
+ # Gather the MITRE and NIST data source lists
+ #
+
+ cur_ds.execute('SELECT * FROM orm_datasource WHERE source = "mitre" ORDER BY key DESC;')
+ for i,ds in enumerate(cur_ds):
+ mitre_ds_list[ds[ORM.DATASOURCE_ID]] = ds[ORM.DATASOURCE_DESCRIPTION]
+ cur_ds.execute('SELECT * FROM orm_datasource WHERE source = "nist" ORDER BY key DESC;')
+ for i,ds in enumerate(cur_ds):
+ nist_ds_list[ds[ORM.DATASOURCE_ID]] = ds[ORM.DATASOURCE_DESCRIPTION]
+
+ #
+ # Iterate over the MITRE data sources
+ #
+
+ cur_ds.execute('SELECT * FROM orm_datasource WHERE source = "mitre" ORDER BY key DESC;')
+ for i,ds in enumerate(cur_ds):
+ # Development/debug support
+ if cmd_count and ((cve_count - cmd_skip) > cmd_count):
+ break
+
+ if "ALL" == datasource_list:
+ pass
+ elif not ds[ORM.DATASOURCE_DESCRIPTION] in datasource_list.split(','):
+ continue
+ print("MITRE Source:%s" % ds[ORM.DATASOURCE_DESCRIPTION])
+
+ # Scan the NIST datasource file and extract required values into a map
+ # bin/mitre/srtool_mitre.py --download-only --source='Mitre 2010' --file=data/allitems-cvrf-year-2010.xml --url-file=allitems-cvrf-year-2010.xml
+ cve_source_file = ''
+ for param in ds[ORM.DATASOURCE_LOOKUP].split(' '):
+ if param.startswith('--file='):
+ cve_source_file = param.replace('--file=','')
+ print(" File:%s" % cve_source_file)
+ break
+ mitre_data_map = {}
+ mitre_file = os.path.join(srtool_basepath,cve_source_file)
+
+ #
+ # Gather the Descriptions of all the CVEs in this MITRE data sources
+ #
+ # <Vulnerability Ordinal="158066" xmlns="http://www.icasi.org/CVRF/schema/vuln/1.1">
+ # <Notes>
+ # <Note Ordinal="1" Type="Description">In getProcessRecordLocked ...</Note>
+ # </Notes>
+ # <CVE>CVE-2020-0001</CVE>
+
+ # Find the CVE and extract the description
+ if not os.path.isfile(mitre_file):
+ print("ERROR: no such file '%s'" % mitre_file)
+ exit(1)
+ f = open(mitre_file, 'r')
+ tree = ET.parse(mitre_file)
+ root = tree.getroot()
+ for child in root:
+ cve_name = ''
+ description = ''
+ if 'Vulnerability' in child.tag:
+ for child_v in child:
+ if 'CVE' in child_v.tag:
+ cve_name = child_v.text
+ if 'Notes' in child_v.tag:
+ for child_n in child_v:
+ if 'Note' in child_n.tag:
+ if 'Description' == child_n.attrib['Type']:
+ description = child_n.text.replace('\n','\\r')
+
+# # Debugging support
+# if cve_name != "CVE-2016-0887": #"CVE-2020-7470","CVE-2019-15031"
+# continue
+
+ if cve_name:
+ mitre_data_map[cve_name] = [description]
+
+
+ #
+ # Update the 'description' for all found CVE records in this datasource
+ #
+
+ for cve_name in mitre_data_map:
+ description = mitre_data_map[cve_name][DATA_MAP_DESCRIPTION]
+ cur_cve.execute('SELECT * FROM orm_cve WHERE name = "%s"' % cve_name)
+ cve = cur_cve.fetchone()
+ if not cve:
+ print("WARNING: MISSING CVE in orm : %s" % cve_name)
+ continue
+
+
+ #
+ # Repair the data source mappings
+ # * Add missing MITRE links
+ # * Replace old MITRE links with found links
+ #
+
+ found_mapping = False
+ is_nist = False
+ cur_cs.execute('SELECT * FROM orm_cvesource WHERE cve_id = %d' % cve[ORM.CVE_ID])
+ for cve2ds in cur_cs:
+ if cve2ds[ORM.CVESOURCE_DATASOURCE_ID] in nist_ds_list:
+ is_nist = True
+ if cve2ds[ORM.CVESOURCE_DATASOURCE_ID] in mitre_ds_list:
+ # Do we have an obsolete MITRE mapping?
+ if cve2ds[ORM.CVESOURCE_DATASOURCE_ID] != ds[ORM.DATASOURCE_ID]:
+ # Delete old mapping
+ print("Delete old mapping %s,%s" % (cve_name,mitre_ds_list[cve2ds[ORM.CVESOURCE_DATASOURCE_ID]]))
+ if force:
+ sql = 'DELETE FROM orm_cvesource WHERE id=?'
+ cur_del.execute(sql, (cve2ds[ORM.CVESOURCE_ID],))
+ else:
+ # We are good to go
+ found_mapping = True
+
+ # Disable this feature for now, since we do want to add unneeded passive MITRE records
+ if False:
+ # Add if missing or deleted as obsolete
+ if not found_mapping:
+ print("Insert new mapping %s,%s" % (cve_name,mitre_ds_list[ds[ORM.DATASOURCE_ID]]))
+ if force:
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ cur_cs.execute(sql, (cve[ORM.CVE_ID],ds[ORM.DATASOURCE_ID],))
+
+ # If this CVE has any NIST data sources, then skip description checking (e.g. NIST Modified preempt)
+ if is_nist:
+ continue
+
+ # Test is desciption needs repair
+ if cve[ORM.CVE_DESCRIPTION] != description:
+ fix_count += 1
+ print("CHANGE: %s DESCRIPTION('%s' to '%s')" % (
+ cve_name, description_summary(cve[ORM.CVE_DESCRIPTION]), description_summary(description)))
+ if force:
+ sql = ''' UPDATE orm_cve
+ SET description = ?
+ WHERE id = ?'''
+ cur_cve.execute(sql, (description,cve[ORM.CVE_ID],))
+# print('%05d: %-20s = %-20s' % (j,cve_name,nist_data_map[cve_name]))
+
+
+ #
+ # Repair the data source mappings
+ # * Add missing MITRE links
+ # * Replace old MITRE links with found links
+ #
+
+ found_mapping = False
+ cur_cs.execute('SELECT * FROM orm_cvesource WHERE cve_id = %d' % cve[ORM.CVE_ID])
+ for j,cve2ds in enumerate(cur_cs):
+ if cve2ds[ORM.CVESOURCE_DATASOURCE_ID] in mitre_ds_list:
+ # Do we have an obsolete MITRE mapping?
+ if cve2ds[ORM.CVESOURCE_DATASOURCE_ID] != ds[ORM.DATASOURCE_ID]:
+ # Delete old mapping
+ print("Delete old mapping %s,%s" % (cve_name,mitre_ds_list[cve2ds[ORM.CVESOURCE_DATASOURCE_ID]]))
+ if force:
+ sql = 'DELETE FROM orm_cvesource WHERE id=?'
+ cur_del.execute(sql, (cve2ds[ORM.CVESOURCE_ID],))
+ else:
+ # We are good to go
+ found_mapping = True
+ # Add if missing or deleted as obsolete
+ if not found_mapping:
+ print("Insert new mapping %s,%s" % (cve_name,mitre_ds_list[ds[ORM.DATASOURCE_ID]]))
+ if force:
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ cur_cs.execute(sql, (cve[ORM.CVE_ID],ds[ORM.DATASOURCE_ID],))
+
+
+ # Development/debug support
+ cve_count += 1
+ if cmd_skip and (cve_count < cmd_skip): continue
+ if cmd_count and ((cve_count - cmd_skip) > cmd_count): break
+
+ # Progress indicator support
+ if (0 == cve_count % 1000):
+ print('%05d: %-20s\r' % (cve_count,cve_name), end='')
+ if force: conn.commit()
+ print('')
+ pass
+
+ print("CVE COUNT=%d, fix_count=%d" % (cve_count,fix_count))
+ if force: conn.commit()
+
+#
+# Fix CVE records with missing 'cvssV2_severity' values in the top-level CVE records, due to
+# CVE imports before a patch was sent upstream
+#
+# The NIST Modified list is processed first. If any of its CVEs are found in a regular
+# list, that CVE is skipped since it was preempted
+#
+
+def fix_severity(datasource_list):
+ conn = SQL_CONNECT()
+ cur_ds = conn.cursor()
+ cur_cs = conn.cursor()
+ cur_cve = conn.cursor()
+ cur_del = conn.cursor()
+
+ cve_count = 0
+ fix_count = 0
+ nist_ds_list = {}
+ modified_cve_list = []
+
+ DATA_MAP_V3_Score = 0
+ DATA_MAP_V3_Severity = 1
+ DATA_MAP_V2_Score = 2
+ DATA_MAP_V2_Severity = 3
+
+
+ # Allow "MOD" as shorthand for the modification datasource
+ datasource_list = datasource_list.replace('MOD','NIST Modified Data')
+
+ #
+ # Gather the NIST data source list
+ #
+
+ cur_ds.execute('SELECT * FROM orm_datasource WHERE source = "nist" ORDER BY key DESC;')
+ for i,ds in enumerate(cur_ds):
+ nist_ds_list[ds[ORM.DATASOURCE_ID]] = ds[ORM.DATASOURCE_DESCRIPTION]
+
+ #
+ # Iterate over the NIST data sources
+ #
+
+ cur_ds.execute('SELECT * FROM orm_datasource WHERE source = "nist" ORDER BY key DESC;')
+ for i,ds in enumerate(cur_ds):
+ # Development/debug support
+ if cmd_count and ((cve_count - cmd_skip) > cmd_count):
+ break
+
+ if ds[ORM.DATASOURCE_DESCRIPTION] in ['NIST Common Weakness Enumeration Data']:
+ continue
+ elif "ALL" == datasource_list:
+ pass
+ elif not ds[ORM.DATASOURCE_DESCRIPTION] in datasource_list.split(','):
+ continue
+ print("NIST Source:%s" % ds[ORM.DATASOURCE_DESCRIPTION])
+ is_modified_list = ds[ORM.DATASOURCE_DESCRIPTION] == 'NIST Modified Data'
+
+ # Scan the NIST datasource file and extract required values into a map
+ # (bin/nist/srtool_nist.py --download-only --source='NIST 2002' --file=data/nvdcve-1.1-2002.json --url-file=nvdcve-1.1-2002.json.gz --url-meta=nvdcve-1.1-2002.meta)
+ cve_source_file = ''
+ for param in ds[ORM.DATASOURCE_LOOKUP].split(' '):
+ if param.startswith('--file='):
+ cve_source_file = param.replace('--file=','')
+ print(" File:%s" % cve_source_file)
+ break
+ nist_data_map = {}
+ nist_file = os.path.join(srtool_basepath,cve_source_file)
+
+ #
+ # Gather the V3/V2 status of all the CVEs in this NIST data sources
+ #
+
+ try:
+ if not os.path.isfile(nist_file):
+ print("ERROR: no such file '%s'" % nist_file)
+ exit(1)
+ f = open(nist_file, 'r')
+ source_dct = json.load(f)
+ for item in source_dct["CVE_Items"]:
+ if not 'cve' in item:
+ continue
+ if not 'CVE_data_meta' in item['cve']:
+ continue
+ if not 'ID' in item['cve']['CVE_data_meta']:
+ continue
+ cve_name = item['cve']['CVE_data_meta']['ID']
+
+ # Is this the NIST Modified list?
+ if is_modified_list:
+ # Add CVE name to Modified list
+ modified_cve_list.append(cve_name)
+ elif cve_name in modified_cve_list:
+ # Skip if already process by Modified list
+ continue
+
+# # Debugging support
+# if cve_name != "CVE-2016-0887": #"CVE-2020-7470","CVE-2019-15031"
+# continue
+
+ cvssV3_baseScore = ''
+ cvssV3_baseSeverity = ''
+ cvssV2_baseScore = ''
+ cvssV2_severity = ''
+ if ('impact' in item) and ('baseMetricV3' in item['impact']):
+ cvssV3_baseScore = "%.1f" % float(item['impact']['baseMetricV3']['cvssV3']['baseScore'])
+ cvssV3_baseSeverity = item['impact']['baseMetricV3']['cvssV3']['baseSeverity']
+ if ('impact' in item) and ('baseMetricV2' in item['impact']):
+ cvssV2_baseScore = "%.1f" % float(item['impact']['baseMetricV2']['cvssV2']['baseScore'])
+ cvssV2_severity = item['impact']['baseMetricV2']['severity']
+
+ nist_data_map[cve_name] = [cvssV3_baseScore,cvssV3_baseSeverity,cvssV2_baseScore,cvssV2_severity]
+
+ except Exception as e:
+ print("ERROR:%s (%s)" % (e,item['impact']['baseMetricV3']))
+ return
+
+ #
+ # Update the V3/V2 status for all found CVE records in this datasource
+ #
+
+ for cve_name in nist_data_map:
+ cur_cve.execute('SELECT * FROM orm_cve WHERE name = "%s"' % cve_name)
+ cve = cur_cve.fetchone()
+ if not cve:
+ print("WARNING: MISSING CVE in orm_cve : %s" % cve_name)
+ continue
+ cve_name = cve[ORM.CVE_NAME]
+ if cve_name in nist_data_map:
+ fix_count += 1
+ if (nist_data_map[cve_name][DATA_MAP_V3_Score] != cve[ORM.CVE_CVSSV3_BASESCORE]) or (nist_data_map[cve_name][DATA_MAP_V3_Severity] != cve[ORM.CVE_CVSSV3_BASESEVERITY]) or \
+ (nist_data_map[cve_name][DATA_MAP_V2_Score] != cve[ORM.CVE_CVSSV2_BASESCORE]) or (nist_data_map[cve_name][DATA_MAP_V2_Severity] != cve[ORM.CVE_CVSSV2_SEVERITY ]):
+ print("CHANGE: %s V3(%s to %s,%s to %s)V2(%s to %s,%s to %s) (%s,%s)" % (
+ cve_name,
+ cve[ORM.CVE_CVSSV3_BASESCORE],nist_data_map[cve_name][DATA_MAP_V3_Score],cve[ORM.CVE_CVSSV3_BASESEVERITY],nist_data_map[cve_name][DATA_MAP_V3_Severity],
+ cve[ORM.CVE_CVSSV2_BASESCORE],nist_data_map[cve_name][DATA_MAP_V2_Score],cve[ORM.CVE_CVSSV2_SEVERITY ],nist_data_map[cve_name][DATA_MAP_V2_Severity],
+ ORM.get_orm_string(cve[ORM.CVE_STATUS],ORM.STATUS_STR),cve[ORM.CVE_COMMENTS],
+ ))
+
+ if force:
+ sql = ''' UPDATE orm_cve
+ SET cvssV3_baseScore = ?, cvssV3_baseSeverity = ?, cvssV2_baseScore = ?, cvssV2_severity = ?
+ WHERE id = ?'''
+ cur_cve.execute(sql, (nist_data_map[cve_name][DATA_MAP_V3_Score],nist_data_map[cve_name][DATA_MAP_V3_Severity],nist_data_map[cve_name][DATA_MAP_V2_Score],nist_data_map[cve_name][DATA_MAP_V2_Severity],cve[ORM.CVE_ID],))
+# print('%05d: %-20s = %-20s' % (j,cve_name,nist_data_map[cve_name]))
+ else:
+ print("ERROR:CVE_NAME '%s' NOT MAPPED" % cve_name)
+
+ #
+ # Repair the data source mappings
+ # * Add missing NIST links
+ # * Replace old NIST links with found links (e.g. Modified datasource preempts regular datasources)
+ #
+
+ found_mapping = False
+ cur_cs.execute('SELECT * FROM orm_cvesource WHERE cve_id = %d' % cve[ORM.CVE_ID])
+ for j,cve2ds in enumerate(cur_cs):
+ if cve2ds[ORM.CVESOURCE_DATASOURCE_ID] in nist_ds_list:
+ # Do we have an obsolete NIST mapping?
+ if cve2ds[ORM.CVESOURCE_DATASOURCE_ID] != ds[ORM.DATASOURCE_ID]:
+ # Delete old mapping
+ print("Delete old mapping %s,%s" % (cve_name,nist_ds_list[cve2ds[ORM.CVESOURCE_DATASOURCE_ID]]))
+ if force:
+ sql = 'DELETE FROM orm_cvesource WHERE id=?'
+ cur_del.execute(sql, (cve2ds[ORM.CVESOURCE_ID],))
+ else:
+ # We are good to go
+ found_mapping = True
+ # Add if missing or deleted as obsolete
+ if not found_mapping:
+ print("Insert new mapping %s,%s" % (cve_name,nist_ds_list[ds[ORM.DATASOURCE_ID]]))
+ if force:
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ cur_cs.execute(sql, (cve[ORM.CVE_ID],ds[ORM.DATASOURCE_ID],))
+
+
+ # Development/debug support
+ cve_count += 1
+ if cmd_skip and (cve_count < cmd_skip): continue
+ if cmd_count and ((cve_count - cmd_skip) > cmd_count): break
+
+ # Progress indicator support
+ if (0 == cve_count % 1000):
+ print('%05d: %-20s\r' % (cve_count,cve_name), end='')
+ if force: conn.commit()
+ print('')
+ pass
+
+ print("CVE COUNT=%d, fix_count=%d" % (cve_count,fix_count))
+ if force: conn.commit()
+
+#
+# Trim the V3/V2 scores to one decimal place, in line with NIST public pages
+#
+
+def fix_trim_cve_scores():
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_wr = conn.cursor()
+
+ def fixscore(score):
+ if not score:
+ return ''
+ return '%02.1f' % float(score)
+
+ cve_count = 0
+ fix_count = 0
+
+ cur.execute('SELECT * FROM orm_cve')
+ for i,cve in enumerate(cur):
+ if 0 == i % 100:
+ print("%4d) C=%-30s\r" % (i,cve[ORM.CVE_NAME]), end='')
+
+ new_v3score = fixscore(cve[ORM.CVE_CVSSV3_BASESCORE])
+ new_v2score = fixscore(cve[ORM.CVE_CVSSV2_BASESCORE])
+
+ if (new_v3score != cve[ORM.CVE_CVSSV3_BASESCORE]) or (new_v2score != cve[ORM.CVE_CVSSV2_BASESCORE]):
+ fix_count += 1
+ if verbose:
+ print("CHANGE:%s:%s to %s,%s to %s" % (cve[ORM.CVE_NAME],cve[ORM.CVE_CVSSV3_BASESCORE],new_v3score,cve[ORM.CVE_CVSSV2_BASESCORE],new_v2score))
+
+ if force:
+ sql = ''' UPDATE orm_cve
+ SET cvssV3_baseScore = ?, cvssV2_baseScore = ?
+ WHERE id = ?'''
+ cur_wr.execute(sql, (new_v3score,new_v2score,cve[ORM.CVE_ID],))
+
+ # Development/debug support
+ cve_count += 1
+ if cmd_skip and (cve_count < cmd_skip): continue
+ if cmd_count and ((cve_count - cmd_skip) > cmd_count): break
+
+ # Progress indicator support
+ if (0 == cve_count % 1000):
+ print('%05d: %-20s\r' % (cve_count,cve[ORM.CVE_NAME]), end='')
+ if force: conn.commit()
+ print('')
+ pass
+
+ if force: conn.commit()
+ print("CVE COUNT=%d, fix_count=%d" % (cve_count,fix_count))
+
+# Sample code that does a CVE lookup data fetch and CVE update
+#def example_datasource_lookup(cve,nist_ds,cvesource,cur):
+# if force:
+# if nist_ds:
+# lookup_command = nist_lookup[ cvesource[ORM.CVESOURCE_DATASOURCE_ID] ].replace('%command%','--cve-detail=%s' % cve[ORM.CVE_NAME])
+# result_returncode,result_stdout,result_stderr = execute_process(lookup_command.split(' '))
+# if 0 != result_returncode:
+# print("ERROR_LOOKUP:%s" % lookup_command)
+# return(1)
+# cvssV2_severity = ''
+# for line in result_stdout.decode("utf-8").splitlines():
+# try:
+# name = line[:line.index('=')]
+# value = line[line.index('=')+1:].replace("[EOL]","\n")
+# except:
+# continue
+# if name == 'cvssV2_severity':
+# cvssV2_severity = value
+# break
+# if cvssV2_severity:
+# fix_count += 1
+# sql = ''' UPDATE orm_cve
+# SET cvssV2_severity = ?
+# WHERE id = ?'''
+# cur.execute(sql, (cvssV2_severity,cve[ORM.CVE_ID],))
+# print('%05d: %-20s = %-20s' % (i,cve[ORM.CVE_NAME],cvssV2_severity))
+## return(0)
+
+
+#################################
+# report_multiple_defects
+#
+# Normally for each CVE there is one defect per
+# product. This report finds all cases of multiple
+# defects per Investigation.
+#
+
+def report_multiple_defects():
+
+ conn = SQL_CONNECT()
+ cur_i2d = conn.cursor()
+ cur_inv = conn.cursor()
+ cur_def = conn.cursor()
+
+ cur_inv.execute('SELECT * FROM orm_investigation')
+ count = 0
+ for i,investigation in enumerate(cur_inv):
+ if 0 == i % 100:
+ print("%4d) V=%-30s\r" % (i,investigation[ORM.VULNERABILITY_NAME]), end='')
+
+ cur_i2d.execute('SELECT * FROM orm_investigationtodefect WHERE investigation_id = "%s"' % investigation[ORM.INVESTIGATION_ID])
+ i2d_list = cur_i2d.fetchall()
+ if 1 < len(i2d_list):
+ count += 1
+ for k,i2d in enumerate(i2d_list):
+ cur_def.execute('SELECT * FROM orm_defect WHERE id = "%s"' % i2d[ORM.INVESTIGATIONTODEFECT_DEFECT_ID])
+ defect = cur_def.fetchone()
+ if 0 == k:
+ print("[%02d] Multiple defects for investigation '%s':" % (count,investigation[ORM.INVESTIGATION_NAME]))
+ print(" [%02d] %s: %s (%s)" % (k+1,defect[ORM.DEFECT_NAME],defect[ORM.DEFECT_SUMMARY],ORM.get_orm_string(defect[ORM.DEFECT_RESOLUTION],ORM.DEFECT_RESOLUTION_STR)))
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# report_duplicate_names
+#
+#
+#
+#
+
+def report_duplicate_names():
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+
+
+ cur.execute('SELECT * FROM orm_cve')
+ cve_dict = {}
+ for i,cve in enumerate(cur):
+ if 0 == i % 100:
+ print("%4d) C=%-30s\r" % (i,cve[ORM.CVE_NAME]), end='')
+
+ if not cve[ORM.CVE_NAME] in cve_dict:
+ cve_dict[cve[ORM.CVE_NAME]] = cve[ORM.CVE_ID]
+ else:
+ print("\nERROR:Multiple cve names '%s'" % cve[ORM.CVE_NAME])
+ print(" a) id=%d" % cve_dict[cve[ORM.CVE_NAME]])
+ print(" b) id=%d" % cve[ORM.CVE_ID])
+ cve_dict = {}
+ print('')
+
+ cur.execute('SELECT * FROM orm_vulnerability')
+ vul_dict = {}
+ for i,vulnerability in enumerate(cur):
+ if 0 == i % 100:
+ print("%4d) V=%-30s\r" % (i,vulnerability[ORM.VULNERABILITY_NAME]), end='')
+
+ if not vulnerability[ORM.VULNERABILITY_NAME] in vul_dict:
+ vul_dict[vulnerability[ORM.VULNERABILITY_NAME]] = vulnerability[ORM.VULNERABILITY_ID]
+ else:
+ print("\nERROR:Multiple vulnerability names '%s'" % vulnerability[ORM.VULNERABILITY_NAME])
+ print(" a) id=%d" % vul_dict[vulnerability[ORM.VULNERABILITY_NAME]])
+ print(" b) id=%d" % vulnerability[ORM.VULNERABILITY_ID])
+ vul_dict = {}
+ print('')
+
+ cur.execute('SELECT * FROM orm_investigation')
+ inv_dict = {}
+ for i,investigation in enumerate(cur):
+ if 0 == i % 100:
+ print("%4d) I=%-30s\r" % (i,investigation[ORM.INVESTIGATION_NAME]), end='')
+
+ if not investigation[ORM.INVESTIGATION_NAME] in inv_dict:
+ inv_dict[investigation[ORM.INVESTIGATION_NAME]] = investigation[ORM.INVESTIGATION_ID]
+ else:
+ print("\nERROR:Multiple investigation names '%s'" % investigation[ORM.INVESTIGATION_NAME])
+ print(" a) id=%d" % inv_dict[investigation[ORM.INVESTIGATION_NAME]])
+ print(" b) id=%d" % investigation[ORM.INVESTIGATION_ID])
+ inv_dict = {}
+ print('')
+
+ cur.execute('SELECT * FROM orm_defect')
+ dev_dict = {}
+ for i,defect in enumerate(cur):
+ if 0 == i % 100:
+ print("%4d) D=%-30s\r" % (i,defect[ORM.DEFECT_NAME]), end='')
+
+ if not defect[ORM.DEFECT_NAME] in dev_dict:
+ dev_dict[defect[ORM.DEFECT_NAME]] = defect[ORM.DEFECT_ID]
+ else:
+ print("\nERROR:Multiple defect names '%s'" % defect[ORM.DEFECT_NAME])
+ print(" a) id=%d" % dev_dict[defect[ORM.DEFECT_NAME]])
+ print(" b) id=%d" % defect[ORM.DEFECT_ID])
+ dev_dict = {}
+ print('')
+
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# fix_bad_links
+#
+
+def fix_bad_links():
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_del = conn.cursor()
+ error_count = 0
+
+ #
+ print('\n=== CVE Source Check ===\n')
+ #
+
+ # Find the data source mapping
+ cur.execute('SELECT * FROM orm_datasource;')
+ datasource_map = {}
+ for datasource in cur:
+ # DataSource Map is [cve_file,ds_desc,ds_lastmodifieddate,ds_lastupdateddate]
+ datasource_map[datasource[ORM.DATASOURCE_ID]] = datasource[ORM.DATASOURCE_DESCRIPTION]
+
+ cur.execute('SELECT * FROM orm_cvesource')
+ is_change = False
+ for i,cs in enumerate(cur):
+ cveid = cs[ORM.CVESOURCE_CVE_ID]
+ srcid = cs[ORM.CVESOURCE_DATASOURCE_ID]
+ if 0 == i % 100:
+ print("%4d) CVE=%6d,SRC=%6d\r" % (cs[ORM.CVESOURCE_ID],cveid,srcid), end='')
+ error = False
+ if (1 > cveid): error = True
+ if (1 > srcid): error = True
+
+ if error:
+ print("ERROR: [%4d] CVE=%6d,SRC=%6d (%s)\n" % (cs[ORM.CVESOURCE_ID],cveid,srcid,datasource_map[srcid]))
+ error_count += 1
+ if force:
+ sql = 'DELETE FROM orm_cvesource WHERE id=?'
+ cur_del.execute(sql, (cs[ORM.CVESOURCE_ID],))
+ is_change = True
+
+ print('')
+ if is_change:
+ conn.commit()
+
+ #
+ print('\n=== Defect to Product Check ===\n')
+ #
+
+ # Find all products
+ products = {}
+ cur.execute('SELECT * FROM orm_product')
+ for product in cur:
+ id = product[ORM.PRODUCT_ID]
+ name = "%s %s %s" % (product[ORM.PRODUCT_NAME],product[ORM.PRODUCT_VERSION],product[ORM.PRODUCT_PROFILE])
+ products[id] = name
+ print("[%2d] %s" % (id,name))
+
+ # Test product field for all defects
+ cur.execute('SELECT * FROM orm_defect')
+ i = 0
+ for defect in cur:
+ i += 1
+
+ # Progress indicator support
+ if 99 == i % 100:
+ print('%05d: %-20s\r' % (i,defect[ORM.DEFECT_NAME]), end='')
+ pass
+ if (0 == i % 200):
+# conn.commit()
+ #print('')
+ pass
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
+ product_id = defect[ORM.DEFECT_PRODUCT_ID]
+ if not product_id in products:
+ print("ERROR:[%5d] %-20s => %s" % (defect[ORM.DEFECT_ID],defect[ORM.DEFECT_NAME],product_id))
+ error_count += 1
+
+ print("\nError count = %d " % error_count)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# fix_bad_score_date
+#
+# Remove the 'score_date' value to repair the migration to '0006_reconcile', allowing
+# the field for new CVEs to be regenerated. The schema for this field is 'models.DateField'
+# but the scoring method in # "srtool_common --score-new-cves" was setting an obsolete
+# date_time value. That crashes Django-2.2 (but not Django-1.11).
+#
+
+def fix_bad_score_date():
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_fix = conn.cursor()
+
+ #
+ print('\n=== CVE fix_bad_score_date Check ===\n')
+ #
+
+ cur.execute('SELECT * FROM orm_cve')
+ error_count = 0
+ last_error = ''
+ for i,cve in enumerate(cur):
+ if 999 == (i % 1000) :
+ print("%7d: %-20s %20s, %d\r" % (i+1,cve[ORM.CVE_NAME],last_error,error_count),end='')
+ if force: conn.commit()
+
+ score_date = cve[ORM.CVE_SCORE_DATE]
+ if not score_date:
+ continue
+
+ try:
+ dt = datetime.strptime(score_date,ORM.DATASOURCE_DATE_FORMAT)
+ except:
+ error_count += 1
+ last_error = score_date
+ print("DATE_ERROR:%s,%s" % (cve[ORM.CVE_NAME],cve[ORM.CVE_SCORE_DATE]))
+# if 10 < error_count:
+# break
+ if force:
+ sql = ''' UPDATE orm_cve
+ SET score_date = ?
+ WHERE id = ?'''
+ cur_fix.execute(sql, (None, cve[ORM.CVE_ID],))
+
+ print("Error count = %d (e.g. %s,%s)" % (error_count,cve[ORM.CVE_NAME],last_error))
+ if force:
+ conn.commit()
+
+#################################
+# fix_inherit_affected_components()
+#
+# Inherit the "Affected Components" from CVEs
+# to the new field of their children VUL/INV/DEF
+
+def fix_inherit_affected_components():
+
+ conn = SQL_CONNECT()
+ cur_cve = conn.cursor()
+ cur_cve2vul = conn.cursor()
+ cur_vul = conn.cursor()
+ cur_vul2inv = conn.cursor()
+ cur_inv = conn.cursor()
+ cur_inv2def = conn.cursor()
+ cur_def = conn.cursor()
+ cur_write = conn.cursor()
+
+ def merge_affected_components(alist,blist):
+ affected_components = ''
+ affected_components_list = {}
+ for package in alist.split():
+ affected_components_list[package] = True
+ for package in blist.split():
+ affected_components_list[package] = True
+ if affected_components_list:
+ affected_components = ' '.join(affected_components_list)
+ return(affected_components)
+
+ updates = 0
+ cur_cve.execute('SELECT * FROM orm_cve')
+ for i,cve in enumerate(cur_cve):
+ cve_affect_components = cve[ORM.CVE_PACKAGES]
+ if not cve_affect_components:
+ continue
+ print("CVE:%s, '%s'" % (cve[ORM.CVE_NAME],cve_affect_components))
+
+ # Find all related Vulnerabilities
+ cur_cve2vul.execute('SELECT * FROM orm_cvetovulnerablility WHERE cve_id = %d' % cve[ORM.CVE_ID])
+ for cve2vul in cur_cve2vul:
+ # Update the Vulnerability status
+ cur_vul.execute('SELECT * FROM orm_vulnerability WHERE id = %d' % cve2vul[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID])
+ for vul in cur_vul:
+ vul_affected_components = merge_affected_components(cve_affect_components,vul[ORM.VULNERABILITY_PACKAGES])
+ if vul_affected_components != vul[ORM.VULNERABILITY_PACKAGES]:
+ updates += 1
+ if force:
+ sql = ''' UPDATE orm_vulnerability
+ SET packages = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (vul_affected_components, vul[ORM.VULNERABILITY_ID],))
+ print(" Vul:%s, '%s' to '%s'" % (vul[ORM.VULNERABILITY_NAME],vul[ORM.VULNERABILITY_PACKAGES],vul_affected_components))
+
+ # Find all related Investigations
+ cur_vul2inv.execute('SELECT * FROM orm_vulnerabilitytoinvestigation WHERE vulnerability_id = %d' % vul[ORM.VULNERABILITY_ID])
+ for vul2inv in cur_vul2inv:
+ # Update the Investigation status
+ cur_inv.execute('SELECT * FROM orm_investigation WHERE id = %d' % vul2inv[ORM.VULNERABILITYTOINVESTIGATION_INVESTIGATION_ID])
+ for inv in cur_inv:
+ inv_affected_components = merge_affected_components(vul_affected_components,inv[ORM.INVESTIGATION_PACKAGES])
+ if inv_affected_components != inv[ORM.INVESTIGATION_PACKAGES]:
+ updates += 1
+ if force:
+ sql = ''' UPDATE orm_investigation
+ SET packages = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (inv_affected_components, inv[ORM.INVESTIGATION_ID],))
+ print(" Inv:%s, '%s' to '%s'" % (inv[ORM.INVESTIGATION_NAME],inv[ORM.INVESTIGATION_PACKAGES],inv_affected_components))
+
+ # Find all related Defects
+ cur_inv2def.execute('SELECT * FROM orm_investigationtodefect WHERE investigation_id = %d' % inv[ORM.INVESTIGATION_ID])
+ for inv2def in cur_inv2def:
+ # Update the Defect status
+ cur_def.execute('SELECT * FROM orm_defect WHERE id = %d' % inv2def[ORM.INVESTIGATIONTODEFECT_DEFECT_ID])
+ for defect in cur_def:
+ defect_affected_components = merge_affected_components(inv_affected_components,defect[ORM.DEFECT_PACKAGES])
+ if defect_affected_components != defect[ORM.DEFECT_PACKAGES]:
+ updates += 1
+ if force:
+ sql = ''' UPDATE orm_defect
+ SET packages = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (defect_affected_components, defect[ORM.DEFECT_ID],))
+ print(" Defect:%s, '%s' to '%s'" % (defect[ORM.DEFECT_NAME],defect[ORM.DEFECT_PACKAGES],defect_affected_components))
+
+ if 999 == (i % 1000) :
+ print("%7d: %-20s %6d\r" % (i+1,cve[ORM.CVE_NAME],updates),end='')
+ if force: conn.commit()
+# if 60000 < i:
+# break
+
+ if updates and force: conn.commit()
+ print("Affected Component Updates = %d" % updates)
+
+################################3
+# fix_notify_access
+#
+# Remove notify-access that do not point to existing notify records
+
+def fix_notify_access():
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_notify = conn.cursor()
+
+ bad_notifyaccess = []
+ cur.execute('SELECT * FROM orm_notifyaccess')
+ i = 0
+ for notifyaccess in cur:
+ i += 1
+ notify_id = notifyaccess[ORM.NOTIFYACCESS_NOTIFY_ID]
+ notify = cur_notify.execute('SELECT * FROM orm_notify WHERE id = %d' % notify_id).fetchone()
+ if not notify:
+ bad_notifyaccess.append(notifyaccess[ORM.NOTIFYACCESS_ID])
+
+ if verbose:
+ print("%3d] %d,%d,%d (%d)" % (i,notifyaccess[ORM.NOTIFYACCESS_ID],notifyaccess[ORM.NOTIFYACCESS_NOTIFY_ID],notifyaccess[ORM.NOTIFYACCESS_USER_ID],notify_id))
+ if 999 == (i % 1000) :
+ print("%7d: \r" % (i+1), end='')
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ if force:
+ for notifyaccess_id in bad_notifyaccess:
+ cur.execute('DELETE FROM orm_notifyaccess WHERE id = %d' % notifyaccess_id)
+ conn.commit()
+
+ print("Affected Notify Access Updates = %d/%d" % (len(bad_notifyaccess),i))
+
+
+################################3
+# fix_cvelocal
+#
+# Find and fix CveLocal duplicate/dettached records
+
+def fix_cvelocal():
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_cve = conn.cursor()
+
+ found_list = []
+ duplicates_list = []
+ unattached_list = []
+ cur.execute('SELECT * FROM orm_cvelocal')
+ i = 0
+ for cvelocal in cur:
+ i += 1
+
+ # Test duplicates
+ cvelocal_name = cvelocal[ORM.CVELOCAL_NAME]
+ cvelocal_id = cvelocal[ORM.CVELOCAL_ID]
+ if cvelocal_name in found_list:
+ duplicates_list.append(cvelocal_id)
+ print("ERROR:DUPLICATE:%s" % cvelocal_name)
+ continue
+ else:
+ found_list.append(cvelocal_name)
+
+ # Test dettached
+ found = False
+ try:
+ cve = cur_cve.execute('SELECT * FROM orm_cve WHERE name = "%s"' % cvelocal_name).fetchone()
+ if cve:
+ found = True
+ if cvelocal_name.startswith('SRTCVE'):
+ print("NOTE:ATTACHED:%s" % cvelocal_name)
+ except Exception as e:
+ print("ERROR:LOOKUP:%s" % e)
+ if not found:
+ unattached_list.append(cvelocal_id)
+ print("ERROR:DETTACHED:'%s'" % cvelocal_name)
+
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ if force:
+ for duplicate in duplicates_list:
+ cvelocal = cur.execute('SELECT * FROM orm_cvelocal WHERE id = "%s"' % duplicate).fetchone()
+ print('DELETE_DUPLICATE(%s):%s' % (duplicate,cvelocal[ORM.CVELOCAL_NAME]))
+ cur.execute('DELETE FROM orm_cvelocal WHERE id = %d' % duplicate)
+ for unattached in unattached_list:
+ cvelocal = cur.execute('SELECT * FROM orm_cvelocal WHERE id = "%s"' % unattached).fetchone()
+ print('DELETE_UNATTACHED(%s):%s' % (unattached,cvelocal[ORM.CVELOCAL_NAME]))
+ cur.execute('DELETE FROM orm_cvelocal WHERE id = %d' % unattached)
+ conn.commit()
+
+ print("Errors found: %d" % (len(duplicates_list) + len(unattached_list)))
+
+################################3
+# fix_cvesource
+#
+# Find and fix CveSource duplicate/dettached records
+
+def fix_cvesource():
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_cve = conn.cursor()
+
+ found_list = []
+ duplicates_list = []
+ cur.execute('SELECT * FROM orm_cvesource')
+ i = 0
+ for cvesource in cur:
+ i += 1
+
+ # Test duplicates
+ cve_id = cvesource[ORM.CVESOURCE_CVE_ID]
+ datasource_id = cvesource[ORM.CVESOURCE_DATASOURCE_ID]
+ key = '%d_%d' % (cve_id,datasource_id)
+
+ if key in found_list:
+ duplicates_list.append(cvesource[ORM.CVESOURCE_ID])
+ print("ERROR:DUPLICATE:%s" % key)
+ else:
+ found_list.append(key)
+
+ # Development/debug support
+ if 999 == (i % 1000) :
+ print("%7d: \r" % (i+1), end='')
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ if force:
+ for duplicate in duplicates_list:
+ cur.execute('DELETE FROM orm_cvesource WHERE id = %d' % duplicate)
+ conn.commit()
+
+ print("Errors found: %d" % len(duplicates_list))
+
+#################################
+# report_cve_status_summary()
+#
+# Report the distribution of the CVE status and V3/V2
+# severities across the years. Used to track trends, and
+# also to validate the data and its repairs/migrations.
+#
+
+def report_cve_status_summary():
+
+ I_COUNT = 0
+
+ I_NEW = 1
+ I_VULNERABLE = 2
+ I_INVESTIGATE = 3
+ I_NOT_VULNERABLE = 4
+ I_NEW_RESERVED = 5
+ I_HISTORICAL = 6
+ I_STATUS_OTHER = 7
+
+ I_V3_CRITICAL = 8
+ I_V3_HIGH = 9
+ I_V3_MEDIUM = 10
+ I_V3_LOW = 11
+ I_V3_EMPTY = 12
+ I_V3_OTHER = 13
+
+ I_V2_CRITICAL = 14
+ I_V2_HIGH = 15
+ I_V2_MEDIUM = 16
+ I_V2_LOW = 17
+ I_V2_EMPTY = 18
+ I_V2_OTHER = 19
+
+ I_MAX = 20
+
+ YEAR_START = 1999
+ YEAR_STOP = 2020
+
+ label = [
+ 'NEW',
+ 'VUL',
+ 'INV',
+ 'NVUL',
+ 'NEW_R',
+ 'HIST',
+ 'OTHER',
+ ]
+
+ table = {}
+
+ def blank_row():
+ return [0 for i in range(I_MAX)]
+
+ # Prepare the array
+ for year in range(YEAR_START,YEAR_STOP+1):
+ table[year] = blank_row()
+ for status in range(I_NEW,I_STATUS_OTHER+1):
+ table[status] = blank_row()
+ table['total1'] = blank_row()
+ table['total2'] = blank_row()
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur.execute('SELECT * FROM orm_cve')
+ i = 0
+ for cve in cur:
+ i += 1
+
+ year = int(cve[ORM.CVE_NAME].split('-')[1])
+ v3_severity = cve[ORM.CVE_CVSSV3_BASESEVERITY].upper().strip()
+ v2_severity = cve[ORM.CVE_CVSSV2_SEVERITY].upper().strip()
+ status = cve[ORM.CVE_STATUS]
+
+ if status == ORM.STATUS_NEW:
+ table[year][I_NEW] += 1
+ status_row = I_NEW
+ elif status == ORM.STATUS_VULNERABLE:
+ table[year][I_VULNERABLE] += 1
+ status_row = I_VULNERABLE
+ elif status == ORM.STATUS_INVESTIGATE:
+ table[year][I_INVESTIGATE] += 1
+ status_row = I_INVESTIGATE
+ elif status == ORM.STATUS_NOT_VULNERABLE:
+ table[year][I_NOT_VULNERABLE] += 1
+ status_row = I_NOT_VULNERABLE
+ elif status == ORM.STATUS_NEW_RESERVED:
+ table[year][I_NEW_RESERVED] += 1
+ status_row = I_NEW_RESERVED
+ elif status == ORM.STATUS_HISTORICAL:
+ table[year][I_HISTORICAL] += 1
+ status_row = I_HISTORICAL
+ else:
+ table[year][I_STATUS_OTHER] += 1
+ status_row = I_STATUS_OTHER
+
+ if v3_severity == 'CRITICAL':
+ table[year][I_V3_CRITICAL] += 1
+ table[status_row][I_V3_CRITICAL] += 1
+ elif v3_severity == 'HIGH':
+ table[year][I_V3_HIGH] += 1
+ table[status_row][I_V3_HIGH] += 1
+ elif v3_severity == 'MEDIUM':
+ table[year][I_V3_MEDIUM] += 1
+ table[status_row][I_V3_MEDIUM] += 1
+ elif v3_severity == 'LOW':
+ table[year][I_V3_LOW] += 1
+ table[status_row][I_V3_LOW] += 1
+ elif v3_severity == '':
+ table[year][I_V3_EMPTY] += 1
+ table[status_row][I_V3_EMPTY] += 1
+ else:
+ table[year][I_V3_OTHER] += 1
+ table[status_row][I_V3_OTHER] += 1
+
+ if v2_severity == 'CRITICAL':
+ table[year][I_V2_CRITICAL] += 1
+ table[status_row][I_V2_CRITICAL] += 1
+ elif v2_severity == 'HIGH':
+ table[year][I_V2_HIGH] += 1
+ table[status_row][I_V2_HIGH] += 1
+ elif v2_severity == 'MEDIUM':
+ table[year][I_V2_MEDIUM] += 1
+ table[status_row][I_V2_MEDIUM] += 1
+ elif v2_severity == 'LOW':
+ table[year][I_V2_LOW] += 1
+ table[status_row][I_V2_LOW] += 1
+ elif v2_severity == '':
+ table[year][I_V2_EMPTY] += 1
+ table[status_row][I_V2_EMPTY] += 1
+ else:
+ table[year][I_V2_OTHER] += 1
+ table[status_row][I_V2_OTHER] += 1
+
+ #
+ # Year Summary
+ #
+
+ print("")
+ print(" |Cve |Status |V3 Severity |V2 Severity |")
+ print(" | COUNT| NEW, VUL, INV, NVUL, NEW_R, HIST, OTHR| CRIT, HIGH, MED, LOW, EMPTY, OTHR| CRIT, HIGH, MED, LOW, EMPTY, OTHR|")
+ print("-----|------|------------------------------------------------|-----------------------------------------|-----------------------------------------|")
+
+ for year in range(YEAR_START,YEAR_STOP+1):
+ # Sum the status counts
+ for index in range(I_COUNT,I_STATUS_OTHER+1):
+ table[year][I_COUNT] += table[year][index]
+ # print the columns
+ print("%05d|" % year,end = '')
+ for index in range(I_COUNT,I_V2_OTHER+1):
+ print("%6d%s" % (table[year][index],'|' if index in [I_STATUS_OTHER,I_V3_OTHER,I_V2_OTHER,I_COUNT] else ','),end = '')
+ print("")
+
+ print(" ====|======|================================================|=========================================|=========================================|")
+ totals = blank_row()
+ # Vertical Totals
+ for year in range(YEAR_START,YEAR_STOP+1):
+ for index in range(I_COUNT,I_V2_OTHER+1):
+ totals[index] += table[year][index]
+ print("%-5s|" % 'Total',end = '')
+ for index in range(I_COUNT,I_V2_OTHER+1):
+ print("%6d%s" % (totals[index],'|' if index in [I_STATUS_OTHER,I_V3_OTHER,I_V2_OTHER,I_COUNT] else ','),end = '')
+ print("")
+
+ #
+ # Status Summary
+ #
+
+ print("-----|-------------------------------------------------------|-----------------------------------------|-----------------------------------------|")
+ for status in range(I_NEW,I_STATUS_OTHER+1):
+ print("%-5s|" % label[status-I_NEW],end = '')
+ print(" |",end = '')
+ for index in range(I_V3_CRITICAL,I_V2_OTHER+1):
+ print("%6d%s" % (table[status][index],'|' if index in [I_COUNT,I_STATUS_OTHER,I_V3_OTHER,I_V2_OTHER] else ','),end = '')
+ print("")
+
+ print(" ====|=======================================================|=========================================|=========================================|")
+ # Calculate Status Totals
+ totals = blank_row()
+ # Vertical Totals
+ for status in range(I_NEW,I_STATUS_OTHER+1):
+ for index in range(I_V3_CRITICAL,I_V2_OTHER+1):
+ totals[index] += table[status][index]
+ print("%-5s|" % 'Total',end = '')
+ print(" |",end = '')
+ for index in range(I_V3_CRITICAL,I_V2_OTHER+1):
+ print("%6d%s" % (totals[index],'|' if index in [I_COUNT,I_STATUS_OTHER,I_V3_OTHER,I_V2_OTHER] else ','),end = '')
+ print("")
+
+
+#################################
+# report_db_status_summary()
+#
+# Report the distribution of the status across the
+# CVE/VUL/INV/DEF by years. Used to track trends, and
+# also to validate the data and its repairs/migrations.
+#
+
+def report_db_status_summary():
+
+ today_str = datetime.today().strftime(ORM.DATASOURCE_DATETIME_FORMAT)
+
+ YEAR_START = 1999
+ YEAR_STOP = 2020
+
+ I_COUNT = 0
+
+ # Object Status indexes
+ I_CVE_NEW = 1; I_CVE_VULNERABLE = 2; I_CVE_INVESTIGATE = 3; I_CVE_NOT_VULNERABLE = 4; I_CVE_NEW_RESERVED = 5; I_CVE_HISTORICAL = 6; I_CVE_STATUS_OTHER = 7
+ I_VUL_NEW = 8; I_VUL_VULNERABLE = 9; I_VUL_INVESTIGATE = 10; I_VUL_NOT_VULNERABLE = 11; I_VUL_NEW_RESERVED = 12; I_VUL_HISTORICAL = 13; I_VUL_STATUS_OTHER = 14
+ I_INV_NEW = 15; I_INV_INVNERABLE = 16; I_INV_INVESTIGATE = 17; I_INV_NOT_VULNERABLE = 18; I_INV_NEW_RESERVED = 19; I_INV_HISTORICAL = 20; I_INV_STATUS_OTHER = 21
+ I_DEF_NEW = 22; I_DEF_DEFNERABLE = 23; I_DEF_DEFESTIGATE = 24; I_DEF_NOT_VULNERABLE = 25; I_DEF_NEW_RESERVED = 26; I_DEF_HISTORICAL = 27; I_DEF_STATUS_OTHER = 28
+ I_STATUS_MAX = 29
+ # Summary Status indexes
+ I_NEW = 1; I_VULNERABLE = 2; I_INVESTIGATE = 3; I_NOT_VULNERABLE = 4; I_NEW_RESERVED = 5; I_HISTORICAL = 6; I_STATUS_OTHER = 7
+
+ # SRTool Priority
+ I_CVE_CRITICAL = 1; I_CVE_HIGH = 2; I_CVE_MEDIUM = 3; I_CVE_LOW = 4; I_CVE_UNDEFINED = 5; I_CVE_ERROR = 6
+ I_VUL_CRITICAL = 7; I_VUL_HIGH = 8; I_VUL_MEDIUM = 9; I_VUL_LOW = 10; I_VUL_UNDEFINED = 11; I_VUL_ERROR = 12
+ I_INV_CRITICAL = 13; I_INV_HIGH = 14; I_INV_MEDIUM = 15; I_INV_LOW = 16; I_INV_UNDEFINED = 17; I_INV_ERROR = 18
+ I_DEF_CRITICAL = 19; I_DEF_HIGH = 20; I_DEF_MEDIUM = 21; I_DEF_LOW = 22; I_DEF_UNDEFINED = 23; I_DEF_ERROR = 24
+ I_PRIORITY_MAX = 25
+
+ cve_idx = 100
+ vul_idx = 101
+ inv_idx = 102
+ def_idx = 103
+ object_label = {
+ I_NEW:'NEW',I_VULNERABLE:'VUL',I_INVESTIGATE:'INV',I_NOT_VULNERABLE:'NVUL',I_NEW_RESERVED:'NEW_R',I_HISTORICAL:'HIST',I_STATUS_OTHER:'OTHER',
+ cve_idx:'CVE',vul_idx:'VUL',inv_idx:'INV',def_idx:'DEF',
+ }
+
+ # Prepare the sum table
+ def blank_row():
+ return [0 for i in range(max(I_STATUS_MAX,I_PRIORITY_MAX)+1)]
+ table_status = {}
+ table_priority = {}
+ for year in range(YEAR_START,YEAR_STOP+1):
+ table_status[year] = blank_row()
+ table_priority[year] = blank_row()
+ for status_idx in range(I_NEW,I_STATUS_OTHER+1):
+ table_status[status_idx] = blank_row()
+
+ def sum_object_status(status,year,obj_offset,merge_inactive=True):
+ offset = obj_offset - I_CVE_NEW
+ if merge_inactive:
+ if status == ORM.STATUS_NEW_INACTIVE:
+ status = ORM.STATUS_NEW
+ elif status == ORM.STATUS_INVESTIGATE_INACTIVE:
+ status = ORM.STATUS_INVESTIGATE
+ elif status == ORM.STATUS_VULNERABLE_INACTIVE:
+ status = ORM.STATUS_VULNERABLE
+ elif status == ORM.STATUS_NOT_VULNERABLE_INACTIVE:
+ status = ORM.STATUS_NOT_VULNERABLE
+ if status == ORM.STATUS_NEW:
+ table_status[year][I_NEW+offset] += 1
+ table_status[I_NEW][I_NEW+offset] += 1
+ elif status == ORM.STATUS_VULNERABLE:
+ table_status[year][I_VULNERABLE+offset] += 1
+ table_status[I_VULNERABLE][I_VULNERABLE+offset] += 1
+ elif status == ORM.STATUS_INVESTIGATE:
+ table_status[year][I_INVESTIGATE+offset] += 1
+ table_status[I_INVESTIGATE][I_INVESTIGATE+offset] += 1
+ elif status == ORM.STATUS_NOT_VULNERABLE:
+ table_status[year][I_NOT_VULNERABLE+offset] += 1
+ table_status[I_NOT_VULNERABLE][I_NOT_VULNERABLE+offset] += 1
+ elif status == ORM.STATUS_NEW_RESERVED:
+ table_status[year][I_NEW_RESERVED+offset] += 1
+ table_status[I_NEW_RESERVED][I_NEW_RESERVED+offset] += 1
+ elif status == ORM.STATUS_HISTORICAL:
+ table_status[year][I_HISTORICAL+offset] += 1
+ table_status[I_HISTORICAL][I_HISTORICAL+offset] += 1
+ else:
+ table_status[year][I_STATUS_OTHER+offset] += 1
+ table_status[I_STATUS_OTHER][I_STATUS_OTHER+offset] += 1
+
+ def sum_object_priority(priority,year,obj_offset):
+ offset = obj_offset - I_CVE_CRITICAL
+ if priority == ORM.PRIORITY_LOW:
+ table_priority[year][I_CVE_LOW+offset] += 1
+ elif priority == ORM.PRIORITY_MEDIUM:
+ table_priority[year][I_CVE_MEDIUM+offset] += 1
+ elif priority == ORM.PRIORITY_HIGH:
+ table_priority[year][I_CVE_HIGH+offset] += 1
+ elif priority == ORM.PRIORITY_CRITICAL:
+ table_priority[year][I_CVE_CRITICAL+offset] += 1
+ else: # priority == ORM.PRIORITY_UNDEFINED:
+ table_priority[year][I_CVE_UNDEFINED+offset] += 1
+
+ conn = SQL_CONNECT()
+ cur_cve = conn.cursor()
+ cur_cve2vul = conn.cursor()
+ cur_vul = conn.cursor()
+ cur_vul2inv = conn.cursor()
+ cur_inv = conn.cursor()
+ cur_inv2def = conn.cursor()
+ cur_def = conn.cursor()
+
+ #
+ # Year-specific table_status
+ #
+
+ i = 0
+ cur_cve.execute('SELECT * FROM orm_cve')
+ for count,cve in enumerate(cur_cve):
+ year = int(cve[ORM.CVE_NAME].split('-')[1])
+
+ # Sum the CVE status
+ sum_object_status(cve[ORM.CVE_STATUS],year,I_CVE_NEW)
+ sum_object_priority(cve[ORM.CVE_PRIORITY],year,I_CVE_CRITICAL)
+
+ i += 1
+
+ # Find all related Vulnerabilities
+ cur_cve2vul.execute('SELECT * FROM orm_cvetovulnerablility WHERE cve_id = %d' % cve[ORM.CVE_ID])
+ for cve2vul in cur_cve2vul:
+ # Sum the Vulnerability status
+ cur_vul.execute('SELECT * FROM orm_vulnerability WHERE id = %d' % cve2vul[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID])
+ for vul in cur_vul:
+ sum_object_status(vul[ORM.VULNERABILITY_STATUS],year,I_VUL_NEW)
+ sum_object_priority(vul[ORM.VULNERABILITY_PRIORITY],year,I_VUL_CRITICAL)
+
+ # Find all related Investigations
+ cur_vul2inv.execute('SELECT * FROM orm_vulnerabilitytoinvestigation WHERE vulnerability_id = %d' % vul[ORM.VULNERABILITY_ID])
+ for vul2inv in cur_vul2inv:
+ # Sum the Investigation status
+ cur_inv.execute('SELECT * FROM orm_investigation WHERE id = %d' % vul2inv[ORM.VULNERABILITYTOINVESTIGATION_INVESTIGATION_ID])
+ for inv in cur_inv:
+ sum_object_status(inv[ORM.INVESTIGATION_STATUS],year,I_INV_NEW)
+ sum_object_priority(inv[ORM.INVESTIGATION_PRIORITY],year,I_INV_CRITICAL)
+
+ # Find all related Defects
+ cur_inv2def.execute('SELECT * FROM orm_investigationtodefect WHERE investigation_id = %d' % inv[ORM.INVESTIGATION_ID])
+ for inv2def in cur_inv2def:
+ # Sum the Defect status
+ cur_def.execute('SELECT * FROM orm_defect WHERE id = %d' % inv2def[ORM.INVESTIGATIONTODEFECT_DEFECT_ID])
+ for defect in cur_def:
+ sum_object_status(defect[ORM.DEFECT_SRT_STATUS],year,I_DEF_NEW)
+ sum_object_priority(defect[ORM.DEFECT_SRT_PRIORITY],year,I_DEF_CRITICAL)
+
+ if 1000 == i:
+ print("%7d: %-20s\r" % (count+1,cve[ORM.CVE_NAME]),end='')
+ i = 0
+# if 10000 < count:
+# break
+
+ #
+ # Object-specific table_status
+ #
+
+ for object_idx in range(cve_idx,def_idx+1):
+ table_status[object_idx] = blank_row()
+ table_priority[object_idx] = blank_row()
+ table_status['total'] = blank_row()
+ table_priority['total'] = blank_row()
+
+ print("CVEs...")
+ cur_cve.execute('SELECT * FROM orm_cve')
+ for cve in cur_cve:
+ sum_object_status(cve[ORM.CVE_STATUS],cve_idx,I_CVE_NEW)
+ sum_object_priority(cve[ORM.CVE_PRIORITY],cve_idx,I_CVE_CRITICAL)
+ print("Vulnerabilities...")
+ cur_vul.execute('SELECT * FROM orm_vulnerability')
+ for vul in cur_vul:
+ sum_object_status(vul[ORM.VULNERABILITY_STATUS],vul_idx,I_CVE_NEW)
+ sum_object_priority(vul[ORM.VULNERABILITY_PRIORITY],vul_idx,I_CVE_CRITICAL)
+ cur_inv.execute('SELECT * FROM orm_investigation')
+ print("Investigations...")
+ for inv in cur_inv:
+ sum_object_status(inv[ORM.INVESTIGATION_STATUS],inv_idx,I_CVE_NEW)
+ sum_object_priority(inv[ORM.INVESTIGATION_PRIORITY],inv_idx,I_CVE_CRITICAL)
+ print("Defects...")
+ cur_def.execute('SELECT * FROM orm_defect')
+ for defect in cur_def:
+ sum_object_status(defect[ORM.DEFECT_SRT_STATUS],def_idx,I_CVE_NEW)
+ sum_object_priority(defect[ORM.DEFECT_SRT_PRIORITY],def_idx,I_CVE_CRITICAL)
+ i += 1
+
+ #
+ # Display Status Summary
+ #
+
+ # Year Summary
+ print("\n=== SRTool Status Summary (%s) (%s) ===\n" % (today_str,srtool_basepath))
+ print(" |Cve |CVE Status |Vulnerability Status |Investigation Status |Defect Status |")
+ print(" | COUNT| NEW, VUL, INV, NVUL, NEW_R, HIST, OTHR| NEW, VUL, INV, NVUL, NEW_R, HIST, OTHR| NEW, VUL, INV, NVUL, NEW_R, HIST, OTHR| NEW, VUL, INV, NVUL, NEW_R, HIST, OTHR|")
+ print("-----|-------|-------------------------------------------------|------------------------------------------------|-------------------------------------------------|------------------------------------------------|")
+
+ for year in range(YEAR_START,YEAR_STOP+1):
+ # Sum the status counts
+ for index in range(I_CVE_NEW,I_CVE_STATUS_OTHER+1):
+ table_status[year][I_COUNT] += table_status[year][index]
+ # print the columns
+ print("%5d|" % year,end = '')
+ for index in range(I_COUNT,I_STATUS_MAX):
+ format = "%7d%s" if index in [I_COUNT,I_CVE_NOT_VULNERABLE,I_VUL_NOT_VULNERABLE,I_INV_NOT_VULNERABLE,I_DEF_NOT_VULNERABLE] else "%6d%s"
+ print(format % (table_status[year][index],'|' if index in [I_COUNT,I_CVE_STATUS_OTHER,I_VUL_STATUS_OTHER,I_INV_STATUS_OTHER,I_DEF_STATUS_OTHER] else ','),end = '')
+ print("")
+
+ print("=====|=======|=================================================|================================================|=================================================|================================================|")
+ totals = blank_row()
+ # Vertical Totals
+ for year in range(YEAR_START,YEAR_STOP+1):
+ for index in range(I_COUNT,I_STATUS_MAX):
+ totals[index] += table_status[year][index]
+ print("%-5s|" % 'Total',end = '')
+ for index in range(I_COUNT,I_STATUS_MAX):
+ format = "%7d%s" if index in [I_COUNT,I_CVE_NOT_VULNERABLE,I_VUL_NOT_VULNERABLE,I_INV_NOT_VULNERABLE,I_DEF_NOT_VULNERABLE] else "%6d%s"
+ print(format % (totals[index],'|' if index in [I_COUNT,I_CVE_STATUS_OTHER,I_VUL_STATUS_OTHER,I_INV_STATUS_OTHER,I_DEF_STATUS_OTHER] else ','),end = '')
+ print("")
+
+ # Count Summary
+ print("")
+ print(" | COUNT| NEW, VUL, INV, NVUL, NEW_R, HIST, OTHR|")
+ print("-----|-------|-------------------------------------------------|")
+ for object_idx in range(cve_idx,def_idx+1):
+ # Sum the status counts
+ for index in range(I_CVE_NEW,I_CVE_STATUS_OTHER+1):
+ table_status[object_idx][I_COUNT] += table_status[object_idx][index]
+ # print the columns
+ print("%5s|" % object_label[object_idx],end = '')
+ for index in range(I_COUNT,I_CVE_STATUS_OTHER+1):
+ format = "%7d%s" if index in [I_COUNT,I_CVE_NOT_VULNERABLE,I_VUL_NOT_VULNERABLE,I_INV_NOT_VULNERABLE,I_DEF_NOT_VULNERABLE] else "%6d%s"
+ print(format % (table_status[object_idx][index],'|' if index in [I_COUNT,I_CVE_STATUS_OTHER] else ','),end = '')
+ print("")
+ print("-----|-------|-------------------------------------------------|")
+
+ #
+ # Display Priority Summary
+ #
+
+ # Year Summary
+ print("\n=== SRTool Priority Summary (%s) (%s) ===\n" % (today_str,srtool_basepath))
+ print(" |Cve |CVE Priority |Vulnerability Priority |Investigation Priority |Defect Priority |")
+ print(" | COUNT| CRIT, HIGH, MED, LOW, UNDEF, ERROR| CRIT, HIGH, MED, LOW, UNDEF, ERROR| CRIT, HIGH, MED, LOW, UNDEF, ERROR| CRIT, HIGH, MED, LOW, UNDEF, ERROR|")
+ print("-----|-------|------------------------------------------|------------------------------------------|------------------------------------------|------------------------------------------|")
+
+ for year in range(YEAR_START,YEAR_STOP+1):
+ # Sum the status counts
+ for index in range(I_COUNT,I_PRIORITY_MAX):
+ table_priority[year][I_COUNT] += table_priority[year][index]
+ # print the columns
+ print("%5d|" % year,end = '')
+ for index in range(I_COUNT,I_PRIORITY_MAX):
+ format = "%7d%s" if index in [I_COUNT,I_CVE_UNDEFINED,I_VUL_UNDEFINED,I_INV_UNDEFINED,I_DEF_UNDEFINED] else "%6d%s"
+ print(format % (table_priority[year][index],'|' if index in [I_COUNT,I_CVE_ERROR,I_VUL_ERROR,I_INV_ERROR,I_DEF_ERROR] else ','),end = '')
+ print("")
+
+ print("=====|=======|==========================================|==========================================|==========================================|==========================================|")
+ totals = blank_row()
+ # Vertical Totals
+ for year in range(YEAR_START,YEAR_STOP+1):
+ for index in range(I_COUNT,I_PRIORITY_MAX):
+ totals[index] += table_priority[year][index]
+ print("%-5s|" % 'Total',end = '')
+ for index in range(I_COUNT,I_PRIORITY_MAX):
+ format = "%7d%s" if index in [I_COUNT,I_CVE_UNDEFINED,I_VUL_UNDEFINED,I_INV_UNDEFINED,I_DEF_UNDEFINED] else "%6d%s"
+ print(format % (totals[index],'|' if index in [I_COUNT,I_CVE_ERROR,I_VUL_ERROR,I_INV_ERROR,I_DEF_ERROR] else ','),end = '')
+ print("")
+
+ print("")
+ print(" | COUNT| CRIT, HIGH, MED, LOW, UNDEF, ERROR|")
+ print("-----|-------|------------------------------------------|")
+ for object_idx in range(cve_idx,def_idx+1):
+ # Sum the status counts
+ for index in range(I_COUNT,I_CVE_ERROR+1):
+ table_priority[object_idx][I_COUNT] += table_priority[object_idx][index]
+ # print the columns
+ print("%5s|" % object_label[object_idx],end = '')
+ for index in range(I_COUNT,I_CVE_ERROR+1):
+ format = "%7d%s" if index in [I_COUNT,I_CVE_UNDEFINED] else "%6d%s"
+ print(format % (table_priority[object_idx][index],'|' if index in [I_COUNT,I_CVE_ERROR] else ','),end = '')
+ print("")
+ print("-----|-------|------------------------------------------|")
+
+
+#################################
+# report_unattached_records()
+#
+# Report the VUL unattached to CVE, INV unattached to VUL,
+# and DEF unattached to INV records
+#
+
+def report_unattached_records():
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_cve = conn.cursor()
+ cur_cve2vul = conn.cursor()
+ cur_vul = conn.cursor()
+ cur_vul2inv = conn.cursor()
+ cur_inv = conn.cursor()
+ cur_inv2def = conn.cursor()
+ cur_def = conn.cursor()
+
+ if False:
+ # Add all Vulnerability record IDs
+ unattached_records = {}
+ cur.execute('SELECT * FROM orm_vulnerability')
+ count = 0
+ for vul in cur:
+ count += 1
+ unattached_records[vul[ORM.VULNERABILITY_ID]] = True
+ print("Count = %d" % count)
+ # Remove Vulnerabilities with mapping to CVEs
+ cur.execute('SELECT * FROM orm_cvetovulnerablility')
+ count = 0
+ for cve2vul in cur:
+ count += 1
+ del unattached_records[cve2vul[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID]]
+ print("Count = %d" % count)
+ print("Unattached VUL to CVE = %d" % len(unattached_records))
+ for key in unattached_records:
+ vul = cur.execute('SELECT * FROM orm_vulnerability WHERE id = %d' % key).fetchone()
+ print(" %-10s [%7d] : %s" % (vul[ORM.VULNERABILITY_NAME],vul[ORM.VULNERABILITY_ID],vul[ORM.VULNERABILITY_DESCRIPTION][:60]))
+
+ if False:
+ # Add all Investigation record IDs
+ unattached_records = {}
+ cur.execute('SELECT * FROM orm_investigation')
+ count = 0
+ for inv in cur:
+ count += 1
+ unattached_records[inv[ORM.INVESTIGATION_ID]] = True
+ print("Count = %d" % count)
+ # Remove Investigations with mapping to Vulnerabilities
+ cur.execute('SELECT * FROM orm_vulnerabilitytoinvestigation')
+ count = 0
+ for vul2inv in cur:
+ count += 1
+ del unattached_records[vul2inv[ORM.VULNERABILITYTOINVESTIGATION_INVESTIGATION_ID]]
+ print("Count = %d" % count)
+ print("Unattached INV to VUL = %d" % len(unattached_records))
+ for key in unattached_records:
+ vul = cur.execute('SELECT * FROM orm_investigation WHERE id = %d' % key).fetchone()
+ print(" %-10s [%7d] : %s" % (inv[ORM.INVESTIGATION_NAME],inv[ORM.INVESTIGATION_ID],inv[ORM.INVESTIGATION_DESCRIPTION][:60]))
+
+ # Add all Defect record IDs
+ unattached_records = {}
+ cur.execute('SELECT * FROM orm_defect')
+ count = 0
+ for defect in cur:
+ count += 1
+ unattached_records[defect[ORM.DEFECT_ID]] = True
+ print("Count = %d" % count)
+ # Remove Defects with a mapping Investigations
+ cur.execute('SELECT * FROM orm_investigationtodefect')
+ count = 0
+ for inv2def in cur:
+ count += 1
+ if not inv2def[ORM.INVESTIGATIONTODEFECT_DEFECT_ID] in unattached_records:
+ print("INV2DEF: INV(%d) no such DEF(%d)" % (inv2def[ORM.INVESTIGATIONTODEFECT_INVESTIGATION_ID],inv2def[ORM.INVESTIGATIONTODEFECT_DEFECT_ID]))
+ else:
+ del unattached_records[inv2def[ORM.INVESTIGATIONTODEFECT_DEFECT_ID]]
+ print("Count = %d" % count)
+ print("Unattached DEF to INV = %d" % len(unattached_records))
+ defect_list = []
+ for i,key in enumerate(unattached_records):
+ defect = cur.execute('SELECT * FROM orm_defect WHERE id = %d' % key).fetchone()
+ defect_list.append(defect)
+# print(" %-10s [%7d] : %s" % (defect[ORM.DEFECT_NAME],defect[ORM.DEFECT_ID],defect[ORM.DEFECT_SUMMARY][:60]))
+# if i > 10:
+# break
+
+ def sortDefectRecord(defect):
+ return defect[ORM.DEFECT_NAME]
+ defect_list.sort(key = sortDefectRecord)
+ defect_related = []
+ defect_nocve = []
+ defect_manycve = []
+ test = 0
+ test_match = 0
+ for i,defect in enumerate(defect_list):
+ name = defect[ORM.DEFECT_NAME]
+ summary = defect[ORM.DEFECT_SUMMARY]
+
+ if not defect[ORM.DEFECT_SUMMARY].startswith("Security Advisory - "):
+ defect_related.append('%s,%s' % (name,summary))
+ continue
+
+ if name.startswith('DEFECT-'):
+ name = name.replace('DEFECT-','')
+
+ product = name.split('-')[0]
+ match_set = re.findall(".*(CVE-\d+-\d+).*",summary)
+ if not match_set:
+ defect_nocve.append('%s,%s' % (name,summary))
+# print("ERROR: NO MATCH : %s,%s" % (name,summary))
+ continue
+ elif 1 < len(match_set):
+ defect_manycve.append('%s,%s' % (name,summary))
+# print("ERROR: MULTIPLE MATCHES : %s,%s,%s" % (name,summary,match_set))
+ continue
+ else:
+ cve = match_set[0]
+
+ if not product or not cve:
+ print("ERROR: MISSING : %s,%s,%s,%s" % (name,product,cve,summary))
+ test += 1
+
+ print("%s,%s,%s,%s,%s,%s" % (defect[ORM.DEFECT_NAME],product,cve,
+ ORM.get_orm_string(defect[ORM.DEFECT_STATUS],ORM.DEFECT_STATUS_STR),
+ ORM.get_orm_string(defect[ORM.DEFECT_RESOLUTION],ORM.DEFECT_RESOLUTION_STR),
+ defect[ORM.DEFECT_SUMMARY][:60],))
+
+ match = False
+ cur_cve.execute('SELECT * FROM orm_cve WHERE name = "%s"' % cve)
+ for cve in cur_cve:
+ cur_cve2vul.execute('SELECT * FROM orm_cvetovulnerablility WHERE cve_id = %d' % cve[ORM.CVE_ID])
+ for cve2vul in cur_cve2vul:
+ cur_vul2inv.execute('SELECT * FROM orm_vulnerabilitytoinvestigation WHERE vulnerability_id = %d' % cve2vul[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID])
+ for vul2inv in cur_vul2inv:
+ # Find all related Defects
+ cur_inv2def.execute('SELECT * FROM orm_investigationtodefect WHERE investigation_id = %d' % vul2inv[ORM.VULNERABILITYTOINVESTIGATION_INVESTIGATION_ID])
+ for inv2def in cur_inv2def:
+ cur_def.execute('SELECT * FROM orm_defect WHERE id = %d' % inv2def[ORM.INVESTIGATIONTODEFECT_DEFECT_ID])
+ for defect in cur_def:
+ product_this = defect[ORM.DEFECT_NAME].replace('DEFECT-','').split('-')[0]
+ if product == product_this:
+ match = True
+ print(" DEFECT:%s,%s,%s,%s" % (defect[ORM.DEFECT_NAME],
+ ORM.get_orm_string(defect[ORM.DEFECT_STATUS],ORM.DEFECT_STATUS_STR),
+ ORM.get_orm_string(defect[ORM.DEFECT_RESOLUTION],ORM.DEFECT_RESOLUTION_STR),
+ defect[ORM.DEFECT_SUMMARY][:60],
+ ))
+ if match:
+ test_match += 1
+
+# if test > 10:
+# break
+
+ print("Related = %d" % len(defect_related))
+ print("NoCVE = %d" % len(defect_nocve))
+ print("ManyCVE = %d" % len(defect_manycve))
+ print("Testable = %d" % test)
+ print("TestMatches = %d" % test_match)
+
+# extract product,cve, defect [Defect Status,Defect Resolution]
+# see if CVE has VUL has INV for the product
+
+#################################
+# fix_duplicate_notifications
+#
+# Remove older duplicate notifications
+#
+
+def fix_duplicate_notifications():
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_del = conn.cursor()
+
+ notify_descriptions = {}
+ delete_list = []
+ delete_count = 0
+
+ cur.execute('SELECT * FROM orm_notify ORDER BY srt_created DESC;')
+ for i,notify in enumerate(cur):
+ description = notify[ORM.NOTIFY_DESCRIPTION]
+ if description in notify_descriptions:
+ delete_count += 1
+ delete_list.append(notify[ORM.NOTIFY_ID])
+ else:
+ notify_descriptions[description] = True
+
+ # Progress indicator support
+ if (0 == i % 5000):
+ print('%05d:%05d\r' % (i,delete_count), end='')
+
+ print("")
+ if force:
+ print("Deleting %d..." % len(delete_list))
+ for i,id in enumerate(delete_list):
+ sql = 'DELETE FROM orm_notify WHERE id=?'
+ ret = cur_del.execute(sql, (id,))
+ if (0 == i % 1000):
+ print('%05d:\r' % (i), end='')
+ if (0 == i % 10000):
+ time.sleep(0.1)
+ conn.commit()
+ conn.commit()
+
+ print("")
+ print('Delete count = %d of %d, Unique = %d' % (delete_count,i,len(notify_descriptions)))
+ #print(notify_descriptions)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# cve_trace
+# $ ./bin/common/srtool_utils.py --cve-trace CVE-2024-23180
+#
+
+def cve_trace(cve_name):
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_check = conn.cursor()
+ cur_del = conn.cursor()
+
+ # Preload the data source table
+ datasource_table = {}
+ for ds in cur.execute('SELECT * FROM orm_datasource').fetchall():
+ datasource_table[ds[ORM.DATASOURCE_ID]] = ds[ORM.DATASOURCE_KEY]
+
+ cve = cur.execute('SELECT * FROM orm_cve WHERE name = "%s"' % cve_name).fetchone()
+ if not cve:
+ print("ERROR: could not find CVE '%s'" % cve_name)
+ exit(1)
+ print("CVE found:\n Cve[%s]='%s'" % (cve[ORM.CVE_ID],cve[ORM.CVE_NAME]))
+ cve_id = cve[ORM.CVE_ID]
+
+ cve_local = cur.execute('SELECT * FROM orm_cvelocal WHERE name = "%s"' % cve_name).fetchone()
+ if cve_local:
+ print("CVE LOCAL found:\n '%s' at %s" % (cve_local[ORM.CVELOCAL_NAME],cve_local[ORM.CVELOCAL_ID]))
+
+ print("CveSource found:")
+ cvesource_list = cur.execute('SELECT * FROM orm_cvesource WHERE cve_id = "%s"' % cve_id)
+ for cvesource in cvesource_list:
+ print(" CveSource=[%s] CVE=%s DataSource=%s (%s)" % (cvesource[ORM.CVESOURCE_ID],cvesource[ORM.CVESOURCE_CVE_ID],cvesource[ORM.CVESOURCE_DATASOURCE_ID],datasource_table[cvesource[ORM.CVESOURCE_DATASOURCE_ID]]))
+ # Check the other foreign key
+ check = cur_check.execute('SELECT * FROM orm_datasource WHERE id = "%s"' % cvesource[ORM.CVESOURCE_DATASOURCE_ID]).fetchone()
+ if not check:
+ print("ERROR: DataSource not found at %s" % cvesource[ORM.CVESOURCE_DATASOURCE_ID])
+
+
+ print("CveAccess found:")
+ cveaccess_list = cur.execute('SELECT * FROM orm_cveaccess WHERE cve_id = "%s"' % cve_id)
+ for cveaccess in cveaccess_list:
+ print(" CVE=%s User=%s" % (cveaccess[ORM.CVEACCESS_CVE_ID],cveaccess[ORM.CVEACCESS_USER_ID]))
+ # Check the other foreign key
+ check = cur_check.execute('SELECT * FROM orm_cveaccess WHERE id = "%s"' % cveaccess[ORM.CVEACCESS_USER_ID]).fetchone()
+ if not check:
+ print("ERROR: User not found at %s" % cveaccess[ORM.CVEACCESS_USER_ID])
+
+ print("CveHistory found:")
+ cvehistory_list = cur.execute('SELECT * FROM orm_cvehistory WHERE cve_id = "%s"' % cve_id)
+ for cvehistory in cvehistory_list:
+ print(" CVE=%s History='%s...'" % (cvehistory[ORM.CVEHISTORY_CVE_ID],cvehistory[ORM.CVEHISTORY_COMMENT][:60]))
+
+ print("PackageToCve found:")
+ packagetocve_list = cur.execute('SELECT * FROM orm_packagetocve WHERE cve_id = "%s"' % cve_id)
+ for packagetocve in packagetocve_list:
+ print(" CVE=%s Package=%s" % (packagetocve[ORM.PACKAGETOCVE_CVE_ID],packagetocve[ORM.CVEHISTORY_PACKAGE_ID]))
+
+ print("CveReference found:")
+ cvereference_list = cur.execute('SELECT * FROM orm_cvereference WHERE cve_id = "%s"' % cve_id)
+ for cvereference in cvereference_list:
+ print(" CVE=%s DataSource=%s" % (cvereference[ORM.CVEREFERENCE_CVE_ID],cvereference[ORM.CVEREFERENCE_DATASOURCE_ID]))
+
+ print("CveToVulnerablility found:")
+ cvetovulnerablility_list = cur.execute('SELECT * FROM orm_cvetovulnerablility WHERE cve_id = "%s"' % cve_id)
+ for cvetovulnerablility in cvetovulnerablility_list:
+ print(" CVE=%s Vulnerability=%s" % (cvetovulnerablility[ORM.CVETOVULNERABLILITY_CVE_ID],cvetovulnerablility[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID]))
+
+ # Trial delete
+ if not force:
+ return
+
+ if False:
+ cvehistory_list = cur.execute('SELECT * FROM orm_cvehistory WHERE cve_id = "%s"' % cve_id)
+ for cvehistory in cvehistory_list:
+ print("DELETE CveHistory: CVE=%s History='%s...'" % (cvehistory[ORM.CVEHISTORY_CVE_ID],cvehistory[ORM.CVEHISTORY_COMMENT][:60]))
+ sql = 'DELETE FROM orm_cvehistory WHERE cve_id=?'
+ cur_del.execute(sql, (cve_id,))
+
+ if True:
+ cvesource_list = cur.execute('SELECT * FROM orm_cvesource WHERE cve_id = "%s"' % cve_id)
+ for cvesource in cvesource_list:
+ print("DELETE Cvesource: CVE=%s DataSource=%s" % (cvesource[ORM.CVESOURCE_CVE_ID],cvesource[ORM.CVESOURCE_DATASOURCE_ID]))
+ sql = 'DELETE FROM orm_cvesource WHERE cve_id=?'
+ cur_del.execute(sql, (cve_id,))
+
+ if True:
+ if cve_local:
+ print("DELETE CVE LOCAL: '%s' at %s" % (cve_local[ORM.CVELOCAL_NAME],cve_local[ORM.CVELOCAL_ID]))
+ sql = 'DELETE FROM orm_cvelocal WHERE name=?'
+ cur_del.execute(sql, (cve_name,))
+
+ if False:
+ print("DELETE CVE: '%s' at %s" % (cve[ORM.CVE_NAME],cve[ORM.CVE_ID]))
+ sql = 'DELETE FROM orm_cve WHERE id=?'
+ cur_del.execute(sql, (cve_id,))
+
+ conn.commit()
+ SQL_CLOSE_CONN(conn)
+
+
+#################################
+# report_cve_comments_to_recipes
+#
+# Try to merge comments and packages in orm_cve table.
+#
+
+def report_cve_comments_to_recipes():
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+
+ """
+ comments:
+ Exclude all containing '%wr%' or '%do not include%' or "%n't%":
+ not (comments like "" or comments like '%wr%' or comments like '%do not include%' or comments like "%n't%")
+ Starting from 6 words, it's not purely product names any more: length(comments) - length(replace(comments, ' ', '')) + 1 = 6
+ packages:
+ Exclude all containing '%wr%' and this should have covered all cases; all these cases have empty comments
+ """
+ cur.execute("SELECT id, comments, packages FROM orm_cve WHERE NOT (comments = '')")
+
+ total = 0
+ review_needed = 0
+ # key: ORM.CVE_ID
+ # value: [curr_comments, curr_packages, proposed_comments, proposed_packages, review_needed]
+ dct_cve_obj = {}
+ # all cve_obj has non-empty comments
+ for cve_obj in cur:
+ id = cve_obj[0]
+ comments = cve_obj[1].lower()
+ packages = cve_obj[2].lower()
+ lst_comments = comments.strip('. ').split(' ')
+ lst_packages = packages.split(' ')
+ if 'wr' in packages:
+ dct_cve_obj[id] = ['', cve_obj[2], cve_obj[2], '', 0]
+ total += 1
+ continue
+ elif 'wr' in comments or 'do not include' in comments or "n't" in comments:
+ if packages != '' and packages != 'reject':
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], cve_obj[1] + ' | packages:' + packages, 'reject:' + packages, 0]
+ total += 1
+ continue
+ comments_strict_in_packages = True
+ for comment in lst_comments:
+ if comment not in packages:
+ comments_strict_in_packages = False
+ break
+ if comments_strict_in_packages:
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], '', packages, 0]
+ total += 1
+ continue
+ # if there is only a single word in comments
+ elif ' ' not in comments:
+ if packages == '':
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], '', comments, 0]
+ total += 1
+ continue
+ if comments in packages:
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], '', packages, 0]
+ total += 1
+ continue
+ merged = False
+ for i, package in enumerate(lst_packages):
+ if package in comment:
+ lst_packages[i] = comment
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], '', ' '.join(lst_packages), 0]
+ merged = True
+ break
+ if not merged:
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], '', packages + ' ' + comments, 0]
+ total += 1
+ continue
+ elif len(lst_comments) < 6 and packages == '':
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], '', comments, 0]
+ total += 1
+ continue
+ else:
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], '', '', 1]
+ review_needed += 1
+ total += 1
+
+ print("Automatically merged", str(round((total - review_needed)/total * 100, 2)) + '%', 'of data')
+
+ with open('report_cve_comments_to_recipes.csv', 'x', newline='') as f:
+ csvwriter = csv.writer(f, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
+ csvwriter.writerow(['id', 'curr_comments', 'curr_packages', 'proposed_comments', 'proposed_packages', 'review_needed'])
+ for id in dct_cve_obj:
+ csvwriter.writerow([id] + dct_cve_obj[id])
+
+###############################################################
+#
+# reset scores: reset new CVEs so that they can be rescored
+#
+
+def reset_scores():
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+ cur_ds = conn.cursor()
+ is_change = False
+ write_count = 0
+ progress_set_max(cmd_count if cmd_count else 100)
+
+ if debug_sql:
+ SQL_DEBUG(True,'RST')
+
+ # Scan the open CVEs
+ sql = "SELECT * FROM orm_cve WHERE status='%s' AND score_date IS NOT NULL;" % (ORM.STATUS_NEW)
+ cur.execute(sql)
+ for i,cve in enumerate(cur):
+ cve_name = cve[ORM.CVE_NAME]
+ progress_show(cve_name)
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%04d: %20s\r' % (i,cve_name), end='')
+ if (0 == i % 200) and (not cmd_skip) and is_change:
+ SQL_COMMIT(conn)
+ print("%4d: COMMIT" % i)
+ sleep(2)
+ is_change = False
+ # Development/debug support
+ if cmd_skip > i:
+ continue
+ # Test short count, cap at 100
+ if (cmd_count < (i - cmd_skip)) or (100 < (i - cmd_skip)):
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ sql = ''' UPDATE orm_cve
+ SET score_date = ?
+ WHERE id = ?'''
+ SQL_EXECUTE(cur_write,sql, (None, cve[ORM.CVE_ID]))
+ write_count += 1
+ is_change = True
+
+ if is_change:
+ print("COMMIT")
+ SQL_COMMIT(conn)
+ print("\nUpdated CVEs=%d" % (write_count))
+ # End progress
+ progress_done('Done')
+ # Dump the SQL transaction data
+ if debug_sql:
+ SQL_DUMP()
+
+#################################
# main loop
#
+
def main(argv):
global verbose
global cmd_skip
global cmd_count
+ global force
+ global srtDbName
+ global debug_sql
# setup
- parser = argparse.ArgumentParser(description='srtool.py: manage the SRTool database')
+ parser = argparse.ArgumentParser(description='srtool_utils.py: manage/repair the SRTool database')
parser.add_argument('--sources', '-s', nargs='?', const='display', help='SRTool Sources')
parser.add_argument('--reset-sources', '-r', action='store_const', const='reset_sources', dest='command', help='Reset SRTool Sources')
parser.add_argument('--settings', '-S', action='store_const', const='settings', dest='command', help='Show the SRT Settings')
parser.add_argument('--remove-app-sources', dest='remove_app_sources', help='Remove data sources for a previous app')
+ # One shot database repair routines
+
+ parser.add_argument('--fix-name-sort', action='store_const', const='fix_name_sort', dest='command', help='Recalulate the CVE name sort values')
+ parser.add_argument('--fix-cve-recommend', action='store_const', const='fix_cve_recommend', dest='command', help='Fix the empty CVE recommend values')
+ parser.add_argument('--fix-new-reserved', action='store_const', const='fix_new_reserved', dest='command', help='Reset new reserved CVEs to NEW_RESERVED')
+ parser.add_argument('--fix-new-tags', action='store_const', const='fix_new_tags', dest='command', help='Reset new cve.tags')
+ parser.add_argument('--fix-srt-datetime', dest='fix_srt_datetime', help='Fix SRT dates to datetimes [all|c|v|i|d|history|ch|vh|ih|dh]')
+ parser.add_argument('--fix-reset-nist-to-create', dest='fix_reset_nist_to_create', help='Bulk reset CVE [prefix*] srt_create dates to NIST release dates')
+ parser.add_argument('--fix-missing-create-dates', action='store_const', const='fix_missing_create_dates', dest='command', help='Reset CVE srt_create dates to NIST release dates')
+ parser.add_argument('--fix-public-reserved', action='store_const', const='fix_public_reserved', dest='command', help='Reset CVE NEW_RESERVED if now public')
+ parser.add_argument('--fix-remove-bulk-cve-history', action='store_const', const='fix_remove_bulk_cve_history', dest='command', help='Remove a specific/accidental set of bulk CVE history updates ')
+ parser.add_argument('--fix-bad-mitre-init', action='store_const', const='fix_bad_mitre_init', dest='command', help='Fix MITRE "New" that should be "New-Reserved"')
+ parser.add_argument('--fix-bad-mitre-descr', dest='fix_bad_mitre_descr', help='Fix MITRE that were created with empty descriptions')
+ parser.add_argument('--fix-bad-score-date', action='store_const', const='fix_bad_score_date', dest='command', help='Clear score dates to fix obsolete formats')
+ parser.add_argument('--fix-trim-cve-scores', action='store_const', const='fix_trim_cve_scores', dest='command', help='Trim V3/V2 scores to one decimal place standard')
+ parser.add_argument('--fix-inherit-affected-components', action='store_const', const='fix_inherit_affected_components', dest='command', help='Inherit the affected components field from CVE to its children')
+ parser.add_argument('--fix-notify-access', action='store_const', const='fix_notify_access', dest='command', help='Remove notify-access that do not point to existing notify records')
+ parser.add_argument('--fix-cvelocal', action='store_const', const='fix_cvelocal', dest='command', help='Find and fix CveLocal duplicate/dettached records')
+ parser.add_argument('--fix-cvesource', action='store_const', const='fix_cvesource', dest='command', help='Find and fix CveSource duplicate/dettached records')
+
+ # Continuous maintenance validation and repair routines
+
+ parser.add_argument('--fix-bad-links', action='store_const', const='fix_bad_links', dest='command', help='Find bad links, e.g. "orm_cvesource" (add "-f" to fix)')
+ parser.add_argument('--fix-severity', dest='fix_severity', help='Find bad score/severity values, broken cve source links {ALL|"NIST 2020[,...]*"} (add "-f" to fix)')
+ parser.add_argument('--fix-duplicate-notifications', action='store_const', const='fix_duplicate_notifications', dest='command', help='Removed older duplicate notifications')
+
+ parser.add_argument('--report-multiple-defects', action='store_const', const='report_multiple_defects', dest='command', help='Report multiple defects per investigations')
+ parser.add_argument('--report-duplicate-names', action='store_const', const='report_duplicate_names', dest='command', help='Report duplicate names for CVE,VUL,INV,DEF')
+ parser.add_argument('--report-defects-to-products', action='store_const', const='report_defects_to_products', dest='command', help='Report defects without product link')
+ parser.add_argument('--report-cve-status-summary', action='store_const', const='report_cve_status_summary', dest='command', help='Report the CVE status summary')
+ parser.add_argument('--report-db-status-summary', action='store_const', const='report_db_status_summary', dest='command', help='Report the database status summary')
+ parser.add_argument('--report-unattached-records', action='store_const', const='report_unattached_records', dest='command', help='Report VUL/INV/DEF unattached to parent CVE/VUL/INV')
+ parser.add_argument('--report-cve-comments-to-recipes', action='store_const', const='report_cve_comments_to_recipes', dest='command', help='Report the "comments" field in the CVE table')
+
+ # Other
+
+ parser.add_argument('--cve-trace', dest='cve_trace', help='Trace a CVE (FOREIGN KEY constraint failed)')
+ parser.add_argument('--reset-scores', action='store_const', const='reset_scores', dest='command', help='Reset new CVEs for score test')
+
+ # Options
+
+ parser.add_argument('--database', '-D', dest='database', help='Select specific alternate database file (e.g. a backup)')
+ parser.add_argument('--progress', '-P', action='store_true', dest='do_progress', help='Progress output')
parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
parser.add_argument('--count', dest='count', help='Debugging: short run record count')
-
- parser.add_argument('--fix-new-reserved', action='store_const', const='fix_new_reserved', dest='command', help='Reset new reserved CVEs to NEW_RESERVED')
+ parser.add_argument('--debug-sql', action='store_true', dest='debug_sql', help='Debug SQL writes')
args = parser.parse_args()
- master_log = open(os.path.join(script_pathname, "update_logs/master_log.txt"), "a")
+ master_log = open(os.path.join(srtool_basepath, "update_logs/master_log.txt"), "a")
verbose = args.verbose
if None != args.skip:
cmd_skip = int(args.skip)
if None != args.count:
cmd_count = int(args.count)
+ force = args.force
+ debug_sql = args.debug_sql
+ progress_set_on(args.do_progress)
+
+ # Test for example the backup databases
+ if args.database:
+ srtDbName = args.database
+ import_orm_schema(os.path.dirname(srtDbName) )
if args.sources:
if args.sources.startswith('s'):
@@ -226,14 +3033,76 @@ def main(argv):
sources('reset')
elif 'settings' == args.command:
settings()
- elif 'fix_new_reserved' == args.command:
- fix_new_reserved()
+
elif args.remove_app_sources:
remove_app_sources(args.remove_app_sources)
+
+ elif 'fix_name_sort' == args.command:
+ fix_name_sort()
+ elif 'fix_cve_recommend' == args.command:
+ fix_cve_recommend()
+ elif 'fix_new_reserved' == args.command:
+ fix_new_reserved()
+ elif 'fix_new_tags' == args.command:
+ fix_new_tags()
+ elif args.fix_srt_datetime:
+ fix_srt_datetime(args.fix_srt_datetime)
+ elif args.fix_reset_nist_to_create:
+ fix_reset_nist_to_create(args.fix_reset_nist_to_create)
+ elif 'fix_missing_create_dates' == args.command:
+ fix_missing_create_dates()
+ elif 'fix_public_reserved' == args.command:
+ fix_public_reserved()
+ elif 'fix_remove_bulk_cve_history' == args.command:
+ fix_remove_bulk_cve_history()
+ elif 'report_defects_to_products' == args.command:
+ report_defects_to_products()
+ elif 'fix_bad_mitre_init' == args.command:
+ fix_bad_mitre_init()
+ elif args.fix_bad_mitre_descr:
+ fix_bad_mitre_descr(args.fix_bad_mitre_descr)
+ elif 'fix_bad_score_date' == args.command:
+ fix_bad_score_date()
+ elif 'fix_inherit_affected_components' == args.command:
+ fix_inherit_affected_components()
+ elif 'fix_notify_access' == args.command:
+ fix_notify_access()
+ elif 'fix_cvelocal' == args.command:
+ fix_cvelocal()
+ elif 'fix_cvesource' == args.command:
+ fix_cvesource()
+
+ elif args.fix_severity:
+ fix_severity(args.fix_severity)
+ elif 'fix_trim_cve_scores' == args.command:
+ fix_trim_cve_scores()
+ elif 'fix_duplicate_notifications' == args.command:
+ fix_duplicate_notifications()
+
+ elif 'report_multiple_defects' == args.command:
+ report_multiple_defects()
+ elif 'report_duplicate_names' == args.command:
+ report_duplicate_names()
+ elif 'fix_bad_links' == args.command:
+ fix_bad_links()
+ elif 'report_cve_status_summary' == args.command:
+ report_cve_status_summary()
+ elif 'report_db_status_summary' == args.command:
+ report_db_status_summary()
+ elif 'report_unattached_records' == args.command:
+ report_unattached_records()
+ elif 'report_cve_comments_to_recipes' == args.command:
+ report_cve_comments_to_recipes()
+
+ elif args.cve_trace:
+ cve_trace(args.cve_trace)
+ elif 'reset_scores' == args.command:
+ reset_scores()
+
else:
print("Command not found")
master_log.close()
if __name__ == '__main__':
- script_pathname = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
main(sys.argv[1:])
diff --git a/bin/cve_checker/datasource.json b/bin/cve_checker/datasource.json
new file mode 100755
index 00000000..5fc14da8
--- /dev/null
+++ b/bin/cve_checker/datasource.json
@@ -0,0 +1,25 @@
+{
+ "srtsetting" : [
+ {
+ "name" : "_SRTOOL_CVE_MESSAGE",
+ "helptext" : "CVE Checker MOD",
+ "value" : "CVe Checker MOD"
+ }
+ ],
+ "datasource" : [
+ {
+ "key" : "0201-cvechecker-update",
+ "data" : "cvechecker",
+ "source" : "cvechecker",
+ "name" : "cvechecker_Updates",
+ "description" : "Datasource for scheduled updates",
+ "cve_filter" : "",
+ "init" : "",
+ "update" : "",
+ "lookup" : "",
+ "_comment_" : "2 = Daily, 5 = OnDemand (disabled), Update on Saturdays at 2:00 am",
+ "update_frequency" : "2",
+ "update_time" : "{\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/cve_checker/patcher.json b/bin/cve_checker/patcher.json
new file mode 100755
index 00000000..a0c96733
--- /dev/null
+++ b/bin/cve_checker/patcher.json
@@ -0,0 +1,41 @@
+{
+ "_comments_" : "Blank values indicate defaults",
+ "label" : "ACME",
+ "patcher_dir" : "bin/acme/patcher",
+ "patch_set" : [
+ {
+ "_comments_" : "The ACME custom version of the Jira integration script",
+ "original" : "bin/common/srtool_jira_template.py",
+ "custom" : "bin/acme/srtool_jira_acme.py",
+ "patch" : "",
+ "options" : ""
+ },
+ {
+ "original" : "bin/srt",
+ "custom" : "bin/acme/patcher/inplace/bin/srt",
+ "patch" : "",
+ "options" : "INPLACE DISABLE"
+ }
+ ],
+ "custom_directories" : [
+ {
+ "dir" : "bin/acme"
+ },
+ {
+ "dir" : "lib/acme"
+ }
+ ],
+ "documentation" : [
+ {
+ "help_original" : "the location of the original mainline file",
+ "help_custom" : "the location of the derived and customized file",
+ "help_original_INPLACE" : "In the INPLACE mode, this is the location of the mainline file that has been customized",
+ "help_custom_INPLACE" : "In the INPLACE mode, this is the stash location of the customized file",
+ "help_patch" : "optional location of extracted patch file, default is '$patcher_dir/$filename.patch'",
+ "help_options" : "When empty, indicates the default workflow of a custom file in custom app directory derived from a mainline template file (e.g. bin/common/srtool_jira_template.py)",
+ "help_options_INPLACE" : "Add the 'INPLACE' key if the file is patched in place in the mainline code",
+ "help_options_DISABLE" : "Add the 'DISABLE' key to make this mapping inactive",
+ "help_custom_directories" : "These are the custom directories that are overlaid on the SRTool code, and are ignored for in-place scanning"
+ }
+ ]
+}
diff --git a/bin/cve_checker/srtool_cvechecker.py b/bin/cve_checker/srtool_cvechecker.py
new file mode 100755
index 00000000..6144cb5a
--- /dev/null
+++ b/bin/cve_checker/srtool_cvechecker.py
@@ -0,0 +1,950 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Commandline Tool
+#
+# Copyright (C) 2023 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys
+import argparse
+import json
+import subprocess
+import logging
+import pytz
+
+# load the srt.sqlite schema indexes
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+from common.srtool_sql import *
+from common.srtool_progress import *
+from common.srtool_common import log_error
+
+# Setup:
+logger = logging.getLogger("srt")
+
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR','.')
+SRT_REPORT_DIR = f"{SRT_BASE_DIR}/reports"
+
+# data/cve_checker/yocto-metrics/cve-check/master/1697871310.json
+REMOTE_URL = 'git://git.yoctoproject.org/yocto-metrics'
+REMOTE_PATH = ''
+LOCAL_DIR = 'data/cve_checker/yocto-metrics'
+BRANCH = ''
+
+# Import Channel support
+CK_LOCAL_DIR = 'data/cve_checker'
+
+
+# From lib/cve_check/views.py
+CK_UNDEFINED = 0
+CK_UNPATCHED = 1
+CK_IGNORED = 2
+CK_PATCHED = 3
+
+verbose = False
+test = False
+cmd_count = 0
+cmd_skip = 0
+force_update = False
+
+#################################
+# Helper methods
+#
+
+# quick development/debugging support
+def _log(msg):
+ DBG_LVL = os.environ['SRTDBG_LVL'] if ('SRTDBG_LVL' in os.environ) else 2
+ DBG_LOG = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srt_dbg.log'
+ if 1 == DBG_LVL:
+ print(msg)
+ elif 2 == DBG_LVL:
+ f1=open(DBG_LOG, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+def srtsetting_get(conn,key,default_value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Fetch the key for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ try:
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if srtsetting:
+ if is_dict:
+ return(srtsetting['value'])
+ else:
+ return(srtsetting[ORM.SRTSETTING_VALUE])
+ except Exception as e:
+ print(f"ERROR:{e}")
+ return(default_value)
+
+def srtsetting_set(conn,key,value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Set the key value for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if not srtsetting:
+ sql = ''' INSERT INTO orm_srtsetting (name, helptext, value) VALUES (?,?,?)'''
+ SQL_EXECUTE(cur, sql, (key,'',value))
+ if verbose: print(f"INSERT:{key}:{value}:")
+ else:
+ if verbose: print(f"UPDATE[{srtsetting[ORM.SRTSETTING_ID]}]:{key}:{value}:")
+ sql = ''' UPDATE orm_srtsetting
+ SET value=?
+ WHERE id=?'''
+ if is_dict:
+ SQL_EXECUTE(cur, sql, (value,srtsetting['id']))
+ else:
+ SQL_EXECUTE(cur, sql, (value,srtsetting[ORM.SRTSETTING_ID]))
+ SQL_COMMIT(conn)
+
+def do_chdir(newdir,delay=0.200):
+ os.chdir(newdir)
+ # WARNING: we need a pause else the chdir will break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+def do_makedirs(newdir,delay=0.200):
+ try:
+ os.makedirs(newdir)
+ except:
+ # dir already exists
+ pass
+ # WARNING: we need a pause else the makedirs could break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+#
+# Sub Process calls
+# Enforce that all scripts run from the SRT_BASE_DIR context (re:WSGI)
+#
+def execute_process(*args):
+ # Only string-type parameters allowed
+ cmd_list = []
+ for arg in args:
+ if not arg: continue
+ if isinstance(arg, (list, tuple)):
+ # Flatten all the way down
+ for a in arg:
+ if not a: continue
+ cmd_list.append(str(a))
+ else:
+ cmd_list.append(str(arg))
+
+ result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return(result.returncode,result.stdout.decode('utf-8'),result.stderr.decode('utf-8'))
+
+def execute_commmand(cmnd,path=''):
+ print(f"FOO1:EXECUTE_COMMMAND:{cmnd}:{path}:")
+ cwd = os.getcwd()
+ if path:
+ do_chdir(path,0.4)
+ print(f"FOO2:EXECUTE_COMMMAND:{os.getcwd()}:")
+ result_returncode,result_stdout,result_stderr = execute_process(cmnd)
+ if 0 != result_returncode:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ print(f"ERROR({result_returncode}):{result_stderr}")
+ return(1)
+ if verbose:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ if path:
+ do_chdir(cwd)
+
+# Insure the git repo is cloned and available
+def prepare_git(repo_dir,repo_url,branch):
+ if True or verbose: print(f"prepare_git:({repo_dir},{repo_url})")
+ if not os.path.isdir(repo_dir):
+ repo_parent_dir = os.path.dirname(repo_dir)
+ do_makedirs(repo_parent_dir)
+ print(f"= Clone '{REMOTE_URL}' ... =")
+ cmnd=['git','clone',REMOTE_URL]
+ execute_commmand(cmnd,repo_parent_dir)
+ else:
+ print(f"= Clone '{REMOTE_URL}' skip ... =")
+
+ if branch:
+ print("= Checkout branch '{BRANCH}' ... =")
+ cmnd=['git','-C',repo_dir,'checkout',branch]
+ execute_commmand(cmnd)
+
+ # Get the latest data with a safety pull
+ print("= Pull ... =")
+ cmnd=['git','-C',repo_dir,'pull']
+ execute_commmand(cmnd)
+
+# Compute a sortable CVE name
+def get_name_sort(cve_name):
+ try:
+ a = cve_name.split('-')
+ cve_name_sort = '%s-%s-%07d' % (a[0],a[1],int(a[2]))
+ except:
+ cve_name_sort = cve_name
+ return cve_name_sort
+
+def score2cve_score(score):
+ try:
+ return(float(score))
+ except:
+ return(0.0)
+
+def cve_score2severity(score):
+ score_num = score2cve_score(score)
+ if score_num < 2.5:
+ severity = "Low"
+ elif score_num < 5.0:
+ severity = "Medium"
+ elif score_num < 7.5:
+ severity = "High"
+ else :
+ severity = "Critical"
+ return(severity)
+
+def cve_scores2priority(score_v2,score_v3):
+ score_num = max(score2cve_score(score_v2),score2cve_score(score_v3))
+ if score_num < 2.5:
+ priority = ORM.PRIORITY_LOW
+ elif score_num < 5.0:
+ priority = ORM.PRIORITY_MEDIUM
+ elif score_num < 7.5:
+ priority = ORM.PRIORITY_HIGH
+ else :
+ priority = ORM.PRIORITY_CRITICAL
+ return(priority)
+
+def status2orm_ck(status):
+ if 'Unpatched' == status:
+ return (CK_UNPATCHED,ORM.STATUS_VULNERABLE)
+ elif 'Patched' == status:
+ return (CK_PATCHED,ORM.STATUS_NOT_VULNERABLE)
+ elif 'Ignored' == status:
+ return (CK_IGNORED,ORM.STATUS_NOT_VULNERABLE)
+ else:
+ return (CK_UNDEFINED,ORM.STATUS_NEW)
+
+def count_ck_records(cur):
+ def count_rows(table_name):
+ cur.execute(f"SELECT COUNT(*) FROM {table_name}")
+ return(cur.fetchone()[0])
+ Ck_Audit_cnt = count_rows('cve_checker_Ck_Audit')
+ Ck_Package_cnt = count_rows('cve_checker_Ck_Package')
+ Ck_Product_cnt = count_rows('cve_checker_Ck_Product')
+ Ck_Layer_cnt = count_rows('cve_checker_Ck_Layer')
+ CkPackage2Cve_cnt = count_rows('cve_checker_CkPackage2Cve')
+ CkPackage2CkProduct_cnt = count_rows('cve_checker_CkPackage2CkProduct')
+ return(Ck_Audit_cnt, Ck_Package_cnt, Ck_Product_cnt, Ck_Layer_cnt, CkPackage2Cve_cnt, CkPackage2CkProduct_cnt)
+
+#################################
+# Check Auto Builder CVE_Checker output files
+#
+# e.g. https://git.yoctoproject.org/yocto-metrics/tree/cve-check/master/1697612432.json
+#
+# Unit tests:
+# bin/cve_checker/srtool_cvechecker.py --validate-cvechk-ab master -v
+# bin/cve_checker/srtool_cvechecker.py --validate-cvechk-ab dunfell -v
+#
+
+def validate_cvechk_ab(release):
+ repo_dir = os.path.join(srtool_basepath,LOCAL_DIR)
+ LOCAL_PATH = f'cve-check/{release}'
+
+ # Insure that the repo is present and updated
+ prepare_git(repo_dir,REMOTE_URL,BRANCH)
+
+ # Find the JSON file
+ json_dir = os.path.join(repo_dir,LOCAL_PATH)
+ file_list = []
+ for root, dirs, files in os.walk(json_dir):
+ for i,file in enumerate(files):
+ if not file.endswith('.json'):
+ continue
+ file_list.append(file)
+ print(f"CVKCHK JSON file count = {len(file_list)}")
+
+ progress_set_max(len(file_list))
+ # Scan the JSON files
+ print(f"Release = {release}")
+ for i,json_file in enumerate(file_list):
+
+ # Debugging support
+ if cmd_skip and (i < cmd_skip):
+ continue
+ if cmd_count and (i > (cmd_skip + cmd_count)):
+ continue
+
+ with open(os.path.join(json_dir,json_file)) as json_data:
+ progress_show(json_file)
+ try:
+ dct = json.load(json_data)
+ except Exception as e:
+ print(f"ERROR:JSON_FILE_LOAD:{json_file}:{e}", file=sys.stderr)
+ continue
+
+ if 0 == (i % 20): print(f"{i:4}\r",end='',flush=True)
+
+ for elem in dct:
+ print(f"TOP ELEM:{elem}")
+
+ multiple_products = []
+ mismatch_products = []
+ mismatch_iscves = []
+
+ elem_packages = dct['package']
+ print(f"PACKAGE COUNT:{len(elem_packages)}")
+ for package in elem_packages:
+ name = package['name']
+ short_name = name.replace('-native','')
+
+ package_products = package['products']
+ if 1 != len(package_products):
+ s = f"{name}={len(package_products)}"
+ for product in package_products:
+ s += f":{product['product']}"
+ multiple_products.append(s)
+
+ is_cves = ''
+ for product in package_products:
+ if not is_cves:
+ is_cves = product['cvesInRecord']
+ if short_name != product['product']:
+ mismatch_products.append(f"{name}!={product['product']}")
+ if is_cves != product['cvesInRecord']:
+ mismatch_iscves.append(f"{name}:{is_cves} != {product['cvesInRecord']}")
+
+ print(f"multiple_products:{len(multiple_products)}")
+ for i,mp in enumerate(multiple_products):
+ print(f" {mp}")
+ if i > 5: break
+ print(f"mixed_products:{len(mismatch_products)}")
+ for i,mp in enumerate(mismatch_products):
+ print(f" {mp}")
+ if i > 5: break
+ print(f"mixed_iscves:{len(mismatch_iscves)}")
+ for i,mp in enumerate(mismatch_iscves):
+ print(f" {mp}")
+ if i > 5: break
+ progress_done('Done')
+
+#################################
+# Import Auto Builder CVE_Checker output files
+#
+# e.g. https://git.yoctoproject.org/yocto-metrics/tree/cve-check/master/1697612432.json
+#
+# Unit tests:
+# bin/cve_checker/srtool_cvechecker.py --import-cvechk 7,nanbield,nanbield -v (7 = AB repo)
+# bin/cve_checker/srtool_cvechecker.py --import-cvechk 6,master,<none> -v (6 = SSH import)
+#
+
+def import_cvechk(key,audit_name):
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+
+ ck_import_id,ck_audit_key,ck_import_select = key.split(',')
+
+ _log("Prepare Import channel")
+ sql = """SELECT * FROM cve_checker_CkUploadManager WHERE id = ?"""
+ ck_import = SQL_EXECUTE(cur, sql, (ck_import_id,)).fetchone()
+ if not ck_import:
+ print(f"ERROR: ck_import not found '{ck_import_id}'")
+ exit(1)
+
+ ck_json_list = []
+ if 'Repo' == ck_import['import_mode']:
+ # Isolate the repo's directory name from the local path (first dir)
+ repo_dir_name = ck_import['path']
+ pos = repo_dir_name.find('/')
+ if pos > 0:
+ repo_dir_name = repo_dir_name[0:pos]
+ repo_dir = os.path.join(srtool_basepath,CK_LOCAL_DIR,repo_dir_name)
+ repo_url = ck_import['repo']
+ repo_branch = ck_import['branch']
+
+ # Insure that the repo is present and updated
+ _log("Prepare repo")
+ print(f"FOO:prepare_git({repo_dir},{repo_url},{repo_branch})")
+ prepare_git(repo_dir,repo_url,repo_branch)
+
+ # Is the selector a file?
+ if ck_import_select.endswith('.json'):
+ ck_json_list.append(os.path.join(srtool_basepath,CK_LOCAL_DIR,ck_import['path'],ck_import_select))
+ else:
+ # Gather files from this sub-directory
+ json_dir = os.path.join(srtool_basepath,CK_LOCAL_DIR,ck_import['path'],ck_import_select)
+ for root, dirs, files in os.walk(json_dir):
+ for i,file in enumerate(files):
+ if not file.endswith('.json'):
+ continue
+ ck_json_list.append(os.path.join(json_dir,file))
+ if not ck_json_list:
+ print(f"ERROR: no JSON files found in '{json_dir}'")
+ exit(1)
+ else:
+ print(f"CVKCHK JSON file count = {len(ck_json_list)}")
+ elif 'SSL' == ck_import['import_mode']:
+ host,path = ck_import['path'].split(':')
+ path = os.path.join(path,ck_import_select)
+ ck_ssl_cp_list = []
+ if path.endswith('.json'):
+ ck_ssl_cp_list.append(path)
+ else:
+ cmnd = ['ssh','-i', ck_import['pem'], host, 'ls', path+'/*.json']
+ exec_returncode,exec_stdout,exec_stderr = execute_process(*cmnd)
+ for i,line in enumerate(exec_stdout.splitlines()):
+ line = line.strip()
+ ck_ssl_cp_list.append(line)
+ print(f"FOUND_SSL_JSON={ck_ssl_cp_list}:")
+ local_import_dir = os.path.join(srtool_basepath,'data/cve_checker/ssl')
+ do_makedirs(local_import_dir)
+ cmnd = ['scp','-i', ck_import['pem'], f"{host}:{path}"+"/*", local_import_dir]
+ exec_returncode,exec_stdout,exec_stderr = execute_process(*cmnd)
+ for file in ck_ssl_cp_list:
+ ck_json_list.append(os.path.join(local_import_dir,os.path.basename(file)))
+
+ elif 'Upload' == ck_import['import_mode']:
+ print(f"FOO:UPLOAD:{ck_import_select}:")
+ # Is the selector a file?
+ if ck_import_select.endswith('.json'):
+ print(f"FOO1:{ck_import_select}")
+ ck_json_list.append(ck_import_select)
+ else:
+ print(f"ERROR: Upload: not a JSON file '{ck_import_select}'")
+ exit(1)
+
+ elif 'File' == ck_import['import_mode']:
+ print(f"FOO:{ck_import['path']}:{ck_import_select}:")
+ # Is the selector a file?
+ if ck_import['path'].endswith('.json'):
+ print(f"FOO1:{ck_import['path']}")
+ ck_json_list.append(ck_import['path'])
+ else:
+ # Gather files from this sub-directory
+ json_dir = os.path.join(ck_import['path'],ck_import_select)
+ print(f"FOO2:CHECK:{json_dir}")
+ for root, dirs, files in os.walk(json_dir):
+ for i,file in enumerate(files):
+ if not file.endswith('.json'):
+ continue
+ ck_json_list.append(os.path.join(json_dir,file))
+ if not ck_json_list:
+ print(f"ERROR: no JSON files found in '{json_dir}'")
+ exit(1)
+ else:
+ print(f"CVKCHK JSON file count = {len(ck_json_list)}")
+ else:
+ print(f"ERROR: import mode not recognized '{ck_import['import_mode']}'")
+ exit(1)
+ print(f"FOUND_JSON={ck_json_list}:")
+
+ _log("Prepare ORM Products")
+ sql = """SELECT * FROM orm_product WHERE `key` = ?"""
+ orm_product = SQL_EXECUTE(cur, sql, (ck_audit_key,)).fetchone()
+ if not orm_product:
+ print(f"ERROR: release not found '{ck_audit_key}'")
+ exit(1)
+
+ # Find or create audit, just one per day per release
+ _log("Prepare Audit record")
+ audit_date = datetime.now()
+ if not audit_name:
+ audit_name = f"audit_{audit_date.strftime('%Y%m%d')}_{orm_product['key']}_"
+ sql = f"""SELECT * FROM cve_checker_ck_audit WHERE `name` = ?"""
+ found_audit = SQL_EXECUTE(cur, sql, (audit_name,)).fetchone()
+ if found_audit:
+ ck_audit_id = int(found_audit['id'])
+ # Preclear audit's packages and their indexes
+ sql = f"""SELECT * FROM cve_checker_ck_package WHERE ck_audit_id = ?"""
+ for ck_package in SQL_EXECUTE(cur, sql, params=(ck_audit_id,)).fetchall():
+ sql = f"""DELETE FROM cve_checker_ckpackage2cve WHERE ck_package_id = ?"""
+ SQL_EXECUTE(cur, sql, params=(ck_package['id'],))
+ sql = f"""DELETE FROM cve_checker_ckpackage2ckproduct WHERE ck_package_id = ?"""
+ SQL_EXECUTE(cur, sql, params=(ck_package['id'],))
+ sql = f"""DELETE FROM cve_checker_ck_package WHERE id = ?"""
+ SQL_EXECUTE(cur, sql, params=(ck_package['id'],))
+ SQL_COMMIT(conn)
+ else:
+ # Create a parent audit record
+ sql = ''' INSERT INTO cve_checker_ck_audit (name, orm_product_id,create_time) VALUES (?, ?, ?)'''
+ SQL_EXECUTE(cur, sql, (audit_name,orm_product['id'],audit_date,))
+ ck_audit_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ SQL_COMMIT(conn)
+ if verbose: print(f"ck_audit_id={ck_audit_id}")
+
+ # Scan the JSON files
+ print(f"Release = {ck_audit_key}")
+ if verbose:
+ Ck_Audit_org, Ck_Package_org, Ck_Product_org, Ck_Layer_org, CkPackage2Cve_org, CkPackage2CkProduct_org = count_ck_records(cur)
+
+ layer_id_cache = {}
+ product_id_cache = {}
+ cve_id_cache = {}
+ layer_id_cache_hit = 0
+ product_id_cache_hit = 0
+ cve_id_cache_hit = 0
+ added_cve = 0
+ issue_cnt = 0
+
+ # Prefetch the existing CVE IDs
+ _log("Prepare CVE pre-fetch")
+ print(f"Prefetch CVE IDs ...")
+ sql = f"""SELECT id,name FROM orm_cve"""
+ orm_cves = SQL_EXECUTE(cur, sql, ).fetchall()
+ for orm_cve in orm_cves:
+ layer_id_cache[orm_cve['name']] = orm_cve['id']
+
+ for json_file in ck_json_list:
+ with open(json_file) as json_data:
+ try:
+ dct = json.load(json_data)
+ except Exception as e:
+ print(f"ERROR:JSON_FILE_LOAD:{json_file}:{e}", file=sys.stderr)
+ continue
+
+ elem_packages = dct['package']
+ print(f"PACKAGE COUNT:{len(elem_packages)}")
+ progress_set_max(len(elem_packages))
+ for i,package in enumerate(elem_packages):
+ # Debugging support
+ if cmd_skip and (i < cmd_skip):
+ continue
+ if cmd_count and (i > (cmd_skip + cmd_count)):
+ continue
+ if 0 == (i % 20): print(f"{i:4}\r",end='',flush=True)
+
+ #
+ # Extract the ck_package records
+ #
+
+ package_name = package['name']
+ package_version = package['version']
+ ck_layer_name = package['layer']
+ progress_show(package_name)
+
+ # Fetch or create the ck_layer
+ ck_layer_id = 0
+ if ck_layer_name in layer_id_cache:
+ ck_layer_id = layer_id_cache[ck_layer_name]
+ layer_id_cache_hit += 1
+ if not ck_layer_id:
+ sql = f"""SELECT * FROM cve_checker_ck_layer WHERE "name" = ?"""
+ ck_layer = SQL_EXECUTE(cur, sql, params=(ck_layer_name,)).fetchone()
+ if ck_layer:
+ ck_layer_id = ck_layer['id']
+ if not ck_layer_id:
+ # Create layer record
+ sql = ''' INSERT INTO cve_checker_ck_layer (name) VALUES (?)'''
+ SQL_EXECUTE(cur, sql, (ck_layer_name,))
+ ck_layer_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ SQL_COMMIT(conn)
+ layer_id_cache[ck_layer_name] = ck_layer_id
+
+ # Create ck_package record
+ sql = ''' INSERT INTO cve_checker_ck_package (name,version,ck_layer_id,unpatched_cnt,ignored_cnt,patched_cnt,ck_audit_id) VALUES (?, ?, ?, ?, ?, ?, ?)'''
+ params = (package_name,package_version,ck_layer_id,0,0,0,ck_audit_id)
+ SQL_EXECUTE(cur, sql, params)
+ ck_package_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ SQL_COMMIT(conn)
+
+ # Fetch or create the ck_products
+ for product in package['products']:
+ ck_product_name = product['product']
+ ck_cvesInRecord = product['cvesInRecord']
+
+ ck_product_id = 0
+ if ck_product_name in product_id_cache:
+ ck_product_id = product_id_cache[ck_product_name]
+ product_id_cache_hit += 1
+ if not ck_product_id:
+ sql = f"""SELECT * FROM cve_checker_ck_product WHERE "name" = ?"""
+ ck_product = SQL_EXECUTE(cur, sql, params=(ck_product_name,)).fetchone()
+ if ck_product:
+ ck_product_id = ck_product['id']
+ if not ck_product_id:
+ # Create layer record
+ sql = ''' INSERT INTO cve_checker_ck_product (name) VALUES (?)'''
+ SQL_EXECUTE(cur, sql, (ck_product_name,))
+ ck_product_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ SQL_COMMIT(conn)
+ sql = f"""SELECT * FROM cve_checker_ck_product WHERE "name" = ?"""
+ ck_product = SQL_EXECUTE(cur, sql, params=(ck_product_name,)).fetchone()
+ product_id_cache[ck_product_name] = ck_product_id
+
+ # Create CkPackage2CkProduct
+ sql = ''' INSERT INTO cve_checker_ckpackage2ckproduct (ck_package_id,ck_product_id,cvesInRecord) VALUES (?, ?, ?)'''
+ params = (ck_package_id,ck_product_id,('Yes'==ck_cvesInRecord))
+ SQL_EXECUTE(cur, sql, params)
+ SQL_COMMIT(conn)
+
+ # Fetch or create CVE records for issues
+ unpatched_cnt = 0
+ ignored_cnt = 0
+ patched_cnt = 0
+ for issue in package['issue']:
+ issue_cnt += 1
+ issue_id = issue['id']
+ ck_status,orm_status = status2orm_ck(issue['status'])
+ orm_comments = ''
+ orm_packages = ''
+ srtool_today = datetime.now()
+ print(f"CVE={issue_id}:Package={package_name}")
+
+ # increment status sums
+ if CK_UNPATCHED == ck_status:
+ unpatched_cnt += 1
+ elif CK_IGNORED == ck_status:
+ ignored_cnt += 1
+ elif CK_UNPATCHED == ck_status:
+ patched_cnt += 1
+
+ orm_cve_id = 0
+ if issue_id in cve_id_cache:
+ orm_cve_id = cve_id_cache[issue_id]
+ cve_id_cache_hit += 1
+ if not orm_cve_id:
+ sql = f"""SELECT * FROM orm_cve WHERE "name" = ?"""
+ orm_cve = SQL_EXECUTE(cur, sql, params=(issue_id,)).fetchone()
+ if orm_cve:
+ orm_cve_id = orm_cve['id']
+ if not orm_cve_id:
+ # Create a placehold CVE record until is it published and imported from NVD
+ sql_elements = [
+ 'name',
+ 'name_sort',
+ 'priority',
+ 'status',
+ 'comments',
+ 'comments_private',
+ 'tags',
+ 'cve_data_type',
+ 'cve_data_format',
+ 'cve_data_version',
+ 'public',
+ 'publish_state',
+ 'publish_date',
+ 'acknowledge_date',
+ 'description',
+ 'publishedDate',
+ 'lastModifiedDate',
+ 'recommend',
+ 'recommend_list',
+ 'cvssV3_baseScore',
+ 'cvssV3_baseSeverity',
+ 'cvssV2_baseScore',
+ 'cvssV2_severity',
+ 'packages',
+ 'srt_updated',
+ 'srt_created',
+ ]
+ sql_qmarks = []
+ for i in range(len(sql_elements)):
+ sql_qmarks.append('?')
+ sql_values = (
+ issue_id,
+ get_name_sort(issue_id),
+ cve_scores2priority(issue['scorev2'],issue['scorev3']),
+ orm_status,
+ orm_comments,
+ '',
+ '',
+ '',
+ '',
+ '',
+ True,
+ ORM.PUBLISH_UNPUBLISHED,
+ '',
+ None,
+ issue['summary'],
+ '',
+ '',
+ '',
+ '',
+ issue['scorev3'],
+ cve_score2severity(issue['scorev3']),
+ issue['scorev2'],
+ cve_score2severity(issue['scorev2']),
+ orm_packages,
+ srtool_today,
+ srtool_today
+ )
+ sql, params = 'INSERT INTO orm_cve (%s) VALUES (%s)' % (','.join(sql_elements),','.join(sql_qmarks)),sql_values
+ SQL_EXECUTE(cur, sql, params)
+ orm_cve_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ added_cve += 1
+ # Commit the new CVE and history
+ SQL_COMMIT(conn)
+
+ # Update package status sums
+ update_comment = "%s {%s}" % (ORM.UPDATE_CREATE_STR % ORM.UPDATE_SOURCE_CVE,'Created from CVE Checker')
+ sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
+ SQL_EXECUTE(cur, sql, (orm_cve_id,update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+ SQL_COMMIT(conn)
+
+ cve_id_cache[issue_id] = orm_cve_id
+
+ # Create CkPackage2Cve
+ sql = ''' INSERT INTO cve_checker_ckpackage2cve (ck_package_id,orm_cve_id,ck_status,ck_audit_id) VALUES (?,?,?,?)'''
+ SQL_EXECUTE(cur, sql, (ck_package_id,orm_cve_id,ck_status,ck_audit_id,))
+
+ # Update counts in the CK_Package
+ sql = ''' UPDATE cve_checker_ck_package
+ SET unpatched_cnt = ?, ignored_cnt = ?, patched_cnt = ?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (unpatched_cnt,ignored_cnt,patched_cnt,ck_package_id))
+
+ # Commit these records
+ SQL_COMMIT(conn)
+
+
+ if verbose:
+ Ck_Audit_cnt, Ck_Package_cnt, Ck_Product_cnt, Ck_Layer_cnt, CkPackage2Cve_cnt, CkPackage2CkProduct_cnt = count_ck_records(cur)
+ print(f"Packages = {len(elem_packages)}")
+ print(f"Ck_Audit diff = {Ck_Audit_cnt - Ck_Audit_org}")
+ print(f"Ck_Package diff = {Ck_Package_cnt - Ck_Package_org}")
+ print(f"Ck_Product diff = {Ck_Product_cnt - Ck_Product_org}")
+ print(f"Ck_Layer diff = {Ck_Layer_cnt - Ck_Layer_org}")
+ print(f"CkPackage2Cve diff = {Ck_Audit_cnt - Ck_Layer_org}")
+ print(f"CkPackage2CkProduct diff = {Ck_Audit_cnt - Ck_Audit_org}")
+ print(f"Issue count = {issue_cnt}")
+ print(f"Added Orm_CVE records = {added_cve}")
+ print(f"layer_id_cache_hit = {layer_id_cache_hit}")
+ print(f"product_id_cache_hit = {product_id_cache_hit}")
+ print(f"cve_id_cache_hit = {cve_id_cache_hit}")
+
+ progress_done('Done')
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# update_imports
+#
+
+def update_imports():
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+ now = datetime.now(pytz.utc)
+
+ _log("Update Import channel lists")
+ sql = """SELECT * FROM cve_checker_CkUploadManager"""
+ for ck_import in SQL_EXECUTE(cur, sql, ).fetchall():
+ # 2023-11-20T07:19:47.033Z
+ select_refresh = ck_import['select_refresh'][:26]
+ print(f"FOO1:{select_refresh}")
+ select_refresh = datetime.strptime(select_refresh,'%Y-%m-%d %H:%M:%S.%f')
+ select_refresh = select_refresh.replace(tzinfo=pytz.utc)
+ # Update no more that every 10 minutes
+ delta = now - select_refresh
+ print(f"FOO2:{delta} = {now} - {select_refresh}")
+ if (1 > delta.days) and ((10 * 60) > delta.seconds) and (not force_update):
+ continue
+
+ ck_list = []
+ if 'Repo' == ck_import['import_mode']:
+ # Isolate the repo's directory name from the local path (first dir)
+ repo_dir_name = ck_import['path']
+ pos = repo_dir_name.find('/')
+ if pos > 0:
+ repo_dir_name = repo_dir_name[0:pos]
+ repo_dir = os.path.join(srtool_basepath,CK_LOCAL_DIR,repo_dir_name)
+ repo_url = ck_import['repo']
+ repo_branch = ck_import['branch']
+
+ # Insure that the repo is present and updated
+ _log("Prepare repo")
+ print(f"FOO:prepare_git({repo_dir},{repo_url},{repo_branch})")
+ prepare_git(repo_dir,repo_url,repo_branch)
+
+ # Is the selector a file?
+ if ck_import['path'].endswith('.json'):
+ pass
+ else:
+ # Gather files from this sub-directory
+ json_dir = os.path.join(srtool_basepath,CK_LOCAL_DIR,ck_import['path'])
+ for root, dirs, files in os.walk(json_dir,topdown=True):
+ print(f"BAR:{dirs}:{files}:")
+ for i,dir in enumerate(dirs):
+ ck_list.append(dir)
+ for i,file in enumerate(files):
+ if file.endswith('.json'):
+ ck_list.append(file)
+ # Only the first level
+ break
+
+ elif 'SSL' == ck_import['import_mode']:
+ host,path = ck_import['path'].split(':')
+ cmnd = ['ssh','-i', ck_import['pem'], host, 'ls', path]
+ exec_returncode,exec_stdout,exec_stderr = execute_process(*cmnd)
+ for i,line in enumerate(exec_stdout.splitlines()):
+ line = line.strip()
+ ck_list.append(line)
+
+ elif 'File' == ck_import['import_mode']:
+ # Is the selector a file?
+ if ck_import['path'].endswith('.json'):
+ # Put the file's name in the list
+ ck_list.append(os.path.basename(ck_import['path']))
+ else:
+ # Gather files from this sub-directory
+ json_dir = ck_import['path']
+ for root, dirs, files in os.walk(json_dir,topdown=True):
+ print(f"BAR:{dirs}:{files}:")
+ for i,dir in enumerate(dirs):
+ ck_list.append(dir)
+ for i,file in enumerate(files):
+ if file.endswith('.json'):
+ ck_list.append(file)
+ # Only the first level
+ break
+
+ if ck_list:
+ ck_list.sort()
+ print(f"FOUND_SELECTS[{ck_import['id']}]={ck_list}:")
+ sql = ''' UPDATE cve_checker_CkUploadManager
+ SET select_list=?, select_refresh = ?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, ('|'.join(ck_list),now,ck_import['id']))
+
+
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# new_to_historical
+#
+# For boot strapping an installation,
+# set triage CVE set going forward
+#
+# Range is either 'all', or all CVEs
+# before a given end published date
+#
+
+def new_to_historical(end_date):
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+
+ is_all = True
+ if 'all' != end_date:
+ is_all = False
+ try:
+ pub_date = datetime.strptime(end_date,'%Y-%m-%d')
+ except:
+ print(f"ERROR: pub date not in YYYY-MM-DD: '{end_date}'")
+ exit(1)
+
+ # SRTool Status
+ HISTORICAL = 0
+ NEW = 1
+ status_changes = {}
+
+ sql = """SELECT name,publishedDate,id FROM orm_cve where status = ?"""
+ for cve in SQL_EXECUTE(cur, sql, (NEW,) ).fetchall():
+ name = cve['name']
+
+ cve_year = name[:name.find('-',5)]
+ if not cve_year in status_changes:
+ status_changes[cve_year] = [0,0,0]
+ status_changes[cve_year][0] += 1
+
+ if (not is_all) and (cve['publishedDate'] > end_date):
+ status_changes[cve_year][2] += 1
+ continue
+ status_changes[cve_year][1] += 1
+
+ if not test:
+ sql = ''' UPDATE orm_cve
+ SET status=?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (HISTORICAL,cve['id']))
+
+ print("\n Results")
+ print("Year Found Changed Kept")
+ for cve_year in sorted(status_changes.keys()):
+ print(f"{cve_year}: {status_changes[cve_year][0]:7} {status_changes[cve_year][1]:7} {status_changes[cve_year][2]:7}")
+
+ if not test:
+ SQL_COMMIT(conn)
+ else:
+ print(f"NOTE: changes not committed due to 'test' flag")
+
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+ global test
+ global force_update
+ global cmd_count
+ global cmd_skip
+
+ parser = argparse.ArgumentParser(description='srtool_cve_checker.py: CVE Checker results import')
+
+ parser.add_argument('--import-cvechk', '-i', dest='import_cvechk', help='Import an audit channel')
+ parser.add_argument('--audit-name', '-n', dest='audit_name', help='Name for audit')
+ parser.add_argument('--progress', '-P', action='store_true', dest='do_progress', help='Progress output')
+
+ parser.add_argument('--update-imports', '-u', action='store_true', dest='update_imports', help='Update the import lists')
+ parser.add_argument('--new-to-historical', dest='new_to_historical', help="Change 'new' cves to 'historical' for 'all' or since pub date [all|yyyy-mm-dd]")
+
+ # Test
+ parser.add_argument('--validate-cvechk-ab', '-V', dest='validate_cvechk_ab', help='Validate the AB cve-checker JSON file')
+
+ # Debugging support
+ parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--test', '-t', action='store_true', dest='test', help='Test, dry-run')
+ parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose debugging')
+ parser.add_argument('--local-job', action='store_true', dest='local_job', help='Use local job')
+ args = parser.parse_args()
+
+ ret = 0
+ verbose = args.verbose
+ test = args.test
+ force_update = args.force_update
+ cmd_count = int(args.count) if args.count else 0
+ cmd_skip = int(args.skip) if args.skip else 0
+ progress_set_on(args.do_progress)
+
+ if args.validate_cvechk_ab:
+ validate_cvechk_ab(args.validate_cvechk_ab)
+ elif args.import_cvechk:
+ import_cvechk(args.import_cvechk,args.audit_name)
+ elif args.update_imports:
+ update_imports()
+ elif args.new_to_historical:
+ new_to_historical(args.new_to_historical)
+
+ elif args.drop_ck_tables:
+ drop_ck_tables()
+
+ else:
+ print("srtool_cve_checker.py:Command not found")
+ ret = 1
+
+ progress_done('Done')
+ return(ret)
+
+
+if __name__ == '__main__':
+ srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ exit( main(sys.argv[1:]) )
+
diff --git a/bin/cve_checker/srtool_cvechecker_util.py b/bin/cve_checker/srtool_cvechecker_util.py
new file mode 100755
index 00000000..68498653
--- /dev/null
+++ b/bin/cve_checker/srtool_cvechecker_util.py
@@ -0,0 +1,465 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Commandline Tool
+#
+# Copyright (C) 2023 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+#
+# NOTE: this is a fix-it file, to fix broken tables and late global
+# changes. The fix-its are preserved for reference and re-use.
+#
+
+import os
+import sys
+import argparse
+import json
+import subprocess
+import logging
+
+# load the srt.sqlite schema indexes
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+from common.srtool_sql import *
+from common.srtool_progress import *
+from common.srtool_common import log_error
+
+# Setup:
+logger = logging.getLogger("srt")
+
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR','.')
+SRT_REPORT_DIR = f"{SRT_BASE_DIR}/reports"
+
+# data/cve_checker/yocto-metrics/cve-check/master/1697871310.json
+REMOTE_URL = 'git://git.yoctoproject.org/yocto-metrics'
+REMOTE_PATH = ''
+LOCAL_DIR = 'data/cve_checker/yocto-metrics'
+BRANCH = ''
+
+# From lib/cve_check/views.py
+CK_UNDEFINED = 0
+CK_UNPATCHED = 1
+CK_IGNORED = 2
+CK_PATCHED = 3
+
+verbose = False
+test = False
+cmd_count = 0
+cmd_skip = 0
+force_update = False
+
+#################################
+# Helper methods
+#
+
+# quick development/debugging support
+def _log(msg):
+ DBG_LVL = os.environ['SRTDBG_LVL'] if ('SRTDBG_LVL' in os.environ) else 2
+ DBG_LOG = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srt_dbg.log'
+ if 1 == DBG_LVL:
+ print(msg)
+ elif 2 == DBG_LVL:
+ f1=open(DBG_LOG, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+#
+# Sub Process calls
+# Enforce that all scripts run from the SRT_BASE_DIR context (re:WSGI)
+#
+def execute_process(*args):
+ # Only string-type parameters allowed
+ cmd_list = []
+ for arg in args:
+ if not arg: continue
+ if isinstance(arg, (list, tuple)):
+ # Flatten all the way down
+ for a in arg:
+ if not a: continue
+ cmd_list.append(str(a))
+ else:
+ cmd_list.append(str(arg))
+
+ srt_base_dir = os.environ.get('SRT_BASE_DIR')
+ if srt_base_dir and (srt_base_dir != os.getcwd()):
+ os.chdir(srt_base_dir)
+ if cmd_list[0].startswith('bin/') or cmd_list[0].startswith('./bin'):
+ cmd_list[0] = os.path.join(srt_base_dir,cmd_list[0])
+
+ result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return(result.returncode,result.stdout.decode('utf-8'),result.stderr.decode('utf-8'))
+
+def srtsetting_get(conn,key,default_value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Fetch the key for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ try:
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if srtsetting:
+ if is_dict:
+ return(srtsetting['value'])
+ else:
+ return(srtsetting[ORM.SRTSETTING_VALUE])
+ except Exception as e:
+ print(f"ERROR:{e}")
+ return(default_value)
+
+def srtsetting_set(conn,key,value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Set the key value for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if not srtsetting:
+ sql = ''' INSERT INTO orm_srtsetting (name, helptext, value) VALUES (?,?,?)'''
+ SQL_EXECUTE(cur, sql, (key,'',value))
+ print("INSERT:{key}:{value}:")
+ else:
+ print("UPDATE[{srtsetting[ORM.SRTSETTING_ID]}]:{key}:{value}:")
+ sql = ''' UPDATE orm_srtsetting
+ SET value=?
+ WHERE id=?'''
+ if is_dict:
+ SQL_EXECUTE(cur, sql, (value,srtsetting['id']))
+ else:
+ SQL_EXECUTE(cur, sql, (value,srtsetting[ORM.SRTSETTING_ID]))
+ SQL_COMMIT(conn)
+
+def do_chdir(newdir,delay=0.200):
+ os.chdir(newdir)
+ # WARNING: we need a pause else the chdir will break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+def do_makedirs(newdir,delay=0.200):
+ try:
+ os.makedirs(newdir)
+ except:
+ # dir already exists
+ pass
+ # WARNING: we need a pause else the makedirs could break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+def execute_commmand(cmnd,path=''):
+ cwd = os.getcwd()
+ if path:
+ do_chdir(path)
+ result_returncode,result_stdout,result_stderr = execute_process(cmnd)
+ if 0 != result_returncode:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ print("ERROR({result_returncode}):{result_stderr}")
+ return(1)
+ if verbose:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ if path:
+ do_chdir(cwd)
+
+# Insure the git repo is cloned and available
+def prepare_git(repo_dir,repo_url,branch):
+ if verbose: print(f"prepare_git:({repo_dir},{repo_url})")
+ if not os.path.isdir(repo_dir):
+ repo_parent_dir = os.path.dirname(repo_dir)
+ do_makedirs(repo_parent_dir)
+ print(f"= Clone '{REMOTE_URL}' ... =")
+ cmnd=['git','clone',repo_url]
+ execute_commmand(cmnd,repo_parent_dir)
+ else:
+ print(f"= Clone '{REMOTE_URL}' skip ... =")
+
+ if branch:
+ print("= Checkout branch '{BRANCH}' ... =")
+ cmnd=['git','-C',repo_dir,'checkout',branch]
+ execute_commmand(cmnd)
+
+ # Get the latest data with a safety pull
+ print("= Pull ... =")
+ cmnd=['git','-C',repo_dir,'pull']
+ execute_commmand(cmnd)
+
+# Compute a sortable CVE name
+def get_name_sort(cve_name):
+ try:
+ a = cve_name.split('-')
+ cve_name_sort = '%s-%s-%07d' % (a[0],a[1],int(a[2]))
+ except:
+ cve_name_sort = cve_name
+ return cve_name_sort
+
+def score2cve_score(score):
+ try:
+ return(float(score))
+ except:
+ return(0.0)
+
+def cve_score2severity(score):
+ score_num = score2cve_score(score)
+ if score_num < 2.5:
+ severity = "Low"
+ elif score_num < 5.0:
+ severity = "Medium"
+ elif score_num < 7.5:
+ severity = "High"
+ else :
+ severity = "Critical"
+ return(severity)
+
+def cve_scores2priority(score_v2,score_v3):
+ score_num = max(score2cve_score(score_v2),score2cve_score(score_v3))
+ if score_num < 2.5:
+ priority = ORM.PRIORITY_LOW
+ elif score_num < 5.0:
+ priority = ORM.PRIORITY_MEDIUM
+ elif score_num < 7.5:
+ priority = ORM.PRIORITY_HIGH
+ else :
+ priority = ORM.PRIORITY_CRITICAL
+ return(priority)
+
+def status2orm_ck(status):
+ if 'Unpatched' == status:
+ return (CK_UNPATCHED,ORM.STATUS_VULNERABLE)
+ elif 'Patched' == status:
+ return (CK_PATCHED,ORM.STATUS_NOT_VULNERABLE)
+ elif 'Ignored' == status:
+ return (CK_IGNORED,ORM.STATUS_NOT_VULNERABLE)
+ else:
+ return (CK_UNDEFINED,ORM.STATUS_NEW)
+
+def count_ck_records(cur):
+ def count_rows(table_name):
+ cur.execute(f"SELECT COUNT(*) FROM {table_name}")
+ return(cur.fetchone()[0])
+ Ck_Audit_cnt = count_rows('cve_checker_Ck_Audit')
+ Ck_Package_cnt = count_rows('cve_checker_Ck_Package')
+ Ck_Product_cnt = count_rows('cve_checker_Ck_Product')
+ Ck_Layer_cnt = count_rows('cve_checker_Ck_Layer')
+ CkPackage2Cve_cnt = count_rows('cve_checker_CkPackage2Cve')
+ CkPackage2CkProduct_cnt = count_rows('cve_checker_CkPackage2CkProduct')
+ return(Ck_Audit_cnt, Ck_Package_cnt, Ck_Product_cnt, Ck_Layer_cnt, CkPackage2Cve_cnt, CkPackage2CkProduct_cnt)
+
+#################################
+# drop_ck_tables
+#
+# Drop it all and start over
+#
+
+def drop_ck_tables():
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+
+ def drop(cur, table_name):
+ SQL_EXECUTE(cur, f"DROP TABLE {table_name}")
+ drop(cur, 'cve_checker_Ck_Audit')
+ drop(cur, 'cve_checker_Ck_Package')
+ drop(cur, 'cve_checker_Ck_Product')
+ drop(cur, 'cve_checker_Ck_Layer')
+ drop(cur, 'cve_checker_CkPackage2Cve')
+ drop(cur, 'cve_checker_CkPackage2CkProduct')
+
+ SQL_EXECUTE(cur, f"DELETE FROM django_migrations WHERE app = ?",("cve_checker",))
+
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# fix_orm_cvehistory
+#
+# Some orm_cvehistory records were created with the CVE name instead of
+# the CVE's record ID
+#
+
+def fix_orm_cvehistory():
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+
+ ck_add_cve = open("ck_add_cve.txt", "a")
+
+ sql = f"""SELECT id,cve_id FROM orm_cvehistory WHERE cve_id LIKE ? || '%'"""
+ cvehistories = SQL_EXECUTE(cur, sql, params=('CVE',)).fetchall()
+ for i,cvehistory in enumerate(cvehistories):
+ if 0 == (i % 20): print(f"{i:4}\r",end='',flush=True)
+
+ cve_name = cvehistory['cve_id']
+ ck_add_cve.write(cve_name+'\n')
+
+ sql = f"""SELECT id,name FROM orm_cve WHERE name = ?"""
+ cve = SQL_EXECUTE(cur, sql, params=(cve_name,)).fetchone()
+ if cve:
+ sql = ''' UPDATE orm_cvehistory
+ SET cve_id=?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (cve['id'],cvehistory['id']))
+ else:
+ print(f"ERROR: could not find CVE '{cve_name}'")
+
+ print(f"Bad cvehistories = {len(cvehistories)}")
+
+ if force_update:
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+ ck_add_cve.close()
+
+#################################
+# fix_issue_to_audit
+#
+# The audit link for CkPackage2Cve records was added late. Catch up the existing records.
+#
+
+def fix_issue_to_audit():
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+
+ i = 0
+ issue_cnt = 0
+ issues_to_fix = 0
+ sql = f"""SELECT id FROM cve_checker_ck_audit"""
+ for ck_audit in SQL_EXECUTE(cur, sql, ).fetchall():
+ ck_audit_id = ck_audit['id']
+ print(f"\nAUDIT #{ck_audit_id} ...")
+
+ sql = f"""SELECT id FROM cve_checker_ck_package WHERE ck_audit_id = ?"""
+ for ck_package in SQL_EXECUTE(cur, sql, (ck_audit_id,)).fetchall():
+ ck_package_id = ck_package['id']
+
+ if 0 == (i % 20): print(f"{i:4}\r",end='',flush=True)
+ i += 1
+
+ sql = f"""SELECT id,ck_audit_id FROM cve_checker_ckpackage2cve WHERE ck_package_id = ?"""
+ for ckpackage2cve in SQL_EXECUTE(cur, sql, (ck_package_id,)).fetchall():
+ issue_cnt += 1
+
+ if ckpackage2cve['ck_audit_id']:
+ continue
+
+ issues_to_fix += 1
+ sql = ''' UPDATE cve_checker_ckpackage2cve
+ SET ck_audit_id=?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (ck_audit_id,ckpackage2cve['id'],))
+
+ if force_update:
+ SQL_COMMIT(conn)
+
+ print(f"issue_cnt = {issue_cnt}")
+ print(f"issues_to_fix = {issues_to_fix}")
+
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# add_status_counts_to_packages
+#
+# Populate the added status counts to package records
+#
+
+def add_status_counts_to_packages():
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+
+ issue_cnt = 0
+ issues_to_fix = 0
+ sql = f"""SELECT id,name FROM cve_checker_ck_package"""
+ for i,ck_package in enumerate(SQL_EXECUTE(cur, sql, ).fetchall()):
+ # Debugging support
+ if cmd_count and (i > cmd_count):
+ break
+
+ def get_status_count(status):
+ cur.execute(f"SELECT COUNT(*) FROM cve_checker_ckpackage2cve WHERE ck_package_id = ? AND ck_status = ?", (ck_package['id'],status,))
+ return(cur.fetchone()[0])
+
+ unpatched_cnt = get_status_count(CK_UNPATCHED)
+ ignored_cnt = get_status_count(CK_IGNORED)
+ patched_cnt = get_status_count(CK_PATCHED)
+
+ if verbose:
+ print(f"PACKAGE:{ck_package['name']}:{unpatched_cnt}:{ignored_cnt}:{patched_cnt}:")
+
+ sql = ''' UPDATE cve_checker_ck_package
+ SET unpatched_cnt=?, ignored_cnt=?, patched_cnt=?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (unpatched_cnt,ignored_cnt,patched_cnt,ck_package['id'],))
+
+ if force_update:
+ SQL_COMMIT(conn)
+
+ print("Done.")
+
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+ global test
+ global force_update
+ global cmd_count
+ global cmd_skip
+
+ parser = argparse.ArgumentParser(description='srtool_cve_checker_util.py: Fix CVE Checker results import')
+
+ parser.add_argument('--drop-ck-tables', action='store_true', dest='drop_ck_tables', help='Drop Cve_Check tables and start again')
+ parser.add_argument('--fix-orm-cvehistory', action='store_true', dest='fix_orm_cvehistory', help='Fix cvehistory created with CVE name instead of id')
+ parser.add_argument('--fix-issue-to-audit', action='store_true', dest='fix_issue_to_audit', help='Populate added audit ID to issue records')
+ parser.add_argument('--add-counts-to-packages', action='store_true', dest='counts_to_packages', help='Populate status counts in package records')
+
+ # Debugging support
+ parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--test', '-t', action='store_true', dest='test', help='Test, dry-run')
+ parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose debugging')
+ args = parser.parse_args()
+
+ ret = 0
+ verbose = args.verbose
+ test = args.test
+ force_update = args.force_update
+ if None != args.count:
+ cmd_count = int(args.count)
+ if None != args.skip:
+ cmd_skip = int(args.skip)
+
+ if args.drop_ck_tables:
+ drop_ck_tables()
+ elif args.fix_orm_cvehistory:
+ fix_orm_cvehistory()
+ elif args.fix_issue_to_audit:
+ fix_issue_to_audit()
+ elif args.counts_to_packages:
+ add_status_counts_to_packages()
+
+
+ else:
+ print("srtool_cve_checker_util.py:Command not found")
+ ret = 1
+
+ progress_done('Done')
+ return(ret)
+
+
+if __name__ == '__main__':
+ srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ exit( main(sys.argv[1:]) )
+
diff --git a/bin/cve_checker/srtool_env.sh b/bin/cve_checker/srtool_env.sh
new file mode 100755
index 00000000..e8cb3ffa
--- /dev/null
+++ b/bin/cve_checker/srtool_env.sh
@@ -0,0 +1,4 @@
+# Main application shell settings
+
+
+
diff --git a/bin/debian/srtool_debian.py b/bin/debian/srtool_debian.py
index a8d8b3d4..094deda6 100755
--- a/bin/debian/srtool_debian.py
+++ b/bin/debian/srtool_debian.py
@@ -203,7 +203,9 @@ def main(argv):
# parser.add_argument('--url-file', dest='url_file', help='CVE URL extension')
parser.add_argument('--file', dest='cve_file', help='Local CVE source file')
parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Fetch CVE detail')
+
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='is_verbose', help='Enable verbose debugging output')
args = parser.parse_args()
diff --git a/bin/dev_tools/db_migration_config_sample.yml b/bin/dev_tools/db_migration_config_sample.yml
new file mode 100755
index 00000000..5d5c9e37
--- /dev/null
+++ b/bin/dev_tools/db_migration_config_sample.yml
@@ -0,0 +1,42 @@
+source:
+ name: sqlite_production
+ type: postgres_production
+destination:
+ name: postgres_production
+ type: postgres
+
+sqlite_production:
+ path: srt.sqlite
+
+sqlite_development:
+ path: srt_dev.sqlite
+
+postgres_production:
+ host: localhost
+ user: admin
+ password: password
+ database: srtool
+ port: 5432
+
+postgres_development:
+ host: localhost
+ user: admin
+ password: password
+ database: srtool_dev
+ port: 5432
+
+mysql_production:
+ host: localhost
+ user: admin
+ password: password
+ db: srtool
+ port: 3306
+
+mysql_development:
+ host: localhost
+ user: admin
+ password: password
+ db: srtool_dev
+ port: 3306
+
+
diff --git a/bin/dev_tools/db_migrations.py b/bin/dev_tools/db_migrations.py
new file mode 100755
index 00000000..1fb65562
--- /dev/null
+++ b/bin/dev_tools/db_migrations.py
@@ -0,0 +1,339 @@
+#!/usr/bin/env python3
+
+# Module Imports
+import sys
+import sqlite3
+try:
+ import MySQLdb
+except:
+ print("NOTE: 'MySQLdb' not currently installed")
+try:
+ import psycopg2
+except:
+ print("NOTE: 'psycopg2' not currently installed")
+##from tqdm import tqdm
+import time
+from progress.bar import Bar
+from pick import pick
+import yaml
+import os
+import argparse
+
+# Global variables
+verbose = False
+cmd_skip = 0
+cmd_count = 0
+
+def get_connection(config, db_type):
+ if db_type == "sqlite":
+ return sqlite3.connect(config['path'])
+ elif db_type == "mysql":
+ return MySQLdb.connect(**config)
+ else:
+ return psycopg2.connect(**config)
+
+def get_connections(config):
+ source_conn = get_connection(config[config['source']['name']], config['source']['type'])
+ dest_conn = get_connection(config[config['destination']['name']], config['destination']['type'])
+ return source_conn, dest_conn
+
+# Returns foreign key list for a given table
+def get_foreign_key_list(conn, table, source_type="sqlite"):
+ cur = conn.cursor()
+ if verbose: print("TABLE:%s" % table)
+ if source_type == "sqlite":
+ sql = f"""PRAGMA foreign_key_list({table});"""
+ else:
+ print(f"""ERROR: foreign key search for '{source_type}' databases not yet supported""")
+ exit(1)
+ cur.execute(sql)
+ foreign_keys = []
+ for foreign_key in cur:
+ #Example sqlite: (0, 0, 'users_srtuser', 'user_id', 'id', 'NO ACTION', 'NO ACTION', 'NONE')
+ if verbose: print(" KEY:%s" % str(foreign_key))
+ foreign_keys.append(foreign_key[2])
+ return foreign_keys
+
+# returns dictionary with keys as table names, and values as dictionary with column names and count from source and dest conn
+def get_db_info(conn, dest_conn=None, source_type="sqlite", mysql_db=None):
+ sqlite_sql = """SELECT m.name as table_name, p.name as column_name, p.type as type FROM sqlite_master AS m JOIN pragma_table_info(m.name) AS p where table_name != 'sqlite_sequence' ORDER BY m.name, p.cid"""
+ mysql_sql = f"""SELECT * FROM information_schema.columns where table_name like '%%' and table_schema = '{mysql_db}' order by table_name,ordinal_position"""
+ pg_sql = """SELECT * FROM information_schema.columns where table_schema = 'public' order by table_name,ordinal_position"""
+ cur = conn.cursor()
+ sql = sqlite_sql if source_type == "sqlite" else mysql_sql if source_type == "mysql" else pg_sql
+ cur.execute(sql)
+ columns = cur.description
+ results = [{columns[index][0]:column for index, column in enumerate(value)} for value in cur.fetchall()]
+ if source_type != "sqlite":
+ results = [{'column_name': col['COLUMN_NAME'], 'table_name': col['TABLE_NAME'], 'type': col['DATA_TYPE']}for col in results]
+ tables = {}
+ for i in results:
+ if i['table_name'] not in tables:
+ tables[i['table_name']] = {'columns': [], 'types': []}
+ tables[i['table_name']]['columns'].append(i['column_name'])
+ tables[i['table_name']]['types'].append(i['type'])
+ for table in tables:
+ cur = conn.cursor()
+ sql = f"SELECT count(*) from {table}"
+ cur.execute(sql)
+ results = cur.fetchone()[0]
+ tables[table]['source_count'] = results
+ tables[table]['foreign_keys'] = get_foreign_key_list(conn, table, source_type)
+ if dest_conn is not None:
+ for table in tables:
+ cur = dest_conn.cursor()
+ sql = f"SELECT count(*) from {table}"
+ cur.execute(sql)
+ results = cur.fetchone()[0]
+ tables[table]['dest_count'] = results
+ return tables
+
+# Orders the table list from no foreign key dependencies to all satisfied
+def gen_table_order_sql(source_conn, tables):
+ # Ordered table names: goal state
+ table_names_ordered = []
+ # As yet un-ordered table names: initial state
+ table_names_unordered = []
+ for table in tables:
+ # Never overwrite the migrations table
+ if 'django_migrations' == table:
+ continue
+ table_names_unordered.append([table,tables[table]['foreign_keys'].copy()])
+
+ if verbose: print("Len(table_names_unordered) = %d" % len(table_names_unordered))
+ interation = 0
+ while len(table_names_unordered):
+ change = False
+ interation += 1
+ for i in range(len(table_names_unordered),0,-1):
+ i_index = i-1
+ if verbose: print("Pass %s:(%s)=%s" % (interation,i_index,str(table_names_unordered[i_index])))
+ table_name = table_names_unordered[i_index][0]
+ foreign_keys = table_names_unordered[i_index][1]
+ # If newly satisfied dependency, remove dependency
+ if foreign_keys:
+ for j in range(len(foreign_keys),0,-1):
+ j_index = j-1
+ # Found in resolved ordered list
+ if foreign_keys[j_index] in table_names_ordered:
+ del table_names_unordered[i_index][1][j_index]
+ change = True
+ # If no pending dependencies, promote
+ if not foreign_keys:
+ # No pending dependencies, so move
+ table_names_ordered.append(table_name)
+ # Remove old name from unordered list
+ del table_names_unordered[i_index]
+ change = True
+ if verbose: print(" * Promote:%s" % table_name)
+ # Sanity Check for unresolvable loops
+ if not change:
+ print("ERROR: Unresolvable table dependency loop")
+ for t in table_names_ordered:
+ print(" Resolved:%s" % t)
+ for t in table_names_unordered:
+ print(" Unresolved:%s" % str(t))
+ exit(1)
+ return table_names_ordered
+
+# Pre-clear the destination tables, in reverse dependency order (most to least)
+def clear_dest_tables(dest_conn, table_names_ordered, tables, destination_type):
+ bar = Bar('Pre-clearing destination tables', max=len(table_names_ordered))
+ success = True
+ cur = dest_conn.cursor()
+ for i in range(len(table_names_ordered),0,-1):
+ i_index = i-1
+ sql = "DELETE from %s;" % table_names_ordered[i_index]
+ try:
+ cur.execute(sql, None)
+ bar.next()
+ except Exception as e:
+ success = False
+ print(f"\n\nException:\n{e}\n\nSQL: {sql}\nparams: None")
+ break
+ bar.finish()
+ if success:
+ dest_conn.commit()
+
+# Transfer the tables, one by one
+def transfer_sql(source_conn, dest_conn, table_names_ordered, tables, source_type, destination_type):
+ source_cur = source_conn.cursor()
+ dest_cur = dest_conn.cursor()
+
+ print("Transfer_sql...")
+
+ if verbose:
+ bar_max = 0
+ for table in tables:
+ bar_max += int(tables[table]['source_count'])
+ else:
+ bar_max = len(table_names_ordered)
+ bar = Bar('Transfering data by table', max=bar_max)
+
+ for table in table_names_ordered:
+ success = True
+ count = 0
+
+ q = '`' if destination_type != "postgres" else '"'
+ tables[table]['columns'] = [f'{q}{i}{q}' for i in tables[table]['columns']]
+ sql = f"""SELECT {','.join(tables[table]['columns'])} from {table};"""
+ source_cur.execute(sql)
+ for entry_count,entry in enumerate(source_cur):
+ # Development/debug support
+ if cmd_skip and (entry_count < cmd_skip): continue
+ if cmd_count and ((entry_count - cmd_skip) > cmd_count): break
+
+ entry = list(entry)
+ if table == "orm_cve":
+ if entry[-2] == '' or (entry[-2] is not None and 'RESERVED' in entry[-2]):
+ entry[-2] = None # set acknowledge date to None if currently value is invalid
+ if destination_type == "postgres":
+ for i in range(len(entry)): # handle lack of booleans in non postgres
+ if "bool" in tables[table]['types'][i]:
+ entry[i] = entry[i] != 0
+
+ sql = f"""INSERT INTO {table} ({','.join(tables[table]['columns'])}) VALUES ({','.join(['%s'] * len(entry))});"""
+ try:
+ dest_cur.execute(sql, entry)
+ if verbose: bar.next()
+ # Commit batches as we go
+ count += 1
+ if 0 == (count % 100):
+ dest_conn.commit()
+ except Exception as e:
+ success = False
+ print(f"\n\nException:\n{e}\n\nSQL: {sql}\nparams: {entry}")
+ break
+
+ # Commit the balance of this table
+ if not verbose:
+ bar.next()
+ if success:
+ dest_conn.commit()
+ bar.finish()
+
+def run_tests(tables, source_conn, dest_conn):
+ print('running tests!')
+ matching_counts = 0
+ mismatched_tables = []
+ for table in tables:
+ table_info = tables[table]
+ if table_info['source_count'] == table_info['dest_count']:
+ matching_counts += 1
+ else:
+ mismatched_tables.append(table)
+
+ print(f'Matching Tables Counts between source and destination out of total tables:{matching_counts}/{len(tables)}')
+ print(f'Mismatched tables: {mismatched_tables}')
+ source_count = tables['orm_cve']['source_count']
+ dest_count = tables['orm_cve']['dest_count']
+ if source_count != dest_count:
+ print('orm_cve count does not match between source and destination, not checkin description lengths')
+ source_conn.close()
+ dest_conn.close()
+ return
+
+ source_curr = source_conn.cursor()
+ dest_curr = dest_conn.cursor()
+ query = 'select length(description) as dl, length(comments) as cl from orm_cve order by NAME LIMIT 1000 OFFSET '
+ mismatch = False
+ bars = source_count // 1000 + 1
+ print(f"Numbers of rows in orm_cve: {source_count}")
+ bar = Bar('Checking description lengths in batches of 1000', max=bars)
+ for i in range(bars):
+ offset_query = f'{query}{i * 1000}'
+ source_curr.execute(offset_query)
+ dest_curr.execute(offset_query)
+ columns = source_curr.description
+
+ source = [{columns[index][0]:column for index, column in enumerate(value)} for value in source_curr.fetchall()]
+ dest = [{columns[index][0]:column for index, column in enumerate(value)} for value in dest_curr.fetchall()]
+ mismatch = False
+ for i in range(len(source)):
+ if source[i]['dl'] != dest[i]['dl'] or source[i]['cl'] != dest[i]['cl']:
+ print(f'source:\n{source[i]}\n\ndestination: {dest[i]}\n\n')
+ mismatch = True
+ break
+ bar.next()
+ if mismatch:
+ break
+ bar.finish()
+ if mismatch:
+ print("Error: mismatched length of description in orm_cve")
+ else:
+ print("Success: Description and comment length matches for every row in orm_cve")
+ source_conn.close()
+ dest_conn.close()
+
+def repair_sequences_postgres(tables, dest_conn):
+ bar = Bar('Repairing table sequences', max=len(tables))
+ for table in tables:
+ id = 'id'
+ if table in ['django_session']:
+ bar.next()
+ continue
+ sql = f"SELECT setval(pg_get_serial_sequence('{table}', '{id}'), (SELECT MAX({id}) FROM {table})+1);"
+ cur = dest_conn.cursor()
+ try:
+ cur.execute(sql)
+ bar.next()
+ except Exception as e:
+ print(f"\n\nException:\n{e}\n\nSQL: {sql}\n")
+ break
+ bar.finish()
+
+def main(config, test=False, repair=False, show_order=False):
+ source_conn, dest_conn = get_connections(config)
+ mysql_db_name = config[config['source']['name']]['db'] if config['source']['type'] == "mysql" else None
+ tables = get_db_info(source_conn, dest_conn, config['source']['type'], mysql_db_name)
+ if repair:
+ repair_sequences_postgres(tables, dest_conn)
+ source_conn.close()
+ dest_conn.close()
+ return
+ if test:
+ run_tests(tables, source_conn, dest_conn)
+ return
+ _, select_table = pick(('all tables', 'select tables'), "Would you like to copy all tables, or specific tables for transfer?")
+ if select_table: # filter tables
+ selection = pick(list(tables.items()), f"Please Select which of {len(tables)} tables to copy (use space key to select).\nFormat: Table Name(Current Source Count:Current Destination Count)", multiselect=True, min_selection_count=1, options_map_func= lambda option: f"{option[0]}({option[1]['source_count']}:{option[1]['dest_count']})")
+ selection = [value[0] for value in selection ]
+ tables = {item[0]:item[1] for item in selection}
+
+ # Order the table names by foreign key dependecies
+ table_names_ordered = gen_table_order_sql(source_conn, tables)
+ if show_order:
+ print("Ordered Data Tables: %s" % len(table_names_ordered))
+ for i,table_name in enumerate(table_names_ordered):
+ print("%2d) %-30s %s" % (i+1, table_name, str(tables[table_name]['foreign_keys'])))
+ return
+ # Pre-clear the destination tables to remove obsolete data
+ clear_dest_tables(dest_conn, table_names_ordered, tables, config['destination']['type'])
+ # Transfer the tables, one by one
+ transfer_sql(source_conn, dest_conn, table_names_ordered, tables, config['source']['type'], config['destination']['type'])
+ # Fix up the table sequences
+ repair_sequences_postgres(tables, dest_conn)
+ source_conn.close()
+ dest_conn.close()
+
+if __name__ == "__main__":
+ my_parser = argparse.ArgumentParser(description='DB Migration Script (Postgres/Sqlite/MySql)')
+ my_parser.add_argument('--path',default="db_migration_config.yml", type=str,help='the path to configuration file, default is ./db_migration_config.yml')
+ my_parser.add_argument('--test',default=False, action="store_true", help='Whether to test migration')
+ my_parser.add_argument('--repair', default=False, action="store_true", help="Whether to repair postgres sequences if destination is postgres database")
+ my_parser.add_argument('--show-order', '-o', default=False, action="store_true", dest="show_order", help="Show tables in least to most dependency order")
+ my_parser.add_argument('--verbose', '-v', default=False, action="store_true", dest="verbose", help="Verbose information")
+ my_parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ my_parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ args = my_parser.parse_args()
+
+ verbose = args.verbose
+ if args.skip:
+ cmd_skip = int(args.skip)
+ if args.count:
+ cmd_count = int(args.count)
+
+ with open(args.path, "r") as ymlfile:
+ config = yaml.safe_load(ymlfile)
+ main(config, test=args.test, repair=args.repair, show_order=args.show_order)
+
diff --git a/bin/dev_tools/dump_jason.py b/bin/dev_tools/dump_jason.py
new file mode 100755
index 00000000..979dce67
--- /dev/null
+++ b/bin/dev_tools/dump_jason.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2017-2018 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import json
+import os
+import sys
+
+with open(sys.argv[1], 'r') as handle:
+ parsed = json.load(handle)
+ print(json.dumps(parsed, indent=4, sort_keys=True)) \ No newline at end of file
diff --git a/bin/dev_tools/dump_jason.sh b/bin/dev_tools/dump_jason.sh
new file mode 100755
index 00000000..5e57b088
--- /dev/null
+++ b/bin/dev_tools/dump_jason.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2017-2018 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import json
+import os
+import sys
+
+with open('filename.txt', 'r') as handle:
+ parsed = json.load(handle)
+ print(json.dumps(parsed, indent=4, sort_keys=True)) \ No newline at end of file
diff --git a/bin/dev_tools/history.py b/bin/dev_tools/history.py
new file mode 100755
index 00000000..654555ff
--- /dev/null
+++ b/bin/dev_tools/history.py
@@ -0,0 +1,254 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2017-2018 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import json
+import os
+import sys
+import argparse
+from datetime import datetime, date, timedelta
+from common.srtool_sql import *
+import re
+import subprocess
+
+# load the srt.sqlite schema indexes
+if os.path.isdir('bin'):
+ dir_path = 'bin'
+else:
+ dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+
+# Setup:
+verbose = False
+is_trial = False
+
+#######################################################################
+# Helper Routines
+# stamp = ['d|W',directory,timestamp]
+#
+
+def backup_list():
+ def sort_key(elem):
+ return elem[0]+elem[2]
+
+ stamps = []
+ for directory in os.listdir(os.path.join(srtool_basepath, 'backups')):
+ prefix = 'W' if 10 < len(directory) else 'd'
+ directory = os.path.join(srtool_basepath, 'backups', directory)
+ with open(os.path.join(directory,'timestamp.txt'), 'r') as file:
+ line = file.read().strip()
+ #print("DIR=%s,%s" % (directory,line))
+ stamps.append([prefix, directory, line])
+
+ # Add the current database (now)
+ prefix = 'n'
+ directory = srtool_basepath
+ statinfo = os.stat(os.path.join(directory, 'srt.sqlite'))
+ mod_timestamp = datetime.fromtimestamp(statinfo.st_mtime)
+ stamp_str = mod_timestamp.strftime('%Y-%m-%d %H:%M:%S | %A, %B %d %Y')
+ stamps.append([prefix, directory, stamp_str])
+
+ # Sort my time and return
+ stamps.sort(key=sort_key)
+ return stamps
+
+def run_command(cmnd):
+ print("Command:%s" % cmnd)
+ if not is_trial:
+ p = subprocess.Popen(cmnd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ for line in p.stdout.readlines():
+ if 0 < line.find(b'\r'):
+ continue
+ print(line)
+ retval = p.wait()
+
+#######################################################################
+# init_timestamps
+#
+
+def init_timestamps():
+
+ backup_dir = os.path.join(srtool_basepath, 'backups')
+ for directory in os.listdir(backup_dir):
+ directory = os.path.join(backup_dir, directory)
+ statinfo = os.stat(os.path.join(directory, 'srt.sqlite'))
+ mod_timestamp = datetime.fromtimestamp(statinfo.st_mtime)
+ stamp_str = mod_timestamp.strftime('%Y-%m-%d %H:%M:%S | %A, %B %d %Y')
+ with open(os.path.join(directory,'timestamp.txt'), 'w') as file:
+ file.write('%s\n' % stamp_str)
+ print("DIR=%s,%s" % (directory,mod_timestamp.strftime('%Y-%m-%d %H:%M:%S | %A, %B %d %Y')))
+
+
+#######################################################################
+# list_history
+#
+
+def list_history():
+ stamps = backup_list()
+ for stamp in stamps:
+ print("DIR=%s,%-14s,%s" % (stamp[0],os.path.basename(stamp[1]),stamp[2]))
+
+#######################################################################
+# trace
+#
+
+def trace(item):
+ stamps = backup_list()
+ for stamp in stamps:
+ srtDbName = os.path.join(stamp[1],'srt.sqlite')
+ #print("db=%s" % srtDbName)
+
+ stamp_date = re.sub(' .*','',stamp[2])
+ stamp_day = re.sub('.*\| ','',stamp[2])
+ stamp_day = re.sub(',.*','',stamp_day)
+ stamp_text = '%s,%-9s %8s' % (stamp[0],stamp_day,stamp_date)
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+
+ if item.startswith('CVE-'):
+ cur.execute('SELECT * FROM orm_cve WHERE name = "%s"' % item)
+ for cve in cur:
+ status = ORM.get_orm_string(cve[ORM.CVE_STATUS],ORM.STATUS_STR)
+ print("%s] %-16s, %s, %s %s , %s %s " % (stamp_text, cve[ORM.CVE_NAME], status, cve[ORM.CVE_CVSSV3_BASESCORE],cve[ORM.CVE_CVSSV3_BASESEVERITY],cve[ORM.CVE_CVSSV2_BASESCORE],cve[ORM.CVE_CVSSV2_SEVERITY]))
+
+ SQL_CLOSE_CONN(conn)
+
+#######################################################################
+# replay_nist
+#
+
+def replay_nist():
+ stamps = backup_list()
+
+ # Read base database
+ for i,stamp in enumerate(stamps):
+ print("%2d: [%s]%s" % (i+1,stamp[0],stamp[2]))
+ index = input("Which backup? ")
+ if not index:
+ return
+ try:
+ index = int(index)
+ except:
+ print("Not a number '%s'" % index)
+ return
+ if (index>=1) and (index<len(stamps)):
+ print("You selected base:%s " % stamps[index-1][2])
+ else:
+ print("Out of range '%d'" % index)
+ return
+
+ # Read replay database
+ for i,stamp in enumerate(stamps):
+ print("%2d: [%s]%s" % (i+1,stamp[0],stamp[2]))
+ replay_index = input("Which backup? ")
+ if not replay_index:
+ return
+ try:
+ replay_index = int(replay_index)
+ except:
+ print("Not a number '%s'" % replay_index)
+ return
+ if (replay_index>=1) and (replay_index<len(stamps)):
+ print("You selected replay:%s " % stamps[replay_index-1][2])
+ else:
+ print("Out of range '%d'" % replay_index)
+ return
+
+ # Stop the SRTool server
+ cmnd = './bin/srt_stop.sh'
+ run_command(cmnd)
+
+ # Create restore backup
+ srtDbName = os.path.join(srtool_basepath, 'srt.sqlite')
+ restore_db = os.path.join(srtool_basepath, 'srt.sqlite.restore')
+ if not os.path.isfile(restore_db):
+ cmnd = 'cp %s %s' % (srtDbName,restore_db)
+ run_command(cmnd)
+
+ # Copy in the replay database
+ cmnd = 'cp %s/srt.sqlite .' % stamps[index-1][1]
+ run_command(cmnd)
+
+ # Replay the NIST data
+# cmnd = "bin/nist/srtool_nist.py --update_nist --source='NIST 2019' --file=%s/data/nvdcve-1.0-2019.json --url-file=nvdcve-1.0-2019.json.gz --url-meta=nvdcve-1.0-2019.meta --force --force-cache" % stamps[replay_index-1][1]
+ cmnd = "bin/nist/srtool_nist.py --update_nist_incremental --source='NIST Modified Data' --file=%s/data/nvdcve-1.0-modified.json --url-file=nvdcve-1.0-modified.json.gz --url-meta=nvdcve-1.0-modified.meta --force --force-cache" % stamps[replay_index-1][1]
+
+ run_command(cmnd)
+
+ # Restart the SRTool server
+ cmnd = './bin/srt_start.sh'
+ run_command(cmnd)
+
+#######################################################################
+# restore
+#
+
+def restore():
+ srtDbName = os.path.join(srtool_basepath, 'srt.sqlite')
+ restore_db = os.path.join(srtool_basepath, 'srt.sqlite.restore')
+ if os.path.isfile(restore_db):
+ cmnd = 'cp %s %s' % (restore_db,srtDbName)
+ run_command(cmnd)
+ else:
+ print("No restore database found")
+
+#######################################################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+ global is_trial
+
+ parser = argparse.ArgumentParser(description='history.py: manage the history database')
+ parser.add_argument('--init-timestamps', '-I', action='store_const', const='init_timestamps', dest='command', help='Initialize the backup directory timestamps')
+ parser.add_argument('--list-history', '-l', action='store_const', const='list_history', dest='command', help='List the backup directory timestamps')
+ parser.add_argument('--trace', '-t', dest='trace', help='Trace an item')
+
+ parser.add_argument('--replay-nist', '-r', action='store_const', const='replay_nist', dest='command', help='Replay NIST update')
+ parser.add_argument('--restore', '-R', action='store_const', const='restore', dest='command', help='Restore database')
+
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose output')
+ parser.add_argument('--trial', '-T', action='store_true', dest='is_trial', help='Verbose output')
+
+ args = parser.parse_args()
+ verbose = args.verbose
+ is_trial = args.is_trial
+
+ if 'init_timestamps' == args.command:
+ init_timestamps()
+ elif 'list_history' == args.command:
+ list_history()
+ elif args.trace:
+ trace(args.trace)
+ elif 'replay_nist' == args.command:
+ replay_nist()
+ elif 'restore' == args.command:
+ restore()
+
+
+
+if __name__ == '__main__':
+ srtool_basepath = os.path.dirname(os.path.abspath(sys.argv[0]))
+ main(sys.argv[1:])
diff --git a/bin/dev_tools/lssrt.sh b/bin/dev_tools/lssrt.sh
index f9404d06..5a8259ee 100755
--- a/bin/dev_tools/lssrt.sh
+++ b/bin/dev_tools/lssrt.sh
@@ -4,7 +4,7 @@
declare -A srts
IFS=$'\n' # make newlines the only separator
-for p in $(ps -e -o pid,cmd | grep "manage.py runserver" | grep 'srt' ) ; do
+for p in $(ps -e -o pid,cmd | grep "\(manage.py runserver\|srtool_update.py\)" | grep 'srt' ) ; do
if [ "${p}" != "${p/grep/}" ] ; then
continue
fi
@@ -14,4 +14,3 @@ for p in $(ps -e -o pid,cmd | grep "manage.py runserver" | grep 'srt' ) ; do
pid=${p%% *} # strip python path
echo "[$pid]($d)"
done
-
diff --git a/bin/dev_tools/master_app.sh b/bin/dev_tools/master_app.sh
index bf11d7f1..6abbdfd4 100755
--- a/bin/dev_tools/master_app.sh
+++ b/bin/dev_tools/master_app.sh
@@ -138,14 +138,19 @@ for p in $(find bin -name srtool_env.sh -exec grep -l "SRT_MAIN_APP" {} \;) ; do
else
echo "DISABLE_MASTER:$p"
mv -f $ds_dir/datasource.json $ds_dir/datasource.json_sample 2> /dev/null
- # Remove old app's datasources
- prev_app=$(basename $ds_dir)
- bin/common/srtool_utils.py --remove-app-sources $prev_app
+ # Remove old app's datasources, if database
+ if [ -f "bin/common/srt_schema.py" ] ; then
+ prev_app=$(basename $ds_dir)
+ bin/common/srtool_utils.py --remove-app-sources $prev_app
+ fi
fi
done
if [ "yp" = "$master_app" ] ; then
echo "SET_MASTER:./bin/$master_app"
else
- echo bin/common/srtool_utils.py --remove-app-sources yp
+ # Remove YP's datasources, if database
+ if [ -f "bin/common/srt_schema.py" ] ; then
+ echo bin/common/srtool_utils.py --remove-app-sources yp
+ fi
fi
diff --git a/bin/dev_tools/migrate.sh b/bin/dev_tools/migrate.sh
new file mode 100755
index 00000000..49e923c0
--- /dev/null
+++ b/bin/dev_tools/migrate.sh
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+# Update the model migration file(s)
+# Run generally, or select a specific model.py
+./bin/srt manage makemigrations $1
diff --git a/bin/dev_tools/nohup_start.sh b/bin/dev_tools/nohup_start.sh
new file mode 100755
index 00000000..71a77232
--- /dev/null
+++ b/bin/dev_tools/nohup_start.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+# SRTool helper script to start the instance with hohup
+
+if [ -z "$SRT_PORT" ] ; then
+ SRT_PORT=9000
+fi
+
+# Accept parameters (like 'noautoupdate')
+mv -f nohup.out nohup_prev.out
+nohup ./bin/srt start webport=0.0.0.0:$SRT_PORT $*
+cat nohup.out
diff --git a/bin/dev_tools/prepare_environment.sh b/bin/dev_tools/prepare_environment.sh
new file mode 100755
index 00000000..5536c3e1
--- /dev/null
+++ b/bin/dev_tools/prepare_environment.sh
@@ -0,0 +1,64 @@
+#!/bin/echo ERROR: This script needs to be sourced. Please run as .
+
+#
+# Prepare virtual environment for SRTool
+#
+# $ . prepare_environment.sh
+#
+
+# Prepare local venv for Python
+echo "* Python venv ..."
+if [ ! -d ".venv" ] ; then
+ # sudo apt install python3.10-venv
+ python3 -m venv .venv
+fi
+
+# Start Venv
+source .venv/bin/activate
+
+# Source the standard SRT environment settings
+echo "* srt_env.sh ..."
+if [ ! -f "./srt_env.sh" ] ; then
+ cp bin/dev_tools/* .
+fi
+
+# Set the fundamental environment settings
+# Specify environment (even if cwd is in the "lib" directory due to Apache)
+echo "* SRTool basic environment ..."
+export SRT_BASE_DIR="$(pwd)"
+export SRT_BASE_DIR="$(echo $SRT_BASE_DIR | sed -e 's|lib/||')"
+export SRT_MODE="Studio_Prodution"
+# Python 3 support
+if [ -z "$TZ" ] ; then
+ export TZ="America/Los_Angeles"
+fi
+
+# HTTPS support (Apache)
+#export SRT_CSRF_TRUSTED_ORIGINS="https://hostname.company.com"
+
+# Quick development log
+export SRTDBG_LOG="$SRT_BASE_DIR/srt_dbg.log"
+
+# Override the standard settings
+echo "* SRTool custom development environment ..."
+export SRT_PORT="9000"
+export SRT_SKIP_AUTOUPDATE=0
+export SRTDBG_MINIMAL_DB=0
+export SRTDBG_SKIP_DEFECT_IMPORT=1
+export SRTDBG_SKIP_CVE_IMPORT=0
+export SRTDBG_SKIP_CPE_IMPORT=1
+
+# Email credentials
+export SRT_EMAIL_SMTP="smtp.org.com"
+export SRT_EMAIL_PASSWD="temp_password"
+export SRT_EMAIL_USER="temp_user"
+export SRT_EMAIL_FROM="temp_user@org.com"
+
+# Defect (e.g. Jira) credentials
+export SRT_DEFECT_PASSWD="temp_password"
+export SRT_DEFECT_USER="temp_user"
+
+echo "* SRTool Python requirements ..."
+pip3 install -r bin/srtool-requirements.txt
+
+echo "* SRTool running on port $SRT_PORT"
diff --git a/bin/dev_tools/quick_find.sh b/bin/dev_tools/quick_find.sh
index cf200106..c900f2ec 100755
--- a/bin/dev_tools/quick_find.sh
+++ b/bin/dev_tools/quick_find.sh
@@ -1,7 +1,24 @@
#!/bin/bash
+#
+# Helper script to quickly find strings in the source
-# SRTool helper script to quickly find strings in the source
+show="-l"
+if [ "show" = "$1" ] ; then
+ show=""
+ shift
+fi
-find bin -exec grep -l "$1" {} \; 2> /dev/null
-find lib -exec grep -l "$1" {} \; 2> /dev/null
+dir=''
+if [ -d "$1" ] ; then
+ dir="$1"
+ shift
+fi
+if [ -z "$dir" ] ; then
+ echo "find bin -exec grep $show \"$1\" {} \; 2> /dev/null | grep -v __pycache__"
+ find bin -exec grep $show "$1" {} \; 2> /dev/null | grep -v __pycache__
+ find lib -exec grep $show "$1" {} \; 2> /dev/null | grep -v __pycache__
+else
+ echo "find $dir -exec grep $show \"$1\" {} \; 2> /dev/null| grep -v __pycache__"
+ find $dir -exec grep $show "$1" {} \; 2> /dev/null| grep -v __pycache__
+fi
diff --git a/bin/dev_tools/restart.sh b/bin/dev_tools/restart.sh
index f8a00a30..fc7cdbac 100755
--- a/bin/dev_tools/restart.sh
+++ b/bin/dev_tools/restart.sh
@@ -1,3 +1,3 @@
-./stop.sh
-./start.sh
+./stop.sh $*
+./start.sh $*
diff --git a/bin/dev_tools/srt_env.sh b/bin/dev_tools/srt_env.sh
index f36a2329..07bfdf6e 100755
--- a/bin/dev_tools/srt_env.sh
+++ b/bin/dev_tools/srt_env.sh
@@ -7,50 +7,55 @@ if [ "$0" = "$BASH_SOURCE" ]; then
fi
mode="$1"
-echo "mode=|$mode|"
-if [ "debug" != "$mode" ] ; then
- # Standard Environment
- echo "=== SRTool STANDARD MODE ==="
- export SRT_PORT=9000
- # Quick development log
- export SRTDBG_LOG=`pwd`/srt_dbg.log
- # Development/debugging flags
- export SRTDBG_MINIMAL_DB=0
- export SRTDBG_SKIP_DEFECT_IMPORT=0
- export SRTDBG_SKIP_CVE_IMPORT=0
- export SRTDBG_SKIP_CPE_IMPORT=0
- export SRT_SKIP_AUTOUPDATE=0
- # Email credentials
- export SRT_EMAIL_SMTP=smtp.org.com
- export SRT_EMAIL_PASSWD=temp_password
- export SRT_EMAIL_USER=temp_user
- export SRT_EMAIL_FROM=temp_user@org.com
- # Defect (e.g. Jira) credentials
- export SRT_DEFECT_PASSWD=temp_password
- export SRT_DEFECT_USER=temp_user
-else
- # Minimal Development Environment
- # Standard Environment
- echo "=== SRTool DEBUG MODE ==="
+# Standard Base Environment
+if [ -z "$mode" ] ; then
+ msg="=== SRTool STANDARD MODE ==="
+fi
+export SRT_PORT=9000
+export SRT_MODE=""
+# Quick development log
+export SRTDBG_LOG=`pwd`/srt_dbg.log
+# Development/debugging flags
+export SRTDBG_MINIMAL_DB=0
+export SRTDBG_SKIP_DEFECT_IMPORT=0
+export SRTDBG_SKIP_CVE_IMPORT=0
+export SRTDBG_SKIP_CPE_IMPORT=0
+export SRT_SKIP_AUTOUPDATE=0
+# Email credentials
+export SRT_EMAIL_SMTP=smtp.org.com
+export SRT_EMAIL_PASSWD=temp_password
+export SRT_EMAIL_USER=temp_user
+export SRT_EMAIL_FROM=temp_user@org.com
+# Defect (e.g. Jira) credentials
+export SRT_DEFECT_PASSWD=temp_password
+export SRT_DEFECT_USER=temp_user
+
+if [ "secure" == "$mode" ] ; then
+ msg="=== SRTool SECURE MODE ==="
+ # Lock out 'other' permissions
+ umask 007
+ chmod -R o-rwx .
+ export SRT_MODE="SECURE"
+elif [ "devel" == "$mode" ] ; then
+ msg="=== SRTool DEVELOPMENT MODE ==="
+ # Alternate port from main
+ export SRT_PORT=9020
+ export SRT_MODE="DEVEL"
+ # Disable defect system queries (except on demand)
+ export SRTDBG_SKIP_DEFECT_IMPORT=1
+elif [ "debug" == "$mode" ] ; then
+ msg="=== SRTool DEBUG MODE ==="
+ # Minimal debug bootstrap environment
+ # with development and debugging flags
export SRT_PORT=9990
- # Quick development log
- export SRTDBG_LOG=`pwd`/srt_dbg.log
- # Development/debugging flags
+ export SRT_MODE="DEBUG"
export SRTDBG_MINIMAL_DB=1
export SRTDBG_SKIP_DEFECT_IMPORT=1
export SRTDBG_SKIP_CVE_IMPORT=0
export SRTDBG_SKIP_CPE_IMPORT=0
export SRT_SKIP_AUTOUPDATE=1
- # Email credentials
- export SRT_EMAIL_SMTP=smtp.org.com
- export SRT_EMAIL_PASSWD=temp_password
- export SRT_EMAIL_USER=temp_user
- export SRT_EMAIL_FROM=temp_user@org.com
- # Defect (e.g. Jira) credentials
- export SRT_DEFECT_PASSWD=temp_password
- export SRT_DEFECT_USER=temp_user
fi
+echo $msg
echo "SRT_PORT=$SRT_PORT ; Change it with: export SRT_PORT=9123"
-
diff --git a/bin/dev_tools/start.sh b/bin/dev_tools/start.sh
index 6d6515ba..f7997ea7 100755
--- a/bin/dev_tools/start.sh
+++ b/bin/dev_tools/start.sh
@@ -6,5 +6,8 @@ if [ -z "$SRT_PORT" ] ; then
SRT_PORT=9000
fi
-./bin/srt start webport=0.0.0.0:$SRT_PORT
+# Accept parameters (like 'noautoupdate')
+./bin/srt start webport=0.0.0.0:$SRT_PORT $*
+# Show external access link
+echo "External access: $(hostname -i):$SRT_PORT"
diff --git a/bin/dev_tools/stop.sh b/bin/dev_tools/stop.sh
index 37722fed..cf5a62c3 100755
--- a/bin/dev_tools/stop.sh
+++ b/bin/dev_tools/stop.sh
@@ -2,5 +2,4 @@
# SRTool helper script to stop the instance
-./bin/srt stop
-
+./bin/srt stop $*
diff --git a/bin/dev_tools/tail.sh b/bin/dev_tools/tail.sh
index 4c44cd47..8b18325a 100755
--- a/bin/dev_tools/tail.sh
+++ b/bin/dev_tools/tail.sh
@@ -2,6 +2,12 @@
# SRTool helper script to quickly dump the log files
+tag=0
+if [ "tag" == "$1" ] ; then
+ tag=1
+ shift
+fi
+
CONTEXT=$1
if [ -n "$CONTEXT" ] ; then
CONTEXT="-n $CONTEXT"
@@ -13,6 +19,12 @@ if [ -z "$SRTDBG_LOG" ] ; then
SRTDBG_LOG=/tmp/srt_dbg.log
fi
+if [ 1 -eq $tag ] ; then
+ echo "===TAG `date` TAG===" >> srt_web.log
+ echo "===TAG `date` TAG===" >> $SRTDBG_ERR_LOG
+ echo "===TAG `date` TAG===" >> $SRTDBG_LOG
+fi
+
echo "--- srt_web.log --------------------"
tail srt_web.log $CONTEXT
echo "--- $SRTDBG_ERR_LOG --------------------"
diff --git a/bin/dev_tools/update_status.sh b/bin/dev_tools/update_status.sh
new file mode 100755
index 00000000..5cca0769
--- /dev/null
+++ b/bin/dev_tools/update_status.sh
@@ -0,0 +1,48 @@
+#!/bin/sh
+
+#
+# Helper routine to see if any active update commands are executing
+# in addition to showing the the backgroup updater is running.
+#
+# Sample result:
+# $ ./is_update.sh
+# 18149 python3 /opt/srt/bin/common/srtool_update.py --cron-start
+# Update:2019-03-16 12:29:21,bin/common/srtool_common.py --score-new-cves NEW --count=100
+# Done:2019-03-16 12:29:49,bin/common/srtool_common.py --score-new-cves NEW --count=100
+#
+# An "Update" without a "Done" is an running tack
+#
+
+# Test if the backgroup updater is running
+if [ -f .srtupdate.pid ] ; then
+ pid=`cat .srtupdate.pid`
+ updater=`ps -e -o pid,cmd | grep $pid | grep -v grep | grep cron`
+else
+ echo "No updater pid file found"
+ updater=""
+fi
+if [ -z "$updater" ] ; then
+ echo "!!! WARNING: UPDATER IS NOT RUNNING !!!"
+ cat .srtupdate.task
+ exit 1
+else
+ echo "UPDATER:$updater"
+ echo ""
+fi
+
+# Display status log
+tail -n 20 "update_logs/update_status.log"
+echo ""
+
+# Test if there is an open update in progress
+cat .srtupdate.task
+is_start=`grep "^Update" .srtupdate.task | grep -v "<cron_start>"`
+is_stop=`grep "^Done" .srtupdate.task`
+if [ -z "$is_stop" ] ; then
+ echo "!!! UPDATE JOB RUNNING !!!"
+ exit 1
+else
+ echo "UPDATE PAUSED BETWEEN JOBS."
+ exit 0
+fi
+
diff --git a/bin/mitre/datasource_2010.json b/bin/mitre/datasource_2010.json
new file mode 100755
index 00000000..89d82041
--- /dev/null
+++ b/bin/mitre/datasource_2010.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2010",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2010",
+ "cve_filter" : "CVE-2010",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2010' --file=data/allitems-cvrf-year-2010.xml --url-file=allitems-cvrf-year-2010.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2010' --file=data/allitems-cvrf-year-2010.xml --url-file=allitems-cvrf-year-2010.xml --progress",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2010.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2011.json b/bin/mitre/datasource_2011.json
new file mode 100755
index 00000000..14a41e0b
--- /dev/null
+++ b/bin/mitre/datasource_2011.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2011",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2011",
+ "cve_filter" : "CVE-2011",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2011' --file=data/allitems-cvrf-year-2011.xml --url-file=allitems-cvrf-year-2011.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2011' --file=data/allitems-cvrf-year-2011.xml --url-file=allitems-cvrf-year-2011.xml --progress",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2011.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2012.json b/bin/mitre/datasource_2012.json
new file mode 100755
index 00000000..de42723f
--- /dev/null
+++ b/bin/mitre/datasource_2012.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2012",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2012",
+ "cve_filter" : "CVE-2012",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2012' --file=data/allitems-cvrf-year-2012.xml --url-file=allitems-cvrf-year-2012.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2012' --file=data/allitems-cvrf-year-2012.xml --url-file=allitems-cvrf-year-2012.xml --progress",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2012.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2013.json b/bin/mitre/datasource_2013.json
new file mode 100755
index 00000000..1995fa6d
--- /dev/null
+++ b/bin/mitre/datasource_2013.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2013",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2013",
+ "cve_filter" : "CVE-2013",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2013' --file=data/allitems-cvrf-year-2013.xml --url-file=allitems-cvrf-year-2013.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2013' --file=data/allitems-cvrf-year-2013.xml --url-file=allitems-cvrf-year-2013.xml --progress",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2013.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2014.json b/bin/mitre/datasource_2014.json
new file mode 100755
index 00000000..d2cba168
--- /dev/null
+++ b/bin/mitre/datasource_2014.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2014",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2014",
+ "cve_filter" : "CVE-2014",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2014' --file=data/allitems-cvrf-year-2014.xml --url-file=allitems-cvrf-year-2014.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2014' --file=data/allitems-cvrf-year-2014.xml --url-file=allitems-cvrf-year-2014.xml --progress",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2014.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2015.json b/bin/mitre/datasource_2015.json
index 0ce89f12..5e5a24c6 100755
--- a/bin/mitre/datasource_2015.json
+++ b/bin/mitre/datasource_2015.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2015",
"cve_filter" : "CVE-2015",
- "init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml",
- "update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2015.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2016.json b/bin/mitre/datasource_2016.json
index 36ca814f..c2bc1906 100755
--- a/bin/mitre/datasource_2016.json
+++ b/bin/mitre/datasource_2016.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2016",
"cve_filter" : "CVE-2016",
- "init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml",
- "update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2016.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2017.json b/bin/mitre/datasource_2017.json
index 2b326bf4..f3cfdf54 100755
--- a/bin/mitre/datasource_2017.json
+++ b/bin/mitre/datasource_2017.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2017",
"cve_filter" : "CVE-2017",
- "init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml",
- "update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2017.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2018.json b/bin/mitre/datasource_2018.json
index ebb6eff2..d8b28c0a 100755
--- a/bin/mitre/datasource_2018.json
+++ b/bin/mitre/datasource_2018.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2018",
"cve_filter" : "CVE-2018",
- "init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml",
- "update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2018.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2019.json b/bin/mitre/datasource_2019.json
index 7113aa95..e07cf377 100755
--- a/bin/mitre/datasource_2019.json
+++ b/bin/mitre/datasource_2019.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2019",
"cve_filter" : "CVE-2019",
- "init" : "bin/mitre/srtool_mitre.py -I --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml",
- "update" : "bin/mitre/srtool_mitre.py -u --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2019.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2020.json b/bin/mitre/datasource_2020.json
new file mode 100755
index 00000000..f26f3b2f
--- /dev/null
+++ b/bin/mitre/datasource_2020.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2020",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2020",
+ "cve_filter" : "CVE-2020",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2020' --file=data/allitems-cvrf-year-2020.xml --url-file=allitems-cvrf-year-2020.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2020' --file=data/allitems-cvrf-year-2020.xml --url-file=allitems-cvrf-year-2020.xml --progress",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2020.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2021.json b/bin/mitre/datasource_2021.json
new file mode 100755
index 00000000..72379b01
--- /dev/null
+++ b/bin/mitre/datasource_2021.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2021",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2021",
+ "cve_filter" : "CVE-2021",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2021' --file=data/allitems-cvrf-year-2021.xml --url-file=allitems-cvrf-year-2021.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2021' --file=data/allitems-cvrf-year-2021.xml --url-file=allitems-cvrf-year-2021.xml --progress",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2021.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2022.json b/bin/mitre/datasource_2022.json
new file mode 100755
index 00000000..608acecc
--- /dev/null
+++ b/bin/mitre/datasource_2022.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2022",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2022",
+ "cve_filter" : "CVE-2022",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2022' --file=data/allitems-cvrf-year-2022.xml --url-file=allitems-cvrf-year-2022.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2022' --file=data/allitems-cvrf-year-2022.xml --url-file=allitems-cvrf-year-2022.xml --progress",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2022.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2023.json b/bin/mitre/datasource_2023.json
new file mode 100755
index 00000000..ca3cdaba
--- /dev/null
+++ b/bin/mitre/datasource_2023.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2023",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2023",
+ "cve_filter" : "CVE-2023",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2023' --file=data/allitems-cvrf-year-2023.xml --url-file=allitems-cvrf-year-2023.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2023' --file=data/allitems-cvrf-year-2023.xml --url-file=allitems-cvrf-year-2023.xml --progress",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2023.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/srtool_mitre.py b/bin/mitre/srtool_mitre.py
index 3c6af89d..cdf6ff6e 100755
--- a/bin/mitre/srtool_mitre.py
+++ b/bin/mitre/srtool_mitre.py
@@ -31,8 +31,7 @@ import sys
import xml.etree.ElementTree as ET
import argparse
import shutil
-import sqlite3
-from datetime import datetime, timedelta
+from datetime import datetime, timedelta, date
import pytz
from urllib.request import urlopen
@@ -41,10 +40,13 @@ from urllib.request import urlopen
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
from common.srt_schema import ORM
+from common.srtool_progress import *
+from common.srtool_sql import *
# Setup:
srtDbName = 'srt.sqlite'
srtErrorLog = 'srt_errors.txt'
+COMMIT_DELAY = 64
mitre_cvrf_url = 'https://cve.mitre.org/data/downloads'
mitre_cvrf_xml = 'data/allitems-cvrf-year-2018.xml'
@@ -56,6 +58,8 @@ mitre_cache_dir = 'data/cache/mitre'
# Debugging support
verbose = False
+cmd_skip = 0
+cmd_count = 0
# Development support
overrides = {}
@@ -88,18 +92,17 @@ def srt_error_log(msg):
f1.close()
-# Newly discovered or updated CVEs default to NEW for triage
-# Inited CVEs default to HISTORICAL, unless they are within the courtesy CVE_INIT_NEW_DELTA
+# Newly discovered CVEs default to NEW_RESERVED if reserved, else NEW for triage
init_new_date = None
def get_cve_default_status(is_init,publishedDate,description):
global init_new_date
if None == init_new_date:
# Precalculate and cache the relative 'new' date for efficiency
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
sql = '''SELECT * FROM orm_srtsetting WHERE name=?'''
- CVE_INIT_NEW_DELTA = cur.execute(sql, ('CVE_INIT_NEW_DELTA',)).fetchone()
+ CVE_INIT_NEW_DELTA = SQL_EXECUTE(cur, sql, ('CVE_INIT_NEW_DELTA',)).fetchone()
if CVE_INIT_NEW_DELTA is None:
cve_init_new_delta = 30
else:
@@ -110,18 +113,10 @@ def get_cve_default_status(is_init,publishedDate,description):
#print("\nPreset new data = %s" % init_new_date.strftime("%Y-%m-%d"))
init_new_date = init_new_date.strftime("%Y-%m-%d")
- if is_init:
- # Note: the NIST 'published date' is in the format "2017-05-11", so do a simple string compare
- #print("INIT status: %s versus %s" % (init_new_date,publishedDate))
- if not publishedDate or (publishedDate > init_new_date):
- # Is this reserved by Mitre? Is '** RESERVED **' within the first 20 char positions?
- reserved_pos = description.find('** RESERVED **')
- if (0 <= reserved_pos) and (20 > reserved_pos):
- return ORM.STATUS_NEW_RESERVED
- else:
- return ORM.STATUS_NEW
- else:
- return ORM.STATUS_HISTORICAL
+ # Is this reserved by Mitre? Is '** RESERVED **' within the first 20 char positions?
+ reserved_pos = description.find('** RESERVED **')
+ if (0 <= reserved_pos) and (20 > reserved_pos):
+ return ORM.STATUS_NEW_RESERVED
else:
return ORM.STATUS_NEW
@@ -173,6 +168,11 @@ def fetch_cve(cve_name,cvrf_xml_file):
datasource_xml = os.path.join(srtool_basepath,cvrf_xml_file)
cache_file = os.path.join(srtool_basepath,mitre_cache_dir,"%s.txt" % cve_name)
+ # Insure that the original data file exists
+ if not os.path.isfile(datasource_xml):
+ print("description=There is no loaded Mitre data.")
+ return
+
# Insure the cache dir exists
cache_dir = os.path.join(srtool_basepath,mitre_cache_dir)
if not os.path.isdir(cache_dir):
@@ -268,36 +268,52 @@ def append_cve_database(is_init,file_xml):
tree = ET.parse(file_xml)
root = tree.getroot()
- # Max count for development cycle
- cmd_count = 20 if get_override('SRTDBG_MINIMAL_DB') else 0
-
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_write = conn.cursor()
cur_ds = conn.cursor()
datasource_id = 0
+ srtool_today = date.today()
+ version_date = ''
+
+ # Progress expected max
+ if cmd_count:
+ progress_set_max(cmd_count)
+ else:
+ progress_set_max(len(root))
i = 0
for child in root:
i += 1
+
+ # Extract document date record
+ # <DocumentTracking>
+ # <Version>2020.01.14.22</Version>
+ if 'DocumentTracking' in child.tag:
+ for child_d in child:
+ if 'Version' in child_d.tag:
+ version_date = datetime.strptime(child_d.text, '%Y.%m.%d.%H')
+
+ # Find the Vulnerability records
if not 'Vulnerability' in child.tag:
continue
+
summary = _extract_text(child)
cve_name = summary['CVE']
+ progress_show(cve_name,force_newline=True)
# Progress indicator support
- if 0 == i % 10:
- print('%04d: %20s \r' % (i,cve_name), end='')
- if (0 == i % 200):
- conn.commit()
+ if 0 == (i % COMMIT_DELAY):
+ SQL_COMMIT(conn)
print('')
+ sys.stdout.flush()
if cmd_count and (i > cmd_count):
break
# Find the datasource matching these CVE prefixes
if 0 == datasource_id:
sql = "SELECT * FROM orm_datasource WHERE data = ? AND source = ?"
- cur_ds.execute(sql, ('cve','mitre',))
+ SQL_EXECUTE(cur_ds, sql, ('cve','mitre',))
for ds in cur_ds:
if ds[ORM.DATASOURCE_CVE_FILTER] and cve_name.startswith(ds[ORM.DATASOURCE_CVE_FILTER]):
datasource_id = ds[ORM.DATASOURCE_ID]
@@ -309,28 +325,45 @@ def append_cve_database(is_init,file_xml):
# Define the CVE (if not already there - e.g. not defined by NIST)
sql = ''' SELECT * FROM orm_cve WHERE name = ?'''
- cve = cur_write.execute(sql, (cve_name,)).fetchone()
+ cve = SQL_EXECUTE(cur_write, sql, (cve_name,)).fetchone()
if cve:
cve_id = cve[ORM.CVE_ID]
- print("MITRE:FOUND %20s\r" % cve_name, end='')
+ if not progress_status()[PROGRESS_STATUS_ENABLE]:
+ print("MITRE:FOUND %20s" % cve_name, end='\r')
else:
# Get the default CVE status
status = get_cve_default_status(is_init,summary['Published'],summary['Description'])
- sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, srt_updated, packages)
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
- cur.execute(sql, (cve_name, get_name_sort(cve_name), ORM.PRIORITY_UNDEFINED, status, '', '', 'CVE', 'MITRE', '', 1, ORM.PUBLISH_UNPUBLISHED, '', summary['Description'], summary['Published'], summary['Modified'],0, '', '', '', '', '', datetime.now(),''))
- cve_id = cur.lastrowid
- print("MITRE:ADDED %20s\r" % cve_name)
+ # 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
+ sql = ''' INSERT INTO orm_cve (name, name_sort, priority, status, comments, comments_private, tags, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, acknowledge_date, description, "publishedDate", "lastModifiedDate", recommend, recommend_list, "cvssV3_baseScore", "cvssV3_baseSeverity", "cvssV2_baseScore", "cvssV2_severity", srt_updated, srt_created, packages)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
+ SQL_EXECUTE(cur, sql, (cve_name, get_name_sort(cve_name), ORM.PRIORITY_UNDEFINED, status, '', '', '', 'CVE', 'MITRE', '', True, ORM.PUBLISH_UNPUBLISHED, '', None, summary['Description'], summary['Published'], summary['Modified'],0, '', '', '', '', '', datetime.now(), datetime.now(),''))
+ # 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
+ cve_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ if not progress_status()[PROGRESS_STATUS_ENABLE]:
+ print("MITRE:ADDED %20s" % cve_name, end='\r')
+
+ # Also create CVE history entry
+ update_comment = "%s {%s}" % (ORM.UPDATE_CREATE_STR % ORM.UPDATE_SOURCE_CVE,'Created from MITRE')
+ sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
+ SQL_EXECUTE(cur, sql, (cve_id,update_comment,srtool_today,ORM.USER_SRTOOL_NAME,) )
# Add this data source to the CVE
sql = '''SELECT * FROM orm_cvesource WHERE cve_id=? AND datasource_id=? '''
- if not cur_ds.execute(sql, (cve_id,datasource_id)).fetchone():
- sql = ''' INSERT into orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
- cur_ds.execute(sql, (cve_id,datasource_id))
+ if not SQL_EXECUTE(cur_ds, sql, (cve_id,datasource_id)).fetchone():
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ SQL_EXECUTE(cur_ds, sql, (cve_id,datasource_id))
+
+ #update datasource's lastModifiedDate after successsfuly updating it
+ if datasource_id:
+ print("\nVersion Date=%s" % str(version_date))
+ sql = """UPDATE orm_datasource SET "lastModifiedDate" = ? WHERE id='%s'""" % datasource_id
+ SQL_EXECUTE(cur, sql, (str(version_date),))
- conn.commit()
+ SQL_COMMIT(conn)
print("\nTotal = %5d\n" % i)
+ # End progress
+ progress_done('Done')
#################################
# test dump
@@ -391,8 +424,9 @@ def dump(file_xml):
print("OTHER TOP TAG=%s" % child.tag)
i += 1
- if (0 == (i % 20)):
- print("%5d\r" % i,end = '')
+ if not progress_status()[PROGRESS_STATUS_ENABLE]:
+ if (0 == (i % 20)):
+ print("%5d" % i,end = '\r')
print("\nTotal = %5d\n" % i)
#################################
@@ -401,25 +435,46 @@ def dump(file_xml):
def main(argv):
global verbose
+ global cmd_skip
+ global cmd_count
# setup
parser = argparse.ArgumentParser(description='srtool_mitre.py: manage Mitre CVE data')
- parser.add_argument('--initialize', '-I', action='store_const', const='init_mitre', dest='command', help='Download the Mitre source CVE file')
+ parser.add_argument('--initialize', '-I', action='store_const', const='init_mitre', dest='command', help='Download the Mitre source CVE file, add CVEs')
parser.add_argument('--update', '-u', action='store_const', const='update_mitre', dest='command', help='Update the Mitre source CVE file')
parser.add_argument('--source', dest='source', help='Local CVE source file')
parser.add_argument('--url-file', dest='url_file', help='CVE URL extension')
+ parser.add_argument('--download-only', action='store_const', const='download_mitre', dest='command', help='Download the Mitre source CVE file only')
parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Fetch CVE detail')
parser.add_argument('--file', dest='cve_file', help='Local CVE source file')
+
+ parser.add_argument('--progress', '-P', action='store_true', dest='do_progress', help='Progress output')
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='is_verbose', help='Enable verbose debugging output')
+ parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ parser.add_argument('--debug-sql', action='store_true', dest='debug_sql', help='Debug SQL writes')
parser.add_argument('--dump', '-D', action='store_const', const='dump', dest='command', help='test dump data')
parser.add_argument('--dump2', '-2', action='store_const', const='dump2', dest='command', help='test dump data')
args = parser.parse_args()
+ # fetch any environment overrides
+ set_override('SRTDBG_MINIMAL_DB')
+
if args.is_verbose:
verbose = True
+ if None != args.skip:
+ cmd_skip = int(args.skip)
+ if None != args.count:
+ cmd_count = int(args.count)
+ elif get_override('SRTDBG_MINIMAL_DB'):
+ cmd_count = 20
+ if args.debug_sql:
+ SQL_DEBUG(True,'MTR')
+ progress_set_on(args.do_progress)
if 'dump' == args.command:
dump(mitre_cvrf_xml)
@@ -438,8 +493,6 @@ def main(argv):
fetch_cve(args.cve_detail,args.cve_file)
return
- # fetch any environment overrides
- set_override('SRTDBG_MINIMAL_DB')
# Required parameters to continue
if not args.source:
@@ -449,15 +502,22 @@ def main(argv):
print("ERROR: missing --url_file parameter")
exit(1)
+ # Currently no different between initialize and update actions
if 'init_mitre' == args.command:
init_mitre_file(args.source,args.url_file,args.cve_file,args.force_update)
append_cve_database(True,args.cve_file)
elif 'update_mitre' == args.command:
init_mitre_file(args.source,args.url_file,args.cve_file,args.force_update)
append_cve_database(False,args.cve_file)
+ elif 'download_mitre' == args.command:
+ init_mitre_file(args.source,args.url_file,args.cve_file,args.force_update)
else:
print("Command not found")
+ # Dump the SQL transaction data
+ if args.debug_sql:
+ SQL_DUMP()
+
if __name__ == '__main__':
srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
main(sys.argv[1:])
diff --git a/bin/nist/datasource.json b/bin/nist/datasource.json
index 45210e40..8bc33f8c 100644
--- a/bin/nist/datasource.json
+++ b/bin/nist/datasource.json
@@ -20,13 +20,14 @@
"source" : "nist",
"name" : "NIST",
"description" : "NIST Modified Data",
+ "attributes" : "PREVIEW-SOURCE",
"cve_filter" : "",
"init" : "",
- "update" : "bin/nist/srtool_nist.py -i --source='NIST Modified Data' --file=data/nvdcve-1.0-modified.json --url-file=nvdcve-1.0-modified.json.gz --url-meta=nvdcve-1.0-modified.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-modified.json %command%",
+ "update" : "bin/nist/srtool_nist.py --update_nist_incremental --source='NIST Modified Data' --file=data/nvdcve-1.1-modified.json --url-file=nvdcve-1.1-modified.json.gz --url-meta=nvdcve-1.1-modified.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-modified.json %command%",
"update_frequency" : "2",
- "_comment_" : "Update at 7:00 am",
- "update_time" : "{\"hour\":\"7\"}"
+ "_comment_" : "Update at 1:00 pm",
+ "update_time" : "{\"hour\":\"13\"}"
}
]
}
diff --git a/bin/nist/datasource_2002.json b/bin/nist/datasource_2002.json
new file mode 100755
index 00000000..6b29436a
--- /dev/null
+++ b/bin/nist/datasource_2002.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2002",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2002",
+ "cve_filter" : "CVE-2002",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2002' --file=data/nvdcve-1.1-2002.json --url-file=nvdcve-1.1-2002.json.gz --url-meta=nvdcve-1.1-2002.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2002' --file=data/nvdcve-1.1-2002.json --url-file=nvdcve-1.1-2002.json.gz --url-meta=nvdcve-1.1-2002.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2002.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2003.json b/bin/nist/datasource_2003.json
new file mode 100755
index 00000000..ad301b57
--- /dev/null
+++ b/bin/nist/datasource_2003.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2003",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2003",
+ "cve_filter" : "CVE-2003",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2003' --file=data/nvdcve-1.1-2003.json --url-file=nvdcve-1.1-2003.json.gz --url-meta=nvdcve-1.1-2003.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2003' --file=data/nvdcve-1.1-2003.json --url-file=nvdcve-1.1-2003.json.gz --url-meta=nvdcve-1.1-2003.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2003.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2004.json b/bin/nist/datasource_2004.json
new file mode 100755
index 00000000..c4e4f838
--- /dev/null
+++ b/bin/nist/datasource_2004.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2004",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2004",
+ "cve_filter" : "CVE-2004",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2004' --file=data/nvdcve-1.1-2004.json --url-file=nvdcve-1.1-2004.json.gz --url-meta=nvdcve-1.1-2004.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2004' --file=data/nvdcve-1.1-2004.json --url-file=nvdcve-1.1-2004.json.gz --url-meta=nvdcve-1.1-2004.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2004.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2005.json b/bin/nist/datasource_2005.json
new file mode 100755
index 00000000..fad1bbac
--- /dev/null
+++ b/bin/nist/datasource_2005.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2005",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2005",
+ "cve_filter" : "CVE-2005",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2005' --file=data/nvdcve-1.1-2005.json --url-file=nvdcve-1.1-2005.json.gz --url-meta=nvdcve-1.1-2005.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2005' --file=data/nvdcve-1.1-2005.json --url-file=nvdcve-1.1-2005.json.gz --url-meta=nvdcve-1.1-2005.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2005.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2006.json b/bin/nist/datasource_2006.json
new file mode 100755
index 00000000..af8fbc72
--- /dev/null
+++ b/bin/nist/datasource_2006.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2006",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2006",
+ "cve_filter" : "CVE-2006",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2006' --file=data/nvdcve-1.1-2006.json --url-file=nvdcve-1.1-2006.json.gz --url-meta=nvdcve-1.1-2006.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2006' --file=data/nvdcve-1.1-2006.json --url-file=nvdcve-1.1-2006.json.gz --url-meta=nvdcve-1.1-2006.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2006.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2007.json b/bin/nist/datasource_2007.json
new file mode 100755
index 00000000..5f46571d
--- /dev/null
+++ b/bin/nist/datasource_2007.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2007",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2007",
+ "cve_filter" : "CVE-2007",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2007' --file=data/nvdcve-1.1-2007.json --url-file=nvdcve-1.1-2007.json.gz --url-meta=nvdcve-1.1-2007.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2007' --file=data/nvdcve-1.1-2007.json --url-file=nvdcve-1.1-2007.json.gz --url-meta=nvdcve-1.1-2007.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2007.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2008.json b/bin/nist/datasource_2008.json
new file mode 100755
index 00000000..8923a4b6
--- /dev/null
+++ b/bin/nist/datasource_2008.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2008",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2008",
+ "cve_filter" : "CVE-2008",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2008' --file=data/nvdcve-1.1-2008.json --url-file=nvdcve-1.1-2008.json.gz --url-meta=nvdcve-1.1-2008.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2008' --file=data/nvdcve-1.1-2008.json --url-file=nvdcve-1.1-2008.json.gz --url-meta=nvdcve-1.1-2008.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2008.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2009.json b/bin/nist/datasource_2009.json
new file mode 100755
index 00000000..edca168b
--- /dev/null
+++ b/bin/nist/datasource_2009.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "_comment_" : "Download the NIST source CVE file, load CVEs on demand only",
+ "key" : "0010-nist-2009",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2009",
+ "cve_filter" : "CVE-2009",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2009' --file=data/nvdcve-1.1-2009.json --url-file=nvdcve-1.1-2009.json.gz --url-meta=nvdcve-1.1-2009.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2009' --file=data/nvdcve-1.1-2009.json --url-file=nvdcve-1.1-2009.json.gz --url-meta=nvdcve-1.1-2009.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2009.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2010.json b/bin/nist/datasource_2010.json
new file mode 100755
index 00000000..562fd8c5
--- /dev/null
+++ b/bin/nist/datasource_2010.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2010",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2010",
+ "cve_filter" : "CVE-2010",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2010' --file=data/nvdcve-1.1-2010.json --url-file=nvdcve-1.1-2010.json.gz --url-meta=nvdcve-1.1-2010.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2010' --file=data/nvdcve-1.1-2010.json --url-file=nvdcve-1.1-2010.json.gz --url-meta=nvdcve-1.1-2010.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2010.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2011.json b/bin/nist/datasource_2011.json
new file mode 100755
index 00000000..7f50b3e0
--- /dev/null
+++ b/bin/nist/datasource_2011.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2011",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2011",
+ "cve_filter" : "CVE-2011",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2011' --file=data/nvdcve-1.1-2011.json --url-file=nvdcve-1.1-2011.json.gz --url-meta=nvdcve-1.1-2011.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2011' --file=data/nvdcve-1.1-2011.json --url-file=nvdcve-1.1-2011.json.gz --url-meta=nvdcve-1.1-2011.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2011.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2012.json b/bin/nist/datasource_2012.json
new file mode 100755
index 00000000..6505a244
--- /dev/null
+++ b/bin/nist/datasource_2012.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2012",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2012",
+ "cve_filter" : "CVE-2012",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2012' --file=data/nvdcve-1.1-2012.json --url-file=nvdcve-1.1-2012.json.gz --url-meta=nvdcve-1.1-2012.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2012' --file=data/nvdcve-1.1-2012.json --url-file=nvdcve-1.1-2012.json.gz --url-meta=nvdcve-1.1-2012.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2012.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2013.json b/bin/nist/datasource_2013.json
new file mode 100755
index 00000000..b7768906
--- /dev/null
+++ b/bin/nist/datasource_2013.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2013",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2013",
+ "cve_filter" : "CVE-2013",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2013' --file=data/nvdcve-1.1-2013.json --url-file=nvdcve-1.1-2013.json.gz --url-meta=nvdcve-1.1-2013.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2013' --file=data/nvdcve-1.1-2013.json --url-file=nvdcve-1.1-2013.json.gz --url-meta=nvdcve-1.1-2013.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2013.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2014.json b/bin/nist/datasource_2014.json
new file mode 100755
index 00000000..59cd83f7
--- /dev/null
+++ b/bin/nist/datasource_2014.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2014",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2014",
+ "cve_filter" : "CVE-2014",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2014' --file=data/nvdcve-1.1-2014.json --url-file=nvdcve-1.1-2014.json.gz --url-meta=nvdcve-1.1-2014.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2014' --file=data/nvdcve-1.1-2014.json --url-file=nvdcve-1.1-2014.json.gz --url-meta=nvdcve-1.1-2014.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2014.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2015.json b/bin/nist/datasource_2015.json
index ccca2f3f..49a942b2 100755
--- a/bin/nist/datasource_2015.json
+++ b/bin/nist/datasource_2015.json
@@ -7,9 +7,9 @@
"name" : "NIST",
"description" : "NIST 2015",
"cve_filter" : "CVE-2015",
- "init" : "bin/nist/srtool_nist.py -I --source='NIST 2015' --file=data/nvdcve-1.0-2015.json --url-file=nvdcve-1.0-2015.json.gz --url-meta=nvdcve-1.0-2015.meta",
- "update" : "bin/nist/srtool_nist.py -n --source='NIST 2015' --file=data/nvdcve-1.0-2015.json --url-file=nvdcve-1.0-2015.json.gz --url-meta=nvdcve-1.0-2015.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2015.json %command%",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2015' --file=data/nvdcve-1.1-2015.json --url-file=nvdcve-1.1-2015.json.gz --url-meta=nvdcve-1.1-2015.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2015' --file=data/nvdcve-1.1-2015.json --url-file=nvdcve-1.1-2015.json.gz --url-meta=nvdcve-1.1-2015.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2015.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
diff --git a/bin/nist/datasource_2016.json b/bin/nist/datasource_2016.json
index 9c87ef92..c2ce8401 100755
--- a/bin/nist/datasource_2016.json
+++ b/bin/nist/datasource_2016.json
@@ -7,9 +7,9 @@
"name" : "NIST",
"description" : "NIST 2016",
"cve_filter" : "CVE-2016",
- "init" : "bin/nist/srtool_nist.py -I --source='NIST 2016' --file=data/nvdcve-1.0-2016.json --url-file=nvdcve-1.0-2016.json.gz --url-meta=nvdcve-1.0-2016.meta",
- "update" : "bin/nist/srtool_nist.py -n --source='NIST 2016' --file=data/nvdcve-1.0-2016.json --url-file=nvdcve-1.0-2016.json.gz --url-meta=nvdcve-1.0-2016.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2016.json %command%",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2016' --file=data/nvdcve-1.1-2016.json --url-file=nvdcve-1.1-2016.json.gz --url-meta=nvdcve-1.1-2016.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2016' --file=data/nvdcve-1.1-2016.json --url-file=nvdcve-1.1-2016.json.gz --url-meta=nvdcve-1.1-2016.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2016.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
diff --git a/bin/nist/datasource_2017.json b/bin/nist/datasource_2017.json
index 40695ef5..38703954 100755
--- a/bin/nist/datasource_2017.json
+++ b/bin/nist/datasource_2017.json
@@ -7,9 +7,9 @@
"name" : "NIST",
"description" : "NIST 2017",
"cve_filter" : "CVE-2017",
- "init" : "bin/nist/srtool_nist.py -I --source='NIST 2017' --file=data/nvdcve-1.0-2017.json --url-file=nvdcve-1.0-2017.json.gz --url-meta=nvdcve-1.0-2017.meta",
- "update" : "bin/nist/srtool_nist.py -n --source='NIST 2017' --file=data/nvdcve-1.0-2017.json --url-file=nvdcve-1.0-2017.json.gz --url-meta=nvdcve-1.0-2017.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2017.json %command%",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2017' --file=data/nvdcve-1.1-2017.json --url-file=nvdcve-1.1-2017.json.gz --url-meta=nvdcve-1.1-2017.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2017' --file=data/nvdcve-1.1-2017.json --url-file=nvdcve-1.1-2017.json.gz --url-meta=nvdcve-1.1-2017.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2017.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
diff --git a/bin/nist/datasource_2018.json b/bin/nist/datasource_2018.json
index cf87ca2a..799c9b4a 100755
--- a/bin/nist/datasource_2018.json
+++ b/bin/nist/datasource_2018.json
@@ -7,9 +7,9 @@
"name" : "NIST",
"description" : "NIST 2018",
"cve_filter" : "CVE-2018",
- "init" : "bin/nist/srtool_nist.py -I --source='NIST 2018' --file=data/nvdcve-1.0-2018.json --url-file=nvdcve-1.0-2018.json.gz --url-meta=nvdcve-1.0-2018.meta",
- "update" : "bin/nist/srtool_nist.py -n --source='NIST 2018' --file=data/nvdcve-1.0-2018.json --url-file=nvdcve-1.0-2018.json.gz --url-meta=nvdcve-1.0-2018.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2018.json %command%",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2018' --file=data/nvdcve-1.1-2018.json --url-file=nvdcve-1.1-2018.json.gz --url-meta=nvdcve-1.1-2018.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2018' --file=data/nvdcve-1.1-2018.json --url-file=nvdcve-1.1-2018.json.gz --url-meta=nvdcve-1.1-2018.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2018.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
diff --git a/bin/nist/datasource_2019.json b/bin/nist/datasource_2019.json
index f3315526..9e5ba11d 100755
--- a/bin/nist/datasource_2019.json
+++ b/bin/nist/datasource_2019.json
@@ -7,9 +7,9 @@
"name" : "NIST",
"description" : "NIST 2019",
"cve_filter" : "CVE-2019",
- "init" : "bin/nist/srtool_nist.py -I --source='NIST 2019' --file=data/nvdcve-1.0-2019.json --url-file=nvdcve-1.0-2019.json.gz --url-meta=nvdcve-1.0-2019.meta",
- "update" : "bin/nist/srtool_nist.py -n --source='NIST 2019' --file=data/nvdcve-1.0-2019.json --url-file=nvdcve-1.0-2019.json.gz --url-meta=nvdcve-1.0-2019.meta",
- "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2019.json %command%",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2019' --file=data/nvdcve-1.1-2019.json --url-file=nvdcve-1.1-2019.json.gz --url-meta=nvdcve-1.1-2019.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2019' --file=data/nvdcve-1.1-2019.json --url-file=nvdcve-1.1-2019.json.gz --url-meta=nvdcve-1.1-2019.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2019.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
diff --git a/bin/nist/datasource_2020.json b/bin/nist/datasource_2020.json
new file mode 100755
index 00000000..3f88e2bf
--- /dev/null
+++ b/bin/nist/datasource_2020.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2020",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2020",
+ "cve_filter" : "CVE-2020",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2020' --file=data/nvdcve-1.1-2020.json --url-file=nvdcve-1.1-2020.json.gz --url-meta=nvdcve-1.1-2020.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2020' --file=data/nvdcve-1.1-2020.json --url-file=nvdcve-1.1-2020.json.gz --url-meta=nvdcve-1.1-2020.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2020.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2021.json b/bin/nist/datasource_2021.json
new file mode 100755
index 00000000..1fea6d0f
--- /dev/null
+++ b/bin/nist/datasource_2021.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2021",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2021",
+ "cve_filter" : "CVE-2021",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2021' --file=data/nvdcve-1.1-2021.json --url-file=nvdcve-1.1-2021.json.gz --url-meta=nvdcve-1.1-2021.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2021' --file=data/nvdcve-1.1-2021.json --url-file=nvdcve-1.1-2021.json.gz --url-meta=nvdcve-1.1-2021.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2021.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2022.json b/bin/nist/datasource_2022.json
new file mode 100755
index 00000000..6aae8e44
--- /dev/null
+++ b/bin/nist/datasource_2022.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2022",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2022",
+ "cve_filter" : "CVE-2022",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2022' --file=data/nvdcve-1.1-2022.json --url-file=nvdcve-1.1-2022.json.gz --url-meta=nvdcve-1.1-2022.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2022' --file=data/nvdcve-1.1-2022.json --url-file=nvdcve-1.1-2022.json.gz --url-meta=nvdcve-1.1-2022.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2022.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2023.json b/bin/nist/datasource_2023.json
new file mode 100755
index 00000000..85b52c0c
--- /dev/null
+++ b/bin/nist/datasource_2023.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2023",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2023",
+ "cve_filter" : "CVE-2023",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2023' --file=data/nvdcve-1.1-2023.json --url-file=nvdcve-1.1-2023.json.gz --url-meta=nvdcve-1.1-2023.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2023' --file=data/nvdcve-1.1-2023.json --url-file=nvdcve-1.1-2023.json.gz --url-meta=nvdcve-1.1-2023.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2023.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/srtool_nist.py b/bin/nist/srtool_nist.py
index 37116140..44de0074 100755
--- a/bin/nist/srtool_nist.py
+++ b/bin/nist/srtool_nist.py
@@ -5,7 +5,7 @@
#
# Security Response Tool Commandline Tool
#
-# Copyright (C) 2018 Wind River Systems
+# Copyright (C) 2018-2021 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -21,38 +21,58 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
### Usage Examples (run from top level directory)
-# Updating a specific NIST feed: ./bin/srtool.py -u "NIST JSON Data 2017"
-# Updating with the NIST incremental feed: ./bin/srtool.py -U
+# Updating a specific NIST feed: ./bin/nist/srtool_nist.py -u "NIST JSON Data 2017"
+# Updating with the NIST incremental feed: ./bin/nist/srtool_nist.py -U
import os
import sys
import re
import argparse
-import sqlite3
import json
from datetime import datetime, date, timedelta
import pytz
from urllib.request import urlopen, URLError
+import traceback
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
from common.srt_schema import ORM
+from common.srtool_progress import *
+from common.srtool_sql import *
+from common.srtool_common import log_error
# Setup:
lookupTable = []
cveIndex = {}
db_change = False
-srtDbName = 'srt.sqlite'
+count_read = 0
+count_create = 0
+count_update = 0
+
+ACTION_INIT = 'Initialize'
+ACTION_UPDATE = 'Update'
+ACTION_INCREMENT = 'Increment'
+ACTION_DOWNLOAD = 'Download'
+ACTION_UPDATE_CVE = 'Update_Cve'
+
srtErrorLog = 'srt_errors.txt'
verbose = False
+force_update = False
+force_cache = False
+update_skip_history = False
+cmd_skip = 0
+cmd_count = 0
+COMMIT_DELAY = 64
-nist_cve_url_base = 'https://static.nvd.nist.gov/feeds/json/cve/1.0'
-nist_meta_url_base = 'https://nvd.nist.gov/feeds/json/cve/1.0'
+nist_datasources = {}
+
+nist_cve_url_base = 'https://nvd.nist.gov/feeds/json/cve/1.1'
+nist_meta_url_base = 'https://nvd.nist.gov/feeds/json/cve/1.1'
nist_cache_dir = 'data/cache/nist'
-#################################
+#######################################################################
# Helper methods
#
@@ -78,6 +98,18 @@ def srt_error_log(msg):
f1.write("|" + msg + "|\n" )
f1.close()
+# quick development/debugging support
+def _log(msg):
+ DBG_LVL = os.environ['SRTDBG_LVL'] if ('SRTDBG_LVL' in os.environ) else 2
+ DBG_LOG = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srt_dbg.log'
+ if 1 == DBG_LVL:
+ print(msg)
+ elif 2 == DBG_LVL:
+ f1=open(DBG_LOG, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+# Compute a sortable CVE name
def get_name_sort(cve_name):
try:
a = cve_name.split('-')
@@ -86,154 +118,519 @@ def get_name_sort(cve_name):
cve_name_sort = cve_name
return cve_name_sort
-# Newly discovered or updated CVEs default to NEW for triage
-# Inited CVEs default to HISTORICAL, unless they are within the courtesy CVE_INIT_NEW_DELTA
+# Extract the source file path from the "Lookup" command
+def get_file_from_lookup(lookup):
+ for param in lookup.split(' '):
+ if param.startswith('--file='):
+ return(param.replace('--file=',''))
+ return('')
+
+
+#######################################################################
+# CVE_ItemToSummary: Translate a CVE_Item JSON node to a dictionary
+
+def do_nist_scan_configuration_or(cpe_or_node, name, and_enum, key):
+ cpe_list = ''
+ for cpe in cpe_or_node[key]:
+ cpe23Uri = cpe['cpe23Uri']
+ if 'cpeMatchString' in cpe:
+ cpeMatchString = cpe['cpeMatchString']
+ else:
+ cpeMatchString = ''
+ if 'versionEndIncluding' in cpe:
+ versionEndIncluding = cpe['versionEndIncluding']
+ else:
+ versionEndIncluding = ''
+ cpe_list += '%s,%s,%s,%s|' % (cpe['vulnerable'],cpe23Uri,cpeMatchString,versionEndIncluding)
+ return cpe_list
+
+def nist_scan_configuration_or(cpe_or_node, name, and_enum):
+ cpe_list = '[or]|'
+ found = 0
+ if 'cpe' in cpe_or_node:
+ #if verbose: print("NOTE:NIST_SCAN_CONFIGURATION_OR:cpe")
+ cpe_list += do_nist_scan_configuration_or(cpe_or_node, name, and_enum,'cpe')
+ found += 1
+ if 'cpe_match' in cpe_or_node:
+ #if verbose: print("NOTE:NIST_SCAN_CONFIGURATION_OR:cpe_match")
+ cpe_list += do_nist_scan_configuration_or(cpe_or_node, name, and_enum,'cpe_match')
+ found += 1
+ cpe_list += '[/or]|'
+
+ if verbose and (not found):
+ print("WARNING:NIST_SCAN_CONFIGURATION_OR:NO CPE|CPE_MATCH:%s (%s)" % (cpe_or_node,name))
+ srt_error_log("WARNING:NIST_SCAN_CONFIGURATION_OR:NO CPE|CPE_MATCH:%s (%s)" % (cpe_or_node,name))
+ return cpe_list
+
+def fixscore(score):
+ if not score:
+ return ''
+ return '%02.1f' % float(score)
+
+# Parse NIST JSON record to a summary dict
+def CVE_ItemToSummary(CVE_Item,header_only=False):
+ summary = {}
+
+ #
+ # Assure that all fields are at least defined as empty string
+ #
+
+ # Header info
+ summary['name'] = CVE_Item['cve']['CVE_data_meta']['ID']
+ summary['cve_data_type'] = CVE_Item['cve']['data_type']
+ summary['cve_data_format'] = CVE_Item['cve']['data_format']
+ summary['cve_data_version'] = CVE_Item['cve']['data_version']
+
+ summary['description'] = CVE_Item['cve']['description']['description_data'][0]['value']
+ summary['publishedDate'] = re.sub('T.*','',CVE_Item['publishedDate'])
+ summary['lastModifiedDate'] = re.sub('T.*','',CVE_Item['lastModifiedDate'])
+ summary['url'] = 'https://nvd.nist.gov/vuln/detail/%s' % summary['name']
+ summary['url_title'] = 'NIST Link'
+
+ # cvssV3
+ is_v3 = ('impact' in CVE_Item) and ('baseMetricV3' in CVE_Item['impact'])
+ baseMetricV3 = CVE_Item['impact']['baseMetricV3'] if is_v3 else ''
+ summary['cvssV3_baseScore'] = baseMetricV3['cvssV3']['baseScore'] if is_v3 else ''
+ summary['cvssV3_baseSeverity'] = baseMetricV3['cvssV3']['baseSeverity'] if is_v3 else ''
+ summary['cvssV3_vectorString'] = baseMetricV3['cvssV3']['vectorString'] if is_v3 else ''
+ summary['cvssV3_exploitabilityScore'] = baseMetricV3['exploitabilityScore'] if is_v3 else ''
+ summary['cvssV3_impactScore'] = baseMetricV3['impactScore'] if is_v3 else ''
+ summary['cvssV3_attackVector'] = baseMetricV3['cvssV3']['attackVector'] if is_v3 else ''
+ summary['cvssV3_attackComplexity'] = baseMetricV3['cvssV3']['attackComplexity'] if is_v3 else ''
+ summary['cvssV3_privilegesRequired'] = baseMetricV3['cvssV3']['privilegesRequired'] if is_v3 else ''
+ summary['cvssV3_userInteraction'] = baseMetricV3['cvssV3']['userInteraction'] if is_v3 else ''
+ summary['cvssV3_scope'] = baseMetricV3['cvssV3']['scope'] if is_v3 else ''
+ summary['cvssV3_confidentialityImpact'] = baseMetricV3['cvssV3']['confidentialityImpact'] if is_v3 else ''
+ summary['cvssV3_integrityImpact'] = baseMetricV3['cvssV3']['integrityImpact'] if is_v3 else ''
+ summary['cvssV3_availabilityImpact'] = baseMetricV3['cvssV3']['availabilityImpact'] if is_v3 else ''
+
+ # cvssV2
+ is_v2 = ('impact' in CVE_Item) and ('baseMetricV2' in CVE_Item['impact'])
+ baseMetricV2 = CVE_Item['impact']['baseMetricV2'] if is_v2 else ''
+ summary['cvssV2_baseScore'] = baseMetricV2['cvssV2']['baseScore'] if is_v2 else ''
+ summary['cvssV2_severity'] = baseMetricV2['severity'] if is_v2 else ''
+ summary['cvssV2_vectorString'] = baseMetricV2['cvssV2']['vectorString'] if is_v2 else ''
+ summary['cvssV2_exploitabilityScore'] = baseMetricV2['exploitabilityScore'] if is_v2 else ''
+ summary['cvssV2_impactScore'] = baseMetricV2['exploitabilityScore'] if is_v2 else ''
+ summary['cvssV2_accessVector'] = baseMetricV2['cvssV2']['accessVector'] if is_v2 else ''
+ summary['cvssV2_accessComplexity'] = baseMetricV2['cvssV2']['accessComplexity'] if is_v2 else ''
+ summary['cvssV2_authentication'] = baseMetricV2['cvssV2']['authentication'] if is_v2 else ''
+ summary['cvssV2_confidentialityImpact'] = baseMetricV2['cvssV2']['confidentialityImpact'] if is_v2 else ''
+ summary['cvssV2_integrityImpact'] = baseMetricV2['cvssV2']['integrityImpact'] if is_v2 else ''
+
+ # SRTool specific meta data
+ summary['priority'] = '0'
+ summary['status'] = '0'
+ summary['comments'] = ''
+ summary['comments_private'] = ''
+ summary['tags'] = ''
+ summary['public'] = '1' # Always true since NIST is public source
+ summary['recommend'] = '0'
+ summary['recommend_list'] = ''
+ summary['publish_state'] = ORM.PUBLISH_UNPUBLISHED
+ summary['publish_date'] = ''
+ summary['acknowledge_date'] = None
+ summary['packages'] = ''
+
+ # Fix score to sortable string value
+ summary['cvssV3_baseScore'] = '%02.1f' % float(summary['cvssV3_baseScore']) if summary['cvssV3_baseScore'] else ''
+ summary['cvssV2_baseScore'] = '%02.1f' % float(summary['cvssV2_baseScore']) if summary['cvssV2_baseScore'] else ''
+
+ # The CVE table only needs the header, CVE details needs the rest
+ if header_only:
+ summary['cpe_list'] = ''
+ summary['ref_list'] = ''
+ return summary
+
+ configurations = CVE_Item['configurations']
+ is_first_and = True
+ summary['cpe_list'] = ''
+ for i, config in enumerate(configurations['nodes']):
+ summary['cpe_list'] += '[config]|'
+ summary['cpe_list'] += '[and]|'
+ if "AND" == config['operator']:
+ # create AND record
+ if not is_first_and:
+ summary['cpe_list'] += '[/and]|'
+ summary['cpe_list'] += '[and]|'
+ #is_first_and = False
+ if 'children' in config:
+ for j, cpe_or_node in enumerate(config['children']):
+ if "OR" == cpe_or_node['operator']:
+ summary['cpe_list'] += nist_scan_configuration_or(cpe_or_node, summary['name'], j)
+ else:
+ print("ERROR CONFIGURE:OR_OP?:%s" % cpe_or_node['operator'])
+ log_error("ERROR: NIST (%s) CONFIGURE:OR_OP?:%s" % (summary['name'],cpe_or_node['operator']))
+ elif "OR" == config['operator']:
+ summary['cpe_list'] += nist_scan_configuration_or(config, summary['name'], 0)
+ else:
+ print("ERROR CONFIGURE:OP?:%s" % config['operator'])
+ log_error("ERROR: NIST (%s) CONFIGURE:OP?:%s" % (summary['name'],config['operator']))
+ summary['cpe_list'] += '[/and]|'
+ summary['cpe_list'] += '[/config]|'
+
+ summary['ref_list'] = ''
+ for i, ref in enumerate(CVE_Item['cve']['references']['reference_data']):
+ summary['ref_list'] += '%s%s\t%s\t%s' % ('|' if i>0 else '',ref['url'],','.join([tag for tag in ref['tags']]),ref['refsource'] if ref['refsource'] else '-')
+
+ return summary
+
+#######################################################################
+# get_cve_default_status: bootstrap initial CVE states
+# Newly discovered or updated CVEs default to NEW for triage
+# Inited CVEs default to HISTORICAL, unless they are within the courtesy CVE_INIT_NEW_DELTA
+
init_new_date = None
-def get_cve_default_status(is_init,publishedDate):
+def get_cve_default_status(action,publishedDate):
global init_new_date
if None == init_new_date:
# Precalculate and cache the relative 'new' date for efficiency
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
sql = '''SELECT * FROM orm_srtsetting WHERE name=?'''
- CVE_INIT_NEW_DELTA = cur.execute(sql, ('CVE_INIT_NEW_DELTA',)).fetchone()
+ CVE_INIT_NEW_DELTA = SQL_EXECUTE(cur, sql, ('CVE_INIT_NEW_DELTA',)).fetchone()
+ #SQL_EXECUTE(cur, sql, ('CVE_INIT_NEW_DELTA',))
+ #CVE_INIT_NEW_DELTA = SQL_FETCH_ONE(cur)
if CVE_INIT_NEW_DELTA is None:
cve_init_new_delta = 30
else:
cve_init_new_delta = int(CVE_INIT_NEW_DELTA[ORM.SRTSETTING_VALUE])
-
date_delta = timedelta(days=cve_init_new_delta)
init_new_date = datetime.now(pytz.utc) - date_delta
#print("\nPreset new data = %s" % init_new_date.strftime("%Y-%m-%d"))
init_new_date = init_new_date.strftime("%Y-%m-%d")
- if is_init:
+ if ACTION_INIT == action:
# Note: the NIST 'published date' is in the format "2017-05-11", so do a simple string compare
#print("INIT status: %s > %s" % (publishedDate, init_new_date))
- if not publishedDate or (publishedDate > init_new_date):
+# if not publishedDate or (publishedDate > init_new_date):
+ if True:
return ORM.STATUS_NEW
- else:
- return ORM.STATUS_HISTORICAL
+# else:
+# return ORM.STATUS_HISTORICAL
else:
return ORM.STATUS_NEW
+#######################################################################
+# cwe and cve2cwe
+#
+# Generates and executes appropriate SQLite query for a new CWE
+# returns CWE_ID
-#################################
-# check for updates and apply if any
+### THIS DOES NOT CALL CONNECTION.COMMIT()
+def sql_cwe_query(conn, value):
+ CWE_ID = 0
+ CWE_VULNERABLE_COUNT = 6
+ cur = conn.cursor()
+ sql = '''SELECT * FROM orm_cwetable WHERE name=?'''
+ cwe = SQL_EXECUTE(cur, sql, (value,)).fetchone()
+ if cwe is None:
+ # "1" is True for both Sqlite and Postgress
+ sql = '''INSERT INTO orm_cwetable (name, href, summary, description, vulnerable_count, found) VALUES (?,'','','',1,1)'''
+ SQL_EXECUTE(cur, sql, (value,))
+ cwe_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ SQL_CLOSE_CUR(cur)
+ return cwe_id
+ else:
+ sql = ''' UPDATE orm_cwetable
+ SET vulnerable_count = ?
+ WHERE id = ?'''
+ SQL_EXECUTE(cur, sql, (cwe[CWE_VULNERABLE_COUNT] + 1,cwe[CWE_ID]))
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ return cwe[CWE_ID]
+
+#generates and executes appropriate SQLite query for new CVE to CWE relation
+### THIS DOES NOT CALL CONNECTION.COMMIT()
+def sql_cve2cwe_query(conn, cve_id, cwe_id):
+ cur = conn.cursor()
+ sql = '''SELECT * FROM orm_cvetocwe WHERE cve_id=? AND cwe_id=?'''
+ cve2cwe = SQL_EXECUTE(cur, sql, (cve_id, cwe_id)).fetchone()
+ if cve2cwe is None:
+ sql = '''INSERT INTO orm_cvetocwe (cve_id, cwe_id) VALUES (?, ?)'''
+ SQL_EXECUTE(cur, sql, (cve_id, cwe_id))
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+
+#######################################################################
+#
+# Generates and executes appropriate SQLite query for CVE depending on situation
+# new CVE -> INSERT || modified CVE -> UPDATE || no change -> ignore and return
+# returns (CVE_ID, BOOL) tuple, True if insert or update executed
#
-# Change orm_datasource schema to make LastModifiedDate a datetime object
-# datetime and urllib imports may be in an inappropriate location (top of file currently)
-#gets CVE-Modified feed, determines if we are out of date, and applies updates if true
-#tracks history in update_log.txt
-#incremental argument is boolean that idicates if bulk updating or incremental updating.
-def update_nist(is_init,datasource_description, url_file, url_meta, cve_file, incremental, force_update):
+### THIS DOES NOT CALL CONNECTION.COMMIT()
+def sql_cve_query(action, conn, summary, log):
+ global count_create
+ global count_update
- nist_cve_url = '%s/%s' % (nist_cve_url_base,url_file)
- nist_meta_url = '%s/%s' % (nist_meta_url_base,url_meta)
- nist_file = os.path.join(srtool_basepath,cve_file)
+ is_change = False
+ cur = conn.cursor()
+ sql = '''SELECT * FROM orm_cve WHERE name=?'''
+ cve_current = SQL_EXECUTE(cur, sql, (summary['name'],)).fetchone()
+ cve_id = -1
+ srtool_today = datetime.today()
+ if cve_current is None:
+ count_create += 1
- #update log (1=Monday, 7= Sunday)
- today = datetime.today()
- weeknum = today.strftime("%W")
- weekday = today.isoweekday()
- log = open(os.path.join(srtool_basepath,"update_logs/update_nist_log_%s_%s.txt" % (weeknum, weekday)), "a")
+ # Get the default CVE status
+ summary['status'] = get_cve_default_status(action,summary['publish_date'])
+
+ sql_elements = [
+ 'name',
+ 'name_sort',
+ 'priority',
+ 'status',
+ 'comments',
+ 'comments_private',
+ 'tags',
+ 'cve_data_type',
+ 'cve_data_format',
+ 'cve_data_version',
+ 'public',
+ 'publish_state',
+ 'publish_date',
+ 'acknowledge_date',
+ 'description',
+ 'publishedDate',
+ 'lastModifiedDate',
+ 'recommend',
+ 'recommend_list',
+ 'cvssV3_baseScore',
+ 'cvssV3_baseSeverity',
+ 'cvssV2_baseScore',
+ 'cvssV2_severity',
+ 'packages',
+ 'srt_updated',
+ 'srt_created',
+ ]
+ sql_qmarks = []
+ for i in range(len(sql_elements)):
+ sql_qmarks.append('?')
+ sql_values = (
+ summary['name'],
+ get_name_sort(summary['name']),
+ summary['priority'],
+ summary['status'],
+ summary['comments'],
+ summary['comments_private'],
+ summary['tags'],
+ summary['cve_data_type'],
+ summary['cve_data_format'],
+ summary['cve_data_version'],
+ summary['public'],
+ summary['publish_state'],
+ summary['publish_date'],
+ summary['acknowledge_date'],
+ summary['description'],
+ summary['publishedDate'],
+ summary['lastModifiedDate'],
+ summary['recommend'],
+ summary['recommend_list'],
+ summary['cvssV3_baseScore'],
+ summary['cvssV3_baseSeverity'],
+ summary['cvssV2_baseScore'],
+ summary['cvssV2_severity'],
+ summary['packages'],
+ srtool_today,
+ srtool_today
+ )
+
+ #print('INSERT INTO orm_cve (%s) VALUES (%s)' % (','.join(sql_elements),','.join(sql_qmarks)),sql_values)
+ sql, params = 'INSERT INTO orm_cve (%s) VALUES (%s)' % (','.join(sql_elements),','.join(sql_qmarks)),sql_values
+ SQL_EXECUTE(cur, sql, params)
+ is_change = True
+ cve_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ if log: log.write("\tINSERTED '%s'\n" % summary['name'])
- #ensure cache folder exists (clear cache during "run_all_updates()" from "srtool_utils.py")
- path = os.path.join(srtool_basepath, nist_cache_dir)
- try:
- os.makedirs(path)
- except:
- pass
+ # Also create CVE history entry
+ update_comment = "%s {%s}" % (ORM.UPDATE_CREATE_STR % ORM.UPDATE_SOURCE_CVE,'Created from NIST')
+ sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
+ SQL_EXECUTE(cur, sql, (cve_id,update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
- # Set up database connection
- conn = sqlite3.connect(srtDbName)
- c = conn.cursor()
+ elif (cve_current[ORM.CVE_LASTMODIFIEDDATE] < summary['lastModifiedDate']) or force_update:
+ count_update += 1
- sql = "SELECT * FROM orm_datasource WHERE description='%s'" % datasource_description
- c.execute(sql)
- for ds in c:
- try:
- f = urlopen(nist_meta_url) #Note: meta files are not in json format, hence manual parse
- content = f.readline().decode('UTF-8')
+ cve_id = cve_current[ORM.CVE_ID]
- # These times are all UTC (only the logging uses local time)
- # Note: 'content' format - 'lastModifiedDate:2018-11-08T03:06:21-05:00\r\n'
- # trim the UTC offset to avoid time zone and day light savings glitches
- content = content[:content.rfind('-')]
- date_new = datetime.strptime(content, 'lastModifiedDate:%Y-%m-%dT%H:%M:%S')
- if not ds[ORM.DATASOURCE_LASTMODIFIEDDATE]:
- # Force update if no registed modified date for datasource (e.g. Init)
- date_past = date_new-timedelta(days=1)
- else:
- date_past = datetime.strptime(ds[ORM.DATASOURCE_LASTMODIFIEDDATE], ORM.DATASOURCE_DATETIME_FORMAT)
+ # If CVE was 'reserved', promote to "new'
+ if cve_current[ORM.CVE_STATUS] in (ORM.STATUS_NEW_RESERVED,):
+ summary['status'] = ORM.STATUS_NEW
+ else:
+ summary['status'] = cve_current[ORM.CVE_STATUS]
- log.write("BEGINNING NIST %s\n" % ('INITS' if is_init else 'UPDATES'))
- #determine if we are out of date and apply updates if true
- if (date_new > date_past) or force_update:
- pre_update_time = datetime.now() #used for logging purposes only
+ # If CVE is "new', reset score date so that it will be rescanned
+ if summary['status'] == ORM.STATUS_NEW:
+ summary['score_date'] = None
+ else:
+ summary['score_date'] = cve_current[ORM.CVE_SCORE_DATE]
- nist_json(is_init,nist_cve_url, ds[ORM.DATASOURCE_ID], nist_file, log, date_new, incremental)
- log.write("began %s: %s\n" % ( 'init' if is_init else 'updates', str(pre_update_time) ))
- log.write("finished %s: %s\n" % ( 'init' if is_init else 'updates', str(datetime.now()) ))
- log.write("=============================================================================\n")
- log.write("\n")
+ ### TO-DO
+ ### Capture CPE changes
+ ###
- #update datasource's lastModifiedDate after successsfuly updating it
- sql = "UPDATE orm_datasource SET lastModifiedDate = ? WHERE id='%s'" % ds[ORM.DATASOURCE_ID]
- c.execute(sql, (str(date_new),))
- conn.commit()
- else:
- log.write("No %s needed\n" % ('init' if is_init else 'update'))
- log.write("Checked: %s\n" % datetime.now())
- log.write("=============================================================================\n")
- log.write("\n")
- print("NO %s NEEDED" % ('INIT' if is_init else 'UPDATE'))
+ # Update the CVE record
+ srt_updated = srtool_today if not update_skip_history else cve_current[ORM.CVE_SRT_UPDATED]
+ sql = ''' UPDATE orm_cve
+ SET recommend = ?,
+ recommend_list = ?,
+ cve_data_type = ?,
+ cve_data_format = ?,
+ cve_data_version = ?,
+ status = ?,
+ description = ?,
+ "publishedDate" = ?,
+ "lastModifiedDate" = ?,
+ "cvssV3_baseScore" = ?,
+ "cvssV3_baseSeverity" = ?,
+ "cvssV2_baseScore" = ?,
+ "cvssV2_severity" = ?,
+ score_date = ?,
+ srt_updated = ?
+ WHERE id = ?'''
+ sql_values = (
+ summary['recommend'],
+ summary['recommend_list'],
+ summary['cve_data_type'],
+ summary['cve_data_format'],
+ summary['cve_data_version'],
+ summary['status'],
+ summary['description'],
+ summary['publishedDate'],
+ summary['lastModifiedDate'],
+ summary['cvssV3_baseScore'],
+ summary['cvssV3_baseSeverity'],
+ summary['cvssV2_baseScore'],
+ summary['cvssV2_severity'],
+ summary['score_date'],
+ srt_updated,
+ cve_id)
+ SQL_EXECUTE(cur, sql, sql_values)
+ is_change = True
- # Reset datasource's lastModifiedDate as today
- sql = "UPDATE orm_datasource SET lastModifiedDate = ? WHERE id='%s'" % ds[ORM.DATASOURCE_ID]
- c.execute(sql, (datetime.today().strftime(ORM.DATASOURCE_DATETIME_FORMAT),) )
- conn.commit()
+ if log: log.write("\tUPDATED '%s'\n" % summary['name'])
+ #print('UPDATED: %s (%s)' % (sql,sql_values))
+
+ # Prepare the history comment
+ if not update_skip_history:
+ history_update = []
+ if (cve_current[ORM.CVE_CVSSV3_BASESCORE].strip() != summary['cvssV3_baseScore'].strip() ) or \
+ (cve_current[ORM.CVE_CVSSV3_BASESEVERITY].strip() != summary['cvssV3_baseSeverity'].strip()):
+ history_update.append(ORM.UPDATE_SEVERITY_V3 % (
+ "%s %s" % (cve_current[ORM.CVE_CVSSV3_BASESCORE],cve_current[ORM.CVE_CVSSV3_BASESEVERITY]),
+ "%s %s" % (summary['cvssV3_baseScore'],summary['cvssV3_baseSeverity'])))
+ if (cve_current[ORM.CVE_CVSSV2_BASESCORE].strip() != summary['cvssV2_baseScore'].strip()) or \
+ (cve_current[ORM.CVE_CVSSV2_SEVERITY].strip() != summary['cvssV2_severity'].strip() ):
+ history_update.append(ORM.UPDATE_SEVERITY_V2 % (
+ "%s %s" % (cve_current[ORM.CVE_CVSSV2_BASESCORE],cve_current[ORM.CVE_CVSSV2_SEVERITY]),
+ "%s %s" % (summary['cvssV2_baseScore'],summary['cvssV2_severity'])))
+ if cve_current[ORM.CVE_DESCRIPTION].strip() != summary['description'].strip():
+ history_update.append(ORM.UPDATE_DESCRIPTION)
+ if cve_current[ORM.CVE_LASTMODIFIEDDATE] != summary['lastModifiedDate']:
+ history_update.append(ORM.UPDATE_LASTMODIFIEDDATE % (cve_current[ORM.CVE_LASTMODIFIEDDATE],summary['lastModifiedDate']))
+ if history_update:
+ # Add update to history
+ update_comment = "%s%s" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_CVE,';'.join(history_update))
+ sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
+ SQL_EXECUTE(cur, sql, (cve_id,update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
- #######
- ## TESTING PURPOSES ONLY: reset lastModifiedDate so will always need update!
- #######
- # sql = '''UPDATE orm_datasource
- # SET lastModifiedDate = "0001-01-01 01:01:01"
- # WHERE description="NIST JSON Modified Data 2017" '''
- # c.execute(sql)
- # conn.commit()
+ ### TO-DO
+ ### CREATE NOTIFICATION IF SCORE/SEVERITY HAS CHANGED
+ ###
- f.close()
- except URLError as e:
- raise Exception("Failed to open %s: %s" % (nist_meta_url, e.reason))
- log.close()
- c.close()
- conn.close()
+ else:
+ # CVE found but is already up to date
+ cve_id = cve_current[ORM.CVE_ID]
+ is_change = False
+ if log: log.write("\tSKIPPED '%s'\n" % summary['name'])
+ SQL_CLOSE_CUR(cur)
+ return (cve_id, is_change)
-def file_date(filename,utc=False):
- t = os.path.getmtime(filename)
- file_datetime = datetime.fromtimestamp(t)
- if utc:
- # convert file time to UTC time using simple diff
- now = datetime.now()
- utc_now = datetime.utcnow()
- file_datetime = file_datetime+(utc_now-now)
- return file_datetime
+#######################################################################
+# prescan_modified()
+# Gather all the CVEs in the "Modified" NIST data source
+#
-#parses JSON, creates CVE object, and updates database as necessary. Commits to database on success
-#will EITHER create new record in orm_cve if cve does not exist OR overwrite every field if existing cve out-of-date OR ignore cve
-#requires json to be formatted with NIST Json schema (https://csrc.nist.gov/schema/nvd/feed/0.1/nvd_cve_feed_json_0.1_beta.schema)
-def nist_json(is_init,summary_json_url, datasource_id, datasource_file, log, date_new, incremental):
- import traceback
+def prescan_modified(cve_filter):
+
+ modify_datasource = None
+ cve_skip_list = []
+
+ for id in nist_datasources:
+ if nist_datasources[id][ORM.DATASOURCE_DESCRIPTION] == 'NIST Modified Data':
+ modify_datasource = nist_datasources[id]
+ break
+ if not modify_datasource:
+ print("ERROR: 'NIST Modified Data' not found")
+ log_error("ERROR: 'NIST Modified Data' not found")
+ return cve_skip_list
+
+ nist_file = os.path.join(srtool_basepath,get_file_from_lookup(modify_datasource[ORM.DATASOURCE_LOOKUP]))
+ try:
+ if not os.path.isfile(nist_file):
+ print("ERROR: no such file '%s'" % nist_file)
+ log_error("ERROR: no such file '%s'" % nist_file)
+ exit(1)
+ f = open(nist_file, 'r')
+ source_dct = json.load(f)
+ for item in source_dct["CVE_Items"]:
+ if not 'cve' in item:
+ continue
+ if not 'CVE_data_meta' in item['cve']:
+ continue
+ if not 'ID' in item['cve']['CVE_data_meta']:
+ continue
+ cve_name = item['cve']['CVE_data_meta']['ID']
+ if cve_name.startswith(cve_filter):
+ cve_skip_list.append(cve_name)
+ if verbose: print("MODSKIP:%s:1ADDMOD" % cve_name)
+ except Exception as e:
+ print("ERROR:%s" % e)
+ log_error("ERROR: NIST: prescan_modified(%s) '%s'" % (cve_filter,e))
+
+ return(cve_skip_list)
+
+#######################################################################
+# nist_json: parses JSON, creates CVE object, and updates database as necessary. Commits to database on success
+#
+# Will EITHER create new record in orm_cve if cve does not exist OR overwrite
+# every field if existing cve out-of-date OR ignore cve
+# Requires json to be formatted with NIST Json schema:
+# https://csrc.nist.gov/schema/nvd/feed/0.1/nvd_cve_feed_json_0.1_beta.schema
+
+def nist_json(action, summary_json_url, datasource, datasource_file, log, date_new):
import gzip
+ global count_read
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+
+ # Special handling around the NIST Modified Source
+ is_modified_source = ("PREVIEW-SOURCE" in datasource[ORM.DATASOURCE_ATTRIBUTES])
+
+ # If this is a volatile preview source:
+ # (a) Fetch the existing CveSource matches into a list
+ # (b) Remove found matches from that list
+ # (c) Delete remaining obsolete CveSource entries
+ preview_dict = {}
+ cve_skip_list = []
+ if is_modified_source:
+ sql = '''SELECT * FROM orm_cvesource WHERE datasource_id=? '''
+ for d2c in SQL_EXECUTE(cur, sql, (datasource[ORM.DATASOURCE_ID],)):
+ preview_dict[d2c[ORM.CVESOURCE_CVE_ID]] = d2c[ORM.CVESOURCE_ID]
+ if verbose: print("MODCHK:%8d:1ADDPREV" % d2c[ORM.CVESOURCE_CVE_ID])
+ else:
+ # If normal source but "force_update" flag is set, pre-fetch the CVes
+ # that are in the "Modified" source so that they can be skipped.
+ if force_update:
+ cve_skip_list = prescan_modified(datasource[ORM.DATASOURCE_CVE_FILTER])
# If we have already cached a current version of the NIST file, read from it directly
# The value 'date_new' is in UTC, so convert the fetched file date
- if (not datasource_file) or (not os.path.isfile(datasource_file)) or (date_new > file_date(datasource_file,True)):
+ if (not force_cache) and ((not datasource_file) or (not os.path.isfile(datasource_file)) or (date_new > file_date(datasource_file,True))):
# Fetch and/or refresh upstream CVE file
response = urlopen(summary_json_url)
dct = json.loads(gzip.decompress(response.read()).decode('utf-8')) #uncompress and decode json.gz
@@ -246,65 +643,69 @@ def nist_json(is_init,summary_json_url, datasource_id, datasource_file, log, dat
with open(datasource_file) as json_data:
dct = json.load(json_data)
- conn = sqlite3.connect(srtDbName)
- c = conn.cursor()
+ # Download the upstream CVE source file only
+ if ACTION_DOWNLOAD == action:
+ return
CVE_Items = dct['CVE_Items']
total = len(CVE_Items)
- v = Cve()
+
+ # Progress expected max
+ if cmd_count:
+ progress_set_max(cmd_count)
+ else:
+ progress_set_max(total)
cache_path = os.path.join(srtool_basepath, nist_cache_dir)
#begin parsing each cve in the JSON data
for i, CVE_Item in enumerate(CVE_Items):
+ count_read += 1
+
# Development support
if get_override('SRTDBG_MINIMAL_DB') and (i > 10):
break
- references = CVE_Item['cve']['references']['reference_data']
- CVE_data_meta = CVE_Item['cve']['CVE_data_meta']['ID']
-
- #if cve exists in cache, delete it
- cve_path = os.path.join(cache_path, CVE_data_meta + ".json")
- if (os.path.isfile(cve_path)):
- os.remove(cve_path)
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
#print('.', end='', flush=True)
- print('[%4d]%30s\r' % ((i * 100)/ total, CVE_data_meta), end='', flush=True)
try:
- v.name = CVE_data_meta
-
- v.cve_data_type = CVE_Item['cve']['data_type']
- v.cve_data_format = CVE_Item['cve']['data_format']
- v.cve_data_version = CVE_Item['cve']['data_version']
+ # Translate a CVE_Item JSON node
+ summary = CVE_ItemToSummary(CVE_Item)
- v.description = CVE_Item['cve']['description']['description_data'][0]['value']
- v.publishedDate = re.sub('T.*','',CVE_Item['publishedDate'])
- v.lastModifiedDate = re.sub('T.*','',CVE_Item['lastModifiedDate'])
- v.public = True # Always true since NIST is public source
-
- # We do not know yet if this has been published to the SRTool management
- v.publish_state = ORM.PUBLISH_UNPUBLISHED
- v.publish_date = ''
-
- if ('impact' in CVE_Item) and ('baseMetricV3' in CVE_Item['impact']):
- baseMetricV3 = CVE_Item['impact']['baseMetricV3']
- v.cvssV3_baseScore = baseMetricV3['cvssV3']['baseScore']
- v.cvssV3_baseSeverity = baseMetricV3['cvssV3']['baseSeverity']
- if ('impact' in CVE_Item) and ('baseMetricV2' in CVE_Item['impact']):
- baseMetricV2 = CVE_Item['impact']['baseMetricV2']
- v.cvssV2_baseScore = baseMetricV2['cvssV2']['baseScore']
-
- #check if cve object `v` need to be uploaded to database (cases: new cve, modified cve, or no changes)
- #if true, apply changes. Else ignore and continue
- v_id, is_change = sql_cve_query(conn, v, is_init,log)
-
-
- #if incremental update and CVE changed, save json copy of the cve to cache
- if incremental and is_change:
- file = open(cve_path, 'w+')
- file.write(json.dumps(CVE_Item))
-
- #if CVE `v` updates, must check and update associated records (CWEs, references, and CVE2CWE)
+ # Skip this CVE (Modified preemption)?
+ if not is_modified_source:
+ if summary['name'] in cve_skip_list:
+ if verbose: print("MODSKIP:%s:2SKIPMOD" % summary['name'])
+ continue
+ else:
+ if verbose: print("MODSKIP:%s:3PROCESS" % summary['name'])
+ pass
+
+ # Indicate progress
+ progress_show(summary['name'],force_newline=True)
+ if not progress_status()[PROGRESS_STATUS_ENABLE]:
+ print('[%4d]%30s' % ((i * 100)/ total, summary['name']), end='\r', flush=True)
+ if verbose:
+ # Remove this progress from the verbose lines (allows sorting by cve_id)
+ print('')
+
+ #if cve exists in cache, delete it
+ cve_path = os.path.join(cache_path, '%s.json' % summary['name'])
+ if (os.path.isfile(cve_path)):
+ os.remove(cve_path)
+
+ # Check if cve object need to be uploaded to database (cases: new cve, modified cve, or no changes)
+ # if true, apply changes. Else ignore and continue
+ cve_id, is_change = sql_cve_query(action, conn, summary, log)
+
+ # Remove this found CVE from the preview check list, if present
+ if is_modified_source:
+ preview_dict.pop(cve_id,None)
+ if verbose: print("MODCHK:%8d:2POP" % cve_id)
+
+ # If CVE updates, must check and update associated records (CWEs, references, and CVE2CWE)
#sql_cwe_query, and sql_cve2cwe_query require valid CVE record primary key at some point during their execution, therefore must always be after call to sql_cve_query
if is_change:
problem_list = CVE_Item['cve']['problemtype']['problemtype_data']
@@ -313,189 +714,200 @@ def nist_json(is_init,summary_json_url, datasource_id, datasource_file, log, dat
for description_Item in description_list:
value = description_Item['value']
cwe_id = sql_cwe_query(conn, value)
- sql_cve2cwe_query(conn, v_id, cwe_id)
+ sql_cve2cwe_query(conn, cve_id, cwe_id)
# Add this data source to the CVE
sql = '''SELECT * FROM orm_cvesource WHERE cve_id=? AND datasource_id=? '''
- exists = c.execute(sql, (v_id,datasource_id)).fetchone()
+ exists = SQL_EXECUTE(cur, sql, (cve_id,datasource[ORM.DATASOURCE_ID])).fetchone()
if exists is None:
- sql = ''' INSERT into orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
- c.execute(sql, (v_id,datasource_id))
+ # If volatile source, first remove all existing (potentially obsolete) NIST datasources to CVE
+ if is_modified_source:
+ if verbose: print("MODCHK:%8d:3aREM_OLD_CVESOURCE %s" % (cve_id,summary['name']))
+ sql = '''SELECT * FROM orm_cvesource WHERE cve_id=?'''
+ for cve2ds in SQL_EXECUTE(cur, sql, (cve_id, )):
+ if cve2ds[ORM.CVESOURCE_DATASOURCE_ID] in nist_datasources:
+ sql = 'DELETE FROM orm_cvesource WHERE id=?'
+ SQL_EXECUTE(cur, sql, (cve2ds[ORM.CVESOURCE_ID],))
+ if verbose: print("MODCHK:%8d:3bREM_FROM_CVESOURCE DS:%d" % (cve_id,cve2ds[ORM.CVESOURCE_DATASOURCE_ID]))
+
+ # Now, add found NIST datasource to CVE
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ SQL_EXECUTE(cur, sql, (cve_id,datasource[ORM.DATASOURCE_ID]))
+ if verbose: print("MODCHK:%8d:4ADD_TO_CVESOURCE" % cve_id)
+ else:
+ if verbose: print("MODCHK:%8d:4NO_CHANGE_CVESOURCE" % cve_id)
+ pass
+
+ # Safety commit as we go
+ if 0 == (i % COMMIT_DELAY):
+ SQL_COMMIT(conn)
+ print('')
except Exception as e:
print(traceback.format_exc())
print("UPDATE FAILED")
- c.close()
- conn.close()
- return
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+ raise Exception("Failed to import CVEs %s: %s" % (datasource_file, e))
print()
log.write("total number of CVEs checked: %s\n" % total)
- conn.commit()
- c.close()
- conn.close()
-#################################
-# cve class
+ # Now delete any un-matched obsolete CveSource entries
+ if is_modified_source:
+ if verbose: print("MODCHK:%8d:5REMOVE DEAD LINKS" % 0)
+ for cve_id in preview_dict.keys():
+ # First, remove volatile and obsolete CveSource reference
+ sql = 'DELETE FROM orm_cvesource WHERE id=?'
+ SQL_EXECUTE(cur, sql, (preview_dict[cve_id],))
+ if verbose: print("MODCHK:%8d:6REMOVE DEAD LINK" % cve_id)
+ # Second, reattach to normal CveSource reference
+ cve = SQL_EXECUTE(cur, 'SELECT * FROM orm_cve WHERE id = "%s"' % cve_id).fetchone()
+ if cve:
+ for ds_id in nist_datasources:
+ datasource_cve_filter = nist_datasources[ds_id][ORM.DATASOURCE_CVE_FILTER]
+ if datasource_cve_filter and cve[ORM.CVE_NAME].startswith(datasource_cve_filter):
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ SQL_EXECUTE(cur, sql, (cve_id,ds_id))
+ if verbose: print("MODCHK:%8d:7MOVE TO NORMAL %d" % (cve_id,ds_id))
+ break
+ else:
+ msg = "ERROR: missing CVE record '%d' when reattaching obsolete CveSource reference" % cve_id
+ print(msg)
+ log.write(msg)
+ log_error(msg)
+
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+ # End progress
+ progress_done('Done')
+
+#######################################################################
+# check for updates and apply if any
#
-class Cve():
- # index - primary key
- id = -1
-
- name = ''
-
- priority = 0
- status = ORM.STATUS_HISTORICAL
-
- comments = ''
- comments_private = ''
-
- cve_data_type = ''
- cve_data_format = ''
- cve_data_version = ''
-
- public = False
- publish_state = ORM.PUBLISH_UNPUBLISHED
- publish_date = ''
-
- description = ''
- publishedDate = ''
- lastModifiedDate = ''
- problemtype = ''
-
- # cpe_list = ''
-
- cvssV3_baseScore = ''
- cvssV3_baseSeverity = ''
- # cvssV3_vectorString = ''
- # cvssV3_exploitabilityScore = ''
- # cvssV3_impactScore = ''
- # cvssV3_attackVector = ''
- # cvssV3_attackComplexity = ''
- # cvssV3_privilegesRequired = ''
- # cvssV3_userInteraction = ''
- # cvssV3_scope = ''
- # cvssV3_confidentialityImpact = ''
- # cvssV3_integrityImpact = ''
- # cvssV3_availabilityImpact = ''
-
- cvssV2_baseScore = ''
- cvssV2_severity = ''
- # cvssV2_vectorString = ''
- # cvssV2_exploitabilityScore = ''
- # cvssV2_impactScore = ''
- # cvssV2_accessVector = ''
- # cvssV2_accessComplexity = ''
- # cvssV2_authentication = ''
- # cvssV2_confidentialityImpact = ''
- # cvssV2_integrityImpact = ''
-
- recommend = 0
- recommend_list = ''
-
-#generates and executes appropriate SQLite query for CVE depending on situation
-#new CVE -> INSERT || modified CVE -> UPDATE || no change -> ignore and return
-#returns (CVE_ID, BOOL) tuple, True if insert or update executed
-### THIS DOES NOT CALL CONNECTION.COMMIT()
-def sql_cve_query(conn, cve, is_init, log):
- is_change = False
+# Change orm_datasource schema to make LastModifiedDate a datetime object
+# datetime and urllib imports may be in an inappropriate location (top of file currently)
+#
+# Gets CVE-Modified feed, determines if we are out of date, and applies updates if true
+# tracks history in update_log.txt
+
+def update_nist(action,datasource_description, url_file, url_meta, cve_file):
+ global nist_datasources
+
+ nist_cve_url = '%s/%s' % (nist_cve_url_base,url_file)
+ nist_meta_url = '%s/%s' % (nist_meta_url_base,url_meta)
+ nist_file = os.path.join(srtool_basepath,cve_file) if not cve_file.startswith('/') else cve_file
+
+ #update log (1=Monday, 7= Sunday)
+ today = datetime.today()
+ weeknum = today.strftime("%W")
+ weekday = today.isoweekday()
+ log = open(os.path.join(srtool_basepath,"update_logs/update_nist_log_%s_%s.txt" % (weeknum, weekday)), "a")
+
+
+ #ensure cache folder exists (clear cache during "run_all_updates()" from "srtool_utils.py")
+ path = os.path.join(srtool_basepath, nist_cache_dir)
+ try:
+ os.makedirs(path)
+ except:
+ pass
+
+ # Set up database connection
+ conn = SQL_CONNECT()
cur = conn.cursor()
- sql = '''SELECT * FROM orm_cve WHERE name=?'''
- exists = cur.execute(sql, (cve.name,)).fetchone()
- cve_id = -1
- if exists is None:
- # Get the default CVE status
- status = get_cve_default_status(is_init,cve.publishedDate)
- sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, srt_updated, packages)
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
- cur.execute(sql, (cve.name, get_name_sort(cve.name), cve.priority, status, cve.comments, cve.comments_private, cve.cve_data_type, cve.cve_data_format, cve.cve_data_version, 1, cve.publish_state, cve.publish_date, cve.description, cve.publishedDate, cve.lastModifiedDate, cve.recommend, cve.recommend_list, cve.cvssV3_baseScore, cve.cvssV3_baseSeverity, cve.cvssV2_baseScore, cve.cvssV2_severity, datetime.now(),''))
- is_change = True
- cve_id = cur.lastrowid
- log.write("\tINSERTED '%s'\n" % cve.name)
+ # Prefetch the NIST data sources to assist MODIFIED <-> NORMAL transitions
+ sql = "SELECT * FROM orm_datasource WHERE source = 'nist'"
+ SQL_EXECUTE(cur, sql)
+ nist_datasources = {}
+ for ds in cur:
+ nist_datasources[ds[ORM.DATASOURCE_ID]] = ds
- elif exists[ORM.CVE_LASTMODIFIEDDATE] < cve.lastModifiedDate:
- sql = ''' UPDATE orm_cve
- SET recommend = ?,
- recommend_list = ?,
- cve_data_type = ?,
- cve_data_format = ?,
- cve_data_version = ?,
- description = ?,
- lastModifiedDate = ?,
- cvssV3_baseScore = ?,
- cvssV3_baseSeverity = ?,
- cvssV2_baseScore = ?,
- cvssV2_severity = ?
- WHERE id = ?'''
- cur.execute(sql, (cve.recommend, cve.recommend_list, cve.cve_data_type, cve.cve_data_format, cve.cve_data_version, cve.description, cve.lastModifiedDate, cve.cvssV3_baseScore, cve.cvssV3_baseSeverity, cve.cvssV2_baseScore, cve.cvssV2_severity, exists[0]))
- is_change = True
- log.write("\tUPDATED '%s'\n" % cve.name)
- cve_id = exists[ORM.CVE_ID]
+ sql = "SELECT * FROM orm_datasource WHERE description='%s'" % datasource_description
+ SQL_EXECUTE(cur, sql)
+ for ds in cur.fetchall():
+ try:
+ f = urlopen(nist_meta_url) #Note: meta files are not in json format, hence manual parse
+ content = f.readline().decode('UTF-8')
- ### TO-DO
- ### CREATE NOTIFICATION IF SCORE/SEVERITY HAS CHANGED
- ###
+ # These times are all UTC (only the logging uses local time)
+ # Note: 'content' format - 'lastModifiedDate:2018-11-08T03:06:21-05:00\r\n'
+ # trim the UTC offset to avoid time zone and day light savings glitches
+ content = content[:content.rfind('-')]
+ date_new = datetime.strptime(content, 'lastModifiedDate:%Y-%m-%dT%H:%M:%S')
+ if not ds[ORM.DATASOURCE_LASTMODIFIEDDATE]:
+ # Force update if no registed modified date for datasource (e.g. Init)
+ date_past = date_new-timedelta(days=1)
+ else:
+ date_past = datetime.strptime(ds[ORM.DATASOURCE_LASTMODIFIEDDATE], ORM.DATASOURCE_DATETIME_FORMAT)
- else:
- is_change = False
- log.write("\tSKIPPED '%s'\n" % cve.name)
- cur.close()
- return (cve_id, is_change)
+ log.write("BEGINNING NIST %s\n" % action)
+ #determine if we are out of date and apply updates if true
+ if (date_new > date_past) or force_update:
+ pre_update_time = datetime.now() #used for logging purposes only
+ if verbose: print("NIST: EXECUTING ACTION %s" % action)
+ nist_json(action,nist_cve_url, ds, nist_file, log, date_new)
+ log.write("began %s: %s\n" % ( action, str(pre_update_time) ))
+ log.write("finished %s: %s\n" % ( action, str(datetime.now()) ))
+ log.write("=============================================================================\n")
+ log.write("\n")
-#################################
-# cwe and cve2cwe
-#
+ #update datasource's lastModifiedDate after successsfuly updating it
+ sql = """UPDATE orm_datasource SET "lastModifiedDate" = ? WHERE id='%s'""" % ds[ORM.DATASOURCE_ID]
+ SQL_EXECUTE(cur, sql, (str(date_new),))
+ SQL_COMMIT(conn)
+ else:
+ if verbose: print("NIST: NO %s NEEDED" % action)
+ log.write("No %s needed\n" % action)
+ log.write("Checked: %s\n" % datetime.now())
+ log.write("=============================================================================\n")
+ log.write("\n")
-#generates and executes appropriate SQLite query for a new CWE
-#returns CWE_ID
-### THIS DOES NOT CALL CONNECTION.COMMIT()
-def sql_cwe_query(conn, value):
- CWE_ID = 0
- CWE_VULNERABLE_COUNT = 6
- cur = conn.cursor()
- sql = '''SELECT * FROM orm_cwetable WHERE name=?'''
- cwe = cur.execute(sql, (value,)).fetchone()
- if cwe is None:
- sql = '''INSERT INTO orm_cwetable (name, href, summary, description, vulnerable_count, found) VALUES (?,'','','',1,1)'''
- cur.execute(sql, (value,))
- cwe_id = cur.lastrowid
- cur.close()
- return cwe_id
- else:
- sql = ''' UPDATE orm_cwetable
- SET vulnerable_count = ?
- WHERE id = ?'''
- cur.execute(sql, (cwe[CWE_VULNERABLE_COUNT] + 1,cwe[CWE_ID]))
- conn.commit()
- cur.close()
- return cwe[CWE_ID]
+ # Reset datasource's lastModifiedDate as today
+ sql = """UPDATE orm_datasource SET "lastModifiedDate" = ? WHERE id='%s'""" % ds[ORM.DATASOURCE_ID]
+ SQL_EXECUTE(cur, sql, (datetime.today().strftime(ORM.DATASOURCE_DATETIME_FORMAT),) )
+ SQL_COMMIT(conn)
-#generates and executes appropriate SQLite query for new CVE to CWE relation
-### THIS DOES NOT CALL CONNECTION.COMMIT()
-def sql_cve2cwe_query(conn, cve_id, cwe_id):
- cur = conn.cursor()
- sql = '''SELECT * FROM orm_cvetocwe WHERE cve_id=? AND cwe_id=?'''
- cve2cwe = cur.execute(sql, (cve_id, cwe_id)).fetchone()
- if cve2cwe is None:
- sql = '''INSERT INTO orm_cvetocwe (cve_id, cwe_id) VALUES (?, ?)'''
- cur.execute(sql, (cve_id, cwe_id))
- conn.commit()
- cur.close()
+ #######
+ ## TESTING PURPOSES ONLY: reset lastModifiedDate so will always need update!
+ #######
+ # sql = '''UPDATE orm_datasource
+ # SET "lastModifiedDate" = "0001-01-01 01:01:01"
+ # WHERE description="NIST JSON Modified Data 2017" '''
+ # SQL_EXECUTE(cur, sql)
+ # SQL_COMMIT(conn)
+ f.close()
+ except URLError as e:
+ raise Exception("Failed to open %s: %s" % (nist_meta_url, e))
+ log.close()
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
-#################################
-# main loop
+def file_date(filename,utc=False):
+ t = os.path.getmtime(filename)
+ file_datetime = datetime.fromtimestamp(t)
+ if utc:
+ # convert file time to UTC time using simple diff
+ now = datetime.now()
+ utc_now = datetime.utcnow()
+ file_datetime = file_datetime+(utc_now-now)
+ return file_datetime
+
+#######################################################################
+# fetch_cve: extract and return the meta data for a specific CVE
#
-def fetch_cve(cve_name,cve_source_file):
+def do_fetch_cve(cve_name,cve_source_file,use_cache=True):
+
# Fetch cached data, else extract data from datasource file
cache_path = os.path.join(srtool_basepath, nist_cache_dir)
cve_cache_path = os.path.join(cache_path, cve_name + ".json")
#check if in cache, and use if exists. Else fetch from appropriate CVE JSON feed file
CVE_Item = None
- if (os.path.isfile(cve_cache_path)):
+ if use_cache and os.path.isfile(cve_cache_path):
try:
f = open(cve_cache_path, 'r')
CVE_Item = json.load(f)
@@ -503,8 +915,9 @@ def fetch_cve(cve_name,cve_source_file):
print("Description=ERROR reading CVE summary file '%s':%s" % (cve_cache_path,e))
return
elif cve_source_file:
+ nist_file = os.path.join(srtool_basepath,cve_source_file) if not cve_source_file.startswith('/') else cve_source_file
try:
- f = open(os.path.join(srtool_basepath, cve_source_file), 'r')
+ f = open(nist_file, 'r')
source_dct = json.load(f)
for item in source_dct["CVE_Items"]:
if not 'cve' in item:
@@ -520,149 +933,371 @@ def fetch_cve(cve_name,cve_source_file):
os.makedirs(cache_path)
except:
pass
- cve_cache_file = open(cve_cache_path, "w+") #write the cve to json file in cache
- cve_cache_file.write(json.dumps(CVE_Item))
+ if use_cache:
+ cve_cache_file = open(cve_cache_path, "w+") #write the cve to json file in cache
+ cve_cache_file.write(json.dumps(CVE_Item))
break
except Exception as e:
print("Description=ERROR creating CVE cache file '%s':%s" % (cve_source_file,e))
return
else:
# No data source for details
- return
+ return None
if not CVE_Item:
+ # Not found
+ return None
+ else:
+ # Return translated CVE_Item JSON node
+ return(CVE_ItemToSummary(CVE_Item))
+
+def fetch_cve(cve_name,cve_source_file):
+ summary = do_fetch_cve(cve_name,cve_source_file)
+ if not summary:
print("description=There is no CVE record for %s in the loaded NIST public CVE database." % cve_name)
- return
+ else:
+ # Return the results
+ for key in summary.keys():
+ print('%s=%s' % (key,str(summary[key]).strip()))
+
+def cve_summary(cve_name):
+ cve_name = cve_name.upper()
+
+ DSMAP_FILE = 0
+ DSMAP_DESC = 1
+ DSMAP_MOD = 2
+ DSMAP_UPDATE = 3
+
+ conn = SQL_CONNECT()
+ cur_ds = conn.cursor()
+ cur_cve = conn.cursor()
+ base_id = -1
+ modified_id = -1
+
+ def description_summary(description):
+ desc_sum = 0
+ for ch in description:
+ desc_sum += ord(ch)
+ if 37 < len(description):
+ description = "%-37s..." % description[:37]
+ return("%-40s [sum=%d]" % (description,desc_sum))
+
+ def show_summary(key,cve_name,datasource_map,datasource_id):
+ if datasource_id in datasource_map:
+ data_map = datasource_map[datasource_id]
+ summary = do_fetch_cve(cve_name,data_map[DSMAP_FILE],False)
+ if summary:
+ print(" %s: %s in %s (%s,%s)" % (key,summary['name'],data_map[DSMAP_FILE],data_map[DSMAP_MOD],data_map[DSMAP_UPDATE]))
+ print(" description :%s" % description_summary(summary['description']))
+ print(" cvssV3_baseScore :%s" % summary['cvssV3_baseScore'])
+ print(" cvssV3_baseSeverity:%s" % summary['cvssV3_baseSeverity'])
+ print(" cvssV2_baseScore :%s" % summary['cvssV2_baseScore'])
+ print(" cvssV2_severity :%s" % summary['cvssV2_severity'])
+ print(" publishedDate :%s" % summary['publishedDate'])
+ print(" lastModifiedDate :%s" % summary['lastModifiedDate'])
+ else:
+ print(" %s: There is no CVE record for %s in %s" % (key,cve_name,data_map[DSMAP_FILE]))
+ else:
+ print(" %s: There is no matching datasource" % cve_name)
+
+ # Support CVE record IDs in addition to CVE names
+ cve = None
+ if cve_name[0].isdigit():
+ cve = SQL_EXECUTE(cur_cve, 'SELECT * FROM orm_cve WHERE id = %s' % cve_name).fetchone()
+ if not cve:
+ print("CVE Summary:")
+ print(" CVE : There is no CVE record for this ID %s in orm_cve" % (cve_name))
+ return
+ cve_name = cve[ORM.CVE_NAME]
+ else:
+ cve = SQL_EXECUTE(cur_cve, 'SELECT * FROM orm_cve WHERE name = "%s"' % cve_name).fetchone()
+
+ SQL_EXECUTE(cur_ds, 'SELECT * FROM orm_datasource;')
+ datasource_map = {}
+ for datasource in cur_ds:
+ #print("Datasource[%d]='%s'" % (datasource[ORM.DATASOURCE_ID],datasource[ORM.DATASOURCE_DESCRIPTION]))
+
+ # DataSource Map is [cve_file,ds_desc,ds_lastmodifieddate,ds_lastupdateddate]
+ datasource_map[datasource[ORM.DATASOURCE_ID]] = ['',datasource[ORM.DATASOURCE_DESCRIPTION],datasource[ORM.DATASOURCE_LASTMODIFIEDDATE],datasource[ORM.DATASOURCE_LASTUPDATEDDATE]]
+ if ('nist' == datasource[ORM.DATASOURCE_SOURCE]) and ('NIST Modified Data' == datasource[ORM.DATASOURCE_DESCRIPTION]):
+ datasource_map[datasource[ORM.DATASOURCE_ID]][DSMAP_FILE] = get_file_from_lookup(datasource[ORM.DATASOURCE_LOOKUP])
+ modified_id = datasource[ORM.DATASOURCE_ID]
+ elif ('nist' == datasource[ORM.DATASOURCE_SOURCE]) and datasource[ORM.DATASOURCE_CVE_FILTER] and cve_name.startswith(datasource[ORM.DATASOURCE_CVE_FILTER]):
+ datasource_map[datasource[ORM.DATASOURCE_ID]][DSMAP_FILE] = get_file_from_lookup(datasource[ORM.DATASOURCE_LOOKUP])
+ base_id = datasource[ORM.DATASOURCE_ID]
+
+ # Return the NIST results
+ print("NIST Summary:")
+ show_summary("BASE",cve_name,datasource_map,base_id)
+ show_summary("MOD ",cve_name,datasource_map,modified_id)
+ if cve:
+ SQL_EXECUTE(cur_ds, 'SELECT * FROM orm_cvesource WHERE cve_id = %d' % cve[ORM.CVE_ID])
+ # Return the CVE record's current values
+ print("CVE Summary:")
+ print(" CVE [%s]: %s " % (cve[ORM.CVE_ID],cve[ORM.CVE_NAME],))
+ print(" description :%s" % description_summary(cve[ORM.CVE_DESCRIPTION]))
+ print(" cvssV3_baseScore :%s" % cve[ORM.CVE_CVSSV3_BASESCORE])
+ print(" cvssV3_baseSeverity:%s" % cve[ORM.CVE_CVSSV3_BASESEVERITY])
+ print(" cvssV2_baseScore :%s" % cve[ORM.CVE_CVSSV2_BASESCORE])
+ print(" cvssV2_severity :%s" % cve[ORM.CVE_CVSSV2_SEVERITY])
+ print(" public_notes :%s" % cve[ORM.CVE_COMMENTS])
+ print(" status :%s" % ORM.get_orm_string(cve[ORM.CVE_STATUS],ORM.STATUS_STR))
+ print(" publishedDate :%s" % cve[ORM.CVE_PUBLISHEDDATE])
+ print(" lastModifiedDate :%s" % cve[ORM.CVE_LASTMODIFIEDDATE])
+ # Return the DataSource mapping results
+ print("DataSource Summary:")
+ for j,cs in enumerate(cur_ds):
+ datasource_id = cs[ORM.CVESOURCE_DATASOURCE_ID]
+ if datasource_id in datasource_map:
+ print(" [%2d] %s" % (j+1,datasource_map[cs[ORM.CVESOURCE_DATASOURCE_ID]][DSMAP_DESC]))
+ else:
+ print(" [%2d] Unknown DataSource ID %d" % (j+1,cs[ORM.CVESOURCE_DATASOURCE_ID]))
+ else:
+ print("CVE Summary:")
+ print(" CVE : There is no CVE record for %s in orm_cve" % (cve_name))
- summary = {}
- summary['name'] = cve_name
- summary['cve_data_type'] = CVE_Item['cve']['data_type']
- summary['cve_data_format'] = CVE_Item['cve']['data_format']
- summary['cve_data_version'] = CVE_Item['cve']['data_version']
+#######################################################################
+# update_cve_list: Update CVE records for a list of CVEs
+#
+# This can be used for forcing the instantiation and/or update
+# for specific CVEs on demand, for example instantiating CVEs found in
+# the defect system that may be from older NIST years which are registered
+# as data sources that are on-demand only
+#
- summary['description'] = CVE_Item['cve']['description']['description_data'][0]['value']
- summary['publishedDate'] = re.sub('T.*','',CVE_Item['publishedDate'])
- summary['lastModifiedDate'] = re.sub('T.*','',CVE_Item['lastModifiedDate'])
- summary['url'] = 'https://nvd.nist.gov/vuln/detail/%s' % cve_name
- summary['url_title'] = 'NIST Link'
+def update_cve_list(action,cve_string_list,conn=None):
+ if '/' == cve_string_list[0]:
+ # Read list from file
+ cve_list = []
+ file_fd = open(cve_string_list, 'r')
+ lines = file_fd.readlines()
+ for line in lines:
+ if line:
+ cve_list.append(line.strip())
+ file_fd.close()
+ else:
+ # Read list from comma string
+ cve_list = cve_string_list.split(',')
- if ('impact' in CVE_Item) and ('baseMetricV3' in CVE_Item['impact']):
- baseMetricV3 = CVE_Item['impact']['baseMetricV3']
- summary['cvssV3_baseScore'] = baseMetricV3['cvssV3']['baseScore']
- summary['cvssV3_baseSeverity'] = baseMetricV3['cvssV3']['baseSeverity']
- summary['cvssV3_vectorString'] = baseMetricV3['cvssV3']['vectorString']
- summary['cvssV3_exploitabilityScore'] = baseMetricV3['exploitabilityScore']
- summary['cvssV3_impactScore'] = baseMetricV3['impactScore']
- summary['cvssV3_attackVector'] = baseMetricV3['cvssV3']['attackVector']
- summary['cvssV3_attackComplexity'] = baseMetricV3['cvssV3']['attackComplexity']
- summary['cvssV3_privilegesRequired'] = baseMetricV3['cvssV3']['privilegesRequired']
- summary['cvssV3_userInteraction'] = baseMetricV3['cvssV3']['userInteraction']
- summary['cvssV3_scope'] = baseMetricV3['cvssV3']['scope']
- summary['cvssV3_confidentialityImpact'] = baseMetricV3['cvssV3']['confidentialityImpact']
- summary['cvssV3_integrityImpact'] = baseMetricV3['cvssV3']['integrityImpact']
- summary['cvssV3_availabilityImpact'] = baseMetricV3['cvssV3']['availabilityImpact']
- if ('impact' in CVE_Item) and ('baseMetricV2' in CVE_Item['impact']):
- baseMetricV2 = CVE_Item['impact']['baseMetricV2']
- summary['cvssV2_baseScore'] = baseMetricV2['cvssV2']['baseScore']
- summary['cvssV2_severity'] = baseMetricV2['severity']
- summary['cvssV2_vectorString'] = baseMetricV2['cvssV2']['vectorString']
- summary['cvssV2_exploitabilityScore'] = baseMetricV2['exploitabilityScore']
- summary['cvssV2_impactScore'] = baseMetricV2['exploitabilityScore']
- summary['cvssV2_accessVector'] = baseMetricV2['cvssV2']['accessVector']
- summary['cvssV2_accessComplexity'] = baseMetricV2['cvssV2']['accessComplexity']
- summary['cvssV2_authentication'] = baseMetricV2['cvssV2']['authentication']
- summary['cvssV2_confidentialityImpact'] = baseMetricV2['cvssV2']['confidentialityImpact']
- summary['cvssV2_integrityImpact'] = baseMetricV2['cvssV2']['integrityImpact']
+ DS_MODIFIED_SOURCE = 0
+ DS_CVEFILTER = 1
+ DS_LOOKUP = 2
+ DS_ID = 3
+ DS_SOURCE_FILE = 4
- configurations = CVE_Item['configurations']
- is_first_and = True
- summary['cpe_list'] = ''
- for i, config in enumerate(configurations['nodes']):
- summary['cpe_list'] += '[config]|'
- summary['cpe_list'] += '[and]|'
- if "AND" == config['operator']:
- # create AND record
- if not is_first_and:
- summary['cpe_list'] += '[/and]|'
- summary['cpe_list'] += '[and]|'
- #is_first_and = False
- if 'children' in config:
- for j, cpe_or_node in enumerate(config['children']):
- if "OR" == cpe_or_node['operator']:
- summary['cpe_list'] += nist_scan_configuration_or(cpe_or_node, cve_name, j)
- else:
- print("ERROR CONFIGURE:OR_OP?:%s" % cpe_or_node['operator'])
- elif "OR" == config['operator']:
- summary['cpe_list'] += nist_scan_configuration_or(config, cve_name, 0)
+ # Set up database connection
+ do_close = False
+ if not conn:
+ conn = SQL_CONNECT()
+ do_close = True
+ cur = conn.cursor()
+
+ # Gather the CVE prefix to lookup commands
+ sql = "SELECT * FROM orm_datasource"
+ SQL_EXECUTE(cur, sql)
+ datasource_table = []
+ datasource_nist_ids = {}
+ for datasource in cur:
+ if ('nist' != datasource[ORM.DATASOURCE_SOURCE]) or ('cve' != datasource[ORM.DATASOURCE_DATA]) :
+ # Only consider NIST CVE datasources
+ continue
+
+ # Track the IDs for NIST sources
+ datasource_nist_ids[datasource[ORM.DATASOURCE_ID]] = True
+
+ # Always put the Modified source first
+ if 'NIST Modified Data' == datasource[ORM.DATASOURCE_DESCRIPTION]:
+ datasource_table.insert(0,[True,datasource[ORM.DATASOURCE_CVE_FILTER], datasource[ORM.DATASOURCE_LOOKUP], datasource[ORM.DATASOURCE_ID], get_file_from_lookup(datasource[ORM.DATASOURCE_LOOKUP]) ])
else:
- print("ERROR CONFIGURE:OP?:%s" % config['operator'])
- summary['cpe_list'] += '[/and]|'
- summary['cpe_list'] += '[/config]|'
+ datasource_table.append([False,datasource[ORM.DATASOURCE_CVE_FILTER], datasource[ORM.DATASOURCE_LOOKUP], datasource[ORM.DATASOURCE_ID], get_file_from_lookup(datasource[ORM.DATASOURCE_LOOKUP]) ])
+
+ update = False
+ fd = None
+ source_dct = []
+ for datasource in datasource_table:
+ # Simple caching
+ if fd:
+ fd.close()
+ fd = None
+ source_dct = []
+ has_matches = False
+ # Find at least one CVE that is in this datasource, and always scan the Modified source
+ for cve_name in cve_list:
+ if datasource[DS_MODIFIED_SOURCE] or cve_name.startswith(datasource[DS_CVEFILTER]):
+ has_matches = True
+ break
+ if not has_matches:
+ continue
+ # Find the CVEs in this datasource
+
+ # bin/nist/srtool_nist.py --file=data/nvdcve-1.0-2002.json %command%
+ cve_source_file = datasource[DS_SOURCE_FILE]
+ if verbose: print("NIST_SOURCE:%s %s" % (cve_source_file,cve_name))
+ try:
+ if not fd:
+ # Simple caching
+ fd = open(os.path.join(srtool_basepath, cve_source_file), 'r')
+ source_dct = json.load(fd)
+ for item in source_dct["CVE_Items"]:
+ if not 'cve' in item:
+ continue
+ if not 'CVE_data_meta' in item['cve']:
+ continue
+ if not 'ID' in item['cve']['CVE_data_meta']:
+ continue
- summary['ref_list'] = ''
- for i, ref in enumerate(CVE_Item['cve']['references']['reference_data']):
- summary['ref_list'] += '%s%s\t%s\t%s' % ('|' if i>0 else '',ref['url'],','.join([tag for tag in ref['tags']]),ref['refsource'])
+ # Use a temp CVE list so that Modified" can safely remove found CVEs from the main CVE list
+ cve_list_local = cve_list
+ for cve_name in cve_list_local:
+ if item['cve']['CVE_data_meta']['ID'] == cve_name:
+ if verbose: print(" NIST_TRANSLATE:%s %s" % (cve_source_file,cve_name))
+
+ # If found in the Modified List, remove it from further consideration by regular sources
+ if datasource[DS_MODIFIED_SOURCE]:
+ cve_list.remove(cve_name)
+ if verbose: print(" NIST_FOUND_MODIFIED_REMOVE_NAME:%s" % cve_name)
+
+ # Translate the CVE content
+ summary = CVE_ItemToSummary(item,True)
+ # Commit the CVE content
+ cve_id, is_change = sql_cve_query(action, conn, summary, None)
+ if is_change:
+ update = True
+
+ # First, remove all existing (potentially obsolete) NIST datasources to CVE
+ sql = '''SELECT * FROM orm_cvesource WHERE cve_id=?'''
+ for cve2ds in SQL_EXECUTE(cur, sql, (cve_id, )):
+ if cve2ds[ORM.CVESOURCE_DATASOURCE_ID] in datasource_nist_ids:
+ sql = 'DELETE FROM orm_cvesource WHERE id=?'
+ SQL_EXECUTE(cur, sql, (cve2ds[ORM.CVESOURCE_ID],))
+ if verbose: print(" NIST_REMOVE_OLDSOURCE:%s" % (cve2ds[ORM.CVESOURCE_DATASOURCE_ID]))
+ # Second, add found NIST datasource to CVE
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ SQL_EXECUTE(cur, sql, (cve_id,datasource[DS_ID],))
+ # Note, CVE top record was updated with found values (NIST wins over other sources)
+ # when sql_cve_query() executed
+
+ if verbose: print(" NIST_QUERIED:%s %s" % (cve_source_file,cve_name))
- # Return the results
- for key in summary.keys():
- print('%s=%s' % (key,summary[key]))
+ except Exception as e:
+ print("Description=ERROR CVE list load '%s':%s" % (cve_source_file,e))
+ print(traceback.format_exc())
+ return
-def do_nist_scan_configuration_or(cpe_or_node, name, and_enum, key):
- cpe_list = ''
- for cpe in cpe_or_node[key]:
- cpe23Uri = cpe['cpe23Uri']
- if 'cpeMatchString' in cpe:
- cpeMatchString = cpe['cpeMatchString']
- else:
- cpeMatchString = ''
- if 'versionEndIncluding' in cpe:
- versionEndIncluding = cpe['versionEndIncluding']
- else:
- versionEndIncluding = ''
- cpe_list += '%s,%s,%s,%s|' % (cpe['vulnerable'],cpe23Uri,cpeMatchString,versionEndIncluding)
- return cpe_list
+ if update:
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ if do_close:
+ SQL_CLOSE_CONN(conn)
-def nist_scan_configuration_or(cpe_or_node, name, and_enum):
- cpe_list = '[or]|'
- found = 0
- if 'cpe' in cpe_or_node:
- if verbose: print("NOTE:NIST_SCAN_CONFIGURATION_OR:cpe")
- cpe_list += do_nist_scan_configuration_or(cpe_or_node, name, and_enum,'cpe')
- found += 1
- if 'cpe_match' in cpe_or_node:
- if verbose: print("NOTE:NIST_SCAN_CONFIGURATION_OR:cpe_match")
- cpe_list += do_nist_scan_configuration_or(cpe_or_node, name, and_enum,'cpe_match')
- found += 1
- cpe_list += '[/or]|'
+def update_existing_cves(action,cve_prefix):
+ # Set up database connection
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
- if verbose and (not found):
- print("WARNING:NIST_SCAN_CONFIGURATION_OR:NO CPE|CPE_MATCH:%s" % cpe_or_node)
- return cpe_list
+ # Gather the CVE prefix to lookup commands
+ sql = 'SELECT * FROM orm_cve WHERE name LIKE "'+cve_prefix+'%"'
+ SQL_EXECUTE(cur, sql)
+ cve_table = []
+ i = 0
+ for cve in cur:
+ i += 1
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
-#################################
+ if verbose: print("FOUND:%s" % cve[ORM.CVE_NAME])
+ cve_table.append(cve[ORM.CVE_NAME])
+
+ if 19 == (i % 20):
+ print("SEND:%2d:%s" % (i,cve[ORM.CVE_NAME]))
+ update_cve_list(action,','.join(cve_table),conn)
+ cve_table = []
+
+ if cve_table:
+ print("SEND:%2d:%s" % (i,cve[ORM.CVE_NAME]))
+ update_cve_list(action,','.join(cve_table),conn)
+
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+
+#######################################################################
# main loop
#
def main(argv):
global verbose
+ global force_update
+ global force_cache
+ global update_skip_history
+ global cmd_skip
+ global cmd_count
+
parser = argparse.ArgumentParser(description='srtool_cve.py: manage the CVEs within SRTool database')
parser.add_argument('--init_nist', '-I', action='store_const', const='init_nist', dest='command', help='Initialize nvd.nist.gov/vuln/data-feeds for a specified datasource')
parser.add_argument('--update_nist', '-n', action='store_const', const='update_nist', dest='command', help='Check nvd.nist.gov/vuln/data-feeds for updates on a specified datasource')
+ parser.add_argument('--update_nist_incremental', '-i', action='store_const', const='update_nist_incremental', dest='command', help='Check nvd.nist.gov/vuln/data-feeds for updates')
+ parser.add_argument('--download-only', action='store_const', const='download_nist', dest='command', help='Download the NIST source CVE file(s), load CVEs on demand only')
+ parser.add_argument('--update-cve-list', '-l', dest='update_cve_list', help='Update list of CVEs to database')
+ parser.add_argument('--update-existing-cves', '-L', dest='update_existing_cves', help='Update list of existing CVEs to database')
+
+ parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Lookup CVE data')
+ parser.add_argument('--cve-summary', '-S', dest='cve_summary', help='Quick summary of CVE data [[cvename|cve_id]*|ask]')
+
parser.add_argument('--source', dest='source', help='Local CVE source file')
parser.add_argument('--url-file', dest='url_file', help='CVE URL extension')
parser.add_argument('--url-meta', dest='url_meta', help='CVE URL meta extension')
parser.add_argument('--file', dest='cve_file', help='Local CVE source file')
- parser.add_argument('--update_nist_incremental', '-i', action='store_const', const='update_nist_incremental', dest='command', help='Check nvd.nist.gov/vuln/data-feeds for updates')
- parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Lookup CVE data')
+
+ parser.add_argument('--progress', '-P', action='store_true', dest='do_progress', help='Progress output')
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
- parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Force update')
+ parser.add_argument('--force-cache', action='store_true', dest='force_cache', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
+ parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose output')
+ parser.add_argument('--debug-sql', action='store_true', dest='debug_sql', help='Debug SQL writes')
args = parser.parse_args()
+
verbose = args.verbose
+ force_update = args.force_update
+ force_cache = args.force_cache
+ update_skip_history = args.update_skip_history
+ cmd_skip = 0
+ if None != args.skip:
+ cmd_skip = int(args.skip)
+ cmd_count = 0
+ if None != args.count:
+ cmd_count = int(args.count)
+ if args.debug_sql:
+ SQL_DEBUG(True,'NST')
+ progress_set_on(args.do_progress)
#srt_error_log("DEBUG:srtool_nist:%s" % args)
+ # Update CVE list
+ if args.update_cve_list:
+ update_cve_list(ACTION_UPDATE_CVE,args.update_cve_list)
+ return
+ elif args.update_existing_cves:
+ update_existing_cves(ACTION_UPDATE_CVE,args.update_existing_cves)
+ return
+ elif args.cve_summary:
+ if 'ask' == args.cve_summary.lower():
+ print("Next CVE [name|id]: ",end='')
+ cve = input()
+ while cve:
+ cve_summary(cve)
+ print("Next CVE [name|id]: ",end='')
+ cve = input()
+ else:
+ for cve in args.cve_summary.split(','):
+ cve_summary(args.cve_summary)
+ return
+
# Required parameters to continue
if not args.cve_file:
print("ERROR: missing --cve_file parameter")
@@ -693,30 +1328,49 @@ def main(argv):
ret = 0
if ('init_nist' == args.command) or ('update_nist' == args.command):
- is_init = ('init_nist' == args.command)
+ if ('init_nist' == args.command):
+ action = ACTION_INIT
+ else:
+ action = ACTION_UPDATE
try:
- print ("BEGINNING NIST %s PLEASE WAIT ... this can take some time" % ('INIT' if is_init else 'UPDATES'))
- update_nist(is_init, args.source, args.url_file, args.url_meta, args.cve_file, False, args.force_update)
- master_log.write("SRTOOL:%s:%s:\t\t\t...\t\t\t%s\n" % (date.today(), args.source, "INIT'ED" if is_init else 'UPDATED'))
- print("DATABASE %s FINISHED\n" % ('INIT' if is_init else 'UPDATE'))
+ print ("BEGINNING NIST %s PLEASE WAIT ... this can take some time" % action)
+ update_nist(action, args.source, args.url_file, args.url_meta, args.cve_file)
+ master_log.write("SRTOOL:%s:%s Done:\t\t\t...\t\t\t%s\n" % (date.today(), args.source, action))
+ print("DATABASE %s FINISHED\n" % action)
+ print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
except Exception as e:
- print("DATABASE %s FAILED ... %s" % ('INIT' if is_init else 'UPDATE',e))
+ print("ERROR:DATABASE %s FAILED ... %s" % (action,e))
master_log.write("SRTOOL:%s:%s:\t\t\t...\t\t\tFAILED ... %s\n" % (date.today(), args.source, e))
+ print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
+ log_error("ERROR:NIST:DATABASE %s FAILED ... '%s'" % (action,e))
ret = 1
elif 'update_nist_incremental' == args.command:
try:
- print ("BEGINNING NIST UPDATES PLEASE WAIT ... this can take some time")
- update_nist(False,args.source, args.url_file, args.url_meta, args.cve_file, True, args.force_update)
+ print ("BEGINNING NIST INCREMENTAL UPDATE PLEASE WAIT ... this can take some time")
+ update_nist(ACTION_INCREMENT,args.source, args.url_file, args.url_meta, args.cve_file)
master_log.write("SRTOOL:%s:'NIST JSON Modified Data':\t\t\t...\t\t\tUPDATED\n" % date.today())
print("DATABASE UPDATE FINISHED\n")
+ print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
except Exception as e:
- print("DATABASE INCREMENT FAILED ... %s" % e)
+ print("ERROR:DATABASE INCREMENT FAILED ... %s" % e)
+ print(" :%s" % traceback.format_exc())
+ print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
master_log.write("SRTOOL:%s:%s:\t\t\t...\t\t\tFAILED ... %s\n" % (date.today(), args.source, e))
+ log_error("ERROR:NIST:DATABASE INCREMENT %s FAILED ... '%s'" % (action,e))
ret = 1
+ elif 'download_nist' == args.command:
+ print ("BEGINNING NIST UPDATES PLEASE WAIT ... this can take some time")
+ update_nist(ACTION_DOWNLOAD,args.source, args.url_file, args.url_meta, args.cve_file)
+ master_log.write("SRTOOL:%s:'NIST JSON Modified Data':\t\t\t...\t\t\tUPDATED\n" % date.today())
+ print("DATABASE UPDATE FINISHED\n")
+ print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
else:
ret = 1
print("Command not found")
master_log.close()
+ # Dump the SQL transaction data
+ if args.debug_sql:
+ SQL_DUMP()
if 0 != ret:
exit(ret)
diff --git a/bin/redhat/srtool_redhat.py b/bin/redhat/srtool_redhat.py
index f570965c..32aa12f1 100755
--- a/bin/redhat/srtool_redhat.py
+++ b/bin/redhat/srtool_redhat.py
@@ -31,11 +31,13 @@ import sys
import re
import json
import argparse
+from datetime import datetime, date, timedelta
from urllib.request import urlopen, Request
# Setup:
srtDbName = 'srt.sqlite'
+REDHAT_STALE_DAYS = 4
redhat_cache_dir = 'data/cache/redhat'
redhat_cve_url = "https://access.redhat.com/labs/securitydataapi/cve"
@@ -64,6 +66,12 @@ def get_override(key):
return 'yes' == overrides[key]
return False
+srtErrorLog = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srtool_dbg.log'
+def _log(msg):
+ f1=open(srtErrorLog, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
#################################
# Fetch a CVE record from Red Hat
# REST API, cache the results
@@ -109,9 +117,22 @@ def fetch_cve(cve_name):
except:
pass
- if not os.path.isfile(datasource_file):
- if verbose: print("REDHAT:URLOPEN:%s" % datasource_url)
+ if os.path.isfile(datasource_file):
+ # See if the cache file is stale
+ now = datetime.now()
+ file_time = datetime.fromtimestamp(os.path.getmtime(datasource_file))
+ days_diff = (now - file_time).days
+ if days_diff > REDHAT_STALE_DAYS:
+ os.remove(datasource_file)
+ _log("REDHAT_STALE:%s:%s" % (days_diff,datasource_file))
+ if os.path.isfile(datasource_file):
+ # Use cached CVE file
+ if verbose: print("REDHAT:CACHE:%s" % datasource_file)
+ with open(datasource_file) as json_data:
+ dct = json.load(json_data)
+ if verbose: print("REDHAT:URLOPEN:%s" % datasource_url)
+ else:
# Fetch and/or refresh upstream CVE file
# NOTE: Setting a known browser user agent to accomodate mod_security or some similar server security feature,
# which blocks known spider/bot user agents at the Red Hat site
@@ -133,11 +154,6 @@ def fetch_cve(cve_name):
# Cache the record
datasource_file_fd = open(datasource_file, 'w+')
datasource_file_fd.write(json.dumps(dct))
- else:
- # Use cached CVE file
- if verbose: print("REDHAT:CACHE:%s" % datasource_file)
- with open(datasource_file) as json_data:
- dct = json.load(json_data)
extract_json(dct,'',10)
# for key in summary.keys():
@@ -180,7 +196,9 @@ def main(argv):
parser = argparse.ArgumentParser(description='srtool_redhat.py: manage Red Hat CVE data')
parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Fetch CVE detail')
+
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='is_verbose', help='Enable verbose debugging output')
args = parser.parse_args()
diff --git a/bin/srt b/bin/srt
index ac358d43..ddc3b6a7 100755
--- a/bin/srt
+++ b/bin/srt
@@ -3,7 +3,7 @@
# SRTool - shell script to start "Security Response Tool"
# Copyright (C) 2013-2015 Intel Corp.
-# Copyright (C) 2018 Wind River Systems
+# Copyright (C) 2018-2023 Wind River Systems
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -23,9 +23,22 @@ Usage 1: bin/srt start|stop [webport=<address:port>]
Optional arguments:
[webport] Set the SRTool server port (default: localhost:8000)
[noautoupdate] Disable the auto update server
-Usage 2: bin/srt manage [createsuperuser|lsupdates|migrate|checksettings|collectstatic|...]
+ [start_update] Start only the update server (when main server is service/SSL)
+Usage 2: bin/srt manage [createsuperuser|lsupdates|migrate|makemigrations|checksettings|collectstatic|...]
"
+preset_basic_directories()
+{
+ # create working directories for srtool
+ mkdir -p $SRT_BASE_DIR/backups
+ mkdir -p $SRT_BASE_DIR/data
+ mkdir -p $SRT_BASE_DIR/data/cache
+ mkdir -p $SRT_BASE_DIR/logs
+ mkdir -p $SRT_BASE_DIR/reports
+ mkdir -p $SRT_BASE_DIR/update_logs
+ touch $SRT_BASE_DIR/update_logs/master_log.txt
+}
+
databaseCheck()
{
retval=0
@@ -48,18 +61,31 @@ databaseCheck()
}
get_srt_env_settings() {
+ echo "## Inherited SRT environment settings" > $ENV_FILE
+ echo "SRT_BASE_DIR=$SRT_BASE_DIR" >> $ENV_FILE
+ echo "SRTDBG_LOG=$SRTDBG_LOG" >> $ENV_FILE
+ if [ -z "$TZ" ] ; then
+ export TZ=America/Chicago
+ echo "TZ=America/Chicago" >> $ENV_FILE
+ fi
+
mainapp="yp"
# Apply all shell settings except default app 'yp'
# Only look in directories with proper 'datasource.json' files
- for envscript in $(find ./bin -name "datasource.json") ; do
- envscript=${envscript/datasource.json/srtool_env.sh}
+ for envscript in $(find $SRT_BASE_DIR/bin -name "datasource.json") ; do
+ envscript="${envscript/datasource.json/srtool_env.sh}"
if [ -f "$envscript" -a "$envscript" = "${envscript/bin\/yp/}" ] ; then
. $envscript
+ echo "## Inherit: $envscript" >> $ENV_FILE
+ cat $envscript >> $ENV_FILE
fi
done
# if no main app, default to 'yp'
if [ -z "$SRT_MAIN_APP" ] ; then
- . ./bin/yp/srtool_env.sh
+ envscript="$SRT_BASE_DIR/bin/yp/srtool_env.sh"
+ . $envscript
+ echo "## Inherit: $envscript" >> $ENV_FILE
+ cat $envscript >> $ENV_FILE
fi
echo "SRT_MAIN_APP=$SRT_MAIN_APP"
}
@@ -79,10 +105,8 @@ webserverKillAll()
fi
done
- # Stop the Update app
- if [ 0 -eq $no_auto_update ] ; then
- ${SRT_BASE_DIR}/bin/common/srtool_update.py --cron-stop
- fi
+ # Stop the Update app (even if start was disabled)
+ ${SRT_BASE_DIR}/bin/common/srtool_update.py --cron-stop
}
webserverStartAll()
@@ -99,9 +123,10 @@ webserverStartAll()
databaseCheck || return 1
echo "Starting SRTool webserver..."
+ echo "RUN: $MANAGE runserver --noreload $ADDR_PORT </dev/null >> ${SRT_BASE_DIR}/srt_web.log 2>&1 & echo \$! >${SRT_BASE_DIR}/.srtmain.pid"
$MANAGE runserver --noreload "$ADDR_PORT" \
- </dev/null >>${SRT_BASE_DIR}/srt_web.log 2>&1 \
+ </dev/null >> ${SRT_BASE_DIR}/srt_web.log 2>&1 \
& echo $! >${SRT_BASE_DIR}/.srtmain.pid
sleep 1
@@ -116,12 +141,24 @@ webserverStartAll()
# Start the Update app
if [ 0 -eq $no_auto_update ] ; then
- ${SRT_BASE_DIR}/bin/common/srtool_update.py --cron-start > /dev/null 2>&1 &
+ ${SRT_BASE_DIR}/bin/common/srtool_update.py --cron-start $UPDATE_FOLLOW_PID_FILE >> ${SRT_BASE_DIR}/srt_update.log 2>&1 &
echo "SRTool update service started at PID $!"
fi
return $retval
}
+update_start_all()
+{
+ # Start the Update app
+ if [ 0 -eq $no_auto_update ] ; then
+ echo " First stop any running updater"
+ ${SRT_BASE_DIR}/bin/common/srtool_update.py --cron-stop
+ echo " Now (re)start updater"
+ ${SRT_BASE_DIR}/bin/common/srtool_update.py --cron-start $UPDATE_FOLLOW_PID_FILE -v >> ${SRT_BASE_DIR}/srt_update.log 2>&1 &
+ echo " SRTool update service started at PID $!"
+ fi
+}
+
INSTOPSYSTEM=0
# define the stop command
@@ -216,19 +253,23 @@ export SRT_BASE_DIR=$(dirname $SRT)
SRT_BASE_DIR=$(readlink -f $SRT_BASE_DIR)
SRT_BASE_DIR=$(dirname $SRT_BASE_DIR)
MANAGE="python3 $SRT_BASE_DIR/lib/manage.py"
+ENV_FILE=$SRT_BASE_DIR/.env_vars.env
+
+# Pre-set the local database configuration file if not yet done
+if [ ! -f "$SRT_BASE_DIR/srt_dbconfig.yml" ] ; then
+ cp "$SRT_BASE_DIR/bin/srt_dbconfig.yml" "$SRT_BASE_DIR/srt_dbconfig.yml"
+fi
-# Fetch the datasource environent settings
+# Fetch the datasource environent settings and copy current environment variables to txt for wsgi.py to read
get_srt_env_settings
# insure basic directories are present
-mkdir -p $SRT_BASE_DIR/data
-mkdir -p $SRT_BASE_DIR/data/cache
-mkdir -p $SRT_BASE_DIR/update_logs
-touch $SRT_BASE_DIR/update_logs/master_log.txt
+preset_basic_directories
ADDR_PORT="localhost:8000"
unset CMD
manage_cmd=""
+UPDATE_FOLLOW_PID_FILE=""
if [ "1" = "$SRT_SKIP_AUTOUPDATE" ] ; then
no_auto_update=1
else
@@ -245,6 +286,15 @@ for param in $*; do
manage )
CMD=$param
;;
+ export_env )
+ CMD=$param
+ ;;
+ start_update )
+ CMD=$param
+ ;;
+ update_follow_pid=*)
+ UPDATE_FOLLOW_PID_FILE="--follow-pid-file=${param#*=}"
+ ;;
webport=*)
ADDR_PORT="${param#*=}"
# Split the addr:port string
@@ -307,10 +357,6 @@ case $CMD in
echo "Failed ${CMD}."
exit 4
fi
- # create working directories for srtool
- mkdir -p $SRT_BASE_DIR/update_logs
- mkdir -p $SRT_BASE_DIR/backups
- mkdir -p $SRT_BASE_DIR/reports
# set fail safe stop system on terminal exit
trap stop_system SIGHUP
echo "Successful ${CMD}."
@@ -325,6 +371,17 @@ case $CMD in
$MANAGE $manage_cmd
;;
+ export_env )
+ echo "#export variables"
+ cat $ENV_FILE
+ exit 0
+ ;;
+
+ start_update )
+ echo "start update service"
+ update_start_all
+ echo "update service started"
+ ;;
esac
diff --git a/bin/srt_dbconfig.yml b/bin/srt_dbconfig.yml
new file mode 100644
index 00000000..5d3c7c6b
--- /dev/null
+++ b/bin/srt_dbconfig.yml
@@ -0,0 +1,41 @@
+dbselect: sqlite_prodution
+
+sqlite_prodution:
+ dbtype: sqlite
+ path: srt.sqlite
+
+sqlite_development:
+ dbtype: sqlite
+ path: srt_dev.sqlite
+
+postgres_prodution:
+ dbtype: postgres
+ host: localhost
+ user: admin
+ passwd: password
+ name: srtool
+ port: 5432
+
+postgres_development:
+ dbtype: postgres
+ host: localhost
+ user: admin
+ passwd: password
+ name: srtool_dev
+ port: 5432
+
+mysql_prodution:
+ dbtype: mysql
+ host: localhost
+ user: admin
+ passwd: password
+ name: srtool
+ port: 3306
+
+mysql_development:
+ dbtype: mysql
+ host: localhost
+ user: admin
+ passwd: password
+ name: srtool_dev
+ port: 3306
diff --git a/bin/srtool-requirements.txt b/bin/srtool-requirements.txt
index ab2d4895..2dc9517b 100755
--- a/bin/srtool-requirements.txt
+++ b/bin/srtool-requirements.txt
@@ -1,3 +1,11 @@
-Django>1.11.1,<2.3
+Django==4.0
pytz
requests
+jira
+pyyaml
+progress
+pick
+openpyxl
+python-dotenv
+#psycopg2==2.8.6
+#mysqlclient
diff --git a/bin/ubuntu_trivy/datasource.json b/bin/ubuntu_trivy/datasource.json
new file mode 100755
index 00000000..9b58d88e
--- /dev/null
+++ b/bin/ubuntu_trivy/datasource.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0041-ubuntu-trivy",
+ "data" : "cve",
+ "source" : "UBUNTU_Trivy",
+ "name" : "UBUNTU_Trivy",
+ "description" : "Ubuntu Trivy Repo",
+ "attributes" : "ALT-SOURCE",
+ "cve_filter" : "CVE-",
+ "init" : "bin/ubuntu_trivy/srtool_ubuntu_trivy.py --initialize",
+ "update" : "bin/ubuntu_trivy/srtool_ubuntu_trivy.py --update",
+ "lookup" : "bin/ubuntu_trivy/srtool_ubuntu_trivy.py %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 3:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/ubuntu_trivy/license.txt b/bin/ubuntu_trivy/license.txt
new file mode 100755
index 00000000..f917c53d
--- /dev/null
+++ b/bin/ubuntu_trivy/license.txt
@@ -0,0 +1,4 @@
+[ Ubuntu ]
+
+The ubuntu-cve is PUBLIC.
+
diff --git a/bin/ubuntu_trivy/srtool_ubuntu_trivy.py b/bin/ubuntu_trivy/srtool_ubuntu_trivy.py
new file mode 100755
index 00000000..619c3bf3
--- /dev/null
+++ b/bin/ubuntu_trivy/srtool_ubuntu_trivy.py
@@ -0,0 +1,295 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2013 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+#
+# Theory of operation
+#
+# * This script manages the ubuntu_trivy based CVE data
+#
+
+import os
+import sys
+import argparse
+import shutil
+from urllib.request import urlopen
+
+# load the srt.sqlite schema indexes
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+from common.srtool_sql import *
+from common.srtool_progress import *
+from common.srtool_common import log_error
+
+ubuntu_trivy_cve_url = 'git://git.launchpad.net/ubuntu-cve-tracker'
+ubuntu_trivy_repo_dir = 'data/ubuntu_trivy/ubuntu-cve-tracker'
+ubuntu_trivy_cve_dir = 'data/ubuntu_trivy/ubuntu-cve-tracker'
+ubuntu_trivy_cve_subdir = ('active','ignored','retired')
+
+# Globals
+verbose = False
+
+#################################
+# Helper Functions
+#
+
+#
+# Sub Process calls
+# Enforce that all scripts run from the SRT_BASE_DIR context (re:WSGI)
+#
+def execute_process(*cmd_list):
+ result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return(result.returncode,result.stdout.decode('utf-8'),result.stderr.decode('utf-8'))
+
+def srtsetting_get(conn,key,default_value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Fetch the key for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ try:
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if srtsetting:
+ if is_dict:
+ return(srtsetting['value'])
+ else:
+ return(srtsetting[ORM.SRTSETTING_VALUE])
+ except Exception as e:
+ print(f"ERROR:{e}")
+ return(default_value)
+
+def srtsetting_set(conn,key,value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Set the key value for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if not srtsetting:
+ sql = ''' INSERT INTO orm_srtsetting (name, helptext, value) VALUES (?,?,?)'''
+ SQL_EXECUTE(cur, sql, (key,'',value))
+ if verbose: print(f"INSERT:{key}:{value}:")
+ else:
+ if verbose: print("UPDATE[{srtsetting[ORM.SRTSETTING_ID]}]:{key}:{value}:")
+ sql = ''' UPDATE orm_srtsetting
+ SET value=?
+ WHERE id=?'''
+ if is_dict:
+ SQL_EXECUTE(cur, sql, (value,srtsetting['id']))
+ else:
+ SQL_EXECUTE(cur, sql, (value,srtsetting[ORM.SRTSETTING_ID]))
+ SQL_COMMIT(conn)
+
+def do_chdir(newdir,delay=0.200):
+ os.chdir(newdir)
+ # WARNING: we need a pause else the chdir will break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+def do_makedirs(newdir,delay=0.200):
+ try:
+ os.makedirs(newdir)
+ except:
+ # dir already exists
+ pass
+ # WARNING: we need a pause else the makedirs could break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+def execute_commmand(cmnd,path=''):
+ cwd = os.getcwd()
+ if path:
+ do_chdir(path,delay=1.0)
+ result_returncode,result_stdout,result_stderr = execute_process(*cmnd)
+ if 0 != result_returncode:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ print("ERROR({result_returncode}):{result_stderr}")
+ return(1)
+ if verbose:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ if path:
+ do_chdir(cwd,delay=1.0)
+
+# For Jobs, with captured output
+def execute_system(cmnd):
+ srt_base_dir = os.environ.get('SRT_BASE_DIR')
+ if srt_base_dir and (srt_base_dir != os.getcwd()):
+ os.chdir(srt_base_dir)
+ return os.system(cmnd)
+
+# Insure the git repo is cloned and available
+def prepare_git(repo_dir,repo_url,branch):
+ repo = os.path.basename(repo_url)
+ if verbose: print(f"prepare_git:({repo_dir},{repo_url})")
+ if not os.path.isdir(repo_dir):
+ # Clone into the repo's parent directory
+ repo_parent_dir = os.path.dirname(repo_dir)
+ do_makedirs(repo_parent_dir)
+ if verbose: print(f"= Clone '{repo}' ... =")
+ cmnd=['git','clone',repo_url]
+ execute_commmand(cmnd,repo_parent_dir)
+ else:
+ if verbose: print(f"= Clone '{repo}' skip ... =")
+
+ if branch:
+ if verbose: print("= Checkout branch '{branch}' ... =")
+ cmnd=['git','-C',repo_dir,'checkout',branch]
+ execute_commmand(cmnd)
+
+ # Get the latest data with a safety pull
+ if verbose: print("= Pull ... =")
+ cmnd=['git','-C',repo_dir,'pull']
+ execute_commmand(cmnd)
+
+#################################
+# Initialize and/or refresh the Ubuntu Trivy repo
+#
+
+def init_ubuntu_trivy():
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+ today = datetime.now().strftime("%Y-%m-%d")
+ repo_update = srtsetting_get(conn,"UBUNTU_TRIVY_UPDATE","")
+
+ if not os.path.isdir(ubuntu_trivy_cve_dir):
+ prepare_git(ubuntu_trivy_repo_dir,ubuntu_trivy_cve_url,'')
+ elif today != repo_update:
+ prepare_git(ubuntu_trivy_repo_dir,ubuntu_trivy_cve_url,'')
+ srtsetting_set(conn,"UBUNTU_TRIVY_UPDATE",today)
+
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# Fetch a CVE record from ubuntu_trivy
+# REST API, cache the results
+#
+
+def fetch_cve(cve_name):
+ # Refresh the repo if needed
+ init_ubuntu_trivy()
+
+ msg = 'description='
+ found = False
+ stop_after_linux = False
+ for subdir in ubuntu_trivy_cve_subdir:
+ datasource_file = os.path.join(srtool_basepath,ubuntu_trivy_cve_dir,subdir,cve_name)
+ if os.path.isfile(datasource_file):
+ with open(datasource_file, 'r') as fp:
+ patches_found = False
+ for line_patch in fp:
+
+ line_patch = line_patch.strip()
+ if line_patch.startswith('Patches_'):
+ patches_found = True
+ if patches_found:
+ msg += f"{line_patch}[EOL]"
+ # For kernel, only accept the first section
+ if line_patch.startswith('Patches_linux:'):
+ stop_after_linux = True
+ if stop_after_linux and (not line_patch):
+ break
+ found = True
+ break
+ if not found:
+ msg += 'Ubuntu Trivy record not found.'
+
+ print(msg)
+
+#################################
+# comparibles
+#
+#
+
+def comparibles(cve_list_file):
+ if not cve_list_file.startswith('/'):
+ cve_list_file = os.path.join(srtool_basepath,cve_list_file)
+ if os.path.isfile(cve_list_file):
+ with open(cve_list_file, 'r') as fp:
+ for line in fp:
+ msg = ''
+ cve_name = line.strip()
+ found = False
+ stop_after_linux = False
+ for subdir in ubuntu_trivy_cve_subdir:
+ datasource_file = os.path.join(srtool_basepath,ubuntu_trivy_cve_dir,subdir,cve_name)
+ if os.path.isfile(datasource_file):
+ with open(datasource_file, 'r') as fp:
+ patches_found = False
+ for line_patch in fp:
+ line_patch = line_patch.strip()
+ if line_patch.startswith('Patches_'):
+ patches_found = True
+ if patches_found:
+ msg += f"{line_patch}[EOL]"
+ # For kernel, only accept the first section
+ if line_patch.startswith('Patches_linux:'):
+ stop_after_linux = True
+ if stop_after_linux and (not line_patch):
+ break
+ found = True
+ break
+ if not found:
+ msg = 'Ubuntu Trivy record not found.'
+
+ print(f"{cve_name}||{msg}")
+ else:
+ print(f"ERROR: missing CVE list file '{cve_list_file}'", file=sys.stderr)
+ return(1)
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+
+ # setup
+
+ parser = argparse.ArgumentParser(description='srtool_ubuntu_trivy.py: manage ubuntu_trivy CVE data')
+ parser.add_argument('--initialize', '-i', action='store_const', const='init_ubuntu_trivy', dest='command', help='Download the Ubuntu Trivy repo')
+ parser.add_argument('--update', '-u', action='store_const', const='update_ubuntu_trivy', dest='command', help='Update the Ubuntu Trivy repo')
+ parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Fetch CVE detail')
+
+ parser.add_argument('--comparibles', dest='comparibles', help='Return ubuntu-trivy data for list of CVEs')
+
+ parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='is_verbose', help='Enable verbose debugging output')
+ args = parser.parse_args()
+
+ if args.is_verbose:
+ verbose = True
+
+ if 'init_ubuntu_trivy' == args.command:
+ init_ubuntu_trivy()
+ elif 'update_ubuntu_trivy' == args.command:
+ # No difference from init at this time
+ init_ubuntu_trivy()
+ elif args.cve_detail:
+ fetch_cve(args.cve_detail)
+ elif args.comparibles:
+ comparibles(args.comparibles)
+
+ else:
+ print("Command not found")
+
+if __name__ == '__main__':
+ srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ main(sys.argv[1:])
diff --git a/bin/wr_trivy/datasource.json b/bin/wr_trivy/datasource.json
new file mode 100755
index 00000000..a0150e0f
--- /dev/null
+++ b/bin/wr_trivy/datasource.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0040-wr-trivy",
+ "data" : "cve",
+ "source" : "WR_Trivy",
+ "name" : "WR_Trivy",
+ "description" : "Wind River Trivy Repo",
+ "attributes" : "ALT-SOURCE",
+ "cve_filter" : "CVE-",
+ "init" : "bin/wr_trivy/srtool_wr_trivy.py --initialize",
+ "update" : "bin/wr_trivy/srtool_wr_trivy.py --update",
+ "lookup" : "bin/wr_trivy/srtool_wr_trivy.py %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 3:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/wr_trivy/license.txt b/bin/wr_trivy/license.txt
new file mode 100755
index 00000000..4a6aaab3
--- /dev/null
+++ b/bin/wr_trivy/license.txt
@@ -0,0 +1,4 @@
+[ Wind River ]
+
+The wrlinux-cve is PUBLIC.
+
diff --git a/bin/wr_trivy/srtool_wr_trivy.py b/bin/wr_trivy/srtool_wr_trivy.py
new file mode 100755
index 00000000..64b78a65
--- /dev/null
+++ b/bin/wr_trivy/srtool_wr_trivy.py
@@ -0,0 +1,264 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2013 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+#
+# Theory of operation
+#
+# * This script manages the wr_trivy based CVE data
+#
+
+import os
+import sys
+import argparse
+import shutil
+from urllib.request import urlopen
+
+# load the srt.sqlite schema indexes
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+from common.srtool_sql import *
+from common.srtool_progress import *
+from common.srtool_common import log_error
+
+#wr_trivy_cve_url_file = 'https://salsa.wr_trivy.org/security-tracker-team/security-tracker/blob/master/data/CVE/list'
+wr_trivy_cve_url = 'https://distro.windriver.com/git/windriver-cve-tracker.git'
+wr_trivy_repo_dir = 'data/wr_trivy/windriver-cve-tracker'
+wr_trivy_cve_dir = 'data/wr_trivy/windriver-cve-tracker/active'
+
+# Globals
+verbose = False
+
+#################################
+# Helper Functions
+#
+
+#
+# Sub Process calls
+# Enforce that all scripts run from the SRT_BASE_DIR context (re:WSGI)
+#
+def execute_process(*cmd_list):
+ result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return(result.returncode,result.stdout.decode('utf-8'),result.stderr.decode('utf-8'))
+
+def srtsetting_get(conn,key,default_value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Fetch the key for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ try:
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if srtsetting:
+ if is_dict:
+ return(srtsetting['value'])
+ else:
+ return(srtsetting[ORM.SRTSETTING_VALUE])
+ except Exception as e:
+ print(f"ERROR:{e}")
+ return(default_value)
+
+def srtsetting_set(conn,key,value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Set the key value for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if not srtsetting:
+ sql = ''' INSERT INTO orm_srtsetting (name, helptext, value) VALUES (?,?,?)'''
+ SQL_EXECUTE(cur, sql, (key,'',value))
+ if verbose: print(f"INSERT:{key}:{value}:")
+ else:
+ if verbose: print("UPDATE[{srtsetting[ORM.SRTSETTING_ID]}]:{key}:{value}:")
+ sql = ''' UPDATE orm_srtsetting
+ SET value=?
+ WHERE id=?'''
+ if is_dict:
+ SQL_EXECUTE(cur, sql, (value,srtsetting['id']))
+ else:
+ SQL_EXECUTE(cur, sql, (value,srtsetting[ORM.SRTSETTING_ID]))
+ SQL_COMMIT(conn)
+
+def do_chdir(newdir,delay=0.200):
+ os.chdir(newdir)
+ # WARNING: we need a pause else the chdir will break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+def do_makedirs(newdir,delay=0.200):
+ try:
+ os.makedirs(newdir)
+ except:
+ # dir already exists
+ pass
+ # WARNING: we need a pause else the makedirs could break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+def execute_commmand(cmnd,path=''):
+ cwd = os.getcwd()
+ if path:
+ do_chdir(path)
+ result_returncode,result_stdout,result_stderr = execute_process(*cmnd)
+ if 0 != result_returncode:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ print("ERROR({result_returncode}):{result_stderr}")
+ return(1)
+ if verbose:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ if path:
+ do_chdir(cwd)
+
+# Insure the git repo is cloned and available
+def prepare_git(repo_dir,repo_url,branch):
+ repo = os.path.basename(repo_url)
+ if verbose: print(f"prepare_git:({repo_dir},{repo_url})")
+ if not os.path.isdir(repo_dir):
+ # Clone into the repo's parent directory
+ repo_parent_dir = os.path.dirname(repo_dir)
+ do_makedirs(repo_parent_dir)
+ if verbose: print(f"= Clone '{repo}' ... =")
+ cmnd=['git','clone',repo_url]
+ execute_commmand(cmnd,repo_parent_dir)
+ else:
+ if verbose: print(f"= Clone '{repo}' skip ... =")
+
+ if branch:
+ if verbose: print("= Checkout branch '{branch}' ... =")
+ cmnd=['git','-C',repo_dir,'checkout',branch]
+ execute_commmand(cmnd)
+
+ # Get the latest data with a safety pull
+ if verbose: print("= Pull ... =")
+ cmnd=['git','-C',repo_dir,'pull']
+ execute_commmand(cmnd)
+
+#################################
+# Initialize and/or refresh the Wind River Trivy repo
+#
+
+def init_wr_trivy():
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+ today = datetime.now().strftime("%Y-%m-%d")
+ repo_update = srtsetting_get(conn,"WR_TRIVY_UPDATE","")
+
+ if not os.path.isdir(wr_trivy_cve_dir):
+ prepare_git(wr_trivy_repo_dir,wr_trivy_cve_url,'')
+ elif today != repo_update:
+ prepare_git(wr_trivy_repo_dir,wr_trivy_cve_url,'')
+ srtsetting_set(conn,"WR_TRIVY_UPDATE",today)
+
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# Fetch a CVE record from wr_trivy
+# REST API, cache the results
+#
+
+def fetch_cve(cve_name):
+ # Refresh the repo if needed
+ init_wr_trivy()
+
+ msg = 'description='
+ datasource_file = os.path.join(srtool_basepath,wr_trivy_cve_dir,cve_name)
+ if os.path.isfile(datasource_file):
+ with open(datasource_file, 'r') as fp:
+ for line in fp:
+ msg += "%s[EOL]" % line.replace("\n","")
+ else:
+ msg += 'WR Trivy record not found. Unless this CVE was published in the last week, it is presumed to be Not-Vulnerable'
+
+ if verbose: print("wr_trivy:FILEOPEN:%s" % datasource_text)
+
+ print(msg)
+
+#################################
+# comparibles
+#
+#
+
+def comparibles(cve_list_file):
+ if not cve_list_file.startswith('/'):
+ cve_list_file = os.path.join(srtool_basepath,cve_list_file)
+ if os.path.isfile(cve_list_file):
+ with open(cve_list_file, 'r') as fp:
+ for line in fp:
+ msg = ''
+ cve_name = line.strip()
+ cve_file_name = os.path.join(srtool_basepath,wr_trivy_cve_dir,cve_name)
+ if os.path.isfile(cve_file_name):
+ with open(cve_file_name, 'r') as fp:
+ patches_found = False
+ for line_patch in fp:
+ line_patch = line_patch.strip()
+ if line_patch.startswith('Patches_'):
+ patches_found = True
+ if patches_found:
+ msg += f"{line_patch}[EOL]"
+ else:
+ msg = 'Assumed Not-Vulnerable unless very recent'
+
+ print(f"{cve_name}||{msg}")
+ else:
+ print(f"ERROR: missing CVE list file '{cve_list_file}'", file=sys.stderr)
+ return(1)
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+
+ # setup
+
+ parser = argparse.ArgumentParser(description='srtool_wr_trivy.py: manage wr_trivy CVE data')
+ parser.add_argument('--initialize', '-i', action='store_const', const='init_wr_trivy', dest='command', help='Download the Wind River Trivy repo')
+ parser.add_argument('--update', '-u', action='store_const', const='update_wr_trivy', dest='command', help='Update the Wind River Trivy repo')
+ parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Fetch CVE detail')
+
+ parser.add_argument('--comparibles', dest='comparibles', help='Return wr-trivy data for list of CVEs')
+
+ parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='is_verbose', help='Enable verbose debugging output')
+ args = parser.parse_args()
+
+ if args.is_verbose:
+ verbose = True
+
+ if 'init_wr_trivy' == args.command:
+ init_wr_trivy()
+ elif 'update_wr_trivy' == args.command:
+ # No difference from init at this time
+ init_wr_trivy()
+ elif args.cve_detail:
+ fetch_cve(args.cve_detail)
+ elif args.comparibles:
+ comparibles(args.comparibles)
+
+ else:
+ print("Command not found")
+
+if __name__ == '__main__':
+ srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ main(sys.argv[1:])
diff --git a/bin/yp/datasource.json b/bin/yp/datasource.json
index 97486956..abb2e663 100755
--- a/bin/yp/datasource.json
+++ b/bin/yp/datasource.json
@@ -37,6 +37,11 @@
"helptext" : "Text schema of an example defect",
"value" : "54321"
},
+ {
+ "name" : "SRTOOL_DEFECT_DOESNOTIMPACT",
+ "helptext" : "Comment message to mark CVEs that do not affect the products",
+ "value" : "It doesn't impact Yocto Project"
+ },
{
"name" : "SRTOOL_DEFECT_TOOL",
"helptext" : "The registered script to manage defects",
diff --git a/bin/yp/srtool_cve_checker.py b/bin/yp/srtool_cve_checker.py
new file mode 100755
index 00000000..36edf28f
--- /dev/null
+++ b/bin/yp/srtool_cve_checker.py
@@ -0,0 +1,277 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Commandline Tool
+#
+# Copyright (C) 2021-2023 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys
+import argparse
+import json
+import subprocess
+import logging
+
+# load the srt.sqlite schema indexes
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+from common.srtool_sql import *
+from common.srtool_progress import *
+from common.srtool_common import log_error
+
+# Setup:
+logger = logging.getLogger("srt")
+
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR','.')
+SRT_REPORT_DIR = f"{SRT_BASE_DIR}/reports"
+
+verbose = False
+test = False
+cmd_count = 0
+cmd_skip = 0
+force_update = False
+is_job = False
+
+#################################
+# Helper methods
+#
+
+# quick development/debugging support
+def _log(msg):
+ DBG_LVL = os.environ['SRTDBG_LVL'] if ('SRTDBG_LVL' in os.environ) else 2
+ DBG_LOG = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srt_dbg.log'
+ if 1 == DBG_LVL:
+ print(msg)
+ elif 2 == DBG_LVL:
+ f1=open(DBG_LOG, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+# Sub Process calls
+def execute_process(*args):
+ cmd_list = []
+ for arg in args:
+ if not arg: continue
+ if isinstance(arg, (list, tuple)):
+ # Flatten all the way down
+ for a in arg:
+ if not a: continue
+ cmd_list.append(a)
+ else:
+ cmd_list.append(arg)
+
+ if verbose: print(f"EXECUTE_PROCESS:{cmd_list}:PWD={os.getcwd()}")
+ result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return result.returncode,result.stdout.decode('utf-8'),result.stderr.decode('utf-8')
+
+
+def srtsetting_get(conn,key,default_value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Fetch the key for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ try:
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if srtsetting:
+ if is_dict:
+ return(srtsetting['value'])
+ else:
+ return(srtsetting[ORM.SRTSETTING_VALUE])
+ except Exception as e:
+ print("ERROR:%s" % (e))
+ return(default_value)
+
+def srtsetting_set(conn,key,value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Set the key value for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if not srtsetting:
+ sql = ''' INSERT INTO orm_srtsetting (name, helptext, value) VALUES (?,?,?)'''
+ SQL_EXECUTE(cur, sql, (key,'',value))
+ print("INSERT:%s:%s:" % (key,value))
+ else:
+ print("UPDATE[%d]:%s:%s:" % (srtsetting[ORM.SRTSETTING_ID],key,value))
+ sql = ''' UPDATE orm_srtsetting
+ SET value=?
+ WHERE id=?'''
+ if is_dict:
+ SQL_EXECUTE(cur, sql, (value,srtsetting['id']))
+ else:
+ SQL_EXECUTE(cur, sql, (value,srtsetting[ORM.SRTSETTING_ID]))
+ SQL_COMMIT(conn)
+
+
+#################################
+# scan Auto Builder CVE_Checker output files
+#
+# e.g. https://git.yoctoproject.org/yocto-metrics/tree/cve-check/master/1697612432.json
+#
+
+def validate_cvechk_ab():
+# conn = SQL_CONNECT(column_names=True)
+# cur = SQL_CURSOR(conn)
+
+ # data/cve_checker/yocto-metrics/cve-check/master/1697871310.json
+ REMOTE_URL = 'git://git.yoctoproject.org/yocto-metrics'
+ REMOTE_PATH = ''
+ LOCAL_DIR = 'data/cve_checker'
+ BRANCH = 'master'
+ LOCAL_PATH = 'yocto-metrics/cve-check/master'
+
+ # git@gitlab.aws-eu-north-1.devstar.cloud:pbahrs/studio-developer-image-updater.git
+ # data/wr-studio-conductor/windshare_migration/containers/*.json
+ repo_dir = os.path.join(srtool_basepath,LOCAL_DIR)
+ if False:
+ if not os.path.isdir(repo_dir):
+ print(f"= Clone '{REMOTE_URL}' ... =")
+ github_action([f"clone {REMOTE_URL}"],os.path.dirname(repo_dir),True)
+
+ if BRANCH:
+ print("= Checkout branch '%s' ... =" % BRANCH)
+ cmnd=['git','-C',repo_dir,'checkout',BRANCH]
+ if verbose: print(f"CMND:{cmnd}")
+ execute_commmand(cmnd)
+ time.sleep(0.200)
+
+ # Get the latest data with a safety pull
+ print("= Pull ... =")
+ github_action(['pull'],repo_dir)
+
+ # Find the JSON file
+ json_dir = os.path.join(repo_dir,LOCAL_PATH)
+ file_list = []
+ for root, dirs, files in os.walk(json_dir):
+ for i,file in enumerate(files):
+ if not file.endswith('.json'):
+ continue
+ file_list.append(file)
+ print("CVKCHK JSON file count = %d" % len(file_list))
+
+ progress_set_max(len(file_list))
+ # Scan the JSON files
+ for i,json_file in enumerate(file_list):
+
+ # Debugging support
+ if cmd_skip and (i < cmd_skip):
+ continue
+ if cmd_count and (i > (cmd_skip + cmd_count)):
+ continue
+
+ with open(os.path.join(json_dir,json_file)) as json_data:
+ progress_show(json_file)
+ try:
+ dct = json.load(json_data)
+ except Exception as e:
+ print("ERROR:JSON_FILE_LOAD:%s:%s" % (json_file,e), file=sys.stderr)
+ continue
+
+ if 0 == (i % 20): print("%4d\r" % i,end='',flush=True)
+
+ for elem in dct:
+ print(f"TOP ELEM:{elem}")
+
+ multiple_products = []
+ mismatch_products = []
+ mismatch_iscves = []
+
+ elem_packages = dct['package']
+ print(f"PACKAGE COUNT:{len(elem_packages)}")
+ for package in elem_packages:
+ name = package['name']
+ short_name = name.replace('-native','')
+
+ package_products = package['products']
+ if 1 != len(package_products):
+ s = f"{name}={len(package_products)}"
+ for product in package_products:
+ s += f":{product['product']}"
+ multiple_products.append(s)
+
+ is_cves = ''
+ for product in package_products:
+ if not is_cves:
+ is_cves = product['cvesInRecord']
+ if short_name != product['product']:
+ mismatch_products.append(f"{name}!={product['product']}")
+ if is_cves != product['cvesInRecord']:
+ mismatch_iscves.append(f"{name}:{is_cves} != {product['cvesInRecord']}")
+
+ print(f"multiple_products:{len(multiple_products)}")
+ for i,mp in enumerate(multiple_products):
+ print(f" {mp}")
+ if i > 5: break
+ print(f"mismatch_products:{len(mismatch_products)}")
+ for i,mp in enumerate(mismatch_products):
+ print(f" {mp}")
+ if i > 5: break
+ print(f"mismatch_iscves:{len(mismatch_iscves)}")
+ for i,mp in enumerate(mismatch_iscves):
+ print(f" {mp}")
+ if i > 5: break
+
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+ global test
+ global force_update
+ global cmd_count
+ global cmd_skip
+
+ parser = argparse.ArgumentParser(description='srtool_cve_checker.py: CVE Checker results import')
+
+ # Test
+ parser.add_argument('--validate-cvechk-ab', '-V', action='store_const', const='validate_cvechk_ab', dest='command', help='Validate the JSON file')
+
+ # Debugging support
+ parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--test', '-t', action='store_true', dest='test', help='Test, dry-run')
+ parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose debugging')
+ parser.add_argument('--local-job', action='store_true', dest='local_job', help='Use local job')
+ args = parser.parse_args()
+
+ ret = 0
+ verbose = args.verbose
+ test = args.test
+ force_update = args.force_update
+ if None != args.count:
+ cmd_count = int(args.count)
+ if None != args.skip:
+ cmd_skip = int(args.skip)
+
+ if 'validate_cvechk_ab' == args.command:
+ validate_cvechk_ab()
+
+ else:
+ print("srtool_cve_checker.py:Command not found")
+ ret = 1
+
+ progress_done('Done')
+ return(ret)
+
+
+if __name__ == '__main__':
+ srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ exit( main(sys.argv[1:]) )
+
diff --git a/bin/yp/srtool_defect.py b/bin/yp/srtool_defect.py
index 0e189a3a..b976cf46 100755
--- a/bin/yp/srtool_defect.py
+++ b/bin/yp/srtool_defect.py
@@ -26,13 +26,13 @@
import os
import sys
import argparse
-import sqlite3
import json
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
from common.srt_schema import ORM
+from common.srtool_sql import *
# Setup:
master_log = ''
@@ -133,7 +133,7 @@ class Defect:
#
def new_defect_name(product_prefix):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
sql = "SELECT * FROM orm_srtsetting WHERE name='current_defect_simulation_index'"
@@ -147,7 +147,7 @@ def new_defect_name(product_prefix):
sql = '''UPDATE orm_srtsetting SET value=? WHERE id = ?'''
cur.execute(sql, (index, cvi[ORM.SRTSETTING_ID]))
conn.commit() #commit to db
- conn.close()
+ SQL_CLOSE_CONN(conn)
defect_name = "DEFECT-%s-%05d" % (product_prefix,index)
return defect_name
diff --git a/bin/yp/srtool_publish.py b/bin/yp/srtool_publish.py
new file mode 100755
index 00000000..cabc7452
--- /dev/null
+++ b/bin/yp/srtool_publish.py
@@ -0,0 +1,1052 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2017-2020 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+#
+# Theory of operation
+#
+#
+#
+#
+
+import os
+import sys
+import re
+import csv
+import argparse
+from common.srtool_sql import *
+from datetime import datetime
+import time
+import glob
+import traceback
+
+# The Jira integration script owns the translations
+#from srtool_jira import translate_status
+from common.srtool_common import get_name_sort
+
+lookupTable = []
+cveIndex = {}
+jiraIndex = {}
+db_change = False
+
+# Setup:
+verbose = False
+cmd_truncate = False
+cmd_skip = 0
+cmd_count = 0
+force = False
+
+srtoolDBName = 'srt.sqlite'
+srtUpdateName = 'srt_update_db.csv'
+srtSchemaName = 'srt_schema.py'
+
+# Generate output files
+prev2srtUpdateName = 'data/publish/srt_diff_update_db.csv'
+prev2srtNvName = 'data/publish/srt_diff_nv_db.csv'
+prev2srtNoDefectName = 'data/publish/srt_diff_nodefect_db.csv'
+prev2srtOpenName = 'data/publish/srt_diff_open_db.csv'
+prev2srtSVNSName = 'data/publish/cve-svns-srtool-%s-%s.csv'
+
+#################################
+# Helper methods
+#
+
+overrides = {}
+
+def set_override(key,value=None):
+ if not value is None:
+ overrides[key] = value
+ elif key in os.environ.keys():
+ overrides[key] = 'yes' if os.environ[key].startswith('1') else 'no'
+ else:
+ overrides[key] = 'no'
+ if 'yes' == overrides[key]:
+ print("OVERRIDE: %s = %s" % (key,overrides[key]))
+
+def get_override(key):
+ if key in overrides.keys():
+ return 'yes' == overrides[key]
+ return False
+
+# quick development/debugging support
+def _log(msg):
+ DBG_LVL = os.environ['SRTDBG_LVL'] if ('SRTDBG_LVL' in os.environ) else 2
+ DBG_LOG = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/toaster.log'
+ if 1 == DBG_LVL:
+ print(msg)
+ elif 2 == DBG_LVL:
+ f1=open(DBG_LOG, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+# Trigger verbose then stop for a path taken
+def trigger_verbose_stop(msg):
+ global verbose
+ global cmd_count
+ if not verbose:
+ print(msg)
+ verbose = True
+ cmd_count = 1
+
+
+
+#################################
+# ORM mapping for the given database file
+#
+
+# ORM mapping for the given database file
+class ORM_Class(object):
+ # Members will be added dynamically
+
+ # General routine to return string name of a constant (e.g. 'DATASOURCE_FREQUENCY_STR')
+ @staticmethod
+ def get_orm_string(value,string_set):
+ if None == value: return('None')
+ string_list = string_set.split(',')
+ string_count = len(string_list)
+ value = int(value)
+ if (value < 0) or (value >= string_count):
+ print("ERROR: value '%d' out of range of '%s'" % (value,string_set))
+ return '<error>'
+ return string_list[value]
+
+# Instantiate the ORM class object
+ORM = ORM_Class()
+
+# Attach the specific database schema attibutes and values
+def import_orm_schema(databaseDir):
+ global ORM
+
+ schema = os.path.join(databaseDir,srtSchemaName)
+ # Generate the schema file if not found
+ if not os.path.isfile(schema):
+ ret = os.system("%s --generate-schema-header-dir %s" % (os.path.join(srtool_basepath,'bin/common/srtool_common.py'),databaseDir))
+
+ with open(schema) as fp:
+ for line in fp:
+ try:
+ name = line[:line.index('=')].strip()
+ value = line[line.index('=')+1:].strip()
+ if '"' == value[0]:
+ value = value[1:-1]
+ elif "'" == value[0]:
+ value = value[1:-1]
+ else:
+ value = int(value)
+ except:
+ continue
+ setattr(ORM, name, value)
+
+
+#################################
+# Product list and attributes
+#
+#
+
+class ProductListClass(object):
+ products = []
+ custom_key_list = []
+
+ SORTBY_ORDER = 0x0001
+ SORTBY_KEY = 0x0002
+ SORTBY_CUSTOM = 0x0004
+ SORTBY_REVERSE = 0x0010
+ INCLUDE_PUBLIC = 0x0100
+ INCLUDE_PUBLIC_NO = 0x0200
+ INCLUDE_MODE_DEVELOP = 0x1000
+ INCLUDE_MODE_SUPPORT = 0x2000
+ INCLUDE_MODE_EOL = 0x4000
+ INCLUDE_ALL = 0xff00
+
+ def fetch_products(self,conn,filter):
+ def get_dict_tag(tag,dict_str,default=None):
+ dict = json.loads(dict_str)
+ if tag in dict:
+ return dict[tag]
+ return default
+
+ cur = conn.cursor()
+ sql = "SELECT * FROM orm_product"
+ for product_item in cur.execute(sql):
+ # Filter the product list
+ skip = True
+ mode = get_dict_tag('mode',product_item[ORM.PRODUCT_PRODUCT_TAGS],'')
+ public_status = get_dict_tag('public_status',product_item[ORM.PRODUCT_PRODUCT_TAGS],'yes')
+ if (filter & self.INCLUDE_MODE_DEVELOP) and (mode == 'develop'): skip = False
+ if (filter & self.INCLUDE_MODE_SUPPORT) and (mode == 'support'): skip = False
+ if (filter & self.INCLUDE_MODE_EOL ) and (mode == 'eol' ): skip = False
+ if (filter & self.INCLUDE_PUBLIC ) and (public_status != 'no'): skip = False
+ if (filter & self.INCLUDE_PUBLIC_NO) and (public_status == 'no'): skip = False
+ if skip:
+ continue
+
+ product = {}
+ product['id'] = product_item[ORM.PRODUCT_ID]
+ product['order'] = product_item[ORM.PRODUCT_ORDER]
+ product['key'] = product_item[ORM.PRODUCT_KEY]
+ product['name'] = product_item[ORM.PRODUCT_NAME]
+ if product_item[ORM.PRODUCT_VERSION]:
+ product['name'] += " %s" product_item[ORM.PRODUCT_VERSION]
+ if product_item[ORM.PRODUCT_PROFILE]:
+ product['name'] += " %s" product_item[ORM.PRODUCT_PROFILE]
+ self.products.append(product)
+
+ def set_custom_key_list(self,key_list):
+ self.custom_key_list = key_list
+
+ def get_product_list(self,sortby):
+ def sortByOrder(val):
+ return val['order']
+ def sortByKey(val):
+ return val['key']
+
+ if (self.SORTBY_ORDER & sortby):
+ plist = self.products
+ plist.sort(key = sortByOrder, reverse = (self.SORTBY_REVERSE == (self.SORTBY_REVERSE & sortby)))
+ elif (self.SORTBY_KEY & sortby):
+ plist = self.products
+ plist.sort(key = sortByKey, reverse = (self.SORTBY_REVERSE == (self.SORTBY_REVERSE & sortby)))
+ elif (self.SORTBY_CUSTOM & sortby):
+ plist = []
+ for key in self.custom_key_list:
+ for product in products:
+ if key == product['key']:
+ plist.append(product)
+ continue
+ return(plist)
+
+# Instantiate the class
+productList = ProductListClass()
+
+
+#################################
+# publish charts
+#
+#
+
+# Extracted update CSV file schema
+I_NAME = 0
+I_V2SEVERITY = 1
+I_V3SEVERITY = 2
+I_LIN5 = 3
+I_CGP5 = 4
+I_OVP = 5
+I_LIN6 = 6
+I_CGP6 = 7
+I_SCP6 = 8
+I_LIN7 = 9
+I_CGP7 = 10
+I_SCP7 = 11
+I_LIN8 = 12
+I_LIN9 = 13
+I_LIN10 = 14
+I_LIN18 = 15
+I_OUT_MAX = 16
+I_LIN19 = 16
+I_LINCCM = 17
+I_REJECT = 18
+I_MAX = 19
+
+COLUMN_LABELS = (
+ (I_NAME ,'NAME'),
+ (I_V2SEVERITY ,'V2SEVERITY'),
+ (I_V3SEVERITY ,'V3SEVERITY'),
+ (I_LIN5 ,'LIN5'),
+ (I_CGP5 ,'CGP5'),
+ (I_OVP ,'OVP'),
+ (I_LIN6 ,'LIN6'),
+ (I_CGP6 ,'CGP6'),
+ (I_SCP6 ,'SCP6'),
+ (I_LIN7 ,'LIN7'),
+ (I_CGP7 ,'CGP7'),
+ (I_SCP7 ,'SCP7'),
+ (I_LIN8 ,'LIN8'),
+ (I_LIN9 ,'LIN9'),
+ (I_LIN10 ,'LIN10'),
+ (I_LIN18 ,'LIN18'),
+ (I_OUT_MAX ,'OUT_MAX'),
+ (I_LIN19 ,'LIN19'),
+ (I_LINCCM ,'LINCCM'),
+ (I_REJECT ,'REJECT'),
+)
+
+
+def OBSOLETE_map_productId_productPrefix(conn):
+ cur = conn.cursor()
+
+ sql = "SELECT * FROM orm_product"
+ for product in cur.execute(sql):
+ print("[%d]'%s'" % (product[ORM.PRODUCT_ID],product[ORM.PRODUCT_KEY]))
+
+
+
+ product_prefix_table = {}
+ for key in ('LIN5','CGP5','OVP' ,'LIN6','CGP6','SCP6','LIN7','CGP7','SCP7','LIN8','LIN9','LIN10','LIN1018','LIN1019','LINCCM'):
+# for key in ('LIN7','CGP7','SCP7','LIN8','LIN9','LIN10','LIN1018','LIN1019','LINCCM'):
+ sql = "SELECT * FROM orm_product WHERE key='%s'" % key
+ print("FOO11: '%s'" % sql)
+ product = cur.execute(sql).fetchone()
+ if product:
+ product_prefix_table[product[ORM.PRODUCT_ID]] = key
+ else:
+ print("ERROR: could not match key '%s' to product" % key)
+ # Developer must fix database before continuing
+ exit(1)
+ return product_prefix_table
+
+# Name;V2Severity;V3Severity;LIN5;CGP5;OVP;LIN6;CGP6;SCP6;LIN7;CGP7;SCP7;LIN8;LIN9;LIN10;LIN18;LIN19;LINCCM
+def srt2update(srtDatabasePath):
+
+ srt_database_file = os.path.join(srtDatabasePath,srtoolDBName)
+ if not os.path.isfile(srt_database_file) and not force:
+ print("ERROR: Missing database file '%s'" % srt_database_file)
+ return()
+ srtfile_name = os.path.join(srtDatabasePath,srtUpdateName)
+ if os.path.isfile(srtfile_name) and not force:
+ print("Note: Update file '%s' already present" % srtfile_name)
+ return()
+ print("Extracting update information from '%s' to '%s'" % (srt_database_file,srtfile_name))
+
+ conn = SQL_CONNECT()
+ cur_cve = conn.cursor()
+ cur_vul = conn.cursor()
+ cur_inv = conn.cursor()
+ cur_i2d = conn.cursor()
+ cur_def = conn.cursor()
+
+ def product_append(srt_row,map,key):
+ if key in map:
+ srt_row.append('/'.join(product_map[key]))
+ else:
+ srt_row.append('')
+
+ # Prefetch product table, make fast lookup dict (ID to Key)
+ productList.fetch_products(conn,ProductListClass.INCLUDE_PUBLIC|ProductListClass.INCLUDE_MODE_SUPPORT)
+ product_list = productList.get_product_list(ProductListClass.SORTBY_ORDER|ProductListClass.SORTBY_REVERSE)
+ product_prefix_table = {}
+ for product in product_list:
+ product_prefix_table[product['id']] = product['key']
+
+ with open(srtfile_name, 'w') as srtfile:
+ # Write header
+ sustaining_row = []
+ sustaining_row.append('Name')
+ sustaining_row.append('V2Severity')
+ sustaining_row.append('V3Severity')
+ # now process the releases into investigations
+ for j_index in range(I_MAX):
+ sustaining_row.append(COLUMN_LABELS[j_index][1])
+ sustaining_row.append('Reject')
+ srtfile.write("%s\n" % ';'.join(sustaining_row))
+
+ for i,cve in enumerate(cur_cve.execute("SELECT * FROM orm_cve")):
+ srt_row = []
+ try:
+ srt_row.append('%s:%s:%s:%s:%s' % (cve[ORM.CVE_NAME],ORM.get_orm_string(cve[ORM.CVE_STATUS],ORM.STATUS_STR),cve[ORM.CVE_PUBLISHEDDATE],cve[ORM.CVE_LASTMODIFIEDDATE],cve[ORM.CVE_COMMENTS].replace(':',' - ').replace(';',' - ')))
+ srt_row.append(cve[ORM.CVE_CVSSV2_SEVERITY].upper())
+ srt_row.append(cve[ORM.CVE_CVSSV3_BASESEVERITY].upper())
+ except Exception as e:
+ # was intermittent error one day
+ _log("DATAERROR:%s:" % (e))
+ exit(1)
+
+ product_map = {}
+
+ sql = "SELECT * FROM orm_cvetovulnerablility WHERE cve_id='%s'" % cve[ORM.CVE_ID]
+ for c2v in cur_vul.execute(sql):
+ vulnerability_id = c2v[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID]
+ sql = "SELECT * FROM orm_investigation WHERE vulnerability_id='%s'" % vulnerability_id
+ for investigation in cur_inv.execute(sql):
+ investigation_id = investigation[ORM.INVESTIGATION_ID]
+ sql = "SELECT * FROM orm_investigationtodefect WHERE investigation_id='%s'" % investigation_id
+ is_defects = False
+ for i2d in cur_i2d.execute(sql):
+ is_defects = True
+ defect_id = i2d[ORM.INVESTIGATIONTODEFECT_DEFECT_ID]
+ sql = "SELECT * FROM orm_defect WHERE id='%s'" % defect_id
+ defect = cur_def.execute(sql).fetchone()
+ defect_name = defect[ORM.DEFECT_NAME].strip()
+ defect_rcpl = defect[ORM.DEFECT_RELEASE_VERSION].strip()
+ defect_status = ORM.get_orm_string(defect[ORM.DEFECT_SRT_STATUS],ORM.STATUS_STR)
+ defect_resolution = ORM.get_orm_string(defect[ORM.DEFECT_RESOLUTION],ORM.DEFECT_RESOLUTION_STR)
+ # Account for broken duplicate links
+ if not defect_rcpl and defect[ORM.DEFECT_DUPLICATE_OF].startswith('missing'):
+ defect_resolution = 'DUP_MISSING_PARENT'
+ # Extract the Jira last update if present
+ try:
+ # 2019-01-02T18:41:00.000-0800
+ defect_updated = re.sub('T.*','',defect[ORM.DEFECT_DATE_UPDATED])
+ except:
+ defect_updated = ''
+
+# print("FOO4:%s" % defect_name)
+
+ product_prefix = re.sub('-.*','',defect_name)
+ if not product_prefix in product_map:
+ product_map[product_prefix] = ['%s:%s:%s:%s' % (defect_name,defect_rcpl if defect_rcpl else defect_status,defect_resolution,defect_updated)]
+ else:
+ product_map[product_prefix].append('%s:%s:%s:%s' % (defect_name,defect_rcpl if defect_rcpl else defect_status,defect_resolution,defect_updated))
+ if (not is_defects) and (investigation[ORM.INVESTIGATION_PRODUCT_ID] in product_prefix_table):
+ try:
+ product_prefix = product_prefix_table[investigation[ORM.INVESTIGATION_PRODUCT_ID]]
+ product_map[product_prefix] = ['%s:%s:%s:%s' % ('no_defects',ORM.get_orm_string(investigation[ORM.INVESTIGATION_STATUS],ORM.STATUS_STR),'','')]
+ except:
+ print("\nBAR1:%s|%s|%s" % (srtfile_name,investigation[ORM.INVESTIGATION_PRODUCT_ID],product_prefix_table))
+
+ # Name;V2Severity;V3Severity;<product1>;<product2>;<product3>;...
+ fir product in product_list:
+ product_append(srt_row,product_map,product['key'])
+
+ # Add reject flag
+ if ('** REJECT **' in cve[ORM.CVE_DESCRIPTION]):
+ srt_row.append('REJECT')
+ else:
+ srt_row.append('')
+
+ srtfile.write("%s\n" % ';'.join(srt_row))
+
+ # Debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+ # Progress indicator support
+ if 0 == i % 100:
+ print('%04d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+
+ SQL_CLOSE_CONN(conn)
+
+
+
+#Name V2Severity V3Severity LIN5_RCPL CGP5_RCPL OVP_RCPL LIN6_RCPL CGP6_RCPL SCP6_RCPL LIN7_RCPL CGP7_RCPL SCP7_RCPL LIN8_RCPL LIN9_RCPL LIN10_RCPL LIN18_RCPL LIN19_RCPL LINCCM_RCPL
+def validate_update(prev_path,current_path,report_start_date,report_stop_date,do_svsn):
+ print("\n=== Generate Update Review/Validation Report, %s to %s ===\n" % (report_start_date,report_stop_date))
+
+ product_prefix = {}
+ product_prefix[I_NAME] = 'NAME'
+ product_prefix[I_V2SEVERITY] = 'V2SEVERITY'
+ product_prefix[I_V3SEVERITY] = 'V3SEVERITY'
+ product_prefix[I_LIN5] = 'LIN5'
+ product_prefix[I_CGP5] = 'CGP5'
+ product_prefix[I_OVP] = 'OVP'
+ product_prefix[I_LIN6] = 'LIN6'
+ product_prefix[I_CGP6] = 'CGP6'
+ product_prefix[I_SCP6] = 'SCP6'
+ product_prefix[I_LIN7] = 'LIN7'
+ product_prefix[I_CGP7] = 'CGP7'
+ product_prefix[I_SCP7] = 'SCP7'
+ product_prefix[I_LIN8] = 'LIN8'
+ product_prefix[I_LIN9] = 'LIN9'
+ product_prefix[I_LIN10] = 'LIN10'
+ product_prefix[I_LIN18] = 'LIN18'
+ product_prefix[I_LIN19] = 'LIN19'
+ product_prefix[I_LINCCM] = 'LINCCM'
+ product_prefix[I_REJECT] = 'REJECT'
+
+ enable_data = {}
+ enable_data[I_NAME] = True
+ enable_data[I_V2SEVERITY] = True
+ enable_data[I_V3SEVERITY] = True
+ enable_data[I_LIN5] = False
+ enable_data[I_CGP5] = False
+ enable_data[I_OVP] = False
+ enable_data[I_LIN6] = False
+ enable_data[I_CGP6] = False
+ enable_data[I_SCP6] = False
+ enable_data[I_LIN7] = True
+ enable_data[I_CGP7] = True
+ enable_data[I_SCP7] = True
+ enable_data[I_LIN8] = True
+ enable_data[I_LIN9] = True
+ enable_data[I_LIN10] = True
+ enable_data[I_LIN18] = True
+ enable_data[I_LIN19] = True
+ enable_data[I_LINCCM] = False
+ enable_data[I_REJECT] = False
+
+ # Filter start date
+# report_start_date = '2019-02-16'
+# report_stop_date = '2019-04-30'
+ srtool_today = datetime.today().strftime('%Y-%m-%d')
+
+ # Print SVNS header
+ def print_svns_header(writer):
+ if not writer:
+ return
+ header = [
+ 'CVE Number',
+ 'Priority',
+ 'Version',
+ 'CVSSv3_Severity',
+ 'CVSSv3_Score',
+ 'CVE Description',
+ 'SRT Comments',
+ 'Modifications',
+ 'Created Date',
+ 'Modified Date',
+ 'SRT Acknowledged Date',
+ ]
+ # Append the product columns
+ # Fix-up names
+ prodname = {}
+ prodname[I_LIN19] = 'WRLinux LTS 19'
+ prodname[I_LIN18] = 'WRLinux LTS 18'
+ prodname[I_LIN10] = 'WRLinux LTS 17'
+ prodname[I_LIN9] = 'WRLinux 9.0.0'
+ prodname[I_LIN8] = 'WRLinux 8.0.0'
+ prodname[I_OVP] = 'WRLinux OVP'
+ prodname[I_LIN7] = 'WRLinux 7.0.0'
+ prodname[I_SCP7] = 'WRLinux SCP 7.0.0'
+ prodname[I_CGP7] = 'WRLinux CGP 7.0.0'
+ prodname[I_LIN6] = 'WRLinux 6.0.0'
+ prodname[I_CGP6] = 'WRLinux CGP 6.0.0'
+ prodname[I_SCP6] = 'WRLinux SCP 6.0.0'
+ prodname[I_LIN5] = 'WRLinux 5.0.1'
+ prodname[I_CGP5] = 'WRLinux CGP 5.0.1'
+ for index in range(I_LIN5,I_LIN18+1):
+ header.append('Status %s' % prodname[index])
+ header.append('CQ/Jira Case')
+ writer.writerow(header)
+
+ # Print SVNS row
+ def write_svns_row_on_change(writer,row,modify,cur_cve,last_jira):
+ if not writer:
+ return
+
+ cve_name = row[I_NAME].split(':')[0]
+ sql = "SELECT * FROM orm_cve WHERE name='%s'" % cve_name
+ cve = cur_cve.execute(sql).fetchone()
+ if not cve:
+ print("ERROR_LOOKUP:|%s|" % row[I_NAME])
+ exit(1)
+ print_row = []
+ print_row.append(cve[ORM.CVE_NAME])
+ print_row.append(cve[ORM.CVE_CVSSV2_SEVERITY])
+ print_row.append(cve[ORM.CVE_CVSSV2_BASESCORE])
+ print_row.append(cve[ORM.CVE_CVSSV3_BASESEVERITY])
+ print_row.append(cve[ORM.CVE_CVSSV3_BASESCORE])
+
+ if cmd_truncate:
+ print_row.append(cve[ORM.CVE_DESCRIPTION][:20])
+ else:
+ print_row.append(cve[ORM.CVE_DESCRIPTION])
+
+ cve_comments = cve[ORM.CVE_COMMENTS]
+ if not cve_comments:
+ cve_comments = cve[ORM.CVE_PACKAGES]
+ print_row.append(cve_comments)
+
+ # Use publish date if acknowledge date not available
+ try:
+ acknowledge_date = cve[ORM.CVE_ACKNOWLEDGE_DATE]
+# if not acknowledge_date:
+# acknowledge_date = datetime.strptime(cve[ORM.CVE_PUBLISHEDDATE], '%Y-%m-%d')
+ if acknowledge_date:
+ # NO ACK_DATE:CVE-2013-2516:2019-03-27 07:18:03.982215,2019-02-15:unconverted data remains: .982215
+ acknowledge_date = re.sub('\..*','',acknowledge_date)
+ acknowledge_date = datetime.strptime(acknowledge_date, '%Y-%m-%d %H:%M:%S')
+ acknowledge_date = acknowledge_date.strftime('%Y-%m-%d')
+ else:
+ acknowledge_date = ''
+ except Exception as e:
+ acknowledge_date = ''
+ print("NO ACK_DATE:%s:%s,%s:%s" % (cve[ORM.CVE_NAME],cve[ORM.CVE_ACKNOWLEDGE_DATE],cve[ORM.CVE_PUBLISHEDDATE],e))
+
+ print_row.append(modify)
+ print_row.append(' ' + cve[ORM.CVE_PUBLISHEDDATE]) # BLock automatic date conversions
+ print_row.append(' ' + cve[ORM.CVE_LASTMODIFIEDDATE])
+ print_row.append(' ' + acknowledge_date)
+
+ for index in range(I_LIN5,I_LIN18+1):
+ if not row[index] or ('Not_Vulnerable' == row[index]):
+ print_row.append('Not vulnerable')
+ else:
+ # unmark the "()" inactive status decorators
+ print_row.append(row[index].replace('(','').replace(')',''))
+
+ if 0 <= last_jira.find(';'):
+ jira = re.sub(';.*','',last_jira)
+ print_row.append(jira)
+ if verbose:
+ jiras = re.sub('.*;','',last_jira)
+ print_row.append(jiras)
+ else:
+ print_row.append(last_jira)
+
+ writer.writerow(print_row)
+ return 1
+
+ # Print the row if any item after the CVE name is filled
+ def write_row_on_change(fd,key,row,last_jira):
+ for index in range(I_V2SEVERITY,len(row)):
+ if row[index]:
+ print_row = []
+ for i,col in enumerate(row):
+ if enable_data[i]:
+ print_row.append(col)
+ if 0 <= last_jira.find(';'):
+ jira = re.sub(';.*','',last_jira)
+ print_row.append(jira)
+ jiras = re.sub('.*;','',last_jira)
+ print_row.append(jiras)
+ else:
+ print_row.append(last_jira)
+ fd.write("%s;%s\n" % (key,';'.join(print_row)))
+ return 1
+ return 0
+
+ # Compute the Jira defect from this CVE's latest release
+ def get_latest_jira(srt_row):
+ latest_jira = ''
+ foo = []
+# for index in range(I_LIN5,I_LIN18+1):
+ # Assert that base products (LIN6,LIN7) win over related profiles
+ for index in (I_CGP7,I_SCP7,I_LIN7,I_LIN8,I_LIN9,I_LIN10,I_LIN18):
+ for item in srtEntry[index].split('/'):
+ if not item or item.startswith('no_defects'):
+ continue
+ foo.append(item)
+ srt_defect,status,resolution,defect_updated = item.split(':')
+ if (status in ('Vulnerable','Investigate')) or status[0].isdigit():
+ latest_jira = srt_defect
+ return latest_jira + ";%s" % ','.join(foo)
+
+
+ # Create output files
+ prev2srtUpdateFile = open(prev2srtUpdateName, 'w')
+ prev2srtNvFile = open(prev2srtNvName, 'w')
+ prev2srtNoDefectFile = open(prev2srtNoDefectName, 'w')
+ prev2srtOpenFile = open(prev2srtOpenName, 'w')
+ prev2srtSVNSNameFull = prev2srtSVNSName % (report_start_date.replace('-',''),report_stop_date.replace('-',''))
+ if do_svsn:
+ prev2srtSVNSFile = open(prev2srtSVNSNameFull, 'w')
+ prev2srtWriter = csv.writer(prev2srtSVNSFile, delimiter=';', quotechar='"', quoting=csv.QUOTE_MINIMAL)
+ srt_database_file = os.path.join(current_path,srtoolDBName)
+ _log("FOO:validate_update:db=%s" % srt_database_file)
+ conn = SQL_CONNECT()
+ cur_cve = conn.cursor()
+ else:
+ prev2srtSVNSFile = None
+ prev2srtWriter = None
+ conn = None
+ cur_cve = None
+
+ # Create the headers
+ update_reason = []
+ for index in range(I_NAME,I_LIN19):
+ if enable_data[index]:
+ update_reason.append(product_prefix[index])
+ prev2srtUpdateFile.write("Type;%s\n" % ';'.join(update_reason))
+ prev2srtNvFile.write("Type;%s\n" % ';'.join(update_reason))
+ print_svns_header(prev2srtWriter)
+
+ # Insure that summary files are in place
+ srt2update(prev_path)
+ srt2update(current_path)
+
+ # Load the Previous dataset in to memory
+ is_first_row = True
+ prevlookupTable = []
+ cveIndex = {}
+ index = 0
+ prev_update_file = os.path.join(prev_path,srtUpdateName)
+ print("Loading previous update information from %s" % prev_update_file)
+ with open(prev_update_file, newline='') as prevfile:
+ CVE_reader = csv.reader(prevfile, delimiter=';', quotechar='"')
+ for i,row in enumerate(CVE_reader):
+ if is_first_row or not len(row):
+ is_first_row = False
+ continue
+
+ dbEntry=[]
+ for col in row:
+ dbEntry.append(col.strip())
+ prevlookupTable.append(dbEntry)
+
+ cve_name = dbEntry[I_NAME].split(':')[0]
+ cveIndex[cve_name] = index
+ index += 1
+
+# if dbEntry[I_NAME].startswith('CVE-2018-12384'):
+# print("BOO2")
+
+# if 'CVE-2018-12384' in cveIndex:
+# print("SOO2!")
+
+ # Load the SRTool data, line by line
+ is_first_row = True
+ count = 0
+ cve_update_count = 0
+ cve_nv_count = 0
+ cve_other_count = 0
+ cve_new_count = 0
+ current_update_file = os.path.join(current_path,srtUpdateName)
+ print("Comparing current update information from %s" % current_update_file)
+
+ with open(current_update_file, newline='') as srtfile:
+ CVE_reader = csv.reader(srtfile, delimiter=';', quotechar='"')
+ for i_row,row in enumerate(CVE_reader):
+ if is_first_row or not len(row):
+ is_first_row = False
+ continue
+ count += 1
+
+ # Load record
+ srtEntry=[]
+ for col in row:
+ srtEntry.append(col)
+
+ # Extract CVE name, status
+ cve_name,cve_status,cve_published,cve_lastmodifieddate,cve_comments = srtEntry[I_NAME].split(':')
+
+ # Preset the validation rows
+ update_reason = []
+ nv_rcpl_reason = []
+ other_reason = []
+ svns_reason = []
+ for i in range(I_MAX):
+ update_reason.append('')
+ nv_rcpl_reason.append('')
+ other_reason.append('')
+ svns_reason.append('')
+ update_reason[I_NAME] = cve_name
+ nv_rcpl_reason[I_NAME] = cve_name
+ other_reason[I_NAME] = cve_name
+ svns_reason[I_NAME] = cve_name
+
+# if ('CVE-2015-1006' != cve_name):
+# continue
+# print("FOO1:%s" % cve_name)
+
+ # Find matching Prev CVE entry
+ is_new = False
+ if not cve_name in cveIndex:
+ is_new = True
+ else:
+ prevEntry = prevlookupTable[cveIndex[cve_name]]
+ # Extract previous CVE name, status
+ prev_cve_name,prev_cve_status,prev_cve_published,prev_cve_lastmodifieddate,prev_cve_comments = prevEntry[I_NAME].split(':')
+ # Sanity Test
+ if not prev_cve_name == cve_name:
+ print("Lookup mismatch:(%s,%s)" % (prevEntry[I_NAME],cve_name))
+ exit(1)
+ # Previous 'New_Reserved' placeholder CVE entries do not count in 'new' test
+ if prev_cve_status in ('New_Reserved'):
+ is_new = True
+
+ if is_new:
+## print("FOO2")
+ # New!
+
+ # In range for "New"?
+ if (report_start_date <= cve_published) and (cve_published <= report_stop_date):
+
+ # ('Historical','New','New_Reserved','Investigate','Vulnerable','Not_Vulnerable','(New)','(Investigate)','(Vulnerable)','(Not Vulnerable)')
+ if cve_status not in ('Investigate','Vulnerable','Not_Vulnerable'):
+ continue
+
+ # New CVE record
+ for index in range(I_V2SEVERITY,I_LIN5):
+ update_reason[index] = srtEntry[index]
+ for index in range(I_LIN5,I_LIN19):
+ if not srtEntry[index].strip():
+ update_reason[index] = ''
+ continue
+
+ try:
+ if srtEntry[index].startswith('REJECT'):
+ continue
+ srt_defect,status,resolution,defect_updated = srtEntry[index].split(':')
+ except Exception as e:
+ print("ERROR:%s:%s:%s" % (cve_name,srtEntry[index],e))
+ exit(1)
+
+ resolution_na = resolution in ('Withdrawn','Rejected','Not Applicable','Replaced By Requirement','Cannot Reproduce')
+ if not srt_defect:
+ update_reason[index] = ''
+ elif srt_defect.startswith('no_defects') and prevEntry[index] and prevEntry[index][0].isdigit():
+ update_reason[index] = ''
+ elif status.startswith('Invalid Version') or status.startswith('unknown'):
+ update_reason[index] = ''
+ elif prevEntry[index].startswith('Not_Vulnerable') and status and status[0].isdigit():
+ # (LIN5-16501:Not_Vulnerable,5.0), (LIN5-11686:Not_Vulnerable,5.0.1.9), (LIN5-10025:Not_Vulnerable,5.0)
+ if resolution_na:
+ update_reason[index] = ''
+ else:
+ update_reason[index] = status
+ else:
+ update_reason[index] = status
+ write_svns_row_on_change(prev2srtWriter,update_reason,'New',cur_cve,get_latest_jira(srtEntry))
+ #
+ update_reason[I_NAME] = '%s;%s;%s;%s' % (cve_name,cve_published,cve_status,cve_comments)
+ cve_new_count += write_row_on_change(prev2srtOpenFile,'~New',update_reason,get_latest_jira(srtEntry))
+ continue
+ elif cve_status in ('Investigate','Vulnerable','Not_Vulnerable'):
+ # Not "New" for selected range, but may have "updated" product entries
+ # Create empty Prev record to test for SRT updates
+ prevEntry = []
+ for i in range(I_MAX):
+ prevEntry.append('')
+ # CVE-2010-0006:Not_Vulnerable:2010-01-26:2018-11-13:Linux
+ prevEntry[I_NAME] = '%s::::' % cve_name
+ else:
+ continue
+
+ # Check unexpected SRTool status for Previous tracked CVE
+ # ('Historical','New','New_Reserved','Investigate','Vulnerable','Not_Vulnerable','(New)','(Investigate)','(Vulnerable)','(Not Vulnerable)')
+ if cve_status not in ('Investigate','Vulnerable','Not_Vulnerable'):
+ prev2srtNoDefectFile.write("ODDSTATUS;%s:%s\n" % (cve_name,cve_status))
+## print("ODDSTATUS;%s:%s" % (cve_name,cve_status))
+ continue
+
+# print("QWERTY:%s,%s" % (prevEntry[I_NAME],srtEntry[I_NAME]))
+
+ # Preset the SVNS row
+ svns_reason = []
+ for index in range(I_MAX):
+ svns_reason.append(prevEntry[index])
+
+ # Start validation
+ if prevEntry[I_V2SEVERITY] != srtEntry[I_V2SEVERITY]:
+ if not srtEntry[I_V2SEVERITY] and srtEntry[I_REJECT]:
+ # UPDATE;CVE-2011-1549;LIN5(:5.0.1,),LIN6(:6.0.0,)
+ pass
+ else:
+ update_reason[I_V2SEVERITY] = "(%s,%s)" % (prevEntry[I_V2SEVERITY],srtEntry[I_V2SEVERITY])
+ svns_reason[I_V2SEVERITY] = srtEntry[I_V2SEVERITY]
+
+ if prevEntry[I_V3SEVERITY] != srtEntry[I_V3SEVERITY]:
+ if not srtEntry[I_V3SEVERITY] and srtEntry[I_REJECT]:
+ # UPDATE;CVE-2011-1549;LIN5(:5.0.1,),LIN6(:6.0.0,)
+ pass
+ else:
+ update_reason[I_V3SEVERITY] = "(%s,%s)" % (prevEntry[I_V3SEVERITY],srtEntry[I_V3SEVERITY])
+ svns_reason[I_V3SEVERITY] = srtEntry[I_V3SEVERITY]
+
+ for index in range(I_LIN5,I_LIN19):
+ if ('Not_Vulnerable' == prevEntry[index]) and ('' == srtEntry[index]):
+ continue
+## print("FOO1:%s" % srtEntry[index])
+ # Group multple defects into one column entry
+ defect_list = []
+ defect_nv_list = []
+ svns_list = []
+##
+ try:
+ if prevEntry[index]:
+ prev_srt_defect,prev_status,prev_resolution,prev_defect_updated = prevEntry[index].split('/')[0].split(':')
+# print("FOOBAR3:%s,%s" % (prev_status,prevEntry[index]))
+ else:
+ prev_status = ''
+ except Exception as e:
+# print("FOOBAR9:%s,%s" % (e,prevEntry[index]))
+ exit(1)
+##
+# print("FEI1:%s,%s,%s" % (prevEntry[index] == srtEntry[index],prevEntry[index],srtEntry[index]))
+
+ for item in srtEntry[index].split('/'):
+## print(" 2:%s" % item)
+ # We want to see defects in Previous that are not in SRT
+ if not item:
+ item = '::FOO:%s' % report_start_date
+ srt_defect,status,resolution,defect_updated = item.split(':')
+ status = status.replace('_OBSOLETE','')
+ resolution_na = resolution in ('Withdrawn','Rejected','Not Applicable','Replaced By Requirement','Cannot Reproduce')
+
+ if (report_start_date > defect_updated) or (defect_updated > report_stop_date):
+ continue
+
+ if prev_status != status:
+ if not srt_defect:
+ #UPDATE;CVE-2011-1549;LIN5(:5.0.1,),LIN6(:6.0.0,)
+ other_reason[index] = "(%s:%s,%s)" % ('no_defects?',prevEntry[index],'release')
+ elif srt_defect.startswith('no_defects') and prevEntry[index] and prevEntry[index][0].isdigit():
+ # (no_defects:5.0.1.4,Vulnerable)
+ prev2srtNoDefectFile.write("NODEFECT;%s;%s;%s\n" % (cve_name,product_prefix[index],prevEntry[index]))
+ elif status.startswith('Invalid Version') or status.startswith('unknown'):
+ # (CGP5-1122#Fixed:5.0.1.14,Invalid Version)
+ prev2srtNoDefectFile.write("BADRCPL;%s;%s;%s\n" % (cve_name,srt_defect,prevEntry[index]))
+ elif prevEntry[index].startswith('Not_Vulnerable') and status and status[0].isdigit():
+ # (LIN5-16501:Not_Vulnerable,5.0), (LIN5-11686:Not_Vulnerable,5.0.1.9), (LIN5-10025:Not_Vulnerable,5.0)
+ if resolution_na:
+ defect_nv_list.append("(%s:%s,%s#%s)" % (srt_defect,prevEntry[index],status,resolution)) #product_prefix[index]
+ else:
+ defect_list.append("(%s:%s,%s#%s)" % (srt_defect,prevEntry[index],status,resolution)) #product_prefix[index]
+ svns_list.append(status)
+ elif (I_OVP == index) and '/' in prevEntry[index]:
+ # (OVP-2382:6.0.0.19/7.0.0.0,7.0), (OVP-2342:6.0.0.19/7.0.0.0,6.0.0.19)
+ # Skip combo OVP for now
+ pass
+ elif 'Historical' == status:
+ # (LIN8-7944:Not_Vulnerable,Historical)
+ prev2srtNoDefectFile.write("HISTORICAL;%s;%s;%s\n" % (cve_name,srt_defect,prevEntry[index]))
+ elif 'DUP_MISSING_PARENT' == resolution:
+ # (LIN6-10042:6.0.0.22,Not_Vulnerable)
+ # Review Skip duplicates with broken links for now
+ prev2srtNoDefectFile.write("NOPARENT;%s;%s;%s:%s\n" % (cve_name,srt_defect,prevEntry[index],status))
+ elif 'Replaced By Requirement' == resolution:
+ # (LIN8-7944:Not_Vulnerable,Historical)
+ prev2srtNoDefectFile.write("REPLACED;%s;%s;%s:%s\n" % (cve_name,srt_defect,prevEntry[index],status))
+ else:
+ defect_list.append("(%s:%s,%s)" % (srt_defect,prevEntry[index],status)) #product_prefix[index]
+ svns_list.append(status)
+ else:
+ svns_list.append(prev_status)
+## svns_reason[index] = prev_status
+
+ # Update the cell, empty or not
+ update_reason[index] = ', '.join(defect_list)
+ nv_rcpl_reason[index] = ', '.join(defect_nv_list)
+
+ # Update the SVNS cell if change
+ if svns_list:
+ svns_reason[index] = ', '.join(svns_list)
+# print("FOOBAR2:%s,%s" % (index,svns_list))
+ else:
+ svns_reason[index] = '%s' % prev_status
+# svns_reason[index] = '*%s' % prev_status
+
+ is_severity_change = False
+ is_release_change = False
+ for index in (I_V2SEVERITY,I_V3SEVERITY):
+ if update_reason[index]:
+ is_severity_change = True
+ for index in range(I_LIN7,I_LIN18+1):
+ if update_reason[index]:
+ is_release_change = True
+
+ # Print results
+ if (report_start_date <= cve_lastmodifieddate) and (cve_lastmodifieddate <= report_stop_date):
+ if is_release_change:
+## print("FOOBAR1")
+ cve_update_count += write_row_on_change(prev2srtUpdateFile,'UPDATE',update_reason,get_latest_jira(srtEntry))
+ write_svns_row_on_change(prev2srtWriter,svns_reason,'Updated',cur_cve,get_latest_jira(srtEntry))
+ elif is_severity_change:
+## print("FOOBAR2")
+ cve_update_count += write_row_on_change(prev2srtUpdateFile,'SEVERITY',update_reason,get_latest_jira(srtEntry))
+ write_svns_row_on_change(prev2srtWriter,svns_reason,'Updated',cur_cve,get_latest_jira(srtEntry))
+ cve_nv_count += write_row_on_change(prev2srtNvFile,'NV_RCPL',nv_rcpl_reason,'')
+ cve_other_count += write_row_on_change(prev2srtUpdateFile,'OTHER',other_reason,'')
+
+ # Debug support
+ if cmd_skip and (cve_update_count < cmd_skip): continue
+ if cmd_count and ((cve_update_count - cmd_skip) > cmd_count): break
+ # Progress indicator support
+ if 0 == i_row % 100:
+ print('%04d: %20s\r' % (i_row,cve_name), end='')
+ if 0 == i_row % 200:
+ time.sleep(0.1)
+
+ print("Count=%d,CVE_Find=%d,CVE_Find=%d,CVE_Other=%s,CVE_New=%s" % (count,cve_update_count,cve_nv_count,cve_other_count,cve_new_count))
+ prev2srtUpdateFile.close()
+ prev2srtNvFile.close()
+ prev2srtNoDefectFile.close()
+ prev2srtOpenFile.close()
+ if prev2srtSVNSFile:
+ prev2srtSVNSFile.close()
+ SQL_CLOSE_CONN(conn)
+ print("SVNS file: " + prev2srtSVNSNameFull)
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+ global cmd_skip
+ global cmd_count
+ global cmd_truncate
+ global force
+
+ # setup
+
+ parser = argparse.ArgumentParser(description='srtool_prublish.py: manage SRTool publish table diffs')
+
+ parser.add_argument('--srt2update', dest='srt2update', help='Directory of SRTool database extract update data')
+ parser.add_argument('--validate-update', action='store_const', const='validate_update', dest='command', help='Compare and export update dbs')
+ parser.add_argument('--validate-update-svns', action='store_const', const='validate_update_svns', dest='command', help='Compare and export update dbs and SVNS')
+ parser.add_argument('--start', dest='report_start_date', help='Report start date (default="2019-02-16")')
+ parser.add_argument('--stop', dest='report_stop_date', help='Report stop date (default=<today>)')
+
+ parser.add_argument('--previous', '-p', dest='prev_path', help='Directory of previous update extract')
+ parser.add_argument('--current', '-c', dest='current_path', help="Directory of current update extract [default='.']")
+
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
+ parser.add_argument('--truncate', action='store_true', dest='truncate', help='Truncate output')
+ parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force Update')
+ parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+
+ args = parser.parse_args()
+
+ verbose = args.verbose
+ cmd_truncate = args.truncate
+ cmd_skip = 0
+ if None != args.skip:
+ cmd_skip = int(args.skip)
+ cmd_count = 0
+ if None != args.count:
+ cmd_count = int(args.count)
+ if get_override('SRTDBG_MINIMAL_DB'):
+ cmd_count = 40
+ force = args.force
+
+ # Paths to update file directories
+ prev_path = None
+ if args.prev_path:
+ prev_path = args.prev_path
+ current_path = '.'
+ if args.current_path:
+ current_path = args.current_path
+
+ report_start_date = '2019-02-16'
+ report_stop_date = datetime.today().strftime('%Y-%M-%D') # '2019-04-30'
+ if args.report_start_date:
+ report_start_date = args.report_start_date
+ if args.report_stop_date:
+ report_stop_date = args.report_stop_date
+
+ if args.srt2update:
+ import_orm_schema(args.srt2update)
+ srt2update(args.srt2update)
+ return()
+
+ if not prev_path:
+ print("ERROR: previous path required '--previous <previous_path>'")
+ exit(1)
+ if 'validate_update' == args.command:
+ import_orm_schema(current_path)
+ validate_update(prev_path,current_path,report_start_date,report_stop_date,False)
+ return()
+ elif 'validate_update_svns' == args.command:
+ import_orm_schema(current_path)
+ validate_update(prev_path,current_path,report_start_date,report_stop_date,True)
+ return()
+
+ else:
+ print("Command not found")
+
+if __name__ == '__main__':
+ # fetch any environment overrides
+
+ _log("MAIN:|%s|" % sys.argv)
+ set_override('SRTDBG_MINIMAL_DB')
+
+ srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ main(sys.argv[1:])
diff --git a/bin/yp/srtool_yp.py b/bin/yp/srtool_yp.py
index 338d4467..3703ab4e 100755
--- a/bin/yp/srtool_yp.py
+++ b/bin/yp/srtool_yp.py
@@ -27,13 +27,13 @@
import os
import sys
import argparse
-import sqlite3
import json
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
from common.srt_schema import ORM
+from common.srtool_sql import *
# Setup:
srtDbName = 'srt.sqlite'
@@ -76,7 +76,7 @@ def init_products(source_file):
with open(source_doc) as json_data:
dct = json.load(json_data)
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
Product_Items = dct['Product_Items']
@@ -94,7 +94,7 @@ def init_products(source_file):
product = cur.execute(sql).fetchone()
if product is None:
# NOTE: 'order' is a reserved SQL keyword, so we have to quote it
- sql = ''' INSERT into orm_product ("order", key, name, version, profile, cpe, defect_tags, product_tags) VALUES (?, ?, ?, ?, ?, ?, ?, ?)'''
+ sql = ''' INSERT INTO orm_product (`order`, `key`, name, version, profile, cpe, defect_tags, product_tags) VALUES (?, ?, ?, ?, ?, ?, ?, ?)'''
cur.execute(sql, (order, key, name, version, profile, cpe, defect_tags, product_tags))
else:
sql = ''' UPDATE orm_product
@@ -115,6 +115,7 @@ def main(argv):
parser.add_argument('--file', dest='file', help='Source file')
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose debugging')
args = parser.parse_args()
diff --git a/bin/yp/yocto-project-products.json b/bin/yp/yocto-project-products.json
index b9688747..e8164ba6 100755
--- a/bin/yp/yocto-project-products.json
+++ b/bin/yp/yocto-project-products.json
@@ -1,54 +1,67 @@
{
"Product_Items" : [
- {
- "order" : "1",
- "key" : "Warrior",
- "name" : "Yocto Project Linux",
- "version" : "2.7",
- "profile" : "",
- "cpe" : "cpe:2.3:o:yoctoproject:linux:2.7:*:*:*:*:*:*:*",
- "defect_tags" : "{\"key\":\"warrior\"}",
- "product_tags" : "{\"key\":\"warrior\"}"
- },
+
+ {
+ "order" : "1",
+ "key" : "master",
+ "name" : "Yocto Project",
+ "version" : "master",
+ "profile" : "",
+ "cpe" : "cpe:2.3:o:yoctoproject:*:*:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"master\"}",
+ "product_tags" : "{\"key\":\"master\",\"public_status\":\"no\",\"mode\":\"develop\"}"
+ },
+
+
{
"order" : "2",
- "key" : "Thud",
- "name" : "Yocto Project Linux",
- "version" : "2.6",
- "profile" : "",
- "cpe" : "cpe:2.3:o:yoctoproject:linux:2.6:*:*:*:*:*:*:*",
- "defect_tags" : "{\"key\":\"sumo\"}",
- "product_tags" : "{\"key\":\"sumo\"}"
+ "key" : "nanbield",
+ "name" : "Yocto Project",
+ "version" : "Nanbield",
+ "profile" : "4.3",
+ "cpe" : "cpe:2.3:o:yoctoproject:linux:3.0:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"zeus\"}",
+ "product_tags" : "{\"key\":\"zeus\",\"mode\":\"support\"}"
},
{
"order" : "3",
- "key" : "Sumo",
- "name" : "Yocto Project Linux",
- "version" : "2.5",
- "profile" : "",
- "cpe" : "cpe:2.3:o:yoctoproject:linux:2.5:*:*:*:*:*:*:*",
- "defect_tags" : "{\"key\":\"sumo\"}",
- "product_tags" : "{\"key\":\"sumo\"}"
+ "key" : "mickledore",
+ "name" : "Yocto Project",
+ "version" : "Mickledore",
+ "profile" : "4.2",
+ "cpe" : "cpe:2.3:o:yoctoproject:linux:2.7:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"warrior\"}",
+ "product_tags" : "{\"key\":\"warrior\",\"mode\":\"support\"}"
},
{
"order" : "4",
- "key" : "Rocko",
- "name" : "Yocto Project Linux",
- "version" : "2.4",
- "profile" : "",
- "cpe" : "cpe:2.3:o:yoctoproject:linux:2.4:*:*:*:*:*:*:*",
- "defect_tags" : "{\"key\":\"rocko\"}",
- "product_tags" : "{\"key\":\"rocko\"}"
+ "key" : "langdale",
+ "name" : "Yocto Project",
+ "version" : "Langdale",
+ "profile" : "4.1",
+ "cpe" : "cpe:2.3:o:yoctoproject:linux:2.6:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"thud\"}",
+ "product_tags" : "{\"key\":\"thud\",\"mode\":\"support\"}"
},
{
"order" : "5",
- "key" : "Pyro",
- "name" : "Yocto Project Linux",
- "version" : "2.3",
- "profile" : "",
- "cpe" : "cpe:2.3:o:yoctoproject:linux:2.3:*:*:*:*:*:*:*",
- "defect_tags" : "{\"key\":\"pyro\"}",
- "product_tags" : "{\"key\":\"pyro\"}"
+ "key" : "kirkstone",
+ "name" : "Yocto Project",
+ "version" : "Kirkstone",
+ "profile" : "4.0",
+ "cpe" : "cpe:2.3:o:yoctoproject:linux:2.6:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"thud\"}",
+ "product_tags" : "{\"key\":\"thud\",\"mode\":\"support\"}"
+ },
+ {
+ "order" : "6",
+ "key" : "dunfell",
+ "name" : "Yocto Project",
+ "version" : "Dunfell",
+ "profile" : "3.1",
+ "cpe" : "cpe:2.3:o:yoctoproject:linux:2.6:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"thud\"}",
+ "product_tags" : "{\"key\":\"thud\",\"mode\":\"support\"}"
}
]
}
diff --git a/data/notify-categories.json b/data/notify-categories.json
index dc658fea..0deb631b 100755
--- a/data/notify-categories.json
+++ b/data/notify-categories.json
@@ -13,6 +13,10 @@
},
{
+ "name" : "CVE_DUPLICATE_NOLINK"
+ },
+
+ {
"name" : "VULNERABILITY"
},
diff --git a/data/recipe_names_from_layer_index.txt b/data/recipe_names_from_layer_index.txt
new file mode 100755
index 00000000..75efd629
--- /dev/null
+++ b/data/recipe_names_from_layer_index.txt
@@ -0,0 +1,3844 @@
+a2jmidid
+abseil-cpp
+accountsservice
+ace
+ace-cloud-editor
+acl
+acpica
+acpid
+acpitool
+adcli
+adduser
+ade
+adwaita-icon-theme
+aer-inject
+agent-proxy
+aircrack-ng
+alsa-equal
+alsa-lib
+alsa-oss
+alsa-plugins
+alsa-state
+alsa-tools
+alsa-topology-conf
+alsa-ucm-conf
+alsa-utils
+alsa-utils-scripts
+anaconda-init
+android-tools
+android-tools-conf
+anspass
+anthy
+aoetools
+apache-websocket
+apache2
+apmd
+apparmor
+appstream-glib
+apr
+apr-util
+apt
+argp-standalone
+arno-iptables-firewall
+arptables
+arpwatch
+asciidoc
+asio
+aspell
+assimp
+at
+at-spi2-atk
+at-spi2-core
+atftp
+atk
+atkmm
+atop
+attr
+audiofile
+audit
+aufs-util
+augeas
+autoconf
+autoconf-2.13-native
+autoconf-archive
+autofs
+automake
+avahi
+avro-c
+aws-iot-device-sdk-cpp
+azure-c-shared-utility
+azure-iot-sdk-c
+azure-macro-utils-c
+azure-uamqp-c
+azure-uhttp-c
+azure-umqtt-c
+babeld
+babeltrace
+babeltrace2
+babl
+backport-iwlwifi
+base-files
+base-passwd
+bash
+bash
+bash-completion
+bastille
+bats
+bazel-native
+bc
+bc
+bcc
+bcm2835-bootfiles
+bdftopcf
+bdwgc
+bigbuckbunny-1080p
+bigbuckbunny-480p
+bigbuckbunny-720p
+bind
+binutils
+binutils-cross-canadian-i686
+binutils-cross-i686
+binutils-cross-testsuite
+binutils-crosssdk-x86_64-oesdk-linux
+biossums
+bison
+bison
+bjam-native
+blktool
+blktrace
+blueman
+bluez5
+bmap-tools
+boinc-client
+bonnie++
+boost
+boot-config
+bootchart
+bootchart2
+botan
+bpftool
+bpftrace
+breakpad
+bridge-utils
+broadcom-bt-firmware
+brotli
+bsd-headers
+btrfs-tools
+buck-security
+build-appliance-image
+build-compare
+build-sysroots
+builder
+buildtools-extended-tarball
+buildtools-tarball
+bundler
+busybox
+busybox-inittab
+byacc
+bzip2
+c-ares
+c3-app-container
+c3-systemd-container
+ca-certificates
+cairo
+cairomm
+can-isotp
+can-utils
+cannelloni
+cantarell-fonts
+canutils
+capnproto
+caps
+catch2
+catfish
+ccache
+ccid
+ccs-tools
+cdparanoia
+cdrkit
+cdrtools-native
+celt051
+celt051
+ceph
+ceres-solver
+cfengine
+cfengine-masterfiles
+cgdb
+cgl-unittest
+cgroup-lite
+checkpolicy
+checksec
+checksecurity
+chef
+chef-zero
+cherokee
+chkrootkit
+chrony
+chrpath
+cifs-utils
+cim-schema-docs
+cim-schema-exper
+cim-schema-final
+cinematicexperience
+cirros
+civetweb
+cjson
+cjson
+ckermit
+clamav
+clang
+clang-cross-canadian-i686
+clang-cross-i686
+clang-crosssdk-x86_64
+cli11
+clinfo
+cloc
+cloud-image-compute
+cloud-image-controller
+cloud-image-guest
+cloud-init
+cluster
+cluster-glue
+clutter-1.0
+clutter-gst-3.0
+clutter-gtk-1.0
+cma-test
+cmake
+cmake-native
+cmark
+cmpi-bindings
+cni
+cni
+cockpit
+coderay
+cogl-1.0
+collectd
+colord
+colord-gtk
+colord-native
+compiler-rt
+compose-file
+con2fbmap
+concurrencykit
+concurrent-ruby
+conmon
+connman
+connman-conf
+connman-gnome
+conntrack-tools
+consolekit
+consul
+consul-migrate
+container-base
+container-shutdown-notifier
+containerd-docker
+containerd-opencontainers
+cool.io
+core-image-anaconda
+core-image-anaconda-initramfs
+core-image-base
+core-image-cgl
+core-image-cgl-initramfs
+core-image-clutter
+core-image-full-cmdline
+core-image-kernel-dev
+core-image-mingw-sdktest
+core-image-minimal
+core-image-minimal-dev
+core-image-minimal-initramfs
+core-image-minimal-mtdutils
+core-image-minimal-xfce
+core-image-rt
+core-image-rt
+core-image-rt-extended
+core-image-rt-sdk
+core-image-rt-sdk
+core-image-sato
+core-image-sato-dev
+core-image-sato-ptest-fast
+core-image-sato-sdk
+core-image-sato-sdk-ptest
+core-image-selinux
+core-image-selinux-minimal
+core-image-testmaster
+core-image-testmaster-initramfs
+core-image-tiny
+core-image-tiny-initramfs
+core-image-weston
+core-image-x11
+coreutils
+coreutils
+corosync
+cpio
+cpio
+cpprest
+cppunit
+cppzmq
+cpuburn-arm
+cpufrequtils
+cpuid
+cpupower
+cracklib
+crash
+crda
+createrepo-c
+cri-o
+criu
+crmsh
+cronie
+cross-localedef-native
+crossguid
+crun
+cryptfs-tpm2
+cryptodev-linux
+cryptodev-module
+cryptodev-tests
+cryptsetup
+cscope
+ctags
+ctapi-common
+cube-builder
+cube-builder-initramfs
+cube-cmd-server
+cube-desktop
+cube-dom0
+cube-essential
+cube-graphical-builder
+cube-install
+cube-k8s-node
+cube-server
+cube-update
+cube-vrf
+cunit
+cups
+cups-filters
+curl
+curlpp
+cve-update-db-native
+cwautomacros
+cxxtest
+cyclictest
+cyrus-sasl
+czmq
+daemonize
+daemontools
+dante
+daq
+dash
+db
+dbench
+dbus
+dbus-broker
+dbus-daemon-proxy
+dbus-glib
+dbus-test
+dbus-wait
+dcadec
+dconf
+dconf-editor
+ddrescue
+debianutils
+debootstrap
+debsums
+dejagnu
+depmodwrapper-cross
+desktop-file-utils
+dev86
+devilspie2
+devmem2
+dfu-util
+dfu-util-native
+dhcp
+dhcpcd
+dhex
+dhrystone
+dialog
+dibbler
+dietsplash
+diff-lcs
+diffoscope
+diffstat
+diffutils
+diffutils
+digitemp
+ding-libs
+diod
+directfb
+directfb-examples
+distcc
+distcc-config
+distro-feed-configs
+dldt-inference-engine
+dldt-model-optimizer
+dleyna-connector-dbus
+dleyna-core
+dleyna-renderer
+dleyna-server
+dlm
+dlt-daemon
+dm-verity-image-initramfs
+dmalloc
+dmidecode
+dnf
+dnf-plugin-tui
+dnfdragora
+dnsmasq
+dnssec-conf
+docbook-xml-dtd4
+docbook-xsl-stylesheets
+docker
+docker-ce
+docker-distribution
+docker-moby
+docopt.cpp
+dom0-init
+dos2unix
+dosfstools
+dosfstools
+dotnet
+dovecot
+doxygen
+dpdk
+dpdk
+dpkg
+dracut
+drbd
+drbd-utils
+dropbear
+dstat
+dtach
+dtc
+dumb-init
+dvb-apps
+dwarfsrcfiles
+e2fsprogs
+ebtables
+ecryptfs-utils
+ed
+ed
+edac-utils
+efibootmgr
+efitools
+efitools-native
+efivar
+eject
+elfutils
+elfutils
+ell
+emlog
+enca
+enchant2
+encodings
+enscript
+epeg
+epiphany
+erlang
+erlang
+erlang-native
+erlang-native
+erubis
+esmtp
+espeak
+essential-init
+etcd
+ethtool
+eudev
+evince
+evolution-data-server
+evolution-data-server-native
+evtest
+example
+exfat-utils
+exiv2
+exo
+expat
+expect
+ez-ipupdate
+f2fs-tools
+faac
+faad2
+facter
+faenza-icon-theme
+fatcat
+fatresize
+fb-test
+fbgrab
+fbida
+fbset
+fbset-modes
+fcgi
+fdk-aac
+fetchmail
+ffmpeg
+fftw
+figlet
+file
+file-roller
+findutils
+findutils
+fio
+fipscheck
+firewalld
+flac
+flashrom
+flatbuffers
+flex
+fltk
+fltk-native
+fluentbit
+fluentd
+fluidsynth
+fluidsynth-native
+fmt
+font-adobe-100dpi
+font-adobe-utopia-100dpi
+font-alias
+font-bh-100dpi
+font-bh-lucidatypewriter-100dpi
+font-bitstream-100dpi
+font-cursor-misc
+font-misc-misc
+font-util
+fontconfig
+fontforge
+formfactor
+fping
+frame
+freediameter
+freeglut
+freeradius
+freerdp
+freetype
+fribidi
+fscryptctl
+ftgl
+fts
+function2
+funyahoo-plusplus
+fuse
+fuse-exfat
+fuse-overlayfs
+fuse3
+fvwm
+fwknop
+fwts
+gammu
+garcon
+gateone
+gattlib
+gawk
+gawk
+gcc
+gcc-cross-canadian-i686
+gcc-cross-i686
+gcc-crosssdk-x86_64-oesdk-linux
+gcc-runtime
+gcc-sanitizers
+gcc-source-10.1.0
+gconf
+gcr
+gd
+gdb
+gdb-cross-canadian-i686
+gdb-cross-i686
+gdbm
+gdbm
+gdk-pixbuf
+gdm
+geany
+geany-plugins
+gedit
+gegl
+geis
+gen-coredump
+gengetopt
+gensio
+geoclue
+geocode-glib
+geoip
+geoip-perl
+geoipupdate
+geos
+gerbera
+gettext
+gettext
+gettext-minimal-native
+gexiv2
+gflags
+ghex
+ghostscript
+giflib
+gigolo
+gimp
+git
+gjs
+glade
+glew
+glfw
+glib-2.0
+glib-networking
+glibc
+glibc-locale
+glibc-mtrace
+glibc-scripts
+glibc-testsuite
+glibmm
+glide
+glm
+glmark2
+glog
+glusterfs
+gma500-gfx-check
+gmime
+gmmlib
+gmp
+gmp
+gnome-autoar
+gnome-backgrounds
+gnome-bluetooth
+gnome-calculator
+gnome-common
+gnome-control-center
+gnome-desktop-testing
+gnome-desktop3
+gnome-doc-utils-stub
+gnome-flashback
+gnome-font-viewer
+gnome-keyring
+gnome-menus3
+gnome-online-accounts
+gnome-panel
+gnome-session
+gnome-settings-daemon
+gnome-shell
+gnome-shell-extensions
+gnome-system-monitor
+gnome-terminal
+gnome-themes-extra
+gnome-tweaks
+gnu-config
+gnu-efi
+gnulib
+gnupg
+gnupg
+gnuplot
+gnutls
+gnutls
+go
+go-build
+go-capability
+go-cli
+go-connections
+go-context
+go-cross-canadian-i686
+go-cross-core2-32
+go-crosssdk-x86_64-oesdk-linux
+go-dbus
+go-dep
+go-digest
+go-distribution
+go-errors
+go-fsnotify
+go-helloworld
+go-libtrust
+go-logrus
+go-md2man
+go-metalinter
+go-mux
+go-native
+go-patricia
+go-pty
+go-runtime
+go-systemd
+gobject-introspection
+google-authenticator-libpam
+google-cloud-sdk
+googletest
+gparted
+gperf
+gperf
+gperftools
+gpgme
+gphoto2
+gpm
+gpsd
+gpsd-machine-conf
+gptfdisk
+gradm
+grail
+graphviz
+grep
+grep
+grilo
+groff
+groff
+grpc
+grpc-go
+grub
+grub
+grub-bootconf
+grub-efi
+grubby
+grubby
+gsettings-desktop-schemas
+gsl
+gsoap
+gsound
+gspell
+gssdp
+gst-examples
+gst-instruments
+gst-shark
+gst-validate
+gstd
+gstreamer1.0
+gstreamer1.0-libav
+gstreamer1.0-meta-base
+gstreamer1.0-omx
+gstreamer1.0-plugins-bad
+gstreamer1.0-plugins-base
+gstreamer1.0-plugins-good
+gstreamer1.0-plugins-ugly
+gstreamer1.0-python
+gstreamer1.0-rtsp-server
+gstreamer1.0-vaapi
+gtk+
+gtk+3
+gtk-doc
+gtkmm
+gtkmm3
+gtkperf
+gtksourceview-classic-light
+gtksourceview3
+gtksourceview4
+gtkwave
+guider
+gunicorn
+gupnp
+gupnp-av
+gupnp-dlna
+gupnp-igd
+gupnp-tools
+gvfs
+gyp
+gyp-py2
+gzip
+gzip
+harfbuzz
+hashicorp-serf
+hashie
+haveged
+hdcp
+hddtemp
+hdf5
+hdparm
+heartbeat
+help2man-native
+hexedit
+hiawatha
+hicolor-icon-theme
+hidapi
+hiera
+highline
+hiredis
+hostapd
+hplip
+htop
+htpdate
+http-parser
+http-parser.rb
+hunspell
+hunspell-dictionaries
+hwdata
+hwlatdetect
+hwloc
+hyperstart
+i2c-tools
+ibm-iotf-embeddedc
+ibus
+ibus-native
+iceauth
+icecc-create-env
+icewm
+icon-slicer
+icu
+icyque
+id3lib
+ifenslave
+ifmetric
+ifplugd
+iftop
+ifupdown
+ifuse
+igmpproxy
+igt-gpu-tools
+iksemel
+ima-evm-utils
+ima-inspect
+ima-policy
+imagemagick
+imapfilter
+imsettings
+indent
+inetutils
+iniparser
+init-ifupdown
+init-system-helpers
+initramfs-boot
+initramfs-cgl-boot
+initramfs-cube-builder
+initramfs-debug
+initramfs-debug-image
+initramfs-dm-verity
+initramfs-framework
+initramfs-kexecboot-image
+initramfs-kexecboot-klibc-image
+initramfs-live-boot
+initramfs-live-boot-tiny
+initramfs-live-install
+initramfs-live-install-efi
+initramfs-live-install-efi-testfs
+initramfs-live-install-testfs
+initramfs-module-install
+initramfs-module-install-efi
+initramfs-module-resizefs
+initramfs-module-setup-live
+initramfs-ostree
+initramfs-ostree-image
+initramfs-tools
+initrdscripts-ima
+initrdscripts-secure-core
+initscripts
+inotify-tools
+intel-compute-runtime
+intel-graphics-compiler
+intel-media-driver
+intel-mediasdk
+intel-microcode
+intel-pcm
+intel-vaapi-driver
+intltool
+iotop
+iozone3
+ipaddress
+ipc-run
+ipcalc
+iperf2
+iperf3
+ipmitool
+ipmiutil
+ippool
+iproute2
+ipsec-test
+iptables
+iptraf-ng
+iputils
+ipvsadm
+ipxe
+irda-utils
+irqbalance
+irssi
+isa-l
+iscsi-initiator-utils
+isic
+iso-codes
+isomd5sum
+itstool
+itt
+iucode-tool
+iw
+iwd
+ixgbe
+ixgbevf
+jack
+jansson
+jasper
+jhi
+joe
+jpnevulator
+jq
+jquery
+json
+json-c
+json-glib
+json-spirit
+jsoncpp
+jsonrpc
+kata-agent
+kata-proxy
+kata-runtime
+kata-shim
+kbd
+kconfig-frontends
+kea
+keepalived
+kern-tools-native
+kernel-devsrc
+kernel-initramfs
+kernel-initramfs-image
+kernel-module-emlog
+kernel-module-mali
+kernel-selftest
+kexec-tools
+kexec-tools-klibc
+kexecboot
+kexecboot-cfg
+key-store
+keybinder
+keymaps
+keyutils
+klcc-cross
+klibc
+klibc-static-utils
+klibc-utils
+kmod
+kmod-native
+kmscube
+konkretcmpi
+kpatch
+krb5
+kronosnet
+kubernetes
+kubernetes
+kura
+kvm-image-minimal
+kvmtool
+l3afpad
+lame
+lapack
+latencytop
+lcdproc
+lcms
+lcov
+ldconfig-native
+ldns
+ledmon
+lemon
+leptonica
+less
+leveldb
+lftp
+lib-perl
+liba52
+libacpi
+libaio
+libalgorithm-diff-perl
+libao
+libarchive
+libass
+libassuan
+libatasmart
+libatomic-ops
+libauthen-radius-perl
+libauthen-sasl-perl
+libavc1394
+libblockdev
+libbsd
+libburn
+libbytesize
+libc-bench
+libcamera
+libcanberra
+libcap
+libcap-ng
+libcap-ng-python
+libcapture-tiny-perl
+libcdio
+libcdio-paranoia
+libcec
+libcereal
+libcgi-perl
+libcgroup
+libchamplain
+libcheck
+libclass-method-modifiers-perl
+libcomps
+libconfig
+libconfig-autoconf-perl
+libconfig-general-perl
+libconnman-qt5
+libconvert-asn1-perl
+libcroco
+libcrypt-openssl-guess-perl
+libcrypt-openssl-random-perl
+libcrypt-openssl-rsa-perl
+libcurses-perl
+libcxx
+libcyusbserial
+libdaemon
+libdata-hexdump-perl
+libdazzle
+libdbd-mysql-perl
+libdbd-sqlite-perl
+libdbi
+libdbi-perl
+libdbus-c++
+libdc1394
+libde265
+libdev-checklib-perl
+libdevel-globaldestruction-perl
+libdevmapper
+libdigest-hmac-perl
+libdigest-sha1-perl
+libdivecomputer
+libdmx
+libdnet
+libdnf
+libdrm
+libdvbcsa
+libdvbpsi
+libdvdcss
+libdvdnav
+libdvdread
+libebml
+libedit
+libee
+libeigen
+libencode-locale-perl
+libencode-perl
+libenv-perl
+libepoxy
+liberation-fonts
+liberror-perl
+libesmtp
+libestr
+libev
+libevdev
+libevent
+libexecinfo
+libexif
+libextutils-config-perl
+libextutils-cppguess-perl
+libextutils-helpers-perl
+libextutils-installpaths-perl
+libextutils-parsexs-perl
+libfakekey
+libfann
+libfastjson
+libffi
+libfile-fnmatch-perl
+libfile-slurp-perl
+libfile-slurper-perl
+libfm
+libfm-extra
+libfontenc
+libforms
+libftdi
+libgcc
+libgcc-initial
+libgcrypt
+libgdata
+libgee
+libgfortran
+libgit2
+libgloss
+libglu
+libgnomekbd
+libgpg-error
+libgphoto2
+libgpiod
+libgpiod
+libgsf
+libgssglue
+libgtkstylus
+libgtop
+libgudev
+libgusb
+libgweather
+libgxim
+libhandy
+libharu
+libhtml-parser-perl
+libhtml-tagset-perl
+libhtml-tree-perl
+libhtp
+libhugetlbfs
+libibverbs
+libical
+libice
+libiconv
+libiconv
+libid3tag
+libidn
+libidn
+libidn2
+libiec61883
+libiio
+libimobiledevice
+libimport-into-perl
+libinih
+libinput
+libio-pty-perl
+libio-socket-ssl-perl
+libio-stringy-perl
+libipc-signal-perl
+libipt
+libjitterentropy
+libjpeg-turbo
+libjs-jquery
+libjs-sizzle
+libjson-perl
+libkcapi
+libksba
+liblbxutil
+libldb
+liblightmodbus
+liblinebreak
+liblocale-gettext-perl
+liblockfile
+liblogging
+liblognorm
+libmad
+libmailtools-perl
+libmali-xlnx
+libmatchbox
+libmatroska
+libmbim
+libmcrypt
+libmediaart
+libmediaart-2.0
+libmemcached
+libmemcached
+libmhash
+libmicrohttpd
+libmikmod
+libmime-charset-perl
+libmime-types-perl
+libmimetic
+libmms
+libmng
+libmnl
+libmodbus
+libmodbus
+libmodplug
+libmodule-build-perl
+libmodule-build-tiny-perl
+libmodule-pluggable-perl
+libmodule-runtime-perl
+libmodulemd
+libmoo-perl
+libmpc
+libmpd
+libmpdclient
+libmspack
+libmtp
+libmusicbrainz
+libmxml
+libmypaint
+libndp
+libnet
+libnet-dns-perl
+libnet-dns-sec-perl
+libnet-ldap-perl
+libnet-libidn-perl
+libnet-ssleay-perl
+libnet-telnet-perl
+libnetfilter-acct
+libnetfilter-conntrack
+libnetfilter-cthelper
+libnetfilter-cttimeout
+libnetfilter-log
+libnetfilter-queue
+libnewt
+libnfc
+libnfnetlink
+libnftnl
+libnice
+libnl
+libnma
+libnotify
+libnsl2
+libnss-mdns
+libnss-nis
+libnss-nisplus
+liboauth
+libogg
+libol
+libomxil
+liboop
+libopenmpt
+libopus
+libotr
+libowfat
+libp11
+libpam
+libpcap
+libpciaccess
+libpcre
+libpcre2
+libpeas
+libperlio-gzip-perl
+libpfm4
+libpipeline
+libplist
+libpng
+libproc-waitstat-perl
+libproxy
+libpsl
+libpthread-stubs
+libpwquality
+libqb
+libqmi
+libqofono
+libraw1394
+librdmacm
+librealsense
+librelp
+librepo
+libreport
+librole-tiny-perl
+librsvg
+librsync
+libsamplerate0
+libsass
+libsdl
+libsdl-gfx
+libsdl-image
+libsdl-mixer
+libsdl-net
+libsdl-ttf
+libsdl2
+libsdl2-image
+libsdl2-mixer
+libsdl2-net
+libsdl2-ttf
+libseccomp
+libsecret
+libselinux
+libselinux-python
+libsemanage
+libsepol
+libserialport
+libsigc++-2.0
+libsigc++-3
+libsign
+libsigrok
+libsigrokdecode
+libsm
+libsmi
+libsndfile1
+libsoc
+libsocket6-perl
+libsocketcan
+libsodium
+libsolv
+libsombok3
+libsoup-2.4
+libspatialite
+libsquish
+libsrtp
+libssh
+libssh2
+libssp-nonshared
+libstatgrab
+libstemmer
+libstrictures-perl
+libsub-exporter-progressive-perl
+libsub-uplevel-perl
+libtalloc
+libtar
+libtasn1
+libtdb
+libteam
+libterm-readkey-perl
+libtest-deep-perl
+libtest-harness-perl
+libtest-needs-perl
+libtest-nowarnings-perl
+libtest-pod-perl
+libtest-warn-perl
+libtest-warnings-perl
+libtevent
+libtext-charwidth-perl
+libtext-diff-perl
+libtext-iconv-perl
+libtext-wrapi18n-perl
+libtheora
+libtimedate-perl
+libtimezonemap
+libtinyxml
+libtinyxml2
+libtirpc
+libtool
+libtool-cross
+libtool-native
+libtorrent
+libubootenv
+libubox
+libucontext
+libuio
+libunicode-linebreak-perl
+libunique
+libunistring
+libunix-statgrab
+libunwind
+libupnp
+liburcu
+liburi-perl
+libusb-compat
+libusb1
+libusbg
+libusbgx
+libusbgx-config
+libusbmuxd
+libuser
+libutempter
+libuv
+libva
+libva-initial
+libva-utils
+libvcard
+libvdpau
+libvirt
+libvmi
+libvncserver
+libvorbis
+libvpx
+libwacom
+libwebp
+libwebsockets
+libwhisker2-perl
+libwmf
+libwnck
+libwnck3
+libwpe
+libwww-perl
+libx11
+libx11-compose-data
+libx86-1
+libxau
+libxaw
+libxcam
+libxcb
+libxcomposite
+libxcrypt
+libxcrypt-compat
+libxcursor
+libxdamage
+libxdmcp
+libxext
+libxfce4ui
+libxfce4util
+libxfixes
+libxfont
+libxfont2
+libxft
+libxi
+libxinerama
+libxkbcommon
+libxkbfile
+libxkbui
+libxklavier
+libxml++
+libxml-filter-buffertext-perl
+libxml-libxml-perl
+libxml-namespacesupport-perl
+libxml-parser-perl
+libxml-perl
+libxml-sax-base-perl
+libxml-sax-perl
+libxml-sax-writer-perl
+libxml-simple-perl
+libxml2
+libxmu
+libxpm
+libxpresent
+libxrandr
+libxrender
+libxres
+libxscrnsaver
+libxshmfence
+libxslt
+libxt
+libxtst
+libxv
+libxvmc
+libxxf86vm
+libyami
+libyami-utils
+libyaml
+libyui
+libyui-ncurses
+libzip
+lighttpd
+links
+links-x11
+linpack
+linux-atm
+linux-dummy
+linux-firmware
+linux-intel
+linux-intel-dev
+linux-intel-rt
+linux-libc-headers
+linux-yocto
+linux-yocto-dev
+linux-yocto-rt
+linux-yocto-tiny
+linuxptp
+lio-utils
+lirc
+live555
+lksctp-tools
+lldpd
+llvm
+llvm-common
+llvm-project-source-10.0.1
+lmbench
+lms
+lmsensors
+lmsensors-config
+lockdev
+lockfile-progs
+log4c
+log4cplus
+log4cpp
+logcheck
+logfsprogs
+logrotate
+logwarn
+logwatch
+loudmouth
+lowpan-tools
+lprng
+lrzsz
+lsb-release
+lshw
+lsof
+lsscsi
+ltp
+ltrace
+lttng-modules
+lttng-modules
+lttng-tools
+lttng-ust
+lua
+luajit
+luaposix
+lvm2
+lxc
+lxcfs
+lxdm
+lynis
+lz4
+lzip
+lzo
+lzop
+m4
+m4
+m4-native
+macchanger
+mailcap
+mailx
+make
+make
+make-mod-scripts
+makedepend
+makedevs
+makedumpfile
+maliit-framework-qt5
+maliit-plugins-qt5
+man-db
+man-pages
+mariadb
+mariadb-native
+matchbox-config-gtk
+matchbox-desktop
+matchbox-keyboard
+matchbox-panel-2
+matchbox-session
+matchbox-session-sato
+matchbox-terminal
+matchbox-theme-sato
+matchbox-wm
+maven
+mbedtls
+mbuffer
+mc
+mc
+mce-inject
+mce-test
+mcelog
+mcpp
+mcstrans
+md5deep
+mdadm
+mdbus2
+mdns
+memcached
+memcached
+memstat
+memtester
+menu-cache
+menulibre
+mercurial
+mesa
+mesa-demos
+mesa-gl
+meson
+meta-environment-extsdk-qemux86
+meta-environment-qemux86
+meta-extsdk-toolchain
+meta-filesystems-image
+meta-filesystems-image-base
+meta-go-toolchain
+meta-ide-support
+meta-initramfs-image
+meta-multimedia-image
+meta-multimedia-image-base
+meta-networking-image
+meta-networking-image-base
+meta-oe-image
+meta-oe-image-base
+meta-oe-ptest-image
+meta-perl-base
+meta-perl-image
+meta-perl-ptest-image
+meta-python-image
+meta-python-image-base
+meta-python-ptest-image
+meta-python2-image
+meta-python2-image-base
+meta-python2-ptest-image
+meta-toolchain
+meta-toolchain-qt5
+meta-webserver-image
+meta-webserver-image-base
+meta-world-pkgdata
+metacity
+metee
+method-source
+metrics-discovery
+mg
+mime-construct
+mime-support
+mime-types
+mimic
+mingetty
+mini-iconv
+mini-x-session
+minicom
+minicoredumper
+minidlna
+minini
+miniupnpd
+mixlib-authentication
+mixlib-cli
+mixlib-config
+mixlib-log
+mixlib-shellout
+mkfontscale
+mklibs-native
+mksh
+mm-common
+mmap-smack-test
+mmc-utils
+mobile-broadband-provider-info
+mod-wsgi
+modemmanager
+modutils-initscripts
+mokutil
+mongodb
+monit
+monit
+monkey
+mosh
+mosquitto
+mousepad
+mozjs
+mpc
+mpd
+mpeg2dec
+mpfr
+mpg123
+mpich
+mpv
+mraa
+mscgen
+msgpack
+msgpack-c
+msmtp
+msmtp
+msr-tools
+mtd-utils
+mtdev
+mtools
+mtools
+mtr
+mtree
+multimedia-libcamera-image
+multipath-tools
+musl
+musl-obstack
+musl-utils
+mutt
+mutter
+mx-1.0
+mycroft
+mypaint-brushes-1.0
+mysql-python
+nagios-core
+nagios-nrpe
+nagios-nsca
+nagios-plugins
+nana
+nano
+nanoio
+nanomsg
+nanomsg
+nanopb
+nasm
+nativesdk-buildtools-perl-dummy
+nativesdk-clang-glue
+nativesdk-erlang
+nativesdk-icecc-toolchain
+nativesdk-libtool
+nativesdk-meson
+nativesdk-mingw-w64-headers
+nativesdk-mingw-w64-runtime
+nativesdk-mingw-w64-winpthreads
+nativesdk-packagegroup-qt5-toolchain-host
+nativesdk-packagegroup-sdk-host
+nativesdk-qemu-helper
+nativesdk-qtbase
+nativesdk-sdk-provides-dummy
+nativesdk-wic
+nautilus
+nbd
+nbdkit
+nbench-byte
+ncftp
+ncmpc
+ncp
+ncrack
+ncurses
+ndctl
+ndisc6
+ne10
+neard
+neon
+net-snmp
+net-ssh
+net-ssh-gateway
+net-ssh-multi
+net-tools
+netbase
+netcat
+netcat-openbsd
+netcf
+netdata
+netkit-ftp
+netkit-rpc
+netkit-rsh
+netkit-rusers
+netkit-rwho
+netkit-telnet
+netkit-tftp
+netns
+netperf
+netplan
+nettle
+nettle
+network-manager-applet
+networkd-dispatcher
+networkmanager
+networkmanager-openvpn
+newlib
+nfacct
+nfs-export-root
+nfs-utils
+nftables
+nghttp2
+nginx
+nginx
+ngraph
+nicstat
+nikto
+ninja
+nlohmann-fifo
+nlohmann-json
+nmap
+nmon
+nng
+node-iothub-explorer
+node-red
+node-red-contrib-azureiothubnode
+node-red-contrib-google-cloud
+node-red-contrib-ibm-watson-iot
+nodejs
+nopoll
+nostromo
+notary
+novnc
+npth
+nspr
+nss
+nss-myhostname
+nss-pam-ldapd
+ntfs-3g-ntfsprogs
+ntimed
+ntop
+ntp
+numactl
+numlockx
+nuttcp
+nvme-cli
+nvmetcli
+oath
+obex-data-server
+obexftp
+ocfs2-tools
+oci-image-spec
+oci-image-tools
+oci-runtime-spec
+oci-runtime-tools
+oci-systemd-hook
+octave
+oe-scap
+ofono
+ogl-runtime
+ohai
+oisp-cli
+onboard
+onednn
+onig
+open-iscsi-kernel
+open-iscsi-user
+open-isns
+open-model-zoo
+open-vm-tools
+openal-soft
+openbox
+opencl-clang
+opencl-clang
+opencl-headers
+opencl-icd-loader
+openconnect
+opencore-amr
+openct
+opencv
+openembedded-release
+openflow
+openflow
+openh264
+openhpi
+openipmi
+openjdk-8-native
+openjpeg
+openl2tp
+openldap
+openldap
+openlldp
+openlmi-tools
+openmp
+openobex
+openocd
+opensaf
+opensbi
+opensc
+openscap
+openscap
+openscap-daemon
+openssh
+openssl
+openssl
+openssl-fips
+openssl-fips-example
+openssl-tpm-engine
+openstack-image-aio
+openstack-image-compute
+openstack-image-controller
+openstack-image-network
+openvpn
+openvswitch
+openwsman
+openzone
+opkg
+opkg-arch-config
+opkg-keyrings
+opkg-utils
+oprofile
+opus-tools
+opusfile
+orage
+orc
+orrery
+os-release
+oscam
+ostree
+ostree
+ostree
+ostree-upgrade-mgr
+overc-conftools
+overc-installer
+overc-system-agent
+overc-utils
+ovmf
+ovmf-shell-image
+ovmf-shell-image-enrollkeys
+owfs
+p11-kit
+p7zip
+p8platform
+p910nd
+pacemaker
+package-index
+packagegroup-anaconda-support
+packagegroup-audio
+packagegroup-base
+packagegroup-basic
+packagegroup-boot
+packagegroup-builder
+packagegroup-busybox-replacement
+packagegroup-cgl
+packagegroup-cgl-applications
+packagegroup-cgl-kernel
+packagegroup-cgl-middleware
+packagegroup-cgl-swdevtools
+packagegroup-cloud-aws
+packagegroup-cloud-azure
+packagegroup-cloud-benchmarking
+packagegroup-cloud-compute
+packagegroup-cloud-controller
+packagegroup-cloud-debug
+packagegroup-cloud-extras
+packagegroup-cloud-google
+packagegroup-cloud-ibm
+packagegroup-cloud-network
+packagegroup-cloud-oisp
+packagegroup-container
+packagegroup-containers
+packagegroup-core-base-utils
+packagegroup-core-boot
+packagegroup-core-boot-wrs
+packagegroup-core-buildessential
+packagegroup-core-clutter
+packagegroup-core-device-devel
+packagegroup-core-eclipse-debug
+packagegroup-core-full-cmdline
+packagegroup-core-nfs
+packagegroup-core-sdk
+packagegroup-core-security
+packagegroup-core-security-ptest
+packagegroup-core-selinux
+packagegroup-core-ssh-dropbear
+packagegroup-core-ssh-openssh
+packagegroup-core-standalone-sdk-target
+packagegroup-core-tools-debug
+packagegroup-core-tools-profile
+packagegroup-core-tools-testapps
+packagegroup-core-x11
+packagegroup-core-x11-base
+packagegroup-core-x11-sato
+packagegroup-core-x11-xserver
+packagegroup-cross-canadian-qemux86
+packagegroup-dom0
+packagegroup-dummy-monitoring
+packagegroup-efi-secure-boot
+packagegroup-empty-monitoring
+packagegroup-essential
+packagegroup-fonts-truetype
+packagegroup-glusterfs
+packagegroup-gnome-apps
+packagegroup-gnome-desktop
+packagegroup-go-cross-canadian-qemux86
+packagegroup-go-sdk-target
+packagegroup-graphical-builder
+packagegroup-ids
+packagegroup-ima
+packagegroup-ima-initramfs
+packagegroup-installer-x11-anaconda
+packagegroup-k8s
+packagegroup-luks
+packagegroup-luks-initramfs
+packagegroup-meta-filesystems
+packagegroup-meta-initramfs
+packagegroup-meta-multimedia
+packagegroup-meta-networking
+packagegroup-meta-oe
+packagegroup-meta-perl
+packagegroup-meta-python
+packagegroup-meta-python2
+packagegroup-meta-webserver
+packagegroup-nagios-monitoring
+packagegroup-networkmanager
+packagegroup-ovp-criu
+packagegroup-ovp-debug
+packagegroup-ovp-default-monitoring
+packagegroup-ovp-docker
+packagegroup-ovp-lttng-toolchain
+packagegroup-ovp-trace-tools
+packagegroup-ovp-vm
+packagegroup-qt5-qtcreator-debug
+packagegroup-qt5-toolchain-target
+packagegroup-sdk-target
+packagegroup-self-hosted
+packagegroup-selinux-minimal
+packagegroup-selinux-policycoreutils
+packagegroup-service-discovery
+packagegroup-tools-bluetooth
+packagegroup-tpm
+packagegroup-tpm2
+packagegroup-tpm2-initramfs
+packagegroup-util-linux
+packagegroup-vm-sep
+packagegroup-wr-base
+packagegroup-wr-bsps
+packagegroup-wr-core-cgl
+packagegroup-wr-core-cut
+packagegroup-wr-core-db
+packagegroup-wr-core-dhcp
+packagegroup-wr-core-interactive
+packagegroup-wr-core-libs-extended
+packagegroup-wr-core-mail
+packagegroup-wr-core-net
+packagegroup-wr-core-networking
+packagegroup-wr-core-perl
+packagegroup-wr-core-python
+packagegroup-wr-core-security
+packagegroup-wr-core-sys-util
+packagegroup-wr-core-util
+packagegroup-xfce
+packagegroup-xfce-base
+packagegroup-xfce-desktop
+packagegroup-xfce-extended
+packagegroup-xfce-multimedia
+packagegroup-zabbix-monitoring
+paho-mqtt-c
+pam-plugin-ccreds
+pam-plugin-ldapdb
+pam-ssh-agent-auth
+pamela
+pango
+pangomm
+parole
+parson
+parted
+passwdqc
+patch
+patch
+patchelf
+pavucontrol
+pax-utils
+paxctl
+pbzip2
+pcimem
+pciutils
+pcmanfm
+pcmciautils
+pcr-extend
+pcsc-lite
+pegtl
+perf
+perl
+pflask
+phonet-utils
+phoronix-test-suite
+php
+phpmyadmin
+physfs
+phytool
+picocom
+pidgin
+pidgin-otr
+pidgin-sipe
+piglit
+pigz
+pimd
+pinentry
+pipewire
+pipewire-0.2
+pixman
+pkcs11-helper
+pkgconf
+pkgconfig
+ply
+plymouth
+pm-graph
+pm-qa
+pm-utils
+pmdk
+pmtools
+pngcheck
+po4a
+poco
+podman
+podman-compose
+pointercal
+pointercal-xinput
+policycoreutils
+polkit
+polkit-group-rule-datetime
+polkit-group-rule-network
+pong-clock
+poppler
+poppler-data
+popt
+portaudio-v19
+postfix
+postgresql
+powertop
+ppp
+ppp-dialin
+pps-tools
+pptp-linux
+prelink
+procmail
+procps
+proftpd
+proj
+projucer
+protobuf
+protobuf-c
+proxy-libintl
+pry
+pseudo
+psmisc
+psplash
+psqlodbc
+ptest-runner
+ptpd
+pty-forward-native
+pugixml
+pulseaudio
+pulseaudio-client-conf-sato
+puppet
+puppet-vswitch
+puppetlabs-stdlib
+pure-ftpd
+purple-skypeweb
+puzzles
+pv
+pxaregs
+pyrtm
+python
+python-aioeventlet
+python-alembic
+python-anyjson
+python-appdirs
+python-asn1crypto
+python-astroid
+python-atomicwrites
+python-attr
+python-attrs
+python-automat
+python-automaton
+python-aws-iot-device-sdk-python
+python-babel
+python-backports-abc
+python-backports-functools-lru-cache
+python-backports-init
+python-backports-ssl
+python-barbican
+python-barbicanclient
+python-bcrypt
+python-beautifulsoup4
+python-beautifulsoup4
+python-behave
+python-bitarray
+python-blinker
+python-booleanpy
+python-boto
+python-cachetools
+python-can
+python-castellan
+python-ceilometer
+python-ceilometerclient
+python-certifi
+python-cffi
+python-chardet
+python-cheetah
+python-cinder
+python-cinderclient
+python-click
+python-cmd2
+python-coloredlogs
+python-configargparse
+python-configparser
+python-constantly
+python-contextlib2
+python-cpuset
+python-crcmod
+python-cryptography
+python-cryptography-vectors
+python-cson
+python-cursive
+python-cython
+python-daemon
+python-daemonize
+python-dateutil
+python-dbus
+python-dbusmock
+python-decorator
+python-deprecated
+python-designateclient
+python-distutils-extra
+python-django
+python-django
+python-django-appconf
+python-django-babel
+python-django-compressor
+python-django-nose
+python-django-openstack-auth
+python-django-pyscss
+python-django-south
+python-djangorestframework
+python-dnspython
+python-docker
+python-docker-pycreds
+python-docutils
+python-dominate
+python-editor
+python-engineio
+python-enum
+python-enum-compat
+python-enum34
+python-epydoc
+python-evdev
+python-falcon
+python-feedformatter
+python-feedparser
+python-fixtures
+python-flake8
+python-flask
+python-flask-babel
+python-flask-bcrypt
+python-flask-bootstrap
+python-flask-login
+python-flask-mail
+python-flask-migrate
+python-flask-nav
+python-flask-navigation
+python-flask-pymongo
+python-flask-restful
+python-flask-script
+python-flask-sijax
+python-flask-socketio
+python-flask-sqlalchemy
+python-flask-uploads
+python-flask-user
+python-flask-wtf
+python-flask-xstatic
+python-funcsigs
+python-functools32
+python-functools32
+python-future
+python-futures
+python-futures
+python-futurist
+python-gdata
+python-gevent
+python-gevent-websocket
+python-glance
+python-glance-store
+python-glanceclient
+python-glancestore
+python-google-api-python-client
+python-greenlet
+python-grpcio
+python-grpcio-tools
+python-gsocketpool
+python-h2
+python-heat
+python-heat-cfntools
+python-heatclient
+python-horizon
+python-hp3parclient
+python-hpack
+python-html5lib
+python-humanfriendly
+python-humanize
+python-hyperframe
+python-hyperlink
+python-hypothesis
+python-idna
+python-imaging
+python-importlib-metadata
+python-incremental
+python-inflection
+python-intervals
+python-ipaddr
+python-ipaddress
+python-ipy
+python-iso8601
+python-isodate
+python-isort
+python-itsdangerous
+python-javaobj-py3
+python-jinja2
+python-jsmin
+python-jsonext
+python-jsonpatch
+python-jsonpath-rw
+python-jsonpath-rw-ext
+python-jsonpointer
+python-jsonref
+python-jsonschema
+python-kazoo
+python-keyring
+python-keystone
+python-keystone-hybrid-backend
+python-keystoneclient
+python-keystonemiddleware
+python-lazy-object-proxy
+python-ldap
+python-license-expression
+python-linecache2
+python-lockfile
+python-lockfile
+python-lrparsing
+python-lxml
+python-m2crypto
+python-magnumclient
+python-mako
+python-manilaclient
+python-markupsafe
+python-mccabe
+python-microversion-parse
+python-mimeparse
+python-mistralclient
+python-mock
+python-monotonic
+python-more-itertools
+python-mox
+python-mox3
+python-mprpc
+python-msgpack
+python-native
+python-ndg-httpsclient
+python-netaddr
+python-netifaces
+python-networkmanager
+python-networkx
+python-neutron
+python-neutron-lib
+python-neutronclient
+python-nose-exclude
+python-nova
+python-novaclient
+python-novnc
+python-numeric
+python-oauth2
+python-oauthlib
+python-openstack-nose
+python-openstackclient
+python-openstacksdk
+python-os-brick
+python-os-client-config
+python-os-traits
+python-os-vif
+python-os-win
+python-os-xenapi
+python-osc-lib
+python-oslo.cache
+python-oslo.concurrency
+python-oslo.config
+python-oslo.context
+python-oslo.db
+python-oslo.i18n
+python-oslo.log
+python-oslo.messaging
+python-oslo.middleware
+python-oslo.policy
+python-oslo.privsep
+python-oslo.reports
+python-oslo.rootwrap
+python-oslo.serialization
+python-oslo.service
+python-oslo.versionedobjects
+python-oslo.vmware
+python-oslotest
+python-osprofiler
+python-ovsdbapp
+python-packaging
+python-paho-mqtt
+python-pam
+python-pamela
+python-parse
+python-parse-type
+python-passlib
+python-paste
+python-pathlib2
+python-pbr
+python-pep8
+python-periphery
+python-pexpect
+python-pika
+python-pika-pool
+python-pint
+python-pip
+python-pluggy
+python-ply
+python-posix-ipc
+python-pretend
+python-prettytable
+python-priority
+python-progress
+python-prompt-toolkit
+python-protobuf
+python-psutil
+python-psycopg2
+python-ptyprocess
+python-py
+python-pyalsaaudio
+python-pyasn1
+python-pyasn1-modules
+python-pybind11
+python-pybluez
+python-pycadf
+python-pycodestyle
+python-pyconnman
+python-pycparser
+python-pycrypto
+python-pycryptodomex
+python-pycurl
+python-pydbus
+python-pyelftools
+python-pyephem
+python-pyexpect
+python-pyfirmata
+python-pyflakes
+python-pyflame
+python-pygobject
+python-pygpgme
+python-pyhamcrest
+python-pyiface
+python-pyinotify
+python-pyjks
+python-pyjwt
+python-pylint
+python-pymisp
+python-pymongo
+python-pymysql
+python-pynetlinux
+python-pyopenssl
+python-pyparsing
+python-pyparted
+python-pyperclip
+python-pyperf
+python-pypowervm
+python-pyrex
+python-pyrex-native
+python-pyro4
+python-pyroute2
+python-pyrsistent
+python-pyrtm
+python-pysaml2
+python-pyscss
+python-pyserial
+python-pysmi
+python-pysnmp
+python-pysocks
+python-pysqlite
+python-pysqlite
+python-pystache
+python-pytest
+python-pytest-helpers-namespace
+python-pytest-runner
+python-pytest-salt
+python-pytest-tempdir
+python-python-editor
+python-pytoml
+python-pytun
+python-pytz
+python-pyudev
+python-pyusb
+python-pywbem
+python-pyyaml
+python-pyzmq
+python-rally
+python-rdflib
+python-redis
+python-requests
+python-requests-oauthlib
+python-rfc3339-validator
+python-rfc3986-validator
+python-rfc3987
+python-robotframework
+python-robotframework-seriallibrary
+python-rtslib-fb
+python-ryu
+python-salttesting
+python-scandir
+python-scrypt
+python-sdnotify
+python-selectors34
+python-semantic-version
+python-semver
+python-serpent
+python-setuptools
+python-setuptools-git
+python-setuptools-scm
+python-sh
+python-sijax
+python-simplejson
+python-singledispatch
+python-six
+python-slip-dbus
+python-smbus
+python-snakefood
+python-snimpy
+python-socketio
+python-soupsieve
+python-sparts
+python-speaklater
+python-sqlalchemy
+python-sqlalchemy-migrate
+python-sqlparse
+python-statistics
+python-stevedore
+python-strict-rfc3339
+python-subprocess32
+python-subunit
+python-suds
+python-suds-jurko
+python-swift
+python-swiftclient
+python-systemd
+python-sysv-ipc
+python-taskflow
+python-tenacity
+python-termcolor
+python-thrift
+python-tinyrpc
+python-toml
+python-tooz
+python-tornado
+python-tornado-redis
+python-tqdm
+python-traceback2
+python-trollius
+python-trove
+python-troveclient
+python-twisted
+python-twitter
+python-twofish
+python-txws
+python-typing
+python-tzlocal
+python-ujson
+python-unicodecsv
+python-unidiff
+python-urllib3
+python-vcversioner
+python-versiontools
+python-visitor
+python-vobject
+python-waitress
+python-wcwidth
+python-webcolors
+python-webdav
+python-webencodings
+python-websocket-client
+python-websockify
+python-werkzeug
+python-which
+python-whoosh
+python-wrapt
+python-wtforms
+python-xattr
+python-xlrd
+python-xstatic
+python-xstatic-angular
+python-xstatic-angular-bootstrap
+python-xstatic-angular-cookies
+python-xstatic-angular-fileupload
+python-xstatic-angular-gettext
+python-xstatic-angular-irdragndrop
+python-xstatic-angular-lrdragndrop
+python-xstatic-angular-mock
+python-xstatic-angular-schema-form
+python-xstatic-bootstrap-datepicker
+python-xstatic-bootstrap-scss
+python-xstatic-bootswatch
+python-xstatic-d3
+python-xstatic-font-awesome
+python-xstatic-font-awesome
+python-xstatic-hogan
+python-xstatic-jquery
+python-xstatic-jquery-migrate
+python-xstatic-jquery-ui
+python-xstatic-jquery.quicksearch
+python-xstatic-jquery.tablesorter
+python-xstatic-jsencrypt
+python-xstatic-magic-search
+python-xstatic-mdi
+python-xstatic-objectpath
+python-xstatic-qunit
+python-xstatic-rickshaw
+python-xstatic-roboto-fontface
+python-xstatic-smart-table
+python-xstatic-spin
+python-xstatic-term.js
+python-xstatic-tv4
+python-yappi
+python-zake
+python-zaqarclient
+python-zipp
+python-zopeinterface
+python3
+python3-absl
+python3-aenum
+python3-aiofiles
+python3-aiohttp
+python3-aiohttp-jinja2
+python3-alembic
+python3-amqp
+python3-amqplib
+python3-anaconda
+python3-ansi2html
+python3-ansible
+python3-ansicolors
+python3-anyjson
+python3-appdirs
+python3-apply-defaults
+python3-argcomplete
+python3-argh
+python3-arpeggio
+python3-arrow
+python3-asn1crypto
+python3-astor
+python3-astroid
+python3-async
+python3-async-timeout
+python3-atomicwrites
+python3-attr
+python3-attrs
+python3-autobahn
+python3-automat
+python3-avahi
+python3-aws-iot-device-sdk-python
+python3-awscli
+python3-azure-iot-device
+python3-babel
+python3-backports-functools-lru-cache
+python3-bandit
+python3-bcrypt
+python3-beautifulsoup4
+python3-behave
+python3-bitarray
+python3-bitstring
+python3-blinker
+python3-blivet
+python3-blivetgui
+python3-booleanpy
+python3-boto3
+python3-boto3
+python3-botocore
+python3-bugsnag
+python3-cachecontrol
+python3-cached-property
+python3-cachetools
+python3-can
+python3-cassandra-driver
+python3-cbor
+python3-cbor2
+python3-cephclient
+python3-certifi
+python3-cffi
+python3-chardet
+python3-cheetah
+python3-cheroot
+python3-cherrypy
+python3-click
+python3-cliff
+python3-cmd2
+python3-colorama
+python3-colorama
+python3-colorama
+python3-coloredlogs
+python3-colorlog
+python3-configargparse
+python3-configparser
+python3-configshell-fb
+python3-constantly
+python3-contextlib2
+python3-coverage
+python3-coverage
+python3-crcmod
+python3-croniter
+python3-cryptography
+python3-cryptography-vectors
+python3-cson
+python3-cssselect
+python3-cycler
+python3-cython
+python3-dateutil
+python3-dbus
+python3-dbus-next
+python3-dbusmock
+python3-dbussy
+python3-debtcollector
+python3-decorator
+python3-defusedxml
+python3-deprecation
+python3-dicttoxml
+python3-dill
+python3-distro
+python3-distutils-extra
+python3-django
+python3-django-appconf
+python3-django-south
+python3-djangorestframework
+python3-dnspython
+python3-docker
+python3-docker-compose
+python3-docker-pycreds
+python3-dockerpty
+python3-docopt
+python3-docutils
+python3-dogpile.cache
+python3-dogpile.core
+python3-dominate
+python3-dt-schema
+python3-ecdsa
+python3-editor
+python3-engineio
+python3-entrypoints
+python3-enum-compat
+python3-et-xmlfile
+python3-evdev
+python3-eventlet
+python3-extras
+python3-extras
+python3-fail2ban
+python3-fann2
+python3-fasteners
+python3-fasteners
+python3-fastentrypoints
+python3-feedformatter
+python3-fire
+python3-flask
+python3-flask-babel
+python3-flask-bootstrap
+python3-flask-cors
+python3-flask-jsonpify
+python3-flask-jwt
+python3-flask-login
+python3-flask-mail
+python3-flask-migrate
+python3-flask-nav
+python3-flask-pymongo
+python3-flask-restful
+python3-flask-script
+python3-flask-sijax
+python3-flask-socketio
+python3-flask-sqlalchemy
+python3-flask-uploads
+python3-flask-user
+python3-flask-wtf
+python3-flask-xstatic
+python3-funcsigs
+python3-future
+python3-gast
+python3-geojson
+python3-gevent
+python3-gevent-websocket
+python3-gg-group-setup
+python3-git
+python3-gitdb
+python3-gmqtt
+python3-google-api-core
+python3-google-api-python-client
+python3-google-auth
+python3-google-cloud-core
+python3-google-cloud-pubsub
+python3-google-cloud-storage
+python3-google-resumable-media
+python3-googleapis-common-protos
+python3-graphviz
+python3-greenlet
+python3-grpc-google-iam-v1
+python3-grpcio
+python3-grpcio-tools
+python3-gsocketpool
+python3-gunicorn
+python3-h2
+python3-h5py
+python3-happybase
+python3-haversine
+python3-hgtools
+python3-hpack
+python3-html2text
+python3-html5lib
+python3-httplib2
+python3-httplib2
+python3-httplib2
+python3-httpretty
+python3-humanfriendly
+python3-humanize
+python3-hyperframe
+python3-hyperlink
+python3-i18n
+python3-ibmiotf
+python3-idna
+python3-idna
+python3-idna-ssl
+python3-imageio
+python3-importlib-metadata
+python3-incremental
+python3-inflection
+python3-iniparse
+python3-intervals
+python3-ipaddress
+python3-ipy
+python3-iso3166
+python3-iso8601
+python3-isodate
+python3-isort
+python3-itsdangerous
+python3-janus
+python3-jaraco-functools
+python3-javaobj-py3
+python3-jdcal
+python3-jeepney
+python3-jinja2
+python3-jmespath
+python3-jsmin
+python3-jsonpatch
+python3-jsonpath-rw
+python3-jsonpointer
+python3-jsonref
+python3-jsonrpcserver
+python3-jsonschema
+python3-kafka
+python3-kconfiglib
+python3-keras-applications
+python3-keras-preprocessing
+python3-keyring
+python3-keystoneauth1
+python3-kiwisolver
+python3-knack
+python3-kombu
+python3-langtable
+python3-lazy-object-proxy
+python3-ldap
+python3-lesscpy
+python3-libarchive-c
+python3-license-expression
+python3-lockfile
+python3-logutils
+python3-lrparsing
+python3-luma-core
+python3-luma-oled
+python3-lxml
+python3-lz4
+python3-m2crypto
+python3-magic
+python3-mako
+python3-mapbox
+python3-markdown
+python3-markupsafe
+python3-matplotlib
+python3-mccabe
+python3-meh
+python3-meld3
+python3-memcache
+python3-memcached
+python3-memcached
+python3-mock
+python3-monotonic
+python3-more-itertools
+python3-mpmath
+python3-mprpc
+python3-msgpack
+python3-msk
+python3-msm
+python3-multidict
+python3-nacl
+python3-ndg-httpsclient
+python3-netaddr
+python3-netifaces
+python3-networkmanager
+python3-networkx
+python3-newrelic
+python3-nmap
+python3-nose
+python3-ntplib
+python3-numpy
+python3-oauth2client
+python3-oauthlib
+python3-obd
+python3-oisp
+python3-openpyxl
+python3-ordered-set
+python3-ordereddict
+python3-os-client-config
+python3-osc-lib
+python3-oslo.i18n
+python3-oslo.utils
+python3-ovs
+python3-packaging
+python3-padaos
+python3-padatious
+python3-paho-mqtt
+python3-pako
+python3-pam
+python3-pandas
+python3-parallax
+python3-paramiko
+python3-paramiko
+python3-paramiko
+python3-parse
+python3-parse-type
+python3-passlib
+python3-paste
+python3-pastedeploy
+python3-pathlib
+python3-pathlib2
+python3-pathtools3
+python3-pbr
+python3-pecan
+python3-pep8
+python3-periphery
+python3-petact
+python3-pexpect
+python3-pid
+python3-pika
+python3-pika-pool
+python3-pillow
+python3-pint
+python3-pip
+python3-pkgconfig
+python3-pluggy
+python3-ply
+python3-pocketsphinx
+python3-polyline
+python3-portend
+python3-positional
+python3-posix-ipc
+python3-prctl
+python3-precise-runner
+python3-pretend
+python3-prettytable
+python3-priority
+python3-productmd
+python3-progress
+python3-prompt-toolkit
+python3-protobuf
+python3-psutil
+python3-ptyprocess
+python3-pulsectl
+python3-py
+python3-py-ubjson
+python3-pyalsaaudio
+python3-pyasn1
+python3-pyasn1-modules
+python3-pyatspi
+python3-pyaudio
+python3-pybind11
+python3-pybluez
+python3-pycairo
+python3-pychromecast
+python3-pycodestyle
+python3-pyconnman
+python3-pycparser
+python3-pycrypto
+python3-pycryptodome
+python3-pycryptodomex
+python3-pycurl
+python3-pydbus
+python3-pydocumentdb
+python3-pyelftools
+python3-pyephem
+python3-pyexpect
+python3-pyfirmata
+python3-pyflakes
+python3-pyflakes
+python3-pygments
+python3-pygobject
+python3-pyhamcrest
+python3-pyiface
+python3-pyinotify
+python3-pyjks
+python3-pyjwt
+python3-pykickstart
+python3-pykwalify
+python3-pylint
+python3-pylyrics
+python3-pymisp
+python3-pymongo
+python3-pymysql
+python3-pynetlinux
+python3-pyopenssl
+python3-pyparsing
+python3-pyparted
+python3-pyperclip
+python3-pyperf
+python3-pyqt5
+python3-pyqtchart
+python3-pyro4
+python3-pyroute2
+python3-pyrsistent
+python3-pyserial
+python3-pysnmp
+python3-pysocks
+python3-pystache
+python3-pystemd
+python3-pytest
+python3-pytest-asyncio
+python3-pytest-helpers-namespace
+python3-pytest-html
+python3-pytest-metadata
+python3-pytest-runner
+python3-pytest-salt
+python3-pytest-tempdir
+python3-pytest-timeout
+python3-python-editor
+python3-python-vlc
+python3-pytoml
+python3-pytun
+python3-pytz
+python3-pyudev
+python3-pyusb
+python3-pywbem
+python3-pyyaml
+python3-pyzmq
+python3-raven
+python3-rcssmin
+python3-rdflib
+python3-redis
+python3-regex
+python3-repoze-lru
+python3-repoze.lru
+python3-repoze.who
+python3-requests
+python3-requests-file
+python3-requests-ftp
+python3-requests-futures
+python3-requests-oauthlib
+python3-requests-toolbelt
+python3-requests-unixsocket
+python3-requestsexceptions
+python3-retrying
+python3-rfc3339-validator
+python3-rfc3986
+python3-rfc3986-validator
+python3-rfc3987
+python3-rjsmin
+python3-robotframework
+python3-robotframework-seriallibrary
+python3-routes
+python3-routes
+python3-rsa
+python3-rsa
+python3-rtslib-fb
+python3-ruamel-yaml
+python3-s3transfer
+python3-saharaclient
+python3-scandir
+python3-scapy
+python3-scapy
+python3-scons
+python3-scons-native
+python3-scp
+python3-scrypt
+python3-sdnotify
+python3-secretstorage
+python3-semantic-version
+python3-semver
+python3-sentry-sdk
+python3-serpent
+python3-setuptools
+python3-setuptools-git
+python3-setuptools-scm
+python3-setuptools-scm-git-archive
+python3-sh
+python3-sijax
+python3-simpleeval
+python3-simplegeneric
+python3-simplejson
+python3-simpleline
+python3-singledispatch
+python3-six
+python3-slip-dbus
+python3-smbus
+python3-smbus2
+python3-smmap
+python3-snappy
+python3-socketio
+python3-soupsieve
+python3-speaklater
+python3-speedtest-cli
+python3-sphinx
+python3-spidev
+python3-spidev
+python3-sqlalchemy
+python3-sqlparse
+python3-sshtunnel
+python3-statsd
+python3-stevedore
+python3-strict-rfc3339
+python3-subunit
+python3-suds-jurko
+python3-supervisor
+python3-suricata-update
+python3-sympy
+python3-systemd
+python3-sysv-ipc
+python3-tabulate
+python3-tempita
+python3-tempora
+python3-term
+python3-termcolor
+python3-test-generator
+python3-testrepository
+python3-testresources
+python3-testscenarios
+python3-testtools
+python3-testtools
+python3-texttable
+python3-thrift
+python3-thrift
+python3-tinyrecord
+python3-toml
+python3-tornado
+python3-tox
+python3-tqdm
+python3-trafaret
+python3-trafaret-config
+python3-transitions
+python3-twine
+python3-twisted
+python3-twitter
+python3-twofish
+python3-txaio
+python3-txws
+python3-typeguard
+python3-typing-extensions
+python3-tzlocal
+python3-u-msgpack-python
+python3-ujson
+python3-unidiff
+python3-uritemplate
+python3-uritemplate
+python3-urllib3
+python3-vcversioner
+python3-versiontools
+python3-vine
+python3-virtualenv
+python3-visitor
+python3-voluptuous
+python3-vsts-cd-manager
+python3-waitress
+python3-warlock
+python3-watchdog
+python3-wcwidth
+python3-weakrefmethod
+python3-webcolors
+python3-webencodings
+python3-webob
+python3-webob
+python3-webrtcvad
+python3-websocket-client
+python3-websocket-client
+python3-websockets
+python3-webtest
+python3-werkzeug
+python3-werkzeug
+python3-wheel
+python3-whoosh
+python3-wrapt
+python3-wsgiref
+python3-wsme
+python3-wtforms
+python3-xlrd
+python3-xmlrunner
+python3-xmltodict
+python3-xmltodict
+python3-xmodem
+python3-xstatic
+python3-xstatic-angular-bootstrap
+python3-xstatic-angular-cookies
+python3-xstatic-angular-fileupload
+python3-xstatic-angular-gettext
+python3-xstatic-angular-irdragndrop
+python3-xstatic-angular-schema-form
+python3-xstatic-bootstrap-datepicker
+python3-xstatic-bootstrap-scss
+python3-xstatic-bootswatch
+python3-xstatic-d3
+python3-xstatic-font-awesome
+python3-xstatic-font-awesome
+python3-xstatic-hogan
+python3-xstatic-jasmine
+python3-xstatic-jquery
+python3-xstatic-jquery-migrate
+python3-xstatic-roboto-fontface
+python3-xxhash
+python3-yappi
+python3-yarl
+python3-zc-lockfile
+python3-zipp
+python3-zopeinterface
+pyxdg
+qat16
+qat17
+qemu
+qemu-helper-native
+qemu-native
+qemu-system-native
+qemuwrapper-cross
+qmllive
+qpdf
+qpid
+qpid-python
+qrencode
+qsiv
+qt-kiosk-browser
+qt3d
+qt5-creator
+qt5-demo-extrafiles
+qt5-opengles2-test
+qt5-plugin-generic-vboxtouch
+qt5everywheredemo
+qt5ledscreen
+qt5nmapcarousedemo
+qt5nmapper
+qtbase
+qtbase-native
+qtcharts
+qtchooser
+qtcoap
+qtconnectivity
+qtdatavis3d
+qtdeclarative
+qtgamepad
+qtgraphicaleffects
+qtimageformats
+qtknx
+qtlocation
+qtlottie
+qtmqtt
+qtmultimedia
+qtnetworkauth
+qtopcua
+qtpurchasing
+qtquick3d
+qtquickcontrols
+qtquickcontrols2
+qtquicktimeline
+qtremoteobjects
+qtscript
+qtscxml
+qtsensors
+qtserialbus
+qtserialport
+qtsmarthome
+qtsvg
+qtsystems
+qttools
+qttranslations
+qtvirtualkeyboard
+qtwayland
+qtwebchannel
+qtwebengine
+qtwebglplugin
+qtwebkit
+qtwebsockets
+qtwebview
+qtx11extras
+qtxmlpatterns
+quagga
+quazip
+quilt
+quilt-native
+quitbattery
+quitindicators
+quota
+qwt-qt5
+rabbitmq-c
+rabbitmq-server
+rack
+racoon2
+radiusclient-ng
+radvd
+rapidjson
+raptor2
+rarpd
+rasdaemon
+rclone
+rdate
+rdfind
+rdist
+rdma-core
+re2c
+read-edid
+readline
+readline
+redhat-security
+redis
+refpolicy-mcs
+refpolicy-mcs
+refpolicy-minimum
+refpolicy-minimum
+refpolicy-mls
+refpolicy-mls
+refpolicy-standard
+refpolicy-standard
+refpolicy-targeted
+refpolicy-targeted
+relayd
+remmina
+renderdoc
+resolvconf
+resource-agents
+rest
+rest-client
+restic
+restorecond
+rfkill
+rgb
+riddler
+ristretto
+rndmac
+rng-tools
+rocksdb
+rodent-icon-theme
+rp-pppoe
+rpcbind
+rpcsvc-proto
+rpi-u-boot-scr
+rpm
+rrdtool
+rsnapshot
+rsync
+rsync
+rsyslog
+rt-app
+rt-tests
+rtl8723bs-bt
+rtmpdump
+rtorrent
+ruby
+ruby-shadow
+ruli
+run-container
+run-postinsts
+runc-docker
+runc-opencontainers
+runv
+runx
+rwmem
+rxvt-unicode
+rxvt-unicode
+rygel
+s-suite
+safec
+saftest
+salt
+samba
+samhain-client
+samhain-server
+samhain-standalone
+sanlock
+sassc
+sato-screenshot
+satyr
+sbc
+sblim-cmpi-devel
+sblim-sfc-common
+sblim-sfcb
+sblim-sfcc
+sbsigntool
+sbsigntool-native
+scap-security-guide
+scap-security-guide
+schedtool-dl
+schroedinger
+screen
+screen-getty
+scsirastools
+sdbus-c++
+sdbus-c++-libsystemd
+sdbus-c++-tools
+sdparm
+seabios
+secilc
+secure-core-image
+secure-core-image-initramfs
+secure-core-minimal-image
+secureboot-selftest-image-signed
+secureboot-selftest-image-unsigned
+security-build-image
+security-client-image
+security-server-image
+security-test-image
+sed
+sed
+sedutil
+selinux-autorelabel
+selinux-dbus
+selinux-gui
+selinux-init
+selinux-labeldev
+selinux-python
+selinux-sandbox
+seloader
+semodule-utils
+ser2net
+serf
+serial-forward
+serialcheck
+serverengine
+sessreg
+sethdlc
+setools
+setserial
+settings-daemon
+setxkbmap
+sg3-utils
+sgpio
+shadow
+shadow-securetty
+shadow-sysroot
+shared-mime-info
+shared-mime-info
+sharutils
+shim
+shutdown-desktop
+sigdump
+signing-keys
+sigrok-cli
+simple-mtpfs
+singularity
+sip3
+skopeo
+skopeo
+slang
+slirp4netns
+sloci-image-native
+slop
+smack
+smack-test
+smartmontools
+smbnetfs
+smcroute
+smem
+smstools3
+snappy
+snort
+socat
+soci
+softhsm
+sota-tools
+sound-theme-freedesktop
+source-han-sans-cn-fonts
+source-han-sans-jp-fonts
+source-han-sans-kr-fonts
+source-han-sans-tw-fonts
+sox
+span-lite
+spawn-fcgi
+spdlog
+speedtest-cli
+speex
+speexdsp
+spf13-cobra
+spf13-pflag
+spice
+spice
+spice-html5
+spice-protocol
+spidev-test
+spirv-shader-generator
+spirv-tools
+spitools
+sqlite
+sqlite-orm
+sqlite3
+squashfs-tools
+squid
+srecord
+sshfs-fuse
+ssiapi
+ssmping
+ssmtp
+sssd
+stalonetray
+startup-notification
+sthttpd
+stm32flash
+strace
+streamripper
+stress-ng
+stressapptest
+strongswan
+strptime
+stunnel
+subversion
+sudo
+surf
+suricata
+swig
+synergy
+sysbench
+sysdig
+sysfsutils
+sysklogd
+syslinux
+syslog-ng
+sysprof
+sysstat
+system-config-keyboard
+systemd
+systemd-boot
+systemd-bootchart
+systemd-bootconf
+systemd-compat-units
+systemd-conf
+systemd-machine-units
+systemd-serialgetty
+systemd-systemctl-native
+systemtap
+systemtap-native
+systemtap-uprobes
+systemu
+sysvinit
+sysvinit-inittab
+taglib
+takao-fonts
+tar
+tar
+target-sdk-provides-dummy
+tbb
+tcf-agent
+tcl
+tclap
+tcp-smack-test
+tcp-wrappers
+tcpdump
+tcpreplay
+tcpslice
+tcsh
+tearsofsteel-1080p
+telepathy-glib
+telepathy-idle
+telepathy-python
+telepathy-python3
+tempest
+tensorboard
+tensorflow
+tensorflow-estimator
+tensorflow-for-poets
+tensorflow-native
+terminus-font
+tesseract
+tesseract-lang
+testexport-tarball
+testfloat
+texinfo
+texinfo
+texinfo-dummy-native
+tftp-hpa
+tgt
+thermald
+thin-provisioning-tools
+thrift
+thunar
+thunar-archive-plugin
+thunar-media-tags-plugin
+thunar-shares-plugin
+thunar-volman
+tiff
+tigervnc
+time
+time
+tini
+tiny-init
+tiny-init
+tinyalsa
+tinymembench
+tinyproxy
+tiobench
+tipcutils
+tiptop
+tk
+tmon
+tmux
+tnftp
+tokyocabinet
+tomoyo-tools
+toscoterm
+toybox
+tpm-quote-tools
+tpm-tools
+tpm2-abrmd
+tpm2-abrmd
+tpm2-tools
+tpm2-tools
+tpm2-tss
+tpm2-tss
+tpm2simulator-native
+traceroute
+tracker
+tracker-miners
+transmission
+tree
+tremor
+triggerhappy
+tripwire
+trousers
+tslib
+tsocks
+tss-testsuite
+ttf-abyssinica
+ttf-arphic-uming
+ttf-bitstream-vera
+ttf-dejavu
+ttf-droid
+ttf-gentium
+ttf-hunkyfonts
+ttf-inconsolata
+ttf-liberation
+ttf-liberation-sans-narrow
+ttf-lklug
+ttf-lohit
+ttf-mplus
+ttf-noto-emoji
+ttf-pt-sans
+ttf-roboto
+ttf-sazanami
+ttf-tlwg
+ttf-ubuntu-font-family
+ttf-vlgothic
+ttf-wqy-zenhei
+tufao
+tumbler
+tunctl
+turbostat
+tvheadend
+twm
+tzcode-native
+tzdata
+tzinfo
+tzinfo-data
+u-boot
+u-boot-imx
+u-boot-tools
+u-boot-uenv
+ubi-utils-klibc
+ucarp
+udev-extraconf
+udevil
+udisks2
+udocker
+udp-smack-test
+ufs-utils
+uftp
+uftrace
+ufw
+uhubctl
+uim
+umip
+uml-utilities
+umoci
+umock-c
+unbound
+unclutter-xfixes
+unfs3
+unicode-ucd
+unifdef
+uninative-tarball
+unionfs-fuse
+unixodbc
+unzip
+update-rc.d
+upm
+upower
+uriparser
+usb-modeswitch
+usb-modeswitch-data
+usbinit
+usbmuxd
+usbredir
+usbutils
+usermode
+usleep
+usrsctp
+uthash
+util-linux
+util-macros
+utouch-evemu
+utouch-frame
+utouch-mtview
+uw-imap
+uwsgi
+uxen-guest-image-minimal
+uxen-guest-tools
+v4l-utils
+v86d
+vala
+valgrind
+valijson
+vblade
+vboxguestdrivers
+vdso-test
+vgabios
+vim
+vim-tiny
+virglrenderer
+vlan
+vlc
+vlock
+vo-aacenc
+vo-amrwbenc
+volatile-binds
+volume-key
+vorbis-tools
+vpnc
+vrf
+vrf-init
+vsftpd
+vte
+vte9
+vulkan-demos
+vulkan-headers
+vulkan-loader
+vulkan-tools
+waffle
+watchdog
+watchdog-config
+wavpack
+wayland
+wayland-protocols
+wbxml2
+wdt-tool
+webkitgtk
+webmin
+webrtc-audio-processing
+websocketpp
+weechat
+weston
+weston-init
+wget
+whetstone
+which
+which
+wic-tools
+wifi-test-suite
+wiggle
+windriver-logos
+wipe
+wireguard-module
+wireguard-tools
+wireless-regdb
+wireshark
+wolfssl
+wpa-supplicant
+wpan-tools
+wpebackend-fdo
+wr-app-container
+wr-common-packages-native
+wr-init
+wr-systemd-container
+wr-themes
+wrlinux-image-cgl
+wrlinux-image-core
+wrlinux-image-initramfs
+wrlinux-image-installer
+wrlinux-image-installer-initramfs
+wrlinux-image-minimal-initramfs
+wrlinux-image-ovp-guest
+wrlinux-image-ovp-kvm
+wrlinux-image-ovp-kvm-minimal
+wrlinux-image-small
+wrlinux-image-std
+wrlinux-image-std-sato
+wrlinux-image-tiny-initramfs
+wvdial
+wvstreams
+wxwidgets
+x11perf
+x11vnc
+x264
+x265
+xarchiver
+xauth
+xbitmaps
+xcb-proto
+xcb-util
+xcb-util-cursor
+xcb-util-image
+xcb-util-keysyms
+xcb-util-renderutil
+xcb-util-wm
+xclock
+xcursor-transparent-theme
+xcursorgen
+xdebug
+xdelta3
+xdg-user-dirs
+xdg-utils
+xdotool
+xdpyinfo
+xen
+xen-guest-image-minimal
+xen-image-minimal
+xen-python2
+xen-tools
+xerces-c
+xev
+xeyes
+xf86-input-evdev
+xf86-input-keyboard
+xf86-input-libinput
+xf86-input-mouse
+xf86-input-synaptics
+xf86-input-tslib
+xf86-input-vmmouse
+xf86-input-void
+xf86-video-armsoc
+xf86-video-armsoc
+xf86-video-ast
+xf86-video-ati
+xf86-video-cirrus
+xf86-video-fbdev
+xf86-video-intel
+xf86-video-mga
+xf86-video-nouveau
+xf86-video-vesa
+xf86-video-vmware
+xfce-dusk-gtk3
+xfce-polkit
+xfce4-appfinder
+xfce4-battery-plugin
+xfce4-calculator-plugin
+xfce4-clipman-plugin
+xfce4-closebutton-plugin
+xfce4-cpufreq-plugin
+xfce4-cpugraph-plugin
+xfce4-datetime-plugin
+xfce4-datetime-setter
+xfce4-dev-tools
+xfce4-diskperf-plugin
+xfce4-embed-plugin
+xfce4-equake-plugin
+xfce4-eyes-plugin
+xfce4-fsguard-plugin
+xfce4-genmon-plugin
+xfce4-hotcorner-plugin
+xfce4-mailwatch-plugin
+xfce4-mount-plugin
+xfce4-mpc-plugin
+xfce4-netload-plugin
+xfce4-notes-plugin
+xfce4-notifyd
+xfce4-panel
+xfce4-panel-profiles
+xfce4-places-plugin
+xfce4-power-manager
+xfce4-pulseaudio-plugin
+xfce4-screensaver
+xfce4-screenshooter
+xfce4-sensors-plugin
+xfce4-session
+xfce4-settings
+xfce4-smartbookmark-plugin
+xfce4-systemload-plugin
+xfce4-taskmanager
+xfce4-terminal
+xfce4-time-out-plugin
+xfce4-timer-plugin
+xfce4-vala
+xfce4-verve-plugin
+xfce4-wavelan-plugin
+xfce4-weather-plugin
+xfce4-whiskermenu-plugin
+xfce4-xkb-plugin
+xfconf
+xfdesktop
+xfmpc
+xfontsel
+xfsdump
+xfsprogs
+xfwm4
+xfwm4-themes
+xgamma
+xhost
+xinetd
+xinit
+xinput
+xinput-calibrator
+xkbcomp
+xkbevd
+xkbprint
+xkbutils
+xkeyboard-config
+xl2tpd
+xlsatoms
+xlsclients
+xlsfonts
+xmag
+xmessage
+xmlrpc-c
+xmlsec1
+xmlstarlet
+xmlto
+xmodmap
+xorg-fonts-100dpi
+xorg-minimal-fonts
+xorg-sgml-doctools
+xorgproto
+xorgxrdp
+xorriso
+xpext
+xprop
+xrandr
+xrdb
+xrdp
+xrefresh
+xrestop
+xscreensaver
+xserver-common
+xserver-nodm-init
+xserver-xf86-config
+xserver-xorg
+xserver-xorg-cvt-native
+xset
+xsetmode
+xsetroot
+xsp
+xstdcmap
+xterm
+xtrans
+xuser-account
+xvinfo
+xvisor
+xwd
+xwininfo
+xwud
+xxhash
+xz
+yad
+yaffs2-utils
+yajl
+yajl
+yajl-ruby
+yard
+yasm
+yavta
+yelp
+yelp-tools
+yelp-xsl
+yocto-compat-logos
+yp-tools
+yp-tools
+ypbind-mt
+zabbix
+zbar
+zchunk
+zenity
+zeroconf
+zeromq
+zile
+zip
+zlib
+zlib-intel
+zlib-qat
+zlib-qat
+zlog
+znc
+zram
+zsh
+zstd
diff --git a/lib/acme/reports.py b/lib/acme/reports.py
index 682852ad..92c5693e 100755
--- a/lib/acme/reports.py
+++ b/lib/acme/reports.py
@@ -34,7 +34,7 @@ from srtgui.reports import Report, ReportManager, ProductsReport
from django.db.models import Q, F
from django.db import Error
-from srtgui.templatetags.projecttags import filtered_filesizeformat
+from srtgui.templatetags.jobtags import filtered_filesizeformat
logger = logging.getLogger("srt")
diff --git a/lib/acme/tables.py b/lib/acme/tables.py
index ee14136b..0e39dc78 100755
--- a/lib/acme/tables.py
+++ b/lib/acme/tables.py
@@ -29,7 +29,7 @@ from orm.models import Notify, NotifyAccess, NotifyCategories
from users.models import SrtUser, UserSafe
from django.db.models import Q, Max, Sum, Count, When, Case, Value, IntegerField
-from django.conf.urls import url
+from django.urls import re_path as url
from django.urls import reverse, resolve
from django.http import HttpResponse
from django.views.generic import TemplateView
diff --git a/lib/acme/templates/acme_hello.html b/lib/acme/templates/acme_hello.html
index 679f45a2..dac5c66c 100755
--- a/lib/acme/templates/acme_hello.html
+++ b/lib/acme/templates/acme_hello.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} ACME {% endblock %}
diff --git a/lib/acme/templates/acme_product.html b/lib/acme/templates/acme_product.html
index f1fb1a8b..0e519acd 100755
--- a/lib/acme/templates/acme_product.html
+++ b/lib/acme/templates/acme_product.html
@@ -1,6 +1,6 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} {{object.name}} - ACME Style {% endblock %}
diff --git a/lib/acme/templates/base.html b/lib/acme/templates/base.html
index 5e1f847f..978f73bd 100755
--- a/lib/acme/templates/base.html
+++ b/lib/acme/templates/base.html
@@ -1,7 +1,6 @@
<!DOCTYPE html>
{% load static %}
-{% load projecttags %}
-{% load project_url_tag %}
+{% load jobtags %}
<html lang="en">
<head>
<title>
diff --git a/lib/acme/urls.py b/lib/acme/urls.py
index be10ef3e..9ce5d43a 100755
--- a/lib/acme/urls.py
+++ b/lib/acme/urls.py
@@ -1,4 +1,4 @@
-from django.conf.urls import include, url
+from django.urls import re_path as url,include
from . import views, tables
urlpatterns = [
diff --git a/lib/cve_checker/__init__.py b/lib/cve_checker/__init__.py
new file mode 100755
index 00000000..e69de29b
--- /dev/null
+++ b/lib/cve_checker/__init__.py
diff --git a/lib/cve_checker/admin.py b/lib/cve_checker/admin.py
new file mode 100755
index 00000000..8c38f3f3
--- /dev/null
+++ b/lib/cve_checker/admin.py
@@ -0,0 +1,3 @@
+from django.contrib import admin
+
+# Register your models here.
diff --git a/lib/cve_checker/apps.py b/lib/cve_checker/apps.py
new file mode 100755
index 00000000..0f8bc069
--- /dev/null
+++ b/lib/cve_checker/apps.py
@@ -0,0 +1,5 @@
+from django.apps import AppConfig
+
+
+class Cve_CheckerConfig(AppConfig):
+ name = 'cve_checker'
diff --git a/lib/cve_checker/migrations/0001_initial.py b/lib/cve_checker/migrations/0001_initial.py
new file mode 100644
index 00000000..29cf266c
--- /dev/null
+++ b/lib/cve_checker/migrations/0001_initial.py
@@ -0,0 +1,71 @@
+# Generated by Django 4.0 on 2023-11-15 08:56
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ initial = True
+
+ dependencies = [
+ ('orm', '0014_alter_packagetocve_applicable'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='Ck_Audit',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=80)),
+ ('create_time', models.DateTimeField(auto_now_add=True, null=True)),
+ ('orm_product', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='orm.product')),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Ck_Layer',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=80)),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Ck_Package',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=80)),
+ ('version', models.CharField(max_length=80)),
+ ('unpatched_cnt', models.IntegerField(default=0)),
+ ('ignored_cnt', models.IntegerField(default=0)),
+ ('patched_cnt', models.IntegerField(default=0)),
+ ('ck_audit', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='cve_checker.ck_audit')),
+ ('ck_layer', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='cve_checker.ck_layer')),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Ck_Product',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=80)),
+ ],
+ ),
+ migrations.CreateModel(
+ name='CkPackage2Cve',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('ck_status', models.IntegerField(choices=[(0, 'Undefined'), (1, 'Unpatched'), (2, 'Ignored'), (3, 'Patched')], default=0)),
+ ('ck_audit', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='cve_checker.ck_audit')),
+ ('ck_package', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='issue2pk_package', to='cve_checker.ck_package')),
+ ('orm_cve', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='orm.cve')),
+ ],
+ ),
+ migrations.CreateModel(
+ name='CkPackage2CkProduct',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('cvesInRecord', models.BooleanField(default=True)),
+ ('ck_package', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='cve_checker.ck_package')),
+ ('ck_product', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='cve_checker.ck_product')),
+ ],
+ ),
+ ]
diff --git a/lib/cve_checker/migrations/0002_ckpackage2cve_ck_audit.py b/lib/cve_checker/migrations/0002_ckpackage2cve_ck_audit.py
new file mode 100644
index 00000000..9caf7520
--- /dev/null
+++ b/lib/cve_checker/migrations/0002_ckpackage2cve_ck_audit.py
@@ -0,0 +1,19 @@
+# Generated by Django 4.0 on 2023-11-12 18:32
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('cve_checker', '0001_initial'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='ckpackage2cve',
+ name='ck_audit',
+ field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='cve_checker.ck_audit'),
+ ),
+ ]
diff --git a/lib/cve_checker/migrations/0003_alter_ckpackage2cve_ck_package.py b/lib/cve_checker/migrations/0003_alter_ckpackage2cve_ck_package.py
new file mode 100644
index 00000000..3e6fa9c2
--- /dev/null
+++ b/lib/cve_checker/migrations/0003_alter_ckpackage2cve_ck_package.py
@@ -0,0 +1,19 @@
+# Generated by Django 4.0 on 2023-11-12 20:46
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('cve_checker', '0002_ckpackage2cve_ck_audit'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='ckpackage2cve',
+ name='ck_package',
+ field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='issue2pk_package', to='cve_checker.ck_package'),
+ ),
+ ]
diff --git a/lib/cve_checker/migrations/0004_ck_package_ignored_cnt_ck_package_patched_cnt_and_more.py b/lib/cve_checker/migrations/0004_ck_package_ignored_cnt_ck_package_patched_cnt_and_more.py
new file mode 100644
index 00000000..6f36579c
--- /dev/null
+++ b/lib/cve_checker/migrations/0004_ck_package_ignored_cnt_ck_package_patched_cnt_and_more.py
@@ -0,0 +1,28 @@
+# Generated by Django 4.0 on 2023-11-15 02:23
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('cve_checker', '0003_alter_ckpackage2cve_ck_package'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='ck_package',
+ name='ignored_cnt',
+ field=models.IntegerField(default=0),
+ ),
+ migrations.AddField(
+ model_name='ck_package',
+ name='patched_cnt',
+ field=models.IntegerField(default=0),
+ ),
+ migrations.AddField(
+ model_name='ck_package',
+ name='unpatched_cnt',
+ field=models.IntegerField(default=0),
+ ),
+ ]
diff --git a/lib/cve_checker/migrations/0005_ckuploadmanager.py b/lib/cve_checker/migrations/0005_ckuploadmanager.py
new file mode 100644
index 00000000..bb211c58
--- /dev/null
+++ b/lib/cve_checker/migrations/0005_ckuploadmanager.py
@@ -0,0 +1,27 @@
+# Generated by Django 4.0 on 2023-11-19 21:03
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('cve_checker', '0004_ck_package_ignored_cnt_ck_package_patched_cnt_and_more'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='CkUploadManager',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('order', models.IntegerField(default=0)),
+ ('name', models.CharField(max_length=80)),
+ ('mode', models.CharField(max_length=20)),
+ ('path', models.TextField(blank=True)),
+ ('pem', models.TextField(blank=True)),
+ ('repo', models.TextField(blank=True)),
+ ('branch', models.TextField(blank=True)),
+ ('auto_refresh', models.BooleanField(default=True)),
+ ],
+ ),
+ ]
diff --git a/lib/cve_checker/migrations/0006_rename_mode_ckuploadmanager_import_mode.py b/lib/cve_checker/migrations/0006_rename_mode_ckuploadmanager_import_mode.py
new file mode 100644
index 00000000..46785880
--- /dev/null
+++ b/lib/cve_checker/migrations/0006_rename_mode_ckuploadmanager_import_mode.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.0 on 2023-11-19 21:23
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('cve_checker', '0005_ckuploadmanager'),
+ ]
+
+ operations = [
+ migrations.RenameField(
+ model_name='ckuploadmanager',
+ old_name='mode',
+ new_name='import_mode',
+ ),
+ ]
diff --git a/lib/cve_checker/migrations/0007_ckuploadmanager_select_list_and_more.py b/lib/cve_checker/migrations/0007_ckuploadmanager_select_list_and_more.py
new file mode 100644
index 00000000..121dc9e6
--- /dev/null
+++ b/lib/cve_checker/migrations/0007_ckuploadmanager_select_list_and_more.py
@@ -0,0 +1,23 @@
+# Generated by Django 4.0 on 2023-11-20 07:19
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('cve_checker', '0006_rename_mode_ckuploadmanager_import_mode'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='ckuploadmanager',
+ name='select_list',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AddField(
+ model_name='ckuploadmanager',
+ name='select_refresh',
+ field=models.DateTimeField(auto_now_add=True, null=True),
+ ),
+ ]
diff --git a/lib/cve_checker/migrations/__init__.py b/lib/cve_checker/migrations/__init__.py
new file mode 100755
index 00000000..e69de29b
--- /dev/null
+++ b/lib/cve_checker/migrations/__init__.py
diff --git a/lib/cve_checker/models.py b/lib/cve_checker/models.py
new file mode 100755
index 00000000..8ed61a7a
--- /dev/null
+++ b/lib/cve_checker/models.py
@@ -0,0 +1,165 @@
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2017-2023 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+from __future__ import unicode_literals
+
+import sys
+import os
+import re
+import itertools
+from signal import SIGUSR1
+from datetime import datetime
+
+from django.db import models, IntegrityError, DataError
+from django.db import transaction
+from django.core import validators
+from django.conf import settings
+import django.db.models.signals
+from django.db.models import F, Q, Sum, Count
+from django.contrib.auth.models import AbstractUser, Group, AnonymousUser
+
+from orm.models import Cve, Product
+from srtgui.api import execute_process, execute_process_close_fds
+
+import logging
+logger = logging.getLogger("srt")
+
+# quick development/debugging support
+from srtgui.api import _log
+
+#######################################################################
+# Models
+#
+
+# CVE Checker Audit
+class Ck_Audit(models.Model):
+ search_allowed_fields = ['name', ]
+ name = models.CharField(max_length=80)
+ orm_product = models.ForeignKey(default=None, to='orm.product', null=True, on_delete=models.CASCADE,)
+ create_time = models.DateTimeField(auto_now_add=True, null=True)
+ @property
+ def get_package_count(self):
+ return (Ck_Package.objects.filter(ck_audit=self).count())
+ @property
+ def get_issue_count(self):
+ return (CkPackage2Cve.objects.filter(ck_audit=self).count())
+ @property
+ def get_unpatched_count(self):
+ return (CkPackage2Cve.objects.filter(ck_audit=self).filter(ck_status=CkPackage2Cve.UNPATCHED).count())
+ @property
+ def get_ignored_count(self):
+ return (CkPackage2Cve.objects.filter(ck_audit=self).filter(ck_status=CkPackage2Cve.IGNORED).count())
+ @property
+ def get_patched_count(self):
+ return (CkPackage2Cve.objects.filter(ck_audit=self).filter(ck_status=CkPackage2Cve.PATCHED).count())
+ @property
+ def get_undefined_count(self):
+ return (CkPackage2Cve.objects.filter(ck_audit=self).filter(ck_status=CkPackage2Cve.UNDEFINED).count())
+
+# Generated YP package
+class Ck_Package(models.Model):
+ search_allowed_fields = ['name', ]
+ name = models.CharField(max_length=80)
+ version = models.CharField(max_length=80)
+ ck_layer = models.ForeignKey(default=None, to='cve_checker.ck_layer', null=True, on_delete=models.CASCADE,)
+ ck_audit = models.ForeignKey(default=None, to='cve_checker.ck_audit', null=True, on_delete=models.CASCADE,)
+ # These values are here for filtering support, given limitations of Django's distinct() and table filters
+ unpatched_cnt = models.IntegerField(default=0)
+ ignored_cnt = models.IntegerField(default=0)
+ patched_cnt = models.IntegerField(default=0)
+ @property
+ def get_issue_count(self):
+ return (CkPackage2Cve.objects.filter(ck_package=self).count())
+ @property
+ def get_product_count(self):
+ return (CkPackage2CkProduct.objects.filter(ck_package=self).count())
+ @property
+ def get_product_names(self):
+ id_list = []
+ for pk2pr in CkPackage2CkProduct.objects.filter(ck_package=self):
+ id_list.append(f"{pk2pr.ck_product.name} ({pk2pr.cvesInRecord})")
+ return(','.join(id_list))
+
+# Representation of NVD "CPE"
+class Ck_Product(models.Model):
+ search_allowed_fields = ['name', ]
+ name = models.CharField(max_length=80)
+
+# YP Layer
+class Ck_Layer(models.Model):
+ search_allowed_fields = ['name', ]
+ name = models.CharField(max_length=80)
+
+# CVEs of a Package
+# Unpatched = "Not Fixed" and is (assumed) "Vulnerable"
+# Ignored = "Not Vulnerable" or "Won't Fix" or "Fixed"
+# Patched = "Fixed" or "Not Vulnerable"
+class CkPackage2Cve(models.Model):
+ search_allowed_fields = ['orm_cve__name', 'orm_cve__description']
+ # CveCheck Issue Status
+ UNDEFINED = 0
+ UNPATCHED = 1
+ IGNORED = 2
+ PATCHED = 3
+ CK_STATUS = (
+ (UNDEFINED , 'Undefined'),
+ (UNPATCHED, 'Unpatched'),
+ (IGNORED, 'Ignored'),
+ (PATCHED, 'Patched'),
+ )
+ ck_package = models.ForeignKey(default=None, to='cve_checker.ck_package', related_name="issue2pk_package", null=True, on_delete=models.CASCADE,)
+ orm_cve = models.ForeignKey(default=None, to='orm.cve', null=True, on_delete=models.CASCADE,)
+ ck_status = models.IntegerField(choices=CK_STATUS, default=UNDEFINED)
+ # Link to grandparent audit is included for instanct caounts in the GUI
+ ck_audit = models.ForeignKey(default=None, to='cve_checker.ck_audit', null=True, on_delete=models.CASCADE,)
+ @property
+ def get_status_text(self):
+ if (0 > self.ck_status) or (self.ck_status >= len(CkPackage2Cve.CK_STATUS)):
+ return 'Undefined'
+ return CkPackage2Cve.CK_STATUS[self.ck_status][1]
+
+# Products of a Package
+class CkPackage2CkProduct(models.Model):
+ ck_package = models.ForeignKey(default=None, to='cve_checker.ck_package', null=True, on_delete=models.CASCADE,)
+ ck_product = models.ForeignKey(default=None, to='cve_checker.ck_product', null=True, on_delete=models.CASCADE,)
+ cvesInRecord = models.BooleanField(default=True)
+
+# Products of a Package
+class CkUploadManager(models.Model):
+ order = models.IntegerField(default=0) # Display order
+ name = models.CharField(max_length=80) # Name of this import manager
+ import_mode = models.CharField(max_length=20) # Repo|SSL|File
+ path = models.TextField(blank=True) # Source path, path within repo
+ pem = models.TextField(blank=True) # PEM file for SSH
+ repo = models.TextField(blank=True) # Repository URL
+ branch = models.TextField(blank=True) # Branch in repo if any, for repo
+ auto_refresh = models.BooleanField(default=True) # if wild card, refresh when "Create Audit" is selected
+ select_refresh = models.DateTimeField(auto_now_add=True, null=True) # Last time select list was updated
+ select_list = models.TextField(blank=True) # List (if any) for pull down list, '|' delimited
+ @property
+ def is_select_list(self):
+ return (self.select_list and (0 < len(self.select_list)))
+ @property
+ def get_select_list(self):
+ return self.select_list.split('|')
+ @property
+ def get_path_filename(self):
+ return self.path.split('/')[-1]
diff --git a/lib/cve_checker/reports.py b/lib/cve_checker/reports.py
new file mode 100755
index 00000000..3735bcc3
--- /dev/null
+++ b/lib/cve_checker/reports.py
@@ -0,0 +1,511 @@
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2023 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+# Please run flake8 on this file before sending patches
+
+import os
+import re
+import logging
+from datetime import datetime, date
+import csv
+from openpyxl import Workbook
+from openpyxl import load_workbook
+from openpyxl.styles import Border, Side, PatternFill, Font, GradientFill, Alignment
+from openpyxl.utils import get_column_letter
+import shlex
+
+from srtgui.reports import Report, ReportManager, ProductsReport
+from cve_checker.models import Ck_Audit, Ck_Package, Ck_Product, Ck_Layer, CkPackage2CkProduct, CkPackage2Cve
+from srtgui.api import execute_process
+
+from django.db.models import Q, F
+from django.db import Error
+from srtgui.templatetags.jobtags import filtered_filesizeformat
+
+logger = logging.getLogger("srt")
+
+SRT_BASE_DIR = os.environ['SRT_BASE_DIR']
+SRT_REPORT_DIR = '%s/reports' % SRT_BASE_DIR
+
+# quick development/debugging support
+from srtgui.api import _log
+
+###############################################################################
+# Helper Routines
+#
+
+def _log_args(msg, *args, **kwargs):
+ s = '%s:(' % msg
+ if args:
+ for a in args:
+ s += '%s,' % a
+ s += '),('
+ if kwargs:
+ for key, value in kwargs.items():
+ s += '(%s=%s),' % (key,value)
+ s += ')'
+ _log(s)
+
+def dict_get_value(dict,name,default):
+ return dict[name] if name in dict else default
+
+###############################################################################
+# Excel/openpyxl common look and feel formatting objects
+#
+
+#pyxl_border_all = Border(left=thin, right=thin, top=thin, bottom=thin) # , outline=True)
+pyxl_thin = Side(border_style="thin")
+pyxl_double = Side(border_style="double")
+pyxl_border_left = Border(left=pyxl_thin)
+pyxl_border_bottom = Border(bottom=pyxl_thin)
+pyxl_border_bottom_left = Border(bottom=pyxl_thin, left=pyxl_thin)
+pyxl_alignment_left = Alignment(horizontal='left')
+pyxl_alignment_right = Alignment(horizontal='right')
+pyxl_alignment_wrap = Alignment(wrap_text=True)
+pyxl_alignment_top_wrap = Alignment(vertical="top",wrap_text=True)
+pyxl_font_bold = Font(bold=True)
+pyxl_font_red = Font(color="A00000",bold=True,size = "13")
+pyxl_font_grn = Font(color="00A000",bold=True,size = "13")
+pyxl_font_blu = Font(color="0000A0",bold=True,size = "13")
+pyxl_font_orn = Font(color="FF6600",bold=True,size = "13")
+pyxl_fill_green = PatternFill(start_color="E0FFF0", end_color="E0FFF0", fill_type = "solid")
+# Warning: the form "PatternFill(bgColor="xxxxxx", fill_type = "solid")" returns black cells
+pyxl_backcolor_red = PatternFill(start_color='FCCDBA', end_color='FCCDBA', fill_type = "solid")
+pyxl_backcolor_orn = PatternFill(start_color='FBEAAB', end_color='FBEAAB', fill_type = "solid")
+pyxl_backcolor_yel = PatternFill(start_color='FCFDC7', end_color='FCFDC7', fill_type = "solid")
+pyxl_backcolor_blu = PatternFill(start_color='C5E2FF', end_color='C5E2FF', fill_type = "solid")
+pyxl_backcolor_grn = PatternFill(start_color='D6EDBD', end_color='D6EDBD', fill_type = "solid")
+pyxl_cve_fills = [pyxl_backcolor_red,pyxl_backcolor_orn,pyxl_backcolor_yel,pyxl_backcolor_blu,None,None,None]
+
+def pyxl_write_cell(ws,row_num,column_num,value,border=None,font=None,fill=None,alignment=None):
+ cell = ws.cell(row=row_num, column=column_num)
+ try:
+ cell.value = value
+ if fill:
+ cell.fill = fill
+ if alignment:
+ cell.alignment = alignment
+ if border:
+ cell.border = border
+ if font:
+ cell.font = font
+ except Exception as e:
+ print("ERROR:(%d,%d):%s" % (row_num,column_num,e))
+ # Optional next column return value
+ return(column_num+1)
+
+
+###############################################################################
+# Report Manage for cvecheckerRecord
+#
+
+def doCveCheckerAuditSummaryExcel(ck_audit,options):
+ _log_args("doCveCheckerAuditSummaryExcel", options)
+
+ report_page = dict_get_value(options,'report_page', '')
+ search = dict_get_value(options,'search', '')
+ filter = dict_get_value(options,'filter', '')
+ filter_value = dict_get_value(options,'filter_value', '')
+ orderby = dict_get_value(options,'orderby', '')
+ default_orderby = dict_get_value(options,'default_orderby', '')
+ audit_id = dict_get_value(options,'audit_id', 1)
+
+ do_local_job = False
+ job_local_cnt = 0
+
+ audit_name_fixed = ck_audit.name
+ for ch in (' ','/',':','<','>','$','(',')','\\'):
+ audit_name_fixed = audit_name_fixed.replace(ch,'_')
+
+ report_path = '.'
+ report_name = f"summary_report_{audit_name_fixed}.xlsx"
+ report_full_path = os.path.join(SRT_REPORT_DIR,report_path,report_name)
+ wb = Workbook()
+ primary_sheet_used = False
+
+ #
+ # audit-summary Critical High Medium Low P1 P2 P3 P4 Repos
+ #
+
+ if 'audit-summary' in options:
+ job_local_cnt += 1
+ if do_local_job: job_local.update(job_local_cnt,job_local_max,'audit-summary')
+
+ if not primary_sheet_used:
+ ws = wb.active
+ ws.title = "Audit Summary"
+ primary_sheet_used = True
+ else:
+ ws = wb.create_sheet("Audit Summary")
+
+ ws.column_dimensions[get_column_letter(1)].width = 30
+ ws.column_dimensions[get_column_letter(2)].width = 40
+ row = 1
+
+ col = pyxl_write_cell(ws,row, 1,'CVE Checker Audit Report',font=pyxl_font_bold,border=pyxl_border_bottom)
+ row += 1
+
+ row += 1
+ col = pyxl_write_cell(ws,row, 1,'Audit name')
+ col = pyxl_write_cell(ws,row,col,ck_audit.name,font=pyxl_font_bold)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Release')
+ col = pyxl_write_cell(ws,row,col,ck_audit.orm_product.long_name)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Date')
+ col = pyxl_write_cell(ws,row,col,str(ck_audit.create_time))
+ row += 1
+
+ # Compute products and layers
+ product_count = 0
+ layers = {}
+ for ck_package in Ck_Package.objects.filter(ck_audit=ck_audit):
+ product_count += CkPackage2CkProduct.objects.filter(ck_package=ck_package).count()
+ layers[ck_package.ck_layer.name] = 1
+ layer_count = len(layers)
+
+ # Compute CVEs
+ severity_table = []
+ # Critical, High, Medium, Low, Unknown
+ severity_table.append([0,0,0,0,0]) # UNDEFINED
+ severity_table.append([0,0,0,0,0]) # UNPATCHED
+ severity_table.append([0,0,0,0,0]) # IGNORED
+ severity_table.append([0,0,0,0,0]) # PATCHED
+ s2i = {}
+ s2i['CRITICAL'] = 0
+ s2i['HIGH'] = 1
+ s2i['MEDIUM'] = 2
+ s2i['LOW'] = 3
+ s2i[''] = 4
+ unique_cves = {}
+ for issue in CkPackage2Cve.objects.filter(ck_audit=ck_audit):
+ unique_cves[issue.orm_cve.name] = 1
+ severity = issue.orm_cve.cvssV3_baseSeverity if issue.orm_cve.cvssV3_baseSeverity else issue.orm_cve.cvssV2_severity
+ severity = severity.upper()
+ try:
+ col = s2i[severity.upper()]
+ except:
+ col = 4
+ severity_table[issue.ck_status][col] += 1
+
+ row += 1
+ col = pyxl_write_cell(ws,row, 1,'Package Count')
+ col = pyxl_write_cell(ws,row,col,ck_audit.get_package_count)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Product Count')
+ col = pyxl_write_cell(ws,row,col, product_count)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Layer Count')
+ col = pyxl_write_cell(ws,row,col,layer_count)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Issue Count')
+ col = pyxl_write_cell(ws,row,col,ck_audit.get_issue_count)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Unique Issue Count')
+ col = pyxl_write_cell(ws,row,col,len(unique_cves))
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 4,'Critical',border=pyxl_border_bottom)
+ col = pyxl_write_cell(ws,row,col,'High',border=pyxl_border_bottom)
+ col = pyxl_write_cell(ws,row,col,'Medium',border=pyxl_border_bottom)
+ col = pyxl_write_cell(ws,row,col,'Low',border=pyxl_border_bottom)
+ col = pyxl_write_cell(ws,row,col,'Undefined',border=pyxl_border_bottom)
+ row += 1
+
+ def append_severity(status_id,row):
+ col = pyxl_write_cell(ws,row, 4,severity_table[status_id][0],fill=pyxl_backcolor_red)
+ col = pyxl_write_cell(ws,row,col,severity_table[status_id][1],fill=pyxl_backcolor_orn)
+ col = pyxl_write_cell(ws,row,col,severity_table[status_id][2],fill=pyxl_backcolor_blu)
+ col = pyxl_write_cell(ws,row,col,severity_table[status_id][3],fill=pyxl_backcolor_grn)
+ col = pyxl_write_cell(ws,row,col,severity_table[status_id][4])
+
+ col = pyxl_write_cell(ws,row, 1,'Unpatched_Count')
+ col = pyxl_write_cell(ws,row,col,ck_audit.get_unpatched_count,fill=pyxl_backcolor_red)
+ append_severity(CkPackage2Cve.UNPATCHED,row)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Ignored_Count')
+ col = pyxl_write_cell(ws,row,col,ck_audit.get_ignored_count)
+ append_severity(CkPackage2Cve.IGNORED,row)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Patched_Count')
+ col = pyxl_write_cell(ws,row,col,ck_audit.get_patched_count)
+ append_severity(CkPackage2Cve.PATCHED,row)
+ row += 1
+
+ #
+ # package-summary
+ #
+
+ if 'package-summary' in options:
+ job_local_cnt += 1
+ if do_local_job: job_local.update(job_local_cnt,job_local_max,'package-summary')
+
+ if not primary_sheet_used:
+ ws = wb.active
+ ws.title = "Package Summary"
+ primary_sheet_used = True
+ else:
+ ws = wb.create_sheet("Package Summary")
+
+ ws.column_dimensions[get_column_letter(1)].width = 30
+ ws.column_dimensions[get_column_letter(5)].width = 30
+ ws.column_dimensions[get_column_letter(6)].width = 60
+
+ row = 1
+ first_row = 2
+
+ col = 1
+ for header in ('Package','Version','Layer','Issues','Unpatched CVE','Products (cvesInRecord)'):
+# border = pyxl_border_bottom_left if (col in (3,7,12)) else pyxl_border_bottom
+ border = pyxl_border_bottom
+ pyxl_write_cell(ws,row,col,header,border=border)
+ if (col >= 2) and (col <= 10):
+ ws.column_dimensions[get_column_letter(col)].width = 11
+ col += 1
+ row += 1
+
+ # Sort packages by severity,package.name
+ package_list = Ck_Package.objects.filter(ck_audit=ck_audit).order_by('name')
+ for package in package_list:
+ col = pyxl_write_cell(ws,row, 1,package.name)
+ col = pyxl_write_cell(ws,row,col,package.version)
+ col = pyxl_write_cell(ws,row,col,package.ck_layer.name)
+ col = pyxl_write_cell(ws,row,col,package.get_issue_count)
+ col = pyxl_write_cell(ws,row,col,package.unpatched_cnt)
+ col = pyxl_write_cell(ws,row,col,package.get_product_names)
+ row += 1
+
+ #
+ # unpatched-summary
+ #
+
+ if ('unpatched-summary' in options) or ('unpatched-summary-compare' in options):
+ job_local_cnt += 1
+ if do_local_job: job_local.update(job_local_cnt,job_local_max,'unpatched-summary')
+
+ if not primary_sheet_used:
+ ws = wb.active
+ ws.title = "Unpatched Summary"
+ primary_sheet_used = True
+ else:
+ ws = wb.create_sheet("Unpatched Summary")
+
+ # Comparables
+ comparable_list = ['wr_trivy','ubuntu_trivy']
+
+ ws.column_dimensions[get_column_letter(1)].width = 20
+ ws.column_dimensions[get_column_letter(2)].width = 14
+ ws.column_dimensions[get_column_letter(7)].width = 14
+ ws.column_dimensions[get_column_letter(8)].width = 14
+ for i,comparable in enumerate(comparable_list):
+ ws.column_dimensions[get_column_letter(9+i)].width = 60
+
+ row = 1
+ first_row = 2
+ col = 1
+ header_list = ['Issue','Status','V3 Severity','V3 Score','V2 Severity','V2 Score','Published','Package']
+ if 'unpatched-summary-compare' in options:
+ header_list.extend(comparable_list)
+ for header in header_list:
+ border = pyxl_border_bottom
+ pyxl_write_cell(ws,row,col,header,border=border)
+ col += 1
+ row += 1
+
+ # Sort packages by severity,package.name
+ issues_list = CkPackage2Cve.objects.filter(ck_audit=ck_audit).filter(ck_status=CkPackage2Cve.UNPATCHED).order_by('orm_cve__name')
+
+ # Merge comparibles?
+ comparibles = {}
+ if 'unpatched-summary-compare' in options:
+ issue_list = {}
+ for issue in issues_list:
+ issue_list[issue.orm_cve.name] = 1
+ filename = ('.cve_list.txt')
+ with open(filename, 'w') as outfile:
+ outfile.write('\n'.join(str(cve) for cve in issue_list))
+
+ for i,comparable in enumerate(comparable_list):
+ comparibles[comparable] = {}
+ exec_returncode,exec_stdout,exec_stderr = execute_process(f"bin/{comparable}/srtool_{comparable}.py",'--comparibles',filename)
+ for i,line in enumerate(exec_stdout.splitlines()):
+ cve,status = line.split('||')
+ comparibles[comparable][cve] = status.replace('[EOL]','\n')
+
+ # Generate output
+ for issue in issues_list:
+ col = pyxl_write_cell(ws,row, 1,issue.orm_cve.name,alignment=pyxl_alignment_top_wrap)
+ col = pyxl_write_cell(ws,row,col,issue.get_status_text,alignment=pyxl_alignment_top_wrap)
+ if (not issue.orm_cve.cvssV3_baseScore) or (0.1 > float(issue.orm_cve.cvssV3_baseScore)):
+ col = pyxl_write_cell(ws,row,col,'',alignment=pyxl_alignment_top_wrap)
+ col = pyxl_write_cell(ws,row,col,'',alignment=pyxl_alignment_top_wrap)
+ else:
+ col = pyxl_write_cell(ws,row,col,issue.orm_cve.cvssV3_baseSeverity,alignment=pyxl_alignment_top_wrap)
+ col = pyxl_write_cell(ws,row,col,issue.orm_cve.cvssV3_baseScore,alignment=pyxl_alignment_top_wrap)
+ if (not issue.orm_cve.cvssV2_baseScore) or (0.1 > float(issue.orm_cve.cvssV2_baseScore)):
+ col = pyxl_write_cell(ws,row,col,issue.orm_cve.cvssV2_severity,alignment=pyxl_alignment_top_wrap)
+ col = pyxl_write_cell(ws,row,col,issue.orm_cve.cvssV2_baseScore,alignment=pyxl_alignment_top_wrap)
+ else:
+ col = pyxl_write_cell(ws,row,col,'',alignment=pyxl_alignment_top_wrap)
+ col = pyxl_write_cell(ws,row,col,'',alignment=pyxl_alignment_top_wrap)
+ col = pyxl_write_cell(ws,row,col,issue.orm_cve.publishedDate,alignment=pyxl_alignment_top_wrap)
+ col = pyxl_write_cell(ws,row,col,issue.ck_package.name,alignment=pyxl_alignment_top_wrap)
+
+ if 'unpatched-summary-compare' in options:
+ # Extend the height of the row to show the comparible data
+ ws.row_dimensions[row].height = 70
+ for i,comparable in enumerate(comparable_list):
+ if issue.orm_cve.name in comparibles[comparable]:
+ col = pyxl_write_cell(ws,row,col,comparibles[comparable][issue.orm_cve.name],alignment=pyxl_alignment_top_wrap)
+ row += 1
+
+ wb.save(report_name)
+ return(report_name)
+
+###############################################################################
+#
+# Audit Difference Report
+#
+# db_audit_1 is older
+# db_audit_2 is newer
+
+#
+# TBD
+#
+
+def do_audit_cvechecker_diff_report(db_audit_1, db_audit_2, options):
+ _log_args("DO_AUDIT_DIFF_REPORT", db_audit_1.name, db_audit_1.id, db_audit_2.name, db_audit_2.id, options)
+ global audit_summary
+
+ records = dict_get_value(options,'records','')
+ format = dict_get_value(options,'format', '')
+ title = dict_get_value(options,'title', '')
+ report_type = dict_get_value(options,'report_type', '')
+ record_list = dict_get_value(options,'record_list', '')
+
+ audit_scope_criticals = ('0' == dict_get_value(options,'audit_scope', '0'))
+ delimiter = ','
+
+ #
+ # Audits load
+ #
+ db_table_1 = {}
+ for db_rec in cvecheckerRecord.objects.filter(cvecheckeraudit=db_audit_1):
+ key= f"{db_rec.plugin_id}"
+ db_table_1[key] = db_rec.id
+
+ db_table_2 = {}
+ for db_rec in cvecheckerRecord.objects.filter(cvecheckeraudit=db_audit_2):
+ key= f"{db_rec.plugin_id}"
+ db_table_2[key] = db_rec.id
+ _log(f"FOO:DB_TABLE_1:{len(db_table_1)}")
+ _log(f"FOO:DB_TABLE_2:{len(db_table_2)}")
+
+
+ # Audits compare
+ #
+ db_add = []
+ db_remove = []
+ for key in db_table_1: # Is in Older
+ if not key in db_table_2: # Not in Newer (removed)
+ db_remove.append(db_table_1[key])
+ for key in db_table_2: # Is in Newer
+ if not key in db_table_1: # Not in Older (added)
+ db_add.append(db_table_2[key])
+
+ def update_ws(ws,msg,audit,table):
+ row = 1
+ col = 1
+ for header in ('name', 'port','protocol','product'):
+ col = pyxl_write_cell(ws,row,col,header,border = pyxl_border_bottom)
+ row += 1
+ ws.column_dimensions[get_column_letter(1)].width = 40
+ ws.column_dimensions[get_column_letter(2)].width = 14
+ ws.column_dimensions[get_column_letter(3)].width = 40
+ ws.column_dimensions[get_column_letter(5)].width = 40
+
+ count = 0
+ cvechecker_obj=cvecheckerRecord.objects.filter(cvecheckeraudit=audit)
+ for db_rec in cvechecker_obj :
+ if db_rec.id in table:
+ count += 1
+ col = 1
+ col = pyxl_write_cell(ws,row,col,db_rec.name)
+ col = pyxl_write_cell(ws,row,col,db_rec.port)
+ col = pyxl_write_cell(ws,row,col,db_rec.protocol)
+ col = pyxl_write_cell(ws,row,col,db_rec.cvecheckeraudit.product)
+ row += 1
+ row -= 1
+ for i in range(1,5):
+ ws.cell(row=row,column=i).border=pyxl_border_bottom
+ row += 1
+ pyxl_write_cell(ws,row,1,msg)
+ pyxl_write_cell(ws,row,2,count)
+
+ row += 2
+ pyxl_write_cell(ws,row,1,f"cvechecker ({audit.id})")
+ pyxl_write_cell(ws,row,2,audit.name)
+ #pyxl_write_cell(ws,row,4,audit.audit_date)
+
+ report_name = f"cvecheckerbench_diff_report_{db_audit_1.id}_{db_audit_2.id}.xlsx"
+ report_path = '.'
+ report_full_path = os.path.join(SRT_REPORT_DIR,report_path,report_name)
+ wb = Workbook()
+ ws = wb.active
+ ws.title = 'Added'
+ update_ws(ws,'Added',db_audit_2,db_add)
+ ws = wb.create_sheet('Removed')
+ update_ws(ws,'Removed',db_audit_1,db_remove)
+
+ wb.save(report_full_path)
+ return(report_full_path)
+
+###############################################################################
+# Report Manage for cvechecker
+#
+
+class cvecheckerRecordReportManager():
+ @staticmethod
+ def get_report_class(parent_page, *args, **kwargs):
+ _log("CVECHECKERREPORTMANAGER:%s:" % parent_page)
+ if 'gitleaks' == parent_page:
+ # Extend the Products report
+ return cvecheckerRecordSummaryReport(parent_page, *args, **kwargs)
+ else:
+ # Return the default for all other reports
+ return ReportManager.get_report_class(parent_page, *args, **kwargs)
+
+ @staticmethod
+ def get_context_data(parent_page, *args, **kwargs):
+ _log_args("CVECHECKER_REPORTMANAGER_CONTEXT", *args, **kwargs)
+ reporter = cvecheckerRecordReportManager.get_report_class(parent_page, *args, **kwargs)
+ return reporter.get_context_data(*args, **kwargs)
+
+ @staticmethod
+ def exec_report(parent_page, *args, **kwargs):
+ _log_args("CVECHECKER_REPORTMANAGER_EXEC", *args, **kwargs)
+ reporter = cvecheckerRecordReportManager.get_report_class(parent_page, *args, **kwargs)
+ return reporter.exec_report(*args, **kwargs)
diff --git a/lib/cve_checker/tables.py b/lib/cve_checker/tables.py
new file mode 100755
index 00000000..252d109f
--- /dev/null
+++ b/lib/cve_checker/tables.py
@@ -0,0 +1,695 @@
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2023 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+#
+# NOTICE: Important ToasterTable implementation concepts and limitations
+#
+# 1) The order of table method execution. This implies that data added
+# to the table object in "get_context_data" is NOT persistent.
+#
+# a) __init__
+# b) get_context_data
+# c) __init__ (second call reason unknown)
+# d) setup_queryset
+# e) setup_filters (if present)
+# f) setup_columns
+# g) apply_row_customization (if present)
+#
+# 2) Named URL path arguments from "urls.py" are accessible via kwargs
+# WARNING: these values not NOT available in "__init__"
+#
+# Example:
+# urls.ps : url(r'^foo/(?P<my_value>\d+)$',
+# tables.py: my_value = int(kwargs['my_value'])
+#
+# 3) Named URL query arguments the table's url are accessible via the request
+#
+# Example:
+# url : http://.../foo/bar/42605?my_value=25
+# tables.py: my_value = self.request.GET.get('my_value','0')
+#
+# 4) The context[] values are NOT present in the individual "setup_columns" context
+# They must be explicitly implemented into the individual column data without Django translation
+#
+# 5) The HTML page's templatetags are NOT present in the "setup_columns" context
+# They must be explicitly added into the template code
+#
+# Example:
+# static_data_template = '''
+# {% load jobtags %}<span onclick="toggle_select(\'box_{{data.id}}\');">{{data.recommend|recommend_display}}</span>
+# '''
+#
+# WARNING: because there is no context (#4), you cannot for example use dictionary lookup filters
+# use apply_row_customization() method instead, and set the self.dict_name in setup_columns()
+#
+
+import os
+import re
+import json
+from datetime import timedelta, datetime, date
+import pytz
+import traceback
+
+from django.db.models import Q, Max, Sum, Count, When, Case, Value, IntegerField
+from django.urls import re_path as url
+from django.urls import reverse, resolve
+from django.http import HttpResponse
+from django.views.generic import TemplateView
+
+from srtgui.widgets import ToasterTable
+from cve_checker.models import Ck_Audit, Ck_Package, Ck_Product, Ck_Layer, CkPackage2CkProduct, CkPackage2Cve, CkUploadManager
+from orm.models import Cve, Product
+from orm.models import Notify, NotifyAccess, NotifyCategories
+from orm.models import DataSource, SrtSetting, Job
+from users.models import SrtUser, UserSafe
+from srtgui.api import execute_process
+
+from srtgui.tablefilter import TableFilter
+from srtgui.tablefilter import TableFilterActionToggle
+from srtgui.tablefilter import TableFilterActionDateRange
+from srtgui.tablefilter import TableFilterActionDay
+
+# quick development/debugging support
+from srtgui.api import _log
+
+class CveCheckerAuditsTable(ToasterTable):
+ """Table of All CvecheckerRecord audits"""
+
+ def __init__(self, *args, **kwargs):
+ super(CveCheckerAuditsTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "-id"
+
+ def get_context_data(self, **kwargs):
+ create_time = datetime.now(pytz.utc)
+ context = super(CveCheckerAuditsTable, self).get_context_data(**kwargs)
+ context['orm_products'] = Product.objects.all().order_by('name')
+ context['ab_sets'] = ("master","nanbield","mickledore","langdale","kirkstone","dunfell")
+ context['new_audit_name'] = 'audit_%s' % (create_time.strftime('%Y%m%d'))
+ context['default_product'] = 'Yocto Project master'
+ context['srt_cvechecker_update'] = SrtSetting.get_setting('SRT_CVECHECKER_UPDATE','')
+ context['mru'] = Job.get_recent()
+ context['mrj_type'] = 'all'
+ # Enforce at least the "Upload" import
+ ck_import_obj,created = CkUploadManager.objects.get_or_create(name='Upload')
+ if created:
+ ck_import_obj.order = 1
+ ck_import_obj.import_mode = 'Upload'
+ ck_import_obj.path = ''
+ ck_import_obj.pem = ''
+ ck_import_obj.repo = ''
+ ck_import_obj.branch = ''
+ ck_import_obj.auto_refresh = False
+ ck_import_obj.select_refresh = datetime.now(pytz.utc)
+ ck_import_obj.select_list = "master|nanbield|mickledore|langdale|kirkstone|dunfell"
+ ck_import_obj.save()
+ ck_import_obj,created = CkUploadManager.objects.get_or_create(name='Import from Auto Builder scan')
+ if created:
+ ck_import_obj.order = 2
+ ck_import_obj.import_mode = 'Repo'
+ ck_import_obj.path = 'yocto-metrics/cve-check'
+ ck_import_obj.pem = ''
+ ck_import_obj.repo = 'git://git.yoctoproject.org/yocto-metrics'
+ ck_import_obj.branch = ''
+ ck_import_obj.auto_refresh = True
+ ck_import_obj.select_refresh = datetime.now(pytz.utc)
+ ck_import_obj.select_list = "master|nanbield|mickledore|langdale|kirkstone|dunfell"
+ ck_import_obj.save()
+ context['ckuploadmanager'] = CkUploadManager.objects.all().order_by('order')
+ # Update the Import select tables
+ cmnd = ["bin/cve_checker/srtool_cvechecker.py","--update-imports","-f"]
+ result_returncode,result_stdout,result_stderr = execute_process(*cmnd)
+ if 0 != result_returncode:
+ _log(f"ERROR:{cmnd}: {result_stderr}:{result_stdout}:")
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = Ck_Audit.objects.all()
+ self.queryset = self.queryset.order_by('-id')
+
+ def setup_filters(self, *args, **kwargs):
+ pass
+
+ def setup_columns(self, *args, **kwargs):
+
+ self.add_column(title="Id",
+ field_name="id",
+ hideable=False,
+ orderable=True,
+ )
+
+ name_template = '''
+ <span id="audit_name-disp-{{data.id}}"><td><a href="{% url 'cvechecker_audit' data.id %}">{{data.name}}</a></td></span>
+ <span id="audit_name-edit-{{data.id}}" style="display:none;">
+ <input type="text" id="audit_name-text-{{data.id}}" value="{{data.name}}" size="50">
+ </span>
+ '''
+ self.add_column(title="Name",
+ orderable=True,
+ static_data_name="name",
+ static_data_template=name_template,
+ )
+
+ self.add_column(title="Create Time",
+ field_name="create_time",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ )
+
+ ck_package_link_template = '''
+ <td><a href="{% url 'cvechecker_audit' data.id %}">{{data.get_package_count}}</a></td>
+ '''
+ self.add_column(title="Package Count",
+ static_data_name="count",
+ static_data_template=ck_package_link_template,
+ )
+
+ self.add_column(title="Unpatched CVE",
+ static_data_name="unpatched_count",
+ static_data_template='<b><label style="color:DarkRed">{{data.get_unpatched_count}}</label></b>',
+ )
+ self.add_column(title="Ignored CVE",
+ static_data_name="ignored_count",
+ static_data_template='<label style="color:green">{{data.get_ignored_count}}</label>',
+ )
+ self.add_column(title="Patched CVE",
+ static_data_name="patched_count",
+ static_data_template='<label style="color:green">{{data.get_patched_count}}</label>',
+ )
+ self.add_column(title="Undefined CVE",
+ static_data_name="undefined_count",
+ static_data_template='<label style="color:DarkRed">{{data.get_undefined_count}}</label>',
+ hideable=True,
+ hidden=True,
+ )
+
+ self.add_column(title="YP Release",
+ static_data_name="orm_product__profile",
+ static_data_template='{{data.orm_product.long_name}}',
+ orderable=True,
+ )
+
+ if UserSafe.is_contributor(self.request.user):
+ manage_link_template = '''
+ <span class="glyphicon glyphicon-edit edit-ck-entry" id="edit-entry-{{data.id}}" x-data="{{data.id}}"></span>
+ <span class="glyphicon glyphicon glyphicon glyphicon-ok save-ck-entry" id="save-entry-{{data.id}}" x-data="{{data.id}}" style="display:none;color: Chartreuse;"></span>
+ &nbsp;&nbsp;&nbsp;&nbsp;
+ <span class="glyphicon glyphicon glyphicon glyphicon-remove cancel-ck-entry" id="cancel-entry-{{data.id}}" x-data="{{data.id}}" style="display:none;color: Crimson;"></span>
+ <span class="glyphicon glyphicon-trash trash-audit" x-data="{{data.create_time}}|{{data.id}}"></span>
+ '''
+ self.add_column(title="Manage",
+ hideable=True,
+ static_data_name="manage",
+ static_data_template=manage_link_template,
+ )
+
+
+class CveCheckerAuditTable(ToasterTable):
+ """Table of All entries in CvecheckerRecord"""
+
+ def __init__(self, *args, **kwargs):
+ super(CveCheckerAuditTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "name"
+
+ def get_context_data(self, **kwargs):
+ context = super(CveCheckerAuditTable, self).get_context_data(**kwargs)
+ audit_id = int(kwargs['audit_id'])
+ context['Ck_Audit'] = Ck_Audit.objects.get(id=audit_id)
+ context['mru'] = Job.get_recent()
+ context['mrj_type'] = 'all'
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ audit_id = int(kwargs['audit_id'])
+ self.queryset = Ck_Package.objects.filter(ck_audit_id=audit_id)
+ self.queryset = self.queryset.order_by(self.default_orderby)
+
+ def setup_filters(self, *args, **kwargs):
+ # Status filter
+ is_status = TableFilter(name="is_status", title="Status")
+ audit_id = int(kwargs['audit_id'])
+ status_filter = TableFilterActionToggle(
+ "unpatched",
+ "Unpatched",
+ Q(unpatched_cnt__gt=0))
+ is_status.add_action(status_filter)
+ status_filter = TableFilterActionToggle(
+ "patched/ignored",
+ "Patched/Ignored",
+ Q(unpatched_cnt=0))
+ is_status.add_action(status_filter)
+ self.add_filter(is_status)
+
+ def setup_columns(self, *args, **kwargs):
+
+ self.add_column(title="Id",
+ field_name="id",
+ hideable=True,
+ hidden=True,
+ )
+
+ self.add_column(title="Name",
+ field_name="name",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="Version",
+ field_name="version",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="Layer",
+ field_name="ck_layer",
+ hideable=False,
+ orderable=True,
+ static_data_name="ck_layer",
+ static_data_template="{{data.ck_layer.name}}",
+ )
+
+ issue_link_template = '''
+ <a href="{% url 'cvechecker_issue' data.id %}">{{data.get_issue_count}}</a>
+ '''
+ self.add_column(title="Issues",
+ static_data_name="issue_count",
+ static_data_template=issue_link_template,
+ )
+
+ unpatched_link_template = '''
+ <label style="color:{% if data.unpatched_cnt %}DarkRed{% else %}green{% endif %}">{{data.unpatched_cnt}}</label>
+ '''
+ self.add_column(title="Unpatched CVE",
+ filter_name="is_status",
+ static_data_name="unpatched_count",
+ static_data_template=unpatched_link_template,
+ )
+
+ product_link_template = '''
+ <td><a href="{% url 'cvechecker_product' data.id %}">{{data.get_product_names}}</a></td>
+ '''
+ self.add_column(title="Products (cvesInRecord)",
+ static_data_name="product_count",
+ static_data_template=product_link_template,
+ )
+
+
+class CveCheckerAuditCveTable(ToasterTable):
+ """Table of All entries in CvecheckerRecord"""
+
+ def __init__(self, *args, **kwargs):
+ super(CveCheckerAuditCveTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "orm_cve__name"
+
+ def get_context_data(self, **kwargs):
+ context = super(CveCheckerAuditCveTable, self).get_context_data(**kwargs)
+ audit_id = int(kwargs['audit_id'])
+ context['Ck_Audit'] = Ck_Audit.objects.get(id=audit_id)
+ context['mru'] = Job.get_recent()
+ context['mrj_type'] = 'all'
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ audit_id = int(kwargs['audit_id'])
+ self.queryset = CkPackage2Cve.objects.filter(ck_audit_id=audit_id)
+ self.queryset = self.queryset.order_by(self.default_orderby)
+
+ def setup_filters(self, *args, **kwargs):
+ # Status filter
+ is_status = TableFilter(name="is_status", title="Status")
+ for status_id in range(CkPackage2Cve.UNPATCHED,CkPackage2Cve.PATCHED+1):
+ status_filter = TableFilterActionToggle(
+ CkPackage2Cve.CK_STATUS[status_id][1],
+ CkPackage2Cve.CK_STATUS[status_id][1],
+ Q(ck_status=status_id))
+ is_status.add_action(status_filter)
+ self.add_filter(is_status)
+
+ def setup_columns(self, *args, **kwargs):
+
+ self.add_column(title="Id",
+ field_name="id",
+ hideable=True,
+ hidden=True,
+ )
+
+ cve_link_template = '''
+ <a href="{% url 'cve' data.orm_cve.name %}" target="_blank">{{data.orm_cve.name}}</a>
+ '''
+ self.add_column(title="Name",
+ static_data_name="orm_cve__name",
+ static_data_template=cve_link_template,
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="Status",
+ filter_name="is_status",
+ static_data_name="status",
+ static_data_template="{{data.get_status_text}}",
+ )
+
+ self.add_column(title="V3 Severity",
+ orderable=True,
+ static_data_name="orm_cve__cvssV3_baseSeverity",
+ static_data_template="{{data.orm_cve.cvssV3_baseSeverity}}",
+ )
+
+ self.add_column(title="V3 Score",
+ orderable=True,
+ static_data_name="orm_cve__cvssV3_baseScore",
+ static_data_template="{{data.orm_cve.cvssV3_baseScore}}",
+ )
+
+ self.add_column(title="V2 Severity",
+ orderable=True,
+ static_data_name="data.orm_cve__cvssV2_severity",
+ static_data_template="{{data.orm_cve.cvssV2_severity}}",
+ )
+
+ self.add_column(title="V2 Score",
+ orderable=True,
+ static_data_name="data.orm_cve__cvssV2_baseScore",
+ static_data_template="{{data.orm_cve.cvssV2_baseScore}}",
+ )
+
+ self.add_column(title="Published",
+ static_data_name="data.orm_cve__publishedDate",
+ static_data_template="{{data.orm_cve.publishedDate}}",
+ )
+
+ self.add_column(title="Package",
+ orderable=True,
+ static_data_name="ck_package__name",
+ static_data_template="{{data.ck_package.name}}",
+ )
+
+
+class CveCheckerIssueTable(ToasterTable):
+ """Table of Issues in CvecheckerRecord"""
+
+ def __init__(self, *args, **kwargs):
+ super(CveCheckerIssueTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "orm_cve__name"
+
+ def get_context_data(self, **kwargs):
+ context = super(CveCheckerIssueTable, self).get_context_data(**kwargs)
+ package_id = int(kwargs['package_id'])
+ context['Ck_Package'] = Ck_Package.objects.get(id=package_id)
+ context['mru'] = Job.get_recent()
+ context['mrj_type'] = 'all'
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ package_id = int(kwargs['package_id'])
+ self.queryset = CkPackage2Cve.objects.filter(ck_package_id=package_id)
+ self.queryset = self.queryset.order_by(self.default_orderby)
+
+ def setup_filters(self, *args, **kwargs):
+ # Status filter
+ is_status = TableFilter(name="is_status", title="Status")
+ for status_id in range(CkPackage2Cve.UNPATCHED,CkPackage2Cve.PATCHED+1):
+ status_filter = TableFilterActionToggle(
+ CkPackage2Cve.CK_STATUS[status_id][1],
+ CkPackage2Cve.CK_STATUS[status_id][1],
+ Q(ck_status=status_id))
+ is_status.add_action(status_filter)
+ self.add_filter(is_status)
+
+ def setup_columns(self, *args, **kwargs):
+
+ cve_link_template = '''
+ <a href="{% url 'cve' data.orm_cve.name %}" target="_blank">{{data.orm_cve.name}}</a>
+ '''
+ self.add_column(title="Issue",
+ static_data_name="orm_cve__name",
+ static_data_template=cve_link_template,
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="CK Status",
+ filter_name="is_status",
+ static_data_name="ck_status",
+ static_data_template="{{data.get_status_text}}",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="description",
+ static_data_name="orm_cve__description",
+ static_data_template="{{data.orm_cve.description}}",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="V3 Score",
+ static_data_name="orm_cve__cvssV3_baseScore",
+ static_data_template="{{data.orm_cve.cvssV3_baseScore}}",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="V3 Severity",
+ static_data_name="orm_cve__cvssV3_baseSeverity",
+ static_data_template="{{data.orm_cve.cvssV3_baseSeverity}}",
+ hideable=False,
+ )
+
+ self.add_column(title="V2 Score",
+ static_data_name="orm_cve__cvssV2_baseScore",
+ static_data_template="{{data.orm_cve.cvssV2_baseScore}}",
+ hideable=True,
+ )
+
+ self.add_column(title="V2 Severity",
+ static_data_name="orm_cve__cvssV2_severity",
+ static_data_template="{{data.orm_cve.cvssV2_severity}}",
+ hideable=True,
+ )
+
+ self.add_column(title="Publish Date",
+ static_data_name="orm_cve__publishedDate",
+ static_data_template="{{data.orm_cve.publishedDate}}",
+ hideable=True,
+ )
+
+ self.add_column(title="Last Modified Date",
+ static_data_name="orm_cve__lastModifiedDate",
+ static_data_template="{{data.orm_cve.lastModifiedDate}}",
+ hideable=True,
+ )
+
+
+class CveCheckerProductTable(ToasterTable):
+ """Table of All entries in CvecheckerRecord"""
+
+ def __init__(self, *args, **kwargs):
+ super(CveCheckerProductTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "ck_product__name"
+
+ def get_context_data(self, **kwargs):
+ context = super(CveCheckerProductTable, self).get_context_data(**kwargs)
+ package_id = int(kwargs['package_id'])
+ context['Ck_Package'] = Ck_Package.objects.get(id=package_id)
+ context['mru'] = Job.get_recent()
+ context['mrj_type'] = 'all'
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ package_id = int(kwargs['package_id'])
+ self.queryset = CkPackage2CkProduct.objects.filter(ck_package_id=package_id)
+ self.queryset = self.queryset.order_by(self.default_orderby)
+
+ def setup_filters(self, *args, **kwargs):
+ pass
+
+ def setup_columns(self, *args, **kwargs):
+
+ self.add_column(title="Product",
+ static_data_name="ck_product__name",
+ static_data_template="{{data.ck_product.name}}",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="CvesInRecord",
+ static_data_name="cvesInRecord",
+ static_data_template="{{data.cvesInRecord}}",
+ hideable=False,
+ orderable=True,
+ )
+
+
+class CveCheckerImportManagementTable(ToasterTable):
+ """Table of Audit import meta-management """
+
+ def __init__(self, *args, **kwargs):
+ super(CveCheckerImportManagementTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "order"
+
+ def get_context_data(self, **kwargs):
+ context = super(CveCheckerImportManagementTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = CkUploadManager.objects.all()
+ self.queryset = self.queryset.order_by(self.default_orderby)
+
+ def setup_filters(self, *args, **kwargs):
+ pass
+
+ def setup_columns(self, *args, **kwargs):
+
+ if UserSafe.is_admin(self.request.user):
+ self.add_column(title="ID",
+ field_name="id",
+ hideable=True,
+ hidden = True,
+ )
+
+ order_template = '''
+ <span id="audit_order-disp-{{data.id}}"><td><a href="{% url 'cvechecker_audit' data.id %}">{{data.order}}</a></td></span>
+ <span id="audit_order-edit-{{data.id}}" style="display:none;">
+ <input type="text" id="audit_order-text-{{data.id}}" value="{{data.order}}" size="10">
+ </span>
+ '''
+ self.add_column(title="Order",
+ static_data_name="order",
+ static_data_template=order_template,
+ orderable=True,
+ )
+
+ name_template = '''
+ <span id="audit_name-disp-{{data.id}}">{{data.name}}</span>
+ <span id="audit_name-edit-{{data.id}}" style="display:none;">
+ <input type="text" id="audit_name-text-{{data.id}}" value="{{data.name}}" size="20">
+ </span>
+ '''
+ self.add_column(title="Title",
+ static_data_name="name",
+ static_data_template=name_template,
+ )
+
+ mode_template = '''
+ <span id="audit_mode-disp-{{data.id}}">{{data.import_mode}}</span>
+ <span id="audit_mode-edit-{{data.id}}" style="display:none;">
+ <select id="audit_mode-text-{{data.id}}" name="audit_mode-text-{{data.id}}">
+ <option value="Repo" {% if "Repo" == data.import_mode %}selected{% endif %} >Repo</option>
+ <option value="SSL" {% if "SSL" == data.import_mode %}selected{% endif %} >SSL</option>
+ <option value="File" {% if "File" == data.import_mode %}selected{% endif %} >File</option>
+ </select>
+ </span>
+ '''
+ self.add_column(title="Mode",
+ static_data_name="import_mode",
+ static_data_template=mode_template,
+ )
+
+ repo_template = '''
+ <span id="audit_repo-disp-{{data.id}}">{{data.repo}}</span>
+ <span id="audit_repo-edit-{{data.id}}" style="display:none;">
+ <input type="text" id="audit_repo-text-{{data.id}}" value="{{data.repo}}" size="30">
+ </span>
+ '''
+ self.add_column(title="Repo URL",
+ static_data_name="repo",
+ static_data_template=repo_template,
+ )
+
+ path_template = '''
+ <span id="audit_path-disp-{{data.id}}">{{data.path}}</span>
+ <span id="audit_path-edit-{{data.id}}" style="display:none;">
+ <input type="text" id="audit_path-text-{{data.id}}" value="{{data.path}}" size="30">
+ </span>
+ '''
+ self.add_column(title="Path",
+ static_data_name="path",
+ static_data_template=path_template,
+ )
+
+ pem_template = '''
+ <span id="audit_pem-disp-{{data.id}}">{{data.pem}}</span>
+ <span id="audit_pem-edit-{{data.id}}" style="display:none;">
+ <input type="text" id="audit_pem-text-{{data.id}}" value="{{data.pem}}" size="20">
+ </span>
+ '''
+ self.add_column(title="Pem File",
+ static_data_name="pem_file",
+ static_data_template=pem_template,
+ )
+
+ branch_template = '''
+ <span id="audit_branch-disp-{{data.id}}">{{data.branch}}</span>
+ <span id="audit_branch-edit-{{data.id}}" style="display:none;">
+ <input type="text" id="audit_branch-text-{{data.id}}" value="{{data.branch}}" size="10">
+ </span>
+ '''
+ self.add_column(title="Branch",
+ static_data_name="branch",
+ static_data_template=branch_template,
+ )
+
+ if False:
+ refresh_template = '''
+ {% if "Upload" == data.name %}{% else %}
+ <span id="audit_refresh-disp-{{data.id}}">{{data.auto_refresh}}</span>
+ <span id="audit_refresh-edit-{{data.id}}" style="display:none;">
+ <select id="audit_refresh-text-{{data.id}}" name="audit_mode-text-{{data.id}}">
+ <option value="False" {% if False == data.auto_refresh %}selected{% endif %} >Absolute choice</option>
+ <option value="True" {% if True == data.auto_refresh %}selected{% endif %} >Automatic refresh choices</option>
+ </select>
+
+ </span>
+ {% endif %}
+ '''
+ self.add_column(title="Auto Refresh",
+ static_data_name="auto_refresh",
+ static_data_template=refresh_template,
+ )
+
+ self.add_column(title="Select Refresh",
+ field_name="select_refresh",
+ hideable=True,
+ hidden = True,
+ )
+ self.add_column(title="Select List",
+ field_name="select_list",
+ hideable=True,
+ hidden = True,
+ )
+
+ if UserSafe.is_contributor(self.request.user):
+ manage_link_template = '''
+ {% if "Upload" == data.name %}Built-in{% else %}
+ <span class="glyphicon glyphicon-edit edit-ck-entry" id="edit-entry-{{data.id}}" x-data="{{data.id}}"></span>
+ <span class="glyphicon glyphicon glyphicon glyphicon-ok save-ck-entry" id="save-entry-{{data.id}}" x-data="{{data.id}}" style="display:none;color: Chartreuse;"></span>
+ &nbsp;&nbsp;&nbsp;&nbsp;
+ <span class="glyphicon glyphicon glyphicon glyphicon-remove cancel-ck-entry" id="cancel-entry-{{data.id}}" x-data="{{data.id}}" style="display:none;color: Crimson;"></span>
+ &nbsp;&nbsp;&nbsp;&nbsp;
+ <span class="glyphicon glyphicon-trash trash-import" x-data="{{data.name}}|{{data.id}}"></span>
+ {% endif %}
+ '''
+ self.add_column(title="Manage",
+ hideable=True,
+ static_data_name="manage",
+ static_data_template=manage_link_template,
+ )
diff --git a/lib/cve_checker/templates/ck-audit-toastertable.html b/lib/cve_checker/templates/ck-audit-toastertable.html
new file mode 100755
index 00000000..a9d4d227
--- /dev/null
+++ b/lib/cve_checker/templates/ck-audit-toastertable.html
@@ -0,0 +1,223 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+<style>
+.column1 {
+ float: left;
+ width: 40%;
+}
+.column2 {
+ float: left;
+ width: 60%;
+}
+/* Clear floats after the columns */
+.row:after {
+ content: "";
+ display: table;
+ clear: both;
+}
+</style>
+{% endblock %}
+
+{% load jobtags %}
+
+{% block title %} CVE Check Packages {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'cvechecker_audits' %}">Audits</a></li><span class="divider">&rarr;</span>
+ <li>Packages for "{{Ck_Audit.name}}"</li>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru mrj_type=mrj_type %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<div class="row" style="margin-left:10px;" id="show-settings">
+ <div class="column1">
+ <h2> Audit Name: <b>"{{Ck_Audit.name}}"</b> </h2>
+ </div>
+ <div class="column2">
+ <h2> YP Release: <b>"{{Ck_Audit.orm_product.long_name}}"</b> </h2>
+ </div>
+</div>
+
+<!-- <p><b><big>Actions: </big></b> -->
+
+<p><b><big>Actions: </big></b>
+ <a class="btn btn-default navbar-btn " id="summary-report" >Summary report</a>
+ <a class="btn btn-default navbar-btn " id="vex-report" disabled>VEX</a>
+ <a class="btn btn-default navbar-btn " id="edit-cancel" style="display:none" >Cancel</a>
+ &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
+ <a class="btn btn-default btn-info" disabled>Package View</a>
+ <a class="btn btn-default " href="{% url 'cvechecker_audit_cve' Ck_Audit.id %}">CVE View</a>
+
+<!-- Combination javascript plus redirected download for this ToasterTable (no-POST) page -->
+<form id="summary-report-form" action="/cve_checker/gen_download_cvechecker_summary/" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" name="action" value="download">
+ <input type="hidden" name="options" value="" id="summary_report_options">
+ <input type="hidden" name="audit_id" value="{{Ck_Audit.id}}">
+ <button type="submit" form="summary-report-form" value="Submit2" style="display:none" id="download-summary-report">Generate the diff report</button>
+</form>
+
+<div id="summary-report-options" style="display:none;padding-left:25px;color:DarkCyan;">
+ <br>
+ <h4> Select the pages to be included in the report:</h4>
+ <input type="checkbox" id="audit-summary" name="audit-summary" value="base-severity" checked>
+ <label for="audit-summary">Audit summary</label><br>
+ <input type="checkbox" id="package-summary" name="package-summary" value="package-summary" checked>
+ <label for="package-summary">Package summary</label><br>
+ <input type="checkbox" id="unpatched-summary" name="unpatched-summary" value="unpatched-summary" checked>
+ <label for="unpatched-summary">Unpatched summary</label><br>
+ <input type="checkbox" id="unpatched-summary-compare" name="unpatched-summary-compare" value="unpatched-summary-compare">
+ <label for="unpatched-summary-compare">Unpatched summary with comparibles</label><br>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function () {
+ var selected_editsettings=false;
+ var selected_summary=false;
+
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Audit packages (" + total + ")";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No packages found";
+ }
+ else if (total > 0) {
+ title = total + " Packages" + (total > 1 ? "s" : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+
+ $('.remove-repo-audit').click(function() {
+ var result = confirm("Are you sure you want to remove artifact '" + $(this).attr('x-data').split('|')[0] + "'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'remove-artifact-audit',
+ "record_id" : $(this).attr('x-data').split('|')[1],
+ });
+ }
+ });
+
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ //alert("AJAX RETURN");
+ $("#run-audit-analysis").removeAttr("disabled");
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+// alert("PAGE REFRESH");
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url) {
+ reqdata["Ck_Audit_id"] = {{ Ck_Audit.id }};
+ url = url || "{% url 'xhr_cvechecker_commit' %}";
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+ $('#summary-report').click(function() {
+ if (selected_summary) {
+ document.getElementById("summary-report").innerText = "Summary Report";
+ document.getElementById('vex-report').style.display = 'inline';
+ document.getElementById('edit-cancel').style.display = 'none';
+ $("#summary-report-options").slideUp();
+ selected_summary=false;
+ var options = "";
+ if (document.getElementById('audit-summary').checked) {
+ options = options + "audit-summary,";
+ }
+ if (document.getElementById('package-summary').checked) {
+ options = options + "package-summary,";
+ }
+ if (document.getElementById('unpatched-summary').checked) {
+ options = options + "unpatched-summary,";
+ }
+ if (document.getElementById('unpatched-summary-compare').checked) {
+ options = options + "unpatched-summary-compare,";
+ }
+ document.getElementById("summary_report_options").value=options;
+ document.getElementById("download-summary-report").click();
+ } else {
+ document.getElementById("summary-report").innerText = "Generate Summary Report";
+ document.getElementById("summary-report").style.display = 'inline';
+ document.getElementById('vex-report').style.display = 'none';
+ document.getElementById('edit-cancel').style.display = 'inline';
+ $("#summary-report-options").slideDown();
+ selected_summary=true;
+ }
+ });
+
+ $('#edit-cancel').click(function() {
+ document.getElementById("summary-report").innerText = "Summary Report";
+ document.getElementById('vex-report').style.display = 'inline';
+ document.getElementById('edit-cancel').style.display = 'none';
+ $("#summary-report-options").slideUp();
+ selected_summary=false;
+ });
+
+ $('.submit-downloadattachment').click(function() {
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
+ });
+
+ });
+ </script>
+{% endblock %}
diff --git a/lib/cve_checker/templates/ck-auditcve-toastertable.html b/lib/cve_checker/templates/ck-auditcve-toastertable.html
new file mode 100755
index 00000000..a7648c8b
--- /dev/null
+++ b/lib/cve_checker/templates/ck-auditcve-toastertable.html
@@ -0,0 +1,431 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+<style>
+.column1 {
+ float: left;
+ width: 40%;
+}
+.column2 {
+ float: left;
+ width: 60%;
+}
+/* Clear floats after the columns */
+.row:after {
+ content: "";
+ display: table;
+ clear: both;
+}
+</style>
+{% endblock %}
+
+{% load jobtags %}
+
+{% block title %} CVE Check CVEs {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'cvechecker_audits' %}">Audits</a></li><span class="divider">&rarr;</span>
+ <li>Packages for "{{Ck_Audit.name}}"</li>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru mrj_type=mrj_type %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<div class="row" style="margin-left:10px;" id="show-settings">
+ <div class="column1">
+ <h2> Audit Name: <b>"{{Ck_Audit.name}}"</b> </h2>
+ </div>
+ <div class="column2">
+ <h2> YP Release: <b>"{{Ck_Audit.orm_product.long_name}}"</b> </h2>
+ </div>
+</div>
+
+<!-- <p><b><big>Actions: </big></b> -->
+
+<p><b><big>Actions: </big></b>
+ <!-- <a class="btn btn-default navbar-btn " id="summary-report" disabled>Summary report</a> -->
+ <a class="btn btn-default navbar-btn " id="edit-cancel" style="display:none" >Cancel</a>
+ &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
+ <a class="btn btn-default" href="{% url 'cvechecker_audit' Ck_Audit.id %}">Package View</a>
+ <a class="btn btn-default btn-info" disabled>CVE View</a>
+
+<!-- Combination javascript plus redirected download for this ToasterTable (no-POST) page -->
+<form id="summary-report-form" action="/cve_check/gen_download_audit_summary/" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" name="action" value="download">
+ <input type="hidden" name="options" value="" id="summary_report_options">
+ <input type="hidden" name="audit_id" value="{{Ck_Audit.id}}">
+ <button type="submit" form="summary-report-form" value="Submit2" style="display:none" id="download-summary-report">Generate the diff report</button>
+</form>
+
+<div id="summary-report-options" style="display:none;padding-left:25px;color:DarkCyan;">
+ <br>
+ <h4> Select the pages to be included in the report:</h4>
+ <input type="checkbox" id="repo-severity" name="repo-severity" value="repo-severity" checked>
+ <label for="repo-severity">Severity by repository</label><br>
+ <input type="checkbox" id="package-severity" name="package-severity" value="package-severity" checked>
+ <label for="package-severity">Severity by package</label><br>
+ <input type="checkbox" id="base-severity" name="base-severity" value="base-severity" checked>
+ <label for="base-severity">Severity by base image</label><br>
+ <input type="checkbox" id="cve-summary" name="cve-summary" value="cve-summary">
+ <label for="cve-summary">CVE summary</label><br>
+ <input type="checkbox" id="package-summary" name="package-summary" value="package-summary">
+ <label for="package-summary">Package summary</label><br>
+ <input type="checkbox" id="artifact-labels" name="artifact-labels" value="artifact-labels">
+ <label for="artifact-labels">Artifact Labels</label><br>
+ <input type="checkbox" id="summary_baseline" name="summary_baseline" value="summary_baseline">
+ <label for="summary_baseline">Audit versus Baseline by repository</label><br>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function () {
+ var selected_editsettings=false;
+ var selected_summary=false;
+
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Audit CVEs (" + total + ")";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No CVEs found";
+ }
+ else if (total > 0) {
+ title = total + " CVE" + (total > 1 ? "s" : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+
+ $('.change-repo-type').change(function() {
+ var result = confirm("Are you sure you want to change to type '" + $(this).val().split('|')[1] + "'?");
+ postCommitAjaxRequest({
+ "action" : 'update-artifact-type',
+ "is_update": result,
+ "value" : $(this).val(),
+ });
+ });
+
+ $('.remove-repo-audit').click(function() {
+ var result = confirm("Are you sure you want to remove artifact '" + $(this).attr('x-data').split('|')[0] + "'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'remove-artifact-audit',
+ "record_id" : $(this).attr('x-data').split('|')[1],
+ });
+ }
+ });
+
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ //alert("AJAX RETURN");
+ $("#run-audit-analysis").removeAttr("disabled");
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+
+ if (data.error.startsWith('UPDATE_TYPE:')) {
+ var data = data.error.replace("UPDATE_TYPE:","");
+ var data_id=data.split('|')[0];
+ var data_value=data.replace('type_','');
+ $('#'+data_id).val(data_value);
+ $('#'+data_id).css({ "color": "blue" });
+ return;
+ }
+
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+// alert("PAGE REFRESH");
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url) {
+ reqdata["Ck_Audit_id"] = {{ Ck_Audit.id }};
+ url = url || "{% url 'xhr_cvechecker_commit' %}";
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+ $('#analyze_artifacts').click(function(){
+ var result = confirm("The will analyze every CVE in this audit and will take some time. Proceed?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-analyze-artifacts',
+ "Ck_Audit_id" : '{{Ck_Audit.id}}',
+ },"");
+ }
+ });
+
+ $('#load_artifacts').click(function(){
+ var result = confirm("The will load all CVEs of registered artifacts, and will take some time. Proceed?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-load-artifacts',
+ "Ck_Audit_id" : '{{Ck_Audit.id}}',
+ },"");
+ }
+ });
+
+ $('#load_backfill').click(function(){
+ var result = confirm("Backfill missing vulnerabilities using selected audit?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-backfill-vulnerabilities',
+ "backfill_id" : $("#backfill_vulnerabilities").val(),
+ "Ck_Audit_id" : '{{Ck_Audit.id}}',
+ },"");
+ }
+ });
+
+
+ function setDefaultDisplay(is_default) {
+ var default_style = 'none';
+ if (is_default) {
+ default_style = 'inline';
+ {% if request.user.is_creator %}
+ document.getElementById("edit-settings").innerText = "Settings";
+ {% endif %}
+ document.getElementById("summary-report").innerText = "Summary Report";
+ selected_editsettings=false;
+ selected_summary=false;
+ } else {
+ document.getElementById('show-settings').style.display = 'none';
+ };
+ document.getElementById('browse-content').style.display = default_style;
+ {% if request.user.is_creator %}
+ document.getElementById("edit-settings").style.display = default_style;
+ {% endif %}
+ document.getElementById('load_artifacts').style.display = default_style;
+ document.getElementById('audit-import-tern').style.display = default_style;
+ document.getElementById('summary-report').style.display = default_style;
+ document.getElementById('cve-summary-report').style.display = default_style;
+ document.getElementById('prisma-merge-report').style.display = default_style;
+ document.getElementById('audit-package-versions').style.display = default_style;
+ document.getElementById('audit-artifacts').style.display = default_style;
+ /* Always pre-hide the pop-ups */
+ document.getElementById('summary-report-options').style.display = 'none';
+ {% if request.user.is_creator %}
+ document.getElementById('show-edit-settings').style.display = 'none';
+ {% endif %}
+ document.getElementById('edit-cancel').style.display = 'none';
+ if (is_default) {
+ $("#show-settings").slideDown();
+ };
+ };
+
+ {% if request.user.is_creator %}
+ $('#edit-settings').click(function() {
+ if (selected_editsettings) {
+ setDefaultDisplay(true);
+ postCommitAjaxRequest({
+ "action" : 'submit-editaudit',
+ "product_id" : $("#audit_product_id").val(),
+ "name" : $("#audit-name").val(),
+ "content" : $("#audit-content").val(),
+ "date" : $("#audit-date").val(),
+ "description" : $("#audit-desc").val(),
+ "tree_lock" : $("#tree_lock").val(),
+ "save_lock" : $("#save_lock").val(),
+ });
+ } else {
+ setDefaultDisplay(false);
+ document.getElementById("edit-settings").innerText = "Save Settings";
+ document.getElementById("edit-settings").style.display = 'inline';
+ document.getElementById('edit-cancel').style.display = 'inline';
+ $("#show-edit-settings").slideDown();
+ selected_editsettings=true;
+ };
+ });
+ {% endif %}
+
+ $('#summary-report').click(function() {
+ if (selected_summary) {
+ setDefaultDisplay(true);
+ selected_summary=false;
+ var options = "";
+ if (document.getElementById('repo-severity').checked) {
+ options = options + "repo-severity,";
+ }
+ if (document.getElementById('package-severity').checked) {
+ options = options + "package-severity,";
+ }
+ if (document.getElementById('base-severity').checked) {
+ options = options + "base-severity,";
+ }
+ if (document.getElementById('cve-summary').checked) {
+ options = options + "cve-summary,";
+ }
+ if (document.getElementById('package-summary').checked) {
+ options = options + "package-summary,";
+ }
+ if (document.getElementById('artifact-labels').checked) {
+ options = options + "artifact-labels,";
+ }
+ if (document.getElementById('summary_baseline').checked) {
+ options = options + "summary_baseline,";
+ }
+ document.getElementById("summary_report_options").value=options;
+ document.getElementById("download-summary-report").click();
+ } else {
+ document.getElementById("summary-report").innerText = "Generate Summary Report";
+ setDefaultDisplay(false);
+ document.getElementById("summary-report").style.display = 'inline';
+ document.getElementById('edit-cancel').style.display = 'inline';
+ $("#summary-report-options").slideDown();
+ selected_summary=true;
+ }
+ });
+
+ $('#edit-cancel').click(function() {
+ setDefaultDisplay(true);
+ });
+
+ $('#audit-import-tern').click(function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-import-tern',
+ },"");
+ });
+
+ $('.submit-downloadattachment').click(function() {
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
+ });
+
+ $('#audit-package-versions').click(function() {
+ document.getElementById("download-package-versions").click();
+ });
+
+ $('#audit-artifacts').click(function() {
+ document.getElementById("download-artifacts-summary").click();
+ });
+
+ $('#x_summary-report').click(function() {
+ document.getElementById("download-summary-report").click();
+ });
+
+ $('#submit-refresh-tops').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-refresh-tops',
+ "Ck_Audit_id" : '{{Ck_Audit.id}}',
+ },"");
+ });
+
+ $('#submit-grafana-add').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-grafana-add',
+ "Ck_Audit_id" : '{{Ck_Audit.id}}',
+ },"");
+ });
+
+ $('#submit-grafana-remove').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-grafana-remove',
+ "Ck_Audit_id" : '{{Ck_Audit.id}}',
+ },"");
+ });
+
+ $('#submit-ingest-update').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-ingest-update',
+ "Ck_Audit_id" : '{{Ck_Audit.id}}',
+ },"");
+ });
+
+
+ $('#submit-newowner').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-addowner',
+ "owner_id" : $("#user-list").val(),
+ });
+ });
+
+ $('#submit-newgroup').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-addgroup',
+ "group_id" : $("#group-list").val(),
+ });
+ });
+
+ $('.detach-owner').click(function(){
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-detachowner',
+ "owner_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
+ $('.detach-group').click(function(){
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-detachgroup',
+ "group_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
+ $('#focus_select').on('change', function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-focus',
+ "focus_select" : $("#focus_select").val(),
+ });
+ });
+
+ // Turn on the default controls
+ setDefaultDisplay(true);
+
+ });
+ </script>
+{% endblock %}
diff --git a/lib/cve_checker/templates/ck-audits-toastertable.html b/lib/cve_checker/templates/ck-audits-toastertable.html
new file mode 100755
index 00000000..f9a5c0e4
--- /dev/null
+++ b/lib/cve_checker/templates/ck-audits-toastertable.html
@@ -0,0 +1,425 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}"></script>
+ <script>
+ // Update product (per import's select string)
+ function reset_product(description) {
+ var version;
+ var product_elem_default;
+ var product_elem_matched = "0";
+ // This selector is used to see the x-data and y-data attributes
+ var products = document.querySelectorAll('[id^=product_id]');
+ for (var i in products) {
+ // Only include integer iterated indexes (hack due to selector extra data)
+ if (!isNaN(i)) {
+ product = products[i];
+ if ("default" == product.getAttribute('z-data')) {
+ product_elem_default = product.getAttribute('value');
+ }
+ version = product.getAttribute('y-data');
+ if (description.toUpperCase().includes(version.toUpperCase())) {
+ //alert("MATCH:"+ version + ":" + description + ":")
+ product_elem_matched = product.getAttribute('value');
+ }
+ }
+ }
+ // Update product select
+ const $select = document.querySelector('#audit_product_id');
+ if (product_elem_matched != "0") {
+ $select.value = product_elem_matched;
+ } else {
+ $select.value = product_elem_default;
+ }
+ }
+
+ // Use the select as the audit name extension
+ function new_import_set() {
+ var import_value = document.querySelector('input[name="content"]:checked').value;
+ var import_select = $("#"+import_value+"_list").val();
+ var audit_name = '';
+ if ("import_upload" == import_value) {
+ audit_name = 'download';
+ } else {
+ audit_name = import_select;
+ };
+ $("#audit-name").val("{{new_audit_name}}_" + audit_name);
+ reset_product(audit_name);
+ }
+ </script>
+{% endblock %}
+
+{% block title %} CVE Checker Audits {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}"/>Home</a></li><span class="divider">&rarr;</span>
+ <li>Audits</li>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru mrj_type=mrj_type %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<div class="container-fluid">
+ <h3><span>
+ Actions:
+ {% if request.user.is_creator %}
+ <a class="btn btn-default navbar-btn " id="new-audit-add" >Add an audit</a>
+ {% endif %}
+ <a class="btn btn-default navbar-btn " id="audit-diff" disabled>Audit Diff</a>
+ <a class="btn btn-default navbar-btn " id="new-audit-cancel" style="display:none" >Cancel</a>
+ {% if request.user.is_admin %}
+ &nbsp;&nbsp;&nbsp;&nbsp;
+ <!--<button id="purge-selected" class="btn btn-default" type="button">Purge selected</button> -->
+ {% endif %}
+ </span>
+ </h3>
+</div>
+
+<div id="show-new-audit" style="display:none;padding-left:25px;color:DarkCyan;">
+ <div style="margin-top: 10px;">
+ <label>Name:</label>
+ <input type="text" placeholder="name of audit" id="audit-name" size="50" value="{{new_audit_name}}_master">
+ </div>
+
+ <div>
+ <label style="margin-top: 10px;">Product:</label>
+ <select id="audit_product_id" name="audit_product_id">
+ {% for product in orm_products %}
+ {% with product.id as product_id %}
+ <option id="product_id_{{forloop.counter}}" value="{{product.id}}" x-data="{{product.name}}" y-data="{{product.version}}" z-data="{% if product.long_name == default_product %}default{% endif %}" {% if product.long_name == default_product %}selected{% endif %} >{{product.long_name}}</option>
+ {% endwith %}
+ {% endfor %}
+ </select>
+ <br>
+ </div>
+
+ <div style="margin-top: 10px;">
+ <label>Import:</label>
+ <br>
+ {% for import in ckuploadmanager %}
+ <div style="padding-left: 25px;">
+ {% if "Upload" == import.name %}
+ <input type="radio" name="content" value="import_upload" onclick="new_import_set()" checked>
+ <label for="import_{{import.id}}">&nbsp;&nbsp;{{import.name}}</label>
+
+ <form id="uploadbanner" enctype="multipart/form-data" method="post" action="{% url 'gen_upload_cvechecker' %}">{% csrf_token %}
+ <input id="fileUpload" name="fileUpload" type="file" />
+ <input type="hidden" id="action" name="action" value="upload" >
+ <input type="hidden" id="upload_product_id" name="orm_product_id" value="" >
+ <input type="hidden" id="upload_audit_name" name="audit_name" value="" >
+ <br>
+ <input type="submit" value="submit file" id="submit-upload-ck" />
+ </form>
+
+ {% else %}
+ <input type="radio" name="content" value="import_{{import.id}}" onclick="new_import_set()" ><label for="import_{{import.id}}">&nbsp;&nbsp;{{import.name}}</label>
+ {% if import.is_select_list %}
+ <select id="import_{{import.id}}_list" name="import_{{import.id}}_list" onclick="new_import_set()">
+ {% for item in import.get_select_list %}
+ <option value="{{item}}">{{item}}</option>
+ {% endfor %}
+ </select>
+ {% elif import.path %}
+ ({{import.get_path_filename}})
+ {% endif %}
+ {% endif %}
+ </div>
+ {% endfor %}
+
+ </div>
+</div>
+
+<!-- Combination javascript plus redirected download for this ToasterTable (no-POST) page -->
+<form id="audit-diff-form" action="/wr_studio/gen_download_audit_diff/" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" id="action" name="action" value="download">
+ <input type="hidden" id="form_audit_1_id" name="audit_1_id" value="0">
+ <input type="hidden" id="form_audit_2_id" name="audit_2_id" value="0">
+ <input type="hidden" id="form_audit_scope_id" name="audit_scope" value="0">
+ <input type="hidden" id="form_audit_sort_id" name="audit_sort" value="0">
+ <button type="submit" form="audit-diff-form" value="Submit" style="display:none" id="download-audit-diff">Generate the diff report</button>
+</form>
+
+<div id="show-diff-audit" style="display:none;padding-left:25px;">
+ <span id="inherit-audit" style="color:DarkCyan;">Audit #1:
+ <span class="glyphicon glyphicon-question-sign get-help" title="Audit #1 for difference"></span>
+ <br>
+ <select id="audit_1_id" >
+ {% for audit in audits %}
+ {% with audit.id as audit_id %}
+ <option value="{{audit.id}}" {%if forloop.counter == 2%}selected{% endif %}>{{audit.id}}: {{audit.name}}</option>
+ {% endwith %}
+ {% endfor %}
+ </select></span>
+ <br>
+ <span id="inherit-audit" style="color:DarkCyan;">Audit #2:</span>
+ <span class="glyphicon glyphicon-question-sign get-help" title="Audit #2 for difference"></span>
+ <br>
+ <select id="audit_2_id" >
+ {% for audit in audits %}
+ {% with audit.id as audit_id %}
+ <option value="{{audit.id}}">{{audit.id}}: {{audit.name}}</option>
+ {% endwith %}
+ {% endfor %}
+ </select>
+ <br>
+ <br>Report Scope:&nbsp;&nbsp;
+ <select id="audit_diff_scope">
+ <option value="0" >Criticals Diff Report</option>
+ <option value="1" >Full Diff Report</option>
+ <option value="2" >Cross-product Diff Report</option>
+ </select>
+ <br><br>
+ <input type="checkbox" id="audit-diff-order" name="audit-diff-order" value="1" checked></input>
+ <label for="audit-diff-order">Auto-sort audit order</label>
+ <hr>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <script type="text/javascript">
+ selected_addaudit=false;
+ selected_diffaudit=false;
+
+ $(document).ready(function () {
+
+ // Hide the upload submit button and use our own
+ document.getElementById('submit-upload-ck').style.visibility = 'hidden';
+
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "All CVE Check Audits (" + total + ")";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No CVE Check Audits found";
+ }
+ else if (total > 0) {
+ title = total + " CVE Check Audit" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ $('.edit-ck-entry').click(function() {
+ const ck_id=$(this).attr('x-data');
+ document.getElementById('audit_name-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_name-edit-'+ck_id).style.display = 'inline';
+ document.getElementById('edit-entry-'+ck_id).style.display = 'none';
+ document.getElementById('save-entry-'+ck_id).style.display = 'inline';
+ document.getElementById('cancel-entry-'+ck_id).style.display = 'inline';
+ });
+ $('.save-ck-entry').click(function() {
+ const ck_id=$(this).attr('x-data');
+ document.getElementById('audit_name-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_name-edit-'+ck_id).style.display = 'none';
+ document.getElementById('edit-entry-'+ck_id).style.display = 'inline';
+ document.getElementById('save-entry-'+ck_id).style.display = 'none';
+ document.getElementById('cancel-entry-'+ck_id).style.display = 'none';
+
+ postCommitAjaxRequest({
+ "action" : 'submit-update-ck',
+ "ck_id" : ck_id,
+ "audit_name" : $("#audit_name-text-"+ck_id).val(),
+ });
+ });
+
+ $('.cancel-ck-entry').click(function() {
+ const ck_id=$(this).attr('x-data');
+ document.getElementById('audit_name-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_name-edit-'+ck_id).style.display = 'none';
+ document.getElementById('edit-entry-'+ck_id).style.display = 'inline';
+ document.getElementById('save-entry-'+ck_id).style.display = 'none';
+ document.getElementById('cancel-entry-'+ck_id).style.display = 'none';
+ });
+
+ /* Add handler into the Toaster Table context */
+ $('.trash-audit').click(function() {
+ var result = confirm("Are you sure you want to remove '" + $(this).attr('x-data').split('|')[0] + "'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-trash-audit',
+ "record_id" : $(this).attr('x-data').split('|')[1],
+ });
+ }
+ });
+
+ titleElt.text(title);
+ });
+
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+
+ if (data.error == "refresh_new") {
+ window.location.replace("{% url 'cvechecker_audits' %}");
+ return;
+ }
+
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url="") {
+ if ("" == url) {
+ url = "{% url 'xhr_cvechecker_commit' %}";
+ };
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url: url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+ $('#new-audit-add').click(function(e) {
+ if (selected_addaudit) {
+ document.getElementById("new-audit-add").innerText = "Add an audit";
+ $("#show-new-audit").slideUp();
+ selected_addaudit=false;
+ document.getElementById('new-audit-cancel').style.display = 'none';
+ document.getElementById('audit-diff').style.display = 'inline';
+
+ audit_name = $("#audit-name").val().trim();;
+ if ( "" == audit_name) {
+ alert("Error: an audit name is required");
+ return;
+ }
+
+ import_value = document.querySelector('input[name="content"]:checked').value;
+ import_select = $("#"+import_value+"_list").val();
+ if ("import_upload" == import_value) {
+ var $select = document.querySelector('#upload_product_id');
+ $select.value = $("#audit_product_id").val();
+ $select = document.querySelector('#upload_audit_name');
+ $select.value = $("#audit-name").val();
+ // Click the submit for the upload form
+ document.getElementById("submit-upload-ck").click();
+ } else {
+ postCommitAjaxRequest({
+ "action" : 'submit-createaudit',
+ "name" : $("#audit-name").val(),
+ "product_id" : $("#audit_product_id").val(),
+ "import_id" : import_value.replace("import_",""),
+ "import_select" : import_select,
+ "is-shift" : e.shiftKey,
+ });
+ }
+ } else {
+ document.getElementById("new-audit-add").innerText = "Create this Audit";
+ document.getElementById('audit-diff').style.display = 'none';
+ document.getElementById('new-audit-cancel').style.display = 'inline';
+ $("#show-new-audit").slideDown();
+ selected_addaudit=true;
+ }
+ });
+
+ $('#new-audit-cancel').click(function() {
+ document.getElementById("new-audit-add").innerText = "Add an Audit";
+ document.getElementById('audit-diff').style.display = 'inline';
+ document.getElementById('new-audit-add').style.display = 'inline';
+ document.getElementById('new-audit-cancel').style.display = 'none';
+ $("#show-new-audit").slideUp();
+ $("#show-diff-audit").slideUp();
+ selected_addaudit=false;
+ selected_diffaudit=false;
+ });
+
+ $('#audit-diff').click(function() {
+ if (selected_diffaudit) {
+ selected_diffaudit=false;
+ document.getElementById("audit-diff").innerText = "Audit Diff";
+ document.getElementById('new-audit-add').style.display = 'inline';
+ document.getElementById('new-audit-cancel').style.display = 'none';
+ $("#show-diff-audit").slideUp();
+ /* Trigger the computation and auto download */
+ audit_1_id = $("#audit_1_id").val();
+ audit_2_id = $("#audit_2_id").val();
+ if (audit_1_id == audit_2_id) {
+ alert("You have selected the same two audits for the difference.");
+ return
+ };
+ $("#form_audit_1_id").val(audit_1_id);
+ $("#form_audit_2_id").val(audit_2_id);
+ $("#form_audit_scope_id").val($("#audit_diff_scope").val());
+ if (document.getElementById('audit-diff-order').checked) {
+ $("#form_audit_sort_id").val('1');
+ } else {
+ $("#form_audit_sort_id").val('0');
+ }
+ document.getElementById("download-audit-diff").click();
+ } else {
+ document.getElementById("audit-diff").innerText = "Generate diff report";
+ document.getElementById('new-audit-cancel').style.display = 'inline';
+ document.getElementById('new-audit-add').style.display = 'none';
+ $("#show-diff-audit").slideDown();
+ selected_diffaudit=true;
+ }
+ });
+
+ $('#purge-selected').click(function(){
+ var audit_list=[];
+ $('#harborauditstable input').each(function(){
+ if ($(this).is(':checked')) {
+ audit_list.push($(this).prop('id'));
+ }
+ });
+ if (0 == audit_list.length) {
+ alert("No Audits were selected");
+ return;
+ }
+ var result = confirm("Are you sure you want to purge these " + audit_list.length + " audits (~9 secs/audit)?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-purge-audits',
+ "audit_list" : audit_list.join(","),
+ });
+ }
+ });
+
+ });
+
+ </script>
+{% endblock %}
diff --git a/lib/cve_checker/templates/ck-import_manager-toastertable.html b/lib/cve_checker/templates/ck-import_manager-toastertable.html
new file mode 100755
index 00000000..5661e732
--- /dev/null
+++ b/lib/cve_checker/templates/ck-import_manager-toastertable.html
@@ -0,0 +1,266 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+<style>
+.column1 {
+ float: left;
+ width: 30%;
+}
+.column2 {
+ float: left;
+ width: 60%;
+}
+/* Clear floats after the columns */
+.row:after {
+ content: "";
+ display: table;
+ clear: both;
+}
+</style>
+{% endblock %}
+
+{% load jobtags %}
+
+{% block title %} Cve Check Import Manager {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li>Import Manager"</li>
+ </ul>
+ </div>
+</div>
+
+<p><b><big>Actions: </big></b>
+ <a class="btn btn-default navbar-btn " id="new-import" >New Import</a>
+ <a class="btn btn-default navbar-btn " id="refresh-imports" >Refresh select lists</a>
+ &nbsp;&nbsp;&nbsp;&nbsp;
+ <a class="btn btn-default navbar-btn " id="summary-report" disabled>Summary report</a>
+ &nbsp;&nbsp;&nbsp;&nbsp;
+ <a class="btn btn-default navbar-btn " id="show-help" >Help</a>
+
+<div id="import_help" class="well" style="padding-left:25px;display:none;">
+ <h4>Using import management:</h4>
+ This page is used to drive the table of import sources for CVE Checker audits.<br>
+ <ol>
+ <li> If the import points to a file, it will be used. </li>
+ <li> If the import points to a directory of files, they will be offered in a select list</li>
+ <li> If the import points to a directory of directories, they will be offered in a select list, and the child file(s) will be imported</li>
+ </ol>
+ Fields:<br>
+ <div style="padding-left: 30px;">
+ <b>Title:</b> Displayed title for the import channel<br>
+ <b>Mode:</b> "Repo" is for repositories, "SSL" is for scp, "File" is for direct local or NFS<br>
+ <b>Repo URL:</b> The URL to use to clone git repositories<br>
+ <b>Path:</b><br>
+ </div>
+ <div style="padding-left: 50px;">
+ <b>Repo:</b> Relative path to the target directory or file within the repo tree<br>
+ <b>SSL:</b> The user@ip:/path" to the remote target directory/file<br>
+ <b>File:</b> Absolute path to the local target directory/file<br>
+ </div>
+ <div style="padding-left: 30px;">
+ <b>Pem file:</b> Permissions file for SSH/scp access to the target directory/file<br>
+ <b>Branch:</b> optional branch for the git repo<br>
+ <b>Select List:</b> Extracted directories/files for the respective import's select list<br>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function () {
+ var selected_editsettings=false;
+ var selected_summary=false;
+ var selected_showhelp=false;
+
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Import Manager (" + total + ")";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No Import Managers found";
+ }
+ else if (total > 0) {
+ title = total + " Import Manager" + (total > 1 ? "s" : '') + " found";
+ }
+ }
+
+ $('.edit-ck-entry').click(function() {
+ const ck_id=$(this).attr('x-data');
+ document.getElementById('audit_order-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_order-edit-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_name-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_name-edit-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_mode-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_mode-edit-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_path-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_path-edit-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_pem-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_pem-edit-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_repo-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_repo-edit-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_branch-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_branch-edit-'+ck_id).style.display = 'inline';
+ //document.getElementById('audit_refresh-disp-'+ck_id).style.display = 'none';
+ //document.getElementById('audit_refresh-edit-'+ck_id).style.display = 'inline';
+
+ document.getElementById('edit-entry-'+ck_id).style.display = 'none';
+ document.getElementById('save-entry-'+ck_id).style.display = 'inline';
+ document.getElementById('cancel-entry-'+ck_id).style.display = 'inline';
+ });
+
+ function close_ck_edit(ck_id) {
+ document.getElementById('audit_order-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_order-edit-'+ck_id).style.display = 'none';
+ document.getElementById('audit_name-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_name-edit-'+ck_id).style.display = 'none';
+ document.getElementById('audit_mode-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_mode-edit-'+ck_id).style.display = 'none';
+ document.getElementById('audit_path-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_path-edit-'+ck_id).style.display = 'none';
+ document.getElementById('audit_pem-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_pem-edit-'+ck_id).style.display = 'none';
+ document.getElementById('audit_repo-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_repo-edit-'+ck_id).style.display = 'none';
+ document.getElementById('audit_branch-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_branch-edit-'+ck_id).style.display = 'none';
+ //document.getElementById('audit_refresh-disp-'+ck_id).style.display = 'inline';
+ //document.getElementById('audit_refresh-edit-'+ck_id).style.display = 'none';
+
+ document.getElementById('edit-entry-'+ck_id).style.display = 'inline';
+ document.getElementById('save-entry-'+ck_id).style.display = 'none';
+ document.getElementById('cancel-entry-'+ck_id).style.display = 'none';
+ };
+
+ $('.cancel-ck-entry').click(function() {
+ const ck_id=$(this).attr('x-data');
+ close_ck_edit(ck_id);
+ });
+
+ $('.save-ck-entry').click(function() {
+ const ck_id=$(this).attr('x-data');
+ close_ck_edit(ck_id);
+ postCommitAjaxRequest({
+ "action" : 'submit-update-import-ck',
+ "ck_id" : ck_id,
+ "audit_order" : $("#audit_order-text-"+ck_id).val(),
+ "audit_name" : $("#audit_name-text-"+ck_id).val(),
+ "audit_mode" : $("#audit_mode-text-"+ck_id).val(),
+ "audit_path" : $("#audit_path-text-"+ck_id).val(),
+ "audit_pem" : $("#audit_pem-text-"+ck_id).val(),
+ "audit_repo" : $("#audit_repo-text-"+ck_id).val(),
+ "audit_branch" : $("#audit_branch-text-"+ck_id).val(),
+ });
+ });
+
+ $('.trash-import').click(function() {
+ var result = confirm("Are you sure you want to remove import '" + $(this).attr('x-data').split('|')[0] + "'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-remove-import-ck',
+ "record_id" : $(this).attr('x-data').split('|')[1],
+ });
+ }
+ });
+
+ titleElt.text(title);
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ //alert("AJAX RETURN");
+ $("#run-audit-analysis").removeAttr("disabled");
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+
+ if (data.error.startsWith('UPDATE_TYPE:')) {
+ var data = data.error.replace("UPDATE_TYPE:","");
+ var data_id=data.split('|')[0];
+ var data_value=data.replace('type_','');
+ $('#'+data_id).val(data_value);
+ $('#'+data_id).css({ "color": "blue" });
+ return;
+ }
+
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url) {
+ url = url || "{% url 'xhr_cvechecker_commit' %}";
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+ $('#new-import').click(function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-new-import-ck',
+ });
+ });
+
+ $('#refresh-imports').click(function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-import-refresh',
+ });
+ });
+
+ $('#show-help').click(function() {
+ if (selected_showhelp) {
+ document.getElementById("show-help").innerText = "Help";
+ $("#import_help").slideUp();
+ selected_showhelp = false;
+ } else {
+ document.getElementById("show-help").innerText = "Close Help";
+ $("#import_help").slideDown();
+ selected_showhelp = true;
+ }
+ });
+
+ });
+ </script>
+{% endblock %}
diff --git a/lib/cve_checker/templates/ck-issue-toastertable.html b/lib/cve_checker/templates/ck-issue-toastertable.html
new file mode 100755
index 00000000..4768d21e
--- /dev/null
+++ b/lib/cve_checker/templates/ck-issue-toastertable.html
@@ -0,0 +1,347 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+<style>
+.column1 {
+ float: left;
+ width: 30%;
+}
+.column2 {
+ float: left;
+ width: 60%;
+}
+/* Clear floats after the columns */
+.row:after {
+ content: "";
+ display: table;
+ clear: both;
+}
+</style>
+{% endblock %}
+
+{% load jobtags %}
+
+{% block title %} Cve Checker Package Issues {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'cvechecker_audits' %}">Audits</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'cvechecker_audit' Ck_Package.ck_audit.id %}">Audit packages</a></li><span class="divider">&rarr;</span>
+ <li>Package Issues for {{Ck_Package.name}} from audit {{Ck_Package.ck_audit.name}}"</li>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru mrj_type=mrj_type %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<div class="row" style="margin-left:10px;" id="show-settings">
+ <div class="column1">
+ <h2> Package Name: <b>"{{Ck_Package.name}}"</b> </h2>
+ </div>
+ <div class="column2">
+ <h2> Audit Name: <b>"{{Ck_Package.ck_audit.name}}"</b> </h2>
+ </div>
+</div>
+
+<p><b><big>Reports: </big></b>
+ <a class="btn btn-default navbar-btn " id="summary-report" disabled>Summary report</a>
+ <a class="btn btn-default navbar-btn " id="edit-cancel" style="display:none" >Cancel</a>
+
+<!-- Combination javascript plus redirected download for this ToasterTable (no-POST) page -->
+<form id="summary-report-form" action="/cve_check/gen_download_audit_summary/" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" name="action" value="download">
+ <input type="hidden" name="options" value="" id="summary_report_options">
+ <input type="hidden" name="package_id" value="{{Ck_Package.id}}">
+ <button type="submit" form="summary-report-form" value="Submit2" style="display:none" id="download-summary-report">Generate the diff report</button>
+</form>
+
+<div class="row" style="display:none;margin-left:10px;" id="show-settings">
+ <div class="column1">
+ <h4> Package: <b>{{Ck_Package.name}}</b> </h4>
+ <h4> Audit: <b>{{Ck_Package.ck_audit.name}}</b> </h4>
+ </div>
+</div>
+
+<div id="summary-report-options" style="display:none;padding-left:25px;color:DarkCyan;">
+ <br>
+ <h4> Select the pages to be included in the report:</h4>
+ <input type="checkbox" id="repo-severity" name="repo-severity" value="repo-severity" checked>
+ <label for="repo-severity">Severity by repository</label><br>
+ <input type="checkbox" id="package-severity" name="package-severity" value="package-severity" checked>
+ <label for="package-severity">Severity by package</label><br>
+ <input type="checkbox" id="base-severity" name="base-severity" value="base-severity" checked>
+ <label for="base-severity">Severity by base image</label><br>
+ <input type="checkbox" id="cve-summary" name="cve-summary" value="cve-summary">
+ <label for="cve-summary">CVE summary</label><br>
+ <input type="checkbox" id="package-summary" name="package-summary" value="package-summary">
+ <label for="package-summary">Package summary</label><br>
+ <input type="checkbox" id="artifact-labels" name="artifact-labels" value="artifact-labels">
+ <label for="artifact-labels">Artifact Labels</label><br>
+ <input type="checkbox" id="summary_baseline" name="summary_baseline" value="summary_baseline">
+ <label for="summary_baseline">Audit versus Baseline by repository</label><br>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function () {
+ var selected_editsettings=false;
+ var selected_summary=false;
+
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Package Issues (" + total + ")";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No Package Issues found";
+ }
+ else if (total > 0) {
+ title = total + " Package Issue" + (total > 1 ? "s" : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ //alert("AJAX RETURN");
+ $("#run-audit-analysis").removeAttr("disabled");
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+
+ if (data.error.startsWith('UPDATE_TYPE:')) {
+ var data = data.error.replace("UPDATE_TYPE:","");
+ var data_id=data.split('|')[0];
+ var data_value=data.replace('type_','');
+ $('#'+data_id).val(data_value);
+ $('#'+data_id).css({ "color": "blue" });
+ return;
+ }
+
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+// alert("PAGE REFRESH");
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url) {
+ reqdata["Ck_Package_id"] = {{ Ck_Package.id }};
+ url = url || "{% url 'xhr_cvechecker_commit' %}";
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+ $('#analyze_artifacts').click(function(){
+ var result = confirm("The will analyze every CVE in this audit and will take some time. Proceed?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-analyze-artifacts',
+ },"");
+ }
+ });
+
+ $('#load_artifacts').click(function(){
+ var result = confirm("The will load all CVEs of registered artifacts, and will take some time. Proceed?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-load-artifacts',
+ },"");
+ }
+ });
+
+ $('#load_backfill').click(function(){
+ var result = confirm("Backfill missing vulnerabilities using selected audit?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-backfill-vulnerabilities',
+ "backfill_id" : $("#backfill_vulnerabilities").val(),
+ },"");
+ }
+ });
+
+
+ $('#summary-report').click(function() {
+ if (selected_summary) {
+ setDefaultDisplay(true);
+ selected_summary=false;
+ var options = "";
+ if (document.getElementById('repo-severity').checked) {
+ options = options + "repo-severity,";
+ }
+ if (document.getElementById('package-severity').checked) {
+ options = options + "package-severity,";
+ }
+ if (document.getElementById('base-severity').checked) {
+ options = options + "base-severity,";
+ }
+ if (document.getElementById('cve-summary').checked) {
+ options = options + "cve-summary,";
+ }
+ if (document.getElementById('package-summary').checked) {
+ options = options + "package-summary,";
+ }
+ if (document.getElementById('artifact-labels').checked) {
+ options = options + "artifact-labels,";
+ }
+ if (document.getElementById('summary_baseline').checked) {
+ options = options + "summary_baseline,";
+ }
+ document.getElementById("summary_report_options").value=options;
+ document.getElementById("download-summary-report").click();
+ } else {
+ document.getElementById("summary-report").innerText = "Generate Summary Report";
+ setDefaultDisplay(false);
+ document.getElementById("summary-report").style.display = 'inline';
+ document.getElementById('edit-cancel').style.display = 'inline';
+ $("#summary-report-options").slideDown();
+ selected_summary=true;
+ }
+ });
+
+ $('#edit-cancel').click(function() {
+ setDefaultDisplay(true);
+ });
+
+ $('#audit-import-tern').click(function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-import-tern',
+ },"");
+ });
+
+ $('.submit-downloadattachment').click(function() {
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
+ });
+
+ $('#audit-package-versions').click(function() {
+ document.getElementById("download-package-versions").click();
+ });
+
+ $('#audit-artifacts').click(function() {
+ document.getElementById("download-artifacts-summary").click();
+ });
+
+ $('#x_summary-report').click(function() {
+ document.getElementById("download-summary-report").click();
+ });
+
+ $('#submit-refresh-tops').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-refresh-tops',
+ },"");
+ });
+
+ $('#submit-grafana-add').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-grafana-add',
+ },"");
+ });
+
+ $('#submit-grafana-remove').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-grafana-remove',
+ },"");
+ });
+
+ $('#submit-ingest-update').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-ingest-update',
+ },"");
+ });
+
+
+ $('#submit-newowner').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-addowner',
+ "owner_id" : $("#user-list").val(),
+ });
+ });
+
+ $('#submit-newgroup').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-addgroup',
+ "group_id" : $("#group-list").val(),
+ });
+ });
+
+ $('.detach-owner').click(function(){
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-detachowner',
+ "owner_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
+ $('.detach-group').click(function(){
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-detachgroup',
+ "group_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
+ $('#focus_select').on('change', function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-focus',
+ "focus_select" : $("#focus_select").val(),
+ });
+ });
+
+ // Turn on the default controls
+ setDefaultDisplay(true);
+
+ });
+ </script>
+{% endblock %}
diff --git a/lib/cve_checker/templates/ck-product-toastertable.html b/lib/cve_checker/templates/ck-product-toastertable.html
new file mode 100755
index 00000000..bdf1509f
--- /dev/null
+++ b/lib/cve_checker/templates/ck-product-toastertable.html
@@ -0,0 +1,309 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+<style>
+.column1 {
+ float: left;
+ width: 30%;
+}
+.column2 {
+ float: left;
+ width: 60%;
+}
+/* Clear floats after the columns */
+.row:after {
+ content: "";
+ display: table;
+ clear: both;
+}
+</style>
+{% endblock %}
+
+{% load jobtags %}
+
+{% block title %} Cve Check Product Issues {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'cvechecker_audits' %}">Audits</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'cvechecker_audit' Ck_Package.ck_audit.id %}">Audit packages</a></li><span class="divider">&rarr;</span>
+ <li>Package products for {{Ck_Package.name}} from audit {{Ck_Package.ck_audit.name}}"</li>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru mrj_type=mrj_type %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<p><b><big>Actions: </big></b>
+
+<p><b><big>Reports: </big></b>
+ <a class="btn btn-default navbar-btn " id="summary-report" disabled>Summary report</a>
+ <a class="btn btn-default navbar-btn " id="edit-cancel" style="display:none" >Cancel</a>
+
+<!-- Combination javascript plus redirected download for this ToasterTable (no-POST) page -->
+<form id="summary-report-form" action="/cve_check/gen_download_audit_summary/" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" name="action" value="download">
+ <input type="hidden" name="options" value="" id="summary_report_options">
+ <input type="hidden" name="package_id" value="{{Ck_Package.id}}">
+ <button type="submit" form="summary-report-form" value="Submit2" style="display:none" id="download-summary-report">Generate the diff report</button>
+</form>
+
+<div class="row" style="display:none;margin-left:10px;" id="show-settings">
+ <div class="column1">
+ <h4> Package: <b>{{Ck_Package.name}}</b> </h4>
+ <h4> Audit: <b>{{Ck_Package.ck_audit.name}}</b> </h4>
+ </div>
+</div>
+
+<div id="summary-report-options" style="display:none;padding-left:25px;color:DarkCyan;">
+ <br>
+ <h4> Select the pages to be included in the report:</h4>
+ <input type="checkbox" id="repo-severity" name="repo-severity" value="repo-severity" checked>
+ <label for="repo-severity">Severity by repository</label><br>
+ <input type="checkbox" id="package-severity" name="package-severity" value="package-severity" checked>
+ <label for="package-severity">Severity by package</label><br>
+ <input type="checkbox" id="base-severity" name="base-severity" value="base-severity" checked>
+ <label for="base-severity">Severity by base image</label><br>
+ <input type="checkbox" id="cve-summary" name="cve-summary" value="cve-summary">
+ <label for="cve-summary">CVE summary</label><br>
+ <input type="checkbox" id="package-summary" name="package-summary" value="package-summary">
+ <label for="package-summary">Package summary</label><br>
+ <input type="checkbox" id="artifact-labels" name="artifact-labels" value="artifact-labels">
+ <label for="artifact-labels">Artifact Labels</label><br>
+ <input type="checkbox" id="summary_baseline" name="summary_baseline" value="summary_baseline">
+ <label for="summary_baseline">Audit versus Baseline by repository</label><br>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function () {
+ var selected_editsettings=false;
+ var selected_summary=false;
+
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Products (" + total + ")";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No products found";
+ }
+ else if (total > 0) {
+ title = total + " Product" + (total > 1 ? "s" : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ //alert("AJAX RETURN");
+ $("#run-audit-analysis").removeAttr("disabled");
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+
+ if (data.error.startsWith('UPDATE_TYPE:')) {
+ var data = data.error.replace("UPDATE_TYPE:","");
+ var data_id=data.split('|')[0];
+ var data_value=data.replace('type_','');
+ $('#'+data_id).val(data_value);
+ $('#'+data_id).css({ "color": "blue" });
+ return;
+ }
+
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+// alert("PAGE REFRESH");
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url) {
+ reqdata["Ck_Package_id"] = {{ Ck_Package.id }};
+ url = url || "{% url 'xhr_cvechecker_commit' %}";
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+
+ $('#summary-report').click(function() {
+ if (selected_summary) {
+ setDefaultDisplay(true);
+ selected_summary=false;
+ var options = "";
+ if (document.getElementById('repo-severity').checked) {
+ options = options + "repo-severity,";
+ }
+ if (document.getElementById('package-severity').checked) {
+ options = options + "package-severity,";
+ }
+ if (document.getElementById('base-severity').checked) {
+ options = options + "base-severity,";
+ }
+ if (document.getElementById('cve-summary').checked) {
+ options = options + "cve-summary,";
+ }
+ if (document.getElementById('package-summary').checked) {
+ options = options + "package-summary,";
+ }
+ if (document.getElementById('artifact-labels').checked) {
+ options = options + "artifact-labels,";
+ }
+ if (document.getElementById('summary_baseline').checked) {
+ options = options + "summary_baseline,";
+ }
+ document.getElementById("summary_report_options").value=options;
+ document.getElementById("download-summary-report").click();
+ } else {
+ document.getElementById("summary-report").innerText = "Generate Summary Report";
+ setDefaultDisplay(false);
+ document.getElementById("summary-report").style.display = 'inline';
+ document.getElementById('edit-cancel').style.display = 'inline';
+ $("#summary-report-options").slideDown();
+ selected_summary=true;
+ }
+ });
+
+ $('#edit-cancel').click(function() {
+ setDefaultDisplay(true);
+ });
+
+ $('#audit-import-tern').click(function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-import-tern',
+ },"");
+ });
+
+ $('.submit-downloadattachment').click(function() {
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
+ });
+
+ $('#audit-package-versions').click(function() {
+ document.getElementById("download-package-versions").click();
+ });
+
+ $('#audit-artifacts').click(function() {
+ document.getElementById("download-artifacts-summary").click();
+ });
+
+ $('#x_summary-report').click(function() {
+ document.getElementById("download-summary-report").click();
+ });
+
+ $('#submit-refresh-tops').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-refresh-tops',
+ },"");
+ });
+
+ $('#submit-grafana-add').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-grafana-add',
+ },"");
+ });
+
+ $('#submit-grafana-remove').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-grafana-remove',
+ },"");
+ });
+
+ $('#submit-ingest-update').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-ingest-update',
+ },"");
+ });
+
+
+ $('#submit-newowner').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-addowner',
+ "owner_id" : $("#user-list").val(),
+ });
+ });
+
+ $('#submit-newgroup').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-addgroup',
+ "group_id" : $("#group-list").val(),
+ });
+ });
+
+ $('.detach-owner').click(function(){
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-detachowner',
+ "owner_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
+ $('.detach-group').click(function(){
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-detachgroup',
+ "group_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
+ $('#focus_select').on('change', function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-focus',
+ "focus_select" : $("#focus_select").val(),
+ });
+ });
+
+ });
+ </script>
+{% endblock %}
diff --git a/lib/cve_checker/tests.py b/lib/cve_checker/tests.py
new file mode 100755
index 00000000..7ce503c2
--- /dev/null
+++ b/lib/cve_checker/tests.py
@@ -0,0 +1,3 @@
+from django.test import TestCase
+
+# Create your tests here.
diff --git a/lib/cve_checker/urls.py b/lib/cve_checker/urls.py
new file mode 100755
index 00000000..396c4fda
--- /dev/null
+++ b/lib/cve_checker/urls.py
@@ -0,0 +1,47 @@
+from django.urls import re_path as url,include
+from django.views.generic import RedirectView
+from . import views, tables
+
+urlpatterns = [
+ #
+ # Main pages
+ #
+
+ url(r'^cvechecker_audits/$',
+ tables.CveCheckerAuditsTable.as_view(template_name="ck-audits-toastertable.html"),
+ name='cvechecker_audits'),
+
+ url(r'^cvechecker/(?P<audit_id>\d+)$',
+ tables.CveCheckerAuditTable.as_view(template_name="ck-audit-toastertable.html"),
+ name='cvechecker_audit'),
+
+ url(r'^cvechecker_audit_cve/(?P<audit_id>\d+)$',
+ tables.CveCheckerAuditCveTable.as_view(template_name="ck-auditcve-toastertable.html"),
+ name='cvechecker_audit_cve'),
+
+ url(r'^cvechecker_issue/(?P<package_id>\d+)$',
+ tables.CveCheckerIssueTable.as_view(template_name="ck-issue-toastertable.html"),
+ name='cvechecker_issue'),
+
+ url(r'^cvechecker_product/(?P<package_id>\d+)$',
+ tables.CveCheckerProductTable.as_view(template_name="ck-product-toastertable.html"),
+ name='cvechecker_product'),
+
+ url(r'^cvechecker_import_manager/$',
+ tables.CveCheckerImportManagementTable.as_view(template_name="ck-import_manager-toastertable.html"),
+ name='cvechecker_import_manager'),
+
+ url(r'^gen_download_cvechecker_summary/$', views.gen_download_cvechecker_summary, name='gen_download_cvechecker_summary'),
+ url(r'^gen_download_cvechecker_audit_diff/$', views.gen_download_cvechecker_audit_diff, name='gen_download_cvechecker_audit_diff'),
+ url(r'^gen_upload_cvechecker/$', views.gen_upload_cvechecker, name='gen_upload_cvechecker'),
+ url(r'^cvechecker_clear_jobs/$', views.cvechecker_clear_jobs, name='cvechecker_clear_jobs'),
+
+ url(r'^report/(?P<page_name>\D+)$', views.report, name='report'),
+
+ #
+ # Ajax
+ #
+
+ url(r'^xhr_cvechecker_commit/$', views.xhr_cvechecker_commit,
+ name='xhr_cvechecker_commit'),
+]
diff --git a/lib/cve_checker/views.py b/lib/cve_checker/views.py
new file mode 100755
index 00000000..333cda97
--- /dev/null
+++ b/lib/cve_checker/views.py
@@ -0,0 +1,325 @@
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2023 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import re
+from datetime import datetime, date
+import json
+import traceback
+
+from django.urls import reverse_lazy
+from django.views import generic
+from django.http import HttpResponse, HttpResponseNotFound, JsonResponse, HttpResponseRedirect
+
+from django.contrib.auth.forms import UserCreationForm, UserChangeForm, PasswordChangeForm
+from django.contrib import messages
+from django.contrib.auth import update_session_auth_hash
+from django.contrib.auth.models import Group
+from django.shortcuts import render, redirect
+
+from orm.models import SrtSetting, Product
+from orm.models import Job, ErrorLog
+from users.models import SrtUser, UserSafe
+from srtgui.api import execute_process
+from cve_checker.models import Ck_Audit, Ck_Package, CkUploadManager
+from cve_checker.reports import doCveCheckerAuditSummaryExcel, do_audit_cvechecker_diff_report
+
+from srtgui.views import MimeTypeFinder
+
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR', '.')
+SRT_MAIN_APP = os.environ.get('SRT_MAIN_APP', '.')
+
+# quick development/debugging support
+from srtgui.api import _log
+
+#
+# Main pages
+#
+
+# determine in which mode we are running in, and redirect appropriately
+def landing(request):
+
+ # Django sometimes has a race condition with this view executing
+ # for the master app's landing page HTML which can lead to context
+ # errors, so hard enforce the default re-direction
+ if SRT_MAIN_APP and (SRT_MAIN_APP != "yp"):
+ return redirect(f"/{SRT_MAIN_URL}/landing/")
+
+ # Append the list of landing page extensions
+ landing_extensions_table = []
+ for landing_extension in SrtSetting.objects.filter(name__startswith='LANDING_LINK').order_by('name'):
+ landing_extensions_table.append(landing_extension.value.split('|'))
+
+ context = {
+ 'landing_extensions_table' : landing_extensions_table,
+ 'this_landing' : 'srtgui',
+ }
+ return render(request, 'landing.html', context)
+
+def report(request,page_name):
+ if request.method == "GET":
+ context = GitcvecheckerReportManager.get_context_data(page_name,request=request)
+ record_list = request.GET.get('record_list', '')
+ _log("EXPORT_GET!:%s|%s|" % (request,record_list))
+ context['record_list'] = record_list
+ return render(request, 'report.html', context)
+ elif request.method == "POST":
+ _log("EXPORT_POST!:%s|%s" % (request,request.FILES))
+ parent_page = request.POST.get('parent_page', '')
+ file_name,response_file_name = GitcvecheckerReportManager.exec_report(parent_page,request=request)
+ if file_name and response_file_name:
+ fsock = open(file_name, "rb")
+ content_type = MimeTypeFinder.get_mimetype(file_name)
+ response = HttpResponse(fsock, content_type = content_type)
+ disposition = "attachment; filename=" + response_file_name
+ response["Content-Disposition"] = disposition
+ _log("EXPORT_POST_Q{%s|" % (response))
+ return response
+ else:
+ return render(request, "unavailable_artifact.html", {})
+ return redirect('/')
+ raise Exception("Invalid HTTP method for this page")
+
+# Standalone download URL, for ToasterTable pages
+#
+# TBD
+#
+def gen_download_cvechecker_audit_diff(request):
+ if request.method == "GET":
+ return redirect(landing)
+ elif request.method == "POST":
+ _log("GEN_DOWNLOAD_AUDIT_DIFF(%s)" % request.POST)
+ if request.POST["action"] == "download":
+ audit_1_id = int(request.POST.get('audit_1_id',0))
+ audit_2_id = int(request.POST.get('audit_2_id',0))
+ audit_1 = cvecheckerAudit.objects.get(id=audit_1_id)
+ audit_2 = cvecheckerAudit.objects.get(id=audit_2_id)
+ audit_scope = request.POST.get('audit_scope','0')
+ audit_sort = request.POST.get('audit_sort','1')
+ # Enforce older to newer
+ if ('1' == audit_sort) and (audit_1.id > audit_2.id):
+ audit_1,audit_2 = audit_2,audit_1
+ file_path = do_audit_cvechecker_diff_report(audit_1, audit_2, {'format':'xlsx','audit_scope':audit_scope,})
+ if os.path.isfile(file_path):
+ fsock = open(file_path, "rb")
+ content_type = MimeTypeFinder.get_mimetype(file_path)
+ response = HttpResponse(fsock, content_type = content_type)
+ response['Content-Disposition'] = 'attachment; filename="{}"'.format(os.path.basename(file_path))
+ return response
+ else:
+ _log("ERROR:could not download '%s'" % file_path)
+ return render(request, "unavailable_artifact.html", context={})
+
+#
+# Upload pages
+#
+
+# Standalone download URL, for ToasterTable pages
+def gen_upload_cvechecker(request):
+ if request.method == "GET":
+ return redirect(landing)
+ elif request.method == "POST":
+ _log(f"GEN_UPLOAD_CVECHECKER({request.POST})")
+
+ ck_upload_dir = os.path.join(SRT_BASE_DIR,'data/cve_checker/upload')
+ ck_upload_manager_id = CkUploadManager.objects.get(import_mode='Upload',path='').id
+
+ if request.POST["action"] == "upload":
+ audit_name = request.POST.get('audit_name',0)
+ orm_product_id = int(request.POST.get('orm_product_id',0))
+ orm_product = Product.objects.get(id=orm_product_id)
+
+ if not os.path.isdir(ck_upload_dir):
+ os.makedirs(ck_upload_dir)
+ try:
+ file = request.FILES['fileUpload']
+ except Exception as e:
+ _log("EXPORT_POST:'fileupload:' does not exist: %s" % e)
+ try:
+ ### TODO Error if not JSON file
+ pass
+ # Upload the file
+ local_file_path = os.path.join(ck_upload_dir,file.name)
+ _log("FOO:%s" % local_file_path)
+ if os.path.isfile(local_file_path):
+ os.remove(local_file_path)
+ with open(local_file_path, 'xb+') as destination:
+ for line in file:
+ destination.write(line)
+ # Create an audit from the imported file
+ cmnd = ['./bin/cve_checker/srtool_cvechecker.py','--import-cvechk', f"{ck_upload_manager_id},{orm_product.key},{local_file_path}", "--progress"]
+ cmnd = ['./bin/cve_checker/srtool_cvechecker.py','--import-cvechk', f"{ck_upload_manager_id},{orm_product.key},{local_file_path}", "--audit-name",audit_name,"--progress"]
+ Job.start('Audit from upload','Audit from upload',' '.join(cmnd),'')
+
+ except Exception as e:
+ _log("EXPORT_POST:'fileupload:var-1': %s" % e)
+ return redirect('cvechecker_audits')
+
+#
+# Download pages
+#
+
+# Standalone download URL, for ToasterTable pages
+def gen_download_cvechecker_summary(request):
+ if request.method == "GET":
+ return redirect(landing)
+ elif request.method == "POST":
+ _log("GEN_DOWNLOAD_CVECHECK_SUMMARY(%s)" % request.POST)
+ if request.POST["action"] == "download":
+ audit_id = int(request.POST.get('audit_id',0))
+ ck_audit = Ck_Audit.objects.get(id=audit_id)
+ queryString = request.POST.get('queryString','')
+ options = request.POST.get('options','')
+ options_dict = {'format':'xlsx','audit_id':audit_id}
+ for option in options.split(','):
+ if option: options_dict[option] = 1
+ # orderby=package&filter=is_severity:critical_not_base&search=CVE-2021-44228&default_orderby=name&filter_value=on&
+ for option in queryString.split('&'):
+ if option:
+ name,value = option.split('=')
+ options_dict[name] = value
+ file_path = doCveCheckerAuditSummaryExcel(ck_audit,options_dict)
+ if os.path.isfile(file_path):
+ fsock = open(file_path, "rb")
+ content_type = MimeTypeFinder.get_mimetype(file_path)
+ response = HttpResponse(fsock, content_type = content_type)
+ response['Content-Disposition'] = 'attachment; filename="{}"'.format(os.path.basename(file_path))
+ return response
+ else:
+ _log("ERROR:could not download '%s'" % file_path)
+ return render(request, "unavailable_artifact.html", context={})
+
+ return render(request, "unavailable_artifact.html", context={})
+
+#
+# XHR pages
+#
+
+def xhr_cvechecker_commit(request):
+ _log("XHR_CVECHECK_COMMIT(%s)" % request.POST)
+ if not 'action' in request.POST:
+ _log("xhr_cvechecker_commit:NO_ACTION")
+ return HttpResponse(json.dumps({"error":"missing action\n"}), content_type = "application/json")
+
+ try:
+ error_message = "ok"
+
+ # Fetch cvechecker data from backend
+ if request.POST["action"] == "submit-createaudit":
+ # action': ['submit-createaudit'], 'product_id': ['6'], 'ab_set': ['mickledore'], 'project_name': [''], 'name': ['audit_20231114_mickledore'], 'is-shift': ['false']}>)|
+ audit_name = request.POST.get('name', 'audit_name')
+
+ product_id = int(request.POST.get('product_id', '0'))
+ product = Product.objects.get(id=product_id)
+
+ import_id = request.POST.get('import_id', '0')
+ import_select = request.POST.get('import_select', '_none_')
+
+ # bin/cve_checker/srtool_cvechecker.py --import-cvechk import_id,master,master --progress
+ cmnd = ['./bin/cve_checker/srtool_cvechecker.py','--import-cvechk',f"{import_id},{product.key},{import_select}","--audit-name",audit_name,"--progress"]
+ _log(f"FETCH_cvechecker:JOB:{cmnd}")
+ Job.start('Fetch CveChecker','Fetch CveChecker',' '.join(cmnd),'')
+ # Set update time
+ now = datetime.today().strftime('%Y/%m/%d %H:%M:%S')
+ SrtSetting.set_setting('SRT_CVECHECK_UPDATE',now)
+
+ # Delete a cvechecker
+ elif request.POST["action"] == "submit-trash-audit":
+ cvechecker_id = int(request.POST.get('record_id', '0'))
+ cvechecker_obj = Ck_Audit.objects.get(pk=cvechecker_id)
+ cvechecker_obj.delete()
+
+ # Update management cvechecker settings
+ elif request.POST["action"] == "submit-cvechecker-settings":
+ SrtSetting.set_setting('SRT_cvechecker_PATH',request.POST.get('cvechecker_path', ''))
+
+ # Update cvechecker status
+ elif request.POST["action"] == "submit-update-ck":
+ ck_id = int(request.POST.get('ck_id', '0'))
+ cvechecker_obj = Ck_Audit.objects.get(pk=ck_id)
+ cvechecker_obj.name = request.POST.get('audit_name', '0')
+ cvechecker_obj.save()
+
+ # Add cvechecker import blank
+ elif request.POST["action"] == "submit-new-import-ck":
+ ck_import_obj,created = CkUploadManager.objects.get_or_create(name='new')
+ ck_import_obj.order = 0
+ ck_import_obj.import_mode = 'File'
+ ck_import_obj.path = ''
+ ck_import_obj.pem = ''
+ ck_import_obj.repo = ''
+ ck_import_obj.branch = ''
+ ck_import_obj.auto_refresh = False
+ ck_import_obj.save()
+
+ # Update cvechecker import
+ elif request.POST["action"] == "submit-update-import-ck":
+ ck_id = int(request.POST.get('ck_id', '0'))
+ try:
+ order = int(request.POST.get('audit_order', '0').strip())
+ ck_import_obj = CkUploadManager.objects.get(id=ck_id)
+ ck_import_obj.order = order
+ ck_import_obj.name = request.POST.get('audit_name', 'new').strip()
+ ck_import_obj.import_mode = request.POST.get('audit_mode', 'File').strip()
+ ck_import_obj.path = request.POST.get('audit_path', '').strip()
+ ck_import_obj.pem = request.POST.get('audit_pem', '').strip()
+ ck_import_obj.repo = request.POST.get('audit_repo', '').strip()
+ ck_import_obj.branch = request.POST.get('audit_branch', '').strip()
+ ck_import_obj.auto_refresh = ('True' == request.POST.get('audit_refresh', 'False').strip())
+ ck_import_obj.save()
+ except:
+ error_message = "Error: order must be an integer"
+
+ # Update the Import select tables
+ elif request.POST["action"] == "submit-import-refresh":
+ cmnd = ["bin/cve_checker/srtool_cvechecker.py","--update-imports","-f"]
+ result_returncode,result_stdout,result_stderr = execute_process(*cmnd)
+ if 0 != result_returncode:
+ error_message = f"ERROR:{cmnd}: {result_stderr}:{result_stdout}:"
+
+ # Delete an import
+ elif request.POST["action"] == "submit-remove-import-ck":
+ ck_id = int(request.POST.get('record_id', '0'))
+ ck_import_obj = CkUploadManager.objects.get(pk=ck_id)
+ ck_import_obj.delete()
+
+ # Clear the dead jobs
+ elif request.POST["action"] == "submit-clearjobs":
+ if UserSafe.is_admin(request.user):
+ Job.objects.all().delete()
+
+ # Undefined action
+ else:
+ error_message ="ERROR:unknown action '%s'" % request.POST["action"]
+
+ _log("XHR_CVECHECK_COMMIT:DONE:%s" % error_message)
+ return HttpResponse(json.dumps( {"error": error_message,} ), content_type = "application/json")
+
+ except Exception as e:
+ _log("XHR_CVECHECK_COMMIT:no(%s)(%s)" % (e,traceback.format_exc()))
+ return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+
+
+# Delete jobs
+def cvechecker_clear_jobs(request):
+ if UserSafe.is_admin(request.user):
+ Job.objects.all().delete()
+ return redirect('manage')
diff --git a/lib/orm/management/commands/checksettings.py b/lib/orm/management/commands/checksettings.py
index f5e4df02..5701c0aa 100644
--- a/lib/orm/management/commands/checksettings.py
+++ b/lib/orm/management/commands/checksettings.py
@@ -36,7 +36,7 @@ class Command(BaseCommand):
# to allow embedding comments in the JSON files
def _load_datasource(self,dir):
for ds in glob.glob(os.path.join(dir,'datasource*.json')):
- _log("Load_Datasource:%s" % ds)
+ # _log("Load_Datasource:%s" % ds)
with open(ds) as json_data:
dct = json.load(json_data)
if 'srtsetting' in dct:
@@ -49,7 +49,17 @@ class Command(BaseCommand):
if 'datasource' in dct:
for datasource in dct['datasource']:
#print(" LOAD_DATASOURCE:%s:%s" % (datasource['key'],datasource['description']))
- ds,create = DataSource.objects.get_or_create(key=datasource['key'])
+ ds,created = DataSource.objects.get_or_create(key=datasource['key'])
+ if not created:
+ # Special handling for attributes, persistent enablement
+ new_attributes = datasource['attributes'] if 'attributes' in datasource else ''
+ # An explict "ENABLE" overrides any default "DISABLE"
+ if 'ENABLE ' in ds.attributes:
+ new_attributes = 'ENABLE ' + new_attributes.replace('DISABLE ','').replace('ENABLE ','')
+ # An explict "DISABLE" overrides the default enable
+ if 'DISABLE ' in ds.attributes:
+ new_attributes = 'DISABLE ' + new_attributes.replace('DISABLE ','').replace('ENABLE ','')
+ datasource['attributes'] = new_attributes
for key in datasource.keys():
if key.startswith("_comment"):
continue
diff --git a/lib/orm/management/commands/lsupdates.py b/lib/orm/management/commands/lsupdates.py
index 2a89a811..1805142f 100644
--- a/lib/orm/management/commands/lsupdates.py
+++ b/lib/orm/management/commands/lsupdates.py
@@ -5,7 +5,7 @@
# Security Response Tool Implementation
#
# Copyright (C) 2013-2015 Intel Corp.
-# Copyright (C) 2017-2018 Wind River Systems
+# Copyright (C) 2017-2021 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -103,7 +103,7 @@ class Command(BaseCommand):
(what,
pec))
sys.stdout.flush()
- if int(pec) is 100:
+ if int(pec) == 100:
sys.stdout.write("\n")
sys.stdout.flush()
@@ -322,23 +322,34 @@ class Command(BaseCommand):
logger.info("***LS UPDATES***")
+ # Disable the background updates until these are all processed
+ SrtSetting.set_setting('SRT_DISABLE_UPDATES','yes')
+
+ # First process the pre-init data sources in strict pk order to insure dependencies
+ data_sources=DataSource.objects.filter(update_frequency=DataSource.PREINIT).order_by('key')
+ for source in data_sources:
+ if source.init:
+ print("Fetching pre-init datasource '%s:%s'" % (source.source,source.description))
+ self.execute_script(source.init)
+
# Process the data sources in strict pk order to insure dependencies
data_sources=DataSource.objects.all().order_by('key')
for source in data_sources:
if source.loaded and not (source.update_frequency == DataSource.ONSTARTUP):
- logger.info("Skipping source data from %s",source.description)
print("Skipping datasource %s (already loaded)" % (source.description))
- _log("Skipping datasource %s (already loaded)" % (source.description))
continue
elif not source.init:
# No Init action?
print("Skipping datasource %s (no init action)" % (source.description))
continue
+ elif 'DISABLE ' in source.attributes:
+ # Data source disabled
+ print("Disabled datasource %s (%s)" % (source.description,source.attributes))
+ continue
else:
logger.info("Fetching datasource %s:%s" % (source.source,source.description))
print("Fetching datasource '%s:%s'" % (source.source,source.description))
- _log("Fetching datasource '%s:%s'" % (source.source,source.description))
# Development/testing shortcut
if ('cve' == source.data) and ('yes' == SrtSetting.objects.get(name='SRTDBG_SKIP_CVE_IMPORT').value):
@@ -398,10 +409,12 @@ class Command(BaseCommand):
logger.error("Unknown data source type for (%s,%s,%s) " % (source.data,source.source,source.name))
_log("Unknown data source type for %s,%s,%s) " % (source.data,source.source,source.name))
+ # Re-able the background updates until these are all processed
+ SrtSetting.set_setting('SRT_DISABLE_UPDATES','no')
+
os.system('setterm -cursor on')
def handle(self, *args, **options):
-
# testing shortcuts
if 'yes' == SrtSetting.objects.get(name='SRTDBG_MINIMAL_DB').value:
print("TEST: MINIMAL DATABASE LOADING")
@@ -410,5 +423,4 @@ class Command(BaseCommand):
Command.status_sustaining_limit = 10
Command.debug_defect_limit = 10
Command.cpe_limit = 10
-
self.update()
diff --git a/lib/orm/migrations/0001_initial.py b/lib/orm/migrations/0001_initial.py
index 0914d2bc..69ff00a3 100644
--- a/lib/orm/migrations/0001_initial.py
+++ b/lib/orm/migrations/0001_initial.py
@@ -99,10 +99,11 @@ class Migration(migrations.Migration):
('cvssV2_baseScore',models.CharField(max_length=50, blank=True)),
('cvssV2_severity', models.CharField(max_length=50, blank=True)),
- ('packages', models.TextField(blank=True, null=True)),
+ ('packages', models.TextField(blank=True)),
('score_date', models.DateTimeField(null=True, blank=True)),
('srt_updated', models.DateTimeField(auto_now=True)),
+## ('srt_created', models.DateTimeField(auto_now_add=True)),
],
),
@@ -158,18 +159,18 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='CveSource',
fields=[
- ('cve', models.ForeignKey(related_name='source2cve', default=None, to='orm.cve', null=True,on_delete=models.CASCADE,)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('cve', models.ForeignKey(default=None, related_name='source2cve', to='orm.cve', null=True,on_delete=models.CASCADE,)),
('datasource', models.ForeignKey(default=None, to='orm.datasource',null=True,on_delete=models.CASCADE,)),
],
),
-
migrations.CreateModel(
name='CveToCwe',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('cve', models.ForeignKey(related_name='cve2cwe', default=None, to='orm.cve', null=True,on_delete=models.CASCADE,)),
- ('cwe', models.ForeignKey(default=None, to='orm.cwetable', null=True,on_delete=models.CASCADE,)),
+ ('cve', models.ForeignKey(related_name='cve2cwe', to='orm.cve', on_delete=models.CASCADE,)),
+ ('cwe', models.ForeignKey(to='orm.cwetable', on_delete=models.CASCADE,)),
],
),
@@ -177,10 +178,11 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='Package',
fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mode', models.IntegerField(default=0)),
('name', models.CharField(max_length=50, blank=True)),
('realname', models.CharField(max_length=50, blank=True)),
- ('invalidname', models.TextField(blank=True, null=True)),
+ ('invalidname', models.TextField(blank=True)),
('weight', models.IntegerField(default=0)),
('cve_count', models.IntegerField(default=0)),
('vulnerability_count', models.IntegerField(default=0)),
@@ -192,8 +194,9 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='PackageToCve',
fields=[
- ('package', models.ForeignKey(related_name='package2cve', default=None, to='orm.package', null=True,on_delete=models.CASCADE,)),
- ('cve', models.ForeignKey(related_name='cve2package', default=None, to='orm.cve', null=True,on_delete=models.CASCADE,)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('package', models.ForeignKey(related_name='package2cve', to='orm.package', on_delete=models.CASCADE,)),
+ ('cve', models.ForeignKey(related_name='cve2package', to='orm.cve', on_delete=models.CASCADE,)),
('applicable', models.NullBooleanField(default=True, null=True)),
],
),
@@ -202,13 +205,13 @@ class Migration(migrations.Migration):
name='CveReference',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('cve', models.ForeignKey(related_name='references', default=None, to='orm.cve', null=True,on_delete=models.CASCADE,)),
+ ('cve', models.ForeignKey(related_name='references', to='orm.cve', on_delete=models.CASCADE,)),
('hyperlink', models.CharField(max_length=100, null=True)),
('resource', models.CharField(max_length=100, null=True)),
('type', models.CharField(max_length=100, null=True)),
('source', models.CharField(max_length=100, null=True)),
('name', models.CharField(max_length=100, null=True)),
- ('datasource', models.ForeignKey(related_name='source_references', default=None, to='orm.datasource', null=True,on_delete=models.CASCADE,)),
+ ('datasource', models.ForeignKey(related_name='source_references', to='orm.datasource', default=None, null=True,on_delete=models.CASCADE,)),
],
),
@@ -216,7 +219,7 @@ class Migration(migrations.Migration):
name='CveHistory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('cve', models.ForeignKey(default=None, to='orm.cve', null=True,on_delete=models.CASCADE,)),
+ ('cve', models.ForeignKey(default=None, null=True, to='orm.cve', on_delete=models.CASCADE,)),
('comment', models.TextField(blank=True)),
('date', models.DateField(null=True, blank=True)),
('author', models.TextField(blank=True)),
@@ -258,8 +261,8 @@ class Migration(migrations.Migration):
name='CveToVulnerablility',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('vulnerability', models.ForeignKey(default=None, to='orm.vulnerability', null=True,on_delete=models.CASCADE,)),
- ('cve', models.ForeignKey(default=None, to='orm.cve', null=True,on_delete=models.CASCADE,)),
+ ('vulnerability', models.ForeignKey(to='orm.vulnerability', on_delete=models.CASCADE,)),
+ ('cve', models.ForeignKey(to='orm.cve', on_delete=models.CASCADE,)),
],
),
@@ -267,7 +270,7 @@ class Migration(migrations.Migration):
name='VulnerabilityComments',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('vulnerability', models.ForeignKey(default=None, to='orm.vulnerability', null=True,on_delete=models.CASCADE,)),
+ ('vulnerability', models.ForeignKey(to='orm.vulnerability', on_delete=models.CASCADE,)),
('comment', models.TextField(blank=True)),
('date', models.DateField(null=True, blank=True)),
('author', models.TextField(blank=True)),
@@ -278,7 +281,7 @@ class Migration(migrations.Migration):
name='VulnerabilityHistory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('vulnerability', models.ForeignKey(default=None, to='orm.vulnerability', null=True,on_delete=models.CASCADE,)),
+ ('vulnerability', models.ForeignKey(to='orm.vulnerability', on_delete=models.CASCADE,)),
('comment', models.TextField(blank=True)),
('date', models.DateField(null=True, blank=True)),
('author', models.TextField(blank=True)),
@@ -289,7 +292,7 @@ class Migration(migrations.Migration):
name='VulnerabilityUploads',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('vulnerability', models.ForeignKey(default=None, to='orm.vulnerability', null=True,on_delete=models.CASCADE,)),
+ ('vulnerability', models.ForeignKey(to='orm.vulnerability',on_delete=models.CASCADE,)),
('description', models.TextField(blank=True)),
('path', models.TextField(blank=True)),
('size', models.IntegerField(default=0)),
@@ -310,7 +313,7 @@ class Migration(migrations.Migration):
('resolution', models.IntegerField(default=0)),
('publish', models.TextField(blank=True)),
('release_version', models.CharField(max_length=50)),
- ('product', models.ForeignKey(default=None, to='orm.product', null=True,on_delete=models.CASCADE,)),
+ ('product', models.ForeignKey(to='orm.product', on_delete=models.CASCADE,)),
('date_created', models.CharField(max_length=50)),
('date_updated', models.CharField(max_length=50)),
('srt_updated', models.DateTimeField(auto_now=True)),
@@ -323,8 +326,8 @@ class Migration(migrations.Migration):
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=50)),
- ('vulnerability', models.ForeignKey(related_name='vulnerability_investigation',default=None, to='orm.vulnerability', null=True,on_delete=models.CASCADE,)),
- ('product', models.ForeignKey(related_name='references', default=None, to='orm.product', null=True,on_delete=models.CASCADE,)),
+ ('vulnerability', models.ForeignKey(related_name='vulnerability_investigation',to='orm.vulnerability', on_delete=models.CASCADE,)),
+ ('product', models.ForeignKey(related_name='references', to='orm.product',on_delete=models.CASCADE,)),
('public', models.BooleanField(default=True)),
('comments', models.TextField(blank=True)),
@@ -340,9 +343,9 @@ class Migration(migrations.Migration):
name='InvestigationToDefect',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('investigation', models.ForeignKey(default=None, to='orm.investigation', null=True,on_delete=models.CASCADE,)),
- ('defect', models.ForeignKey(default=None, to='orm.defect', null=True,on_delete=models.CASCADE,)),
- ('product', models.ForeignKey(default=None, to='orm.product', null=True,on_delete=models.CASCADE,)),
+ ('investigation', models.ForeignKey(to='orm.investigation', on_delete=models.CASCADE,)),
+ ('defect', models.ForeignKey(to='orm.defect', on_delete=models.CASCADE,)),
+ ('product', models.ForeignKey(to='orm.product', on_delete=models.CASCADE,)),
],
),
@@ -350,7 +353,7 @@ class Migration(migrations.Migration):
name='InvestigationComments',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('investigation', models.ForeignKey(default=None, to='orm.investigation', null=True,on_delete=models.CASCADE,)),
+ ('investigation', models.ForeignKey(to='orm.investigation', on_delete=models.CASCADE,)),
('comment', models.TextField(blank=True)),
('date', models.DateField(null=True, blank=True)),
('author', models.TextField(blank=True)),
@@ -361,7 +364,7 @@ class Migration(migrations.Migration):
name='InvestigationHistory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('investigation', models.ForeignKey(default=None, to='orm.investigation', null=True,on_delete=models.CASCADE,)),
+ ('investigation', models.ForeignKey(to='orm.investigation', on_delete=models.CASCADE,)),
('comment', models.TextField(blank=True)),
('date', models.DateField(null=True, blank=True)),
('author', models.TextField(blank=True)),
@@ -372,7 +375,7 @@ class Migration(migrations.Migration):
name='InvestigationUploads',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
- ('investigation', models.ForeignKey(default=None, to='orm.investigation', null=True,on_delete=models.CASCADE,)),
+ ('investigation', models.ForeignKey(to='orm.investigation', on_delete=models.CASCADE,)),
('description', models.TextField(blank=True)),
('path', models.TextField(blank=True)),
('size', models.IntegerField(default=0)),
@@ -394,38 +397,43 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='VulnerabilityAccess',
fields=[
- ('vulnerability', models.ForeignKey(default=None, to='orm.vulnerability', null=True,on_delete=models.CASCADE,)),
- ('user', models.ForeignKey(default=None, to='users.srtuser', null=True,on_delete=models.CASCADE,)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('vulnerability', models.ForeignKey(to='orm.vulnerability', on_delete=models.CASCADE,)),
+ ('user', models.ForeignKey(to='users.srtuser', on_delete=models.CASCADE,)),
],
),
migrations.CreateModel(
name='InvestigationAccess',
fields=[
- ('investigation', models.ForeignKey(default=None, to='orm.investigation', null=True,on_delete=models.CASCADE,)),
- ('user', models.ForeignKey(default=None, to='users.srtuser', null=True,on_delete=models.CASCADE,)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('investigation', models.ForeignKey(to='orm.investigation', on_delete=models.CASCADE,)),
+ ('user', models.ForeignKey(to='users.srtuser', on_delete=models.CASCADE,)),
],
),
migrations.CreateModel(
name='VulnerabilityNotification',
fields=[
- ('vulnerability', models.ForeignKey(default=None, to='orm.vulnerability', null=True,on_delete=models.CASCADE,)),
- ('user', models.ForeignKey(default=None, to='users.srtuser', null=True,on_delete=models.CASCADE,)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('vulnerability', models.ForeignKey(to='orm.vulnerability', on_delete=models.CASCADE,)),
+ ('user', models.ForeignKey(to='users.srtuser', on_delete=models.CASCADE,)),
],
),
migrations.CreateModel(
name='InvestigationNotification',
fields=[
- ('investigation', models.ForeignKey(default=None, to='orm.investigation', null=True,on_delete=models.CASCADE,)),
- ('user', models.ForeignKey(default=None, to='users.srtuser', null=True,on_delete=models.CASCADE,)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('investigation', models.ForeignKey(to='orm.investigation', on_delete=models.CASCADE,)),
+ ('user', models.ForeignKey(to='users.srtuser', on_delete=models.CASCADE,)),
],
),
migrations.CreateModel(
name='CpeTable',
fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('vulnerable', models.BooleanField(default='False')),
('cpeMatchString', models.TextField(blank=True)),
('cpe23Uri', models.TextField(blank=True)),
@@ -436,14 +444,16 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='CpeToCve',
fields=[
- ('cpe', models.ForeignKey(default=None, to='orm.cpetable',on_delete=models.CASCADE,)),
- ('cve', models.ForeignKey(default=None, to='orm.cve',on_delete=models.CASCADE,)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('cpe', models.ForeignKey(to='orm.cpetable',on_delete=models.CASCADE,)),
+ ('cve', models.ForeignKey(to='orm.cve',on_delete=models.CASCADE,)),
],
),
migrations.CreateModel(
name='CpeFilter',
fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key_prime', models.CharField(max_length=50)),
('key_sub', models.CharField(max_length=50)),
('status', models.IntegerField(default=CpeFilter.UNDECIDED)),
@@ -456,6 +466,7 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='PublishPending',
fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cve', models.ForeignKey(default=None, to='orm.cve',blank=True,null=True,on_delete=models.CASCADE,)),
('vulnerability', models.ForeignKey(default=None, to='orm.vulnerability',blank=True,null=True,on_delete=models.CASCADE,)),
('investigation', models.ForeignKey(default=None, to='orm.investigation',blank=True,null=True,on_delete=models.CASCADE,)),
@@ -468,19 +479,21 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='Notify',
fields=[
- ('category', models.CharField(max_length=50, null=True)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('category', models.CharField(max_length=50)),
('description', models.TextField()),
('url', models.TextField()),
('priority', models.IntegerField(default=0)),
('author', models.TextField()),
- ('srt_updated', models.DateTimeField(auto_now_add=True)),
- ('srt_created', models.DateTimeField(auto_now=True)),
+## ('srt_updated', models.DateTimeField(auto_now=True)),
+## ('srt_created', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='NotifyAccess',
fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('notify', models.ForeignKey(default=None, to='orm.notify',blank=True,null=True,on_delete=models.CASCADE,)),
('user', models.ForeignKey(default=None, to='users.srtuser',blank=True,null=True,on_delete=models.CASCADE,)),
],
@@ -489,7 +502,8 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='NotifyCategories',
fields=[
- ('category', models.CharField(max_length=50, null=True)),
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('category', models.CharField(max_length=50)),
],
),
diff --git a/lib/orm/migrations/0003_modified.py b/lib/orm/migrations/0003_modified.py
new file mode 100755
index 00000000..e8752007
--- /dev/null
+++ b/lib/orm/migrations/0003_modified.py
@@ -0,0 +1,61 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0002_updates'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='cve',
+ name='srt_created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AddField(
+ model_name='cve',
+ name='tags',
+ field=models.TextField(blank=True, default='', null=True),
+ ),
+ migrations.AddField(
+ model_name='cve',
+ name='acknowledge_date',
+ field=models.DateTimeField(null=True),
+ ),
+
+ migrations.AddField(
+ model_name='investigation',
+ name='tags',
+ field=models.TextField(blank=True, default='', null=True),
+ ),
+ migrations.AddField(
+ model_name='investigation',
+ name='srt_created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AddField(
+ model_name='investigation',
+ name='srt_updated',
+ field=models.DateTimeField(auto_now=True),
+ ),
+
+ migrations.AddField(
+ model_name='vulnerability',
+ name='tags',
+ field=models.TextField(blank=True, default='', null=True),
+ ),
+ migrations.AddField(
+ model_name='vulnerability',
+ name='srt_created',
+ field=models.DateTimeField(auto_now_add=True),
+ ),
+ migrations.AddField(
+ model_name='vulnerability',
+ name='srt_updated',
+ field=models.DateTimeField(auto_now=True),
+ ),
+ ]
diff --git a/lib/orm/migrations/0004_defect_status.py b/lib/orm/migrations/0004_defect_status.py
new file mode 100755
index 00000000..4e5b2f8d
--- /dev/null
+++ b/lib/orm/migrations/0004_defect_status.py
@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0003_modified'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='defect',
+ name='srt_status',
+ field=models.IntegerField(default=0),
+ ),
+ migrations.AddField(
+ model_name='defect',
+ name='srt_outcome',
+ field=models.IntegerField(default=0),
+ ),
+ migrations.AddField(
+ model_name='defect',
+ name='srt_priority',
+ field=models.IntegerField(default=0),
+ ),
+ migrations.AddField(
+ model_name='defect',
+ name='duplicate_of',
+ field=models.CharField(max_length=50, blank=True, default=''),
+ ),
+
+ ]
diff --git a/lib/orm/migrations/0005_publish_report.py b/lib/orm/migrations/0005_publish_report.py
new file mode 100755
index 00000000..6a0c34ee
--- /dev/null
+++ b/lib/orm/migrations/0005_publish_report.py
@@ -0,0 +1,34 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0004_defect_status'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='PublishSet',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('cve', models.ForeignKey(default=None, to='orm.cve', null=True, on_delete=models.CASCADE,)),
+ ('state', models.IntegerField(default=0)),
+ ('reason', models.TextField(blank=True)),
+ ],
+ ),
+
+ migrations.CreateModel(
+ name='DefectHistory',
+ fields=[
+ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
+ ('defect', models.ForeignKey(to='orm.defect', on_delete=models.CASCADE,)),
+ ('comment', models.TextField(blank=True)),
+ ('date', models.DateField(null=True, blank=True)),
+ ('author', models.TextField(blank=True)),
+ ],
+ ),
+
+ ]
diff --git a/lib/orm/migrations/0006_reconcile.py b/lib/orm/migrations/0006_reconcile.py
new file mode 100755
index 00000000..e7ad54bf
--- /dev/null
+++ b/lib/orm/migrations/0006_reconcile.py
@@ -0,0 +1,410 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.5 on 2020-01-12 06:21
+from __future__ import unicode_literals
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0005_publish_report'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='notify',
+ name='srt_created',
+ field=models.DateTimeField(auto_now_add=True, null=True),
+ ),
+ migrations.AddField(
+ model_name='notify',
+ name='srt_updated',
+ field=models.DateTimeField(auto_now=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='cpefilter',
+ name='automatic',
+ field=models.BooleanField(default='False'),
+ ),
+ migrations.AlterField(
+ model_name='cpefilter',
+ name='key_prime',
+ field=models.CharField(max_length=40),
+ ),
+ migrations.AlterField(
+ model_name='cpefilter',
+ name='key_sub',
+ field=models.CharField(max_length=40),
+ ),
+ migrations.AlterField(
+ model_name='cpefilter',
+ name='status',
+ field=models.IntegerField(choices=[(0, 'Undecided'), (1, 'Include'), (2, 'Exclude'), (3, 'Manual')], default=0),
+ ),
+ migrations.AlterField(
+ model_name='cpetocve',
+ name='cpe',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cpe2cve', to='orm.CpeTable'),
+ ),
+ migrations.AlterField(
+ model_name='cpetocve',
+ name='cve',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cve2cpe', to='orm.Cve'),
+ ),
+ migrations.AlterField(
+ model_name='cve',
+ name='publish_state',
+ field=models.IntegerField(choices=[(0, 'Unpublished'), (1, 'Not to be Published'), (2, 'Published'), (3, 'Publish Request (New)'), (4, 'Publish Request (Update)'), (5, 'Publish Submitted')], default=0),
+ ),
+ migrations.AlterField(
+ model_name='cve',
+ name='score_date',
+ field=models.DateField(blank=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='cve',
+ name='srt_created',
+ field=models.DateTimeField(auto_now_add=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='cve',
+ name='srt_updated',
+ field=models.DateTimeField(auto_now=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='cve',
+ name='status',
+ field=models.IntegerField(choices=[(0, 'Historical'), (1, 'New'), (2, 'New-Reserved'), (3, 'Investigate'), (4, 'Vulnerable'), (5, 'Not Vulnerable')], default=1),
+ ),
+ migrations.AlterField(
+ model_name='cvehistory',
+ name='cve',
+ field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='cve_history', to='orm.Cve'),
+ ),
+ migrations.AlterField(
+ model_name='cvelocal',
+ name='cvssV3_attackVector',
+ field=models.CharField(blank=True, max_length=50),
+ ),
+ migrations.AlterField(
+ model_name='cvereference',
+ name='datasource',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='source_references', to='orm.DataSource'),
+ ),
+ migrations.AlterField(
+ model_name='cvesource',
+ name='cve',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='cve_parent', to='orm.Cve'),
+ ),
+ migrations.AlterField(
+ model_name='cvesource',
+ name='datasource',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='cve_datasource', to='orm.DataSource'),
+ ),
+ migrations.AlterField(
+ model_name='cvetocwe',
+ name='cwe',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cwe2cve', to='orm.CweTable'),
+ ),
+ migrations.AlterField(
+ model_name='cvetovulnerablility',
+ name='cve',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cve_to_vulnerability', to='orm.Cve'),
+ ),
+ migrations.AlterField(
+ model_name='cvetovulnerablility',
+ name='vulnerability',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_to_cve', to='orm.Vulnerability'),
+ ),
+ migrations.AlterField(
+ model_name='datasource',
+ name='update_frequency',
+ field=models.IntegerField(choices=[(0, 'Minute'), (1, 'Hourly'), (2, 'Daily'), (3, 'Weekly'), (4, 'Monthly'), (5, 'OnDemand'), (6, 'OnStartup')], default=2),
+ ),
+ migrations.AlterField(
+ model_name='defect',
+ name='priority',
+ field=models.IntegerField(choices=[(0, 'Undefined'), (1, 'Low'), (2, 'Medium'), (3, 'High'), (4, 'Critical'), (5, 'PRIORITY_ERROR')], default=1),
+ ),
+ migrations.AlterField(
+ model_name='defect',
+ name='product',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='product_defect', to='orm.Product'),
+ ),
+ migrations.AlterField(
+ model_name='defect',
+ name='resolution',
+ field=models.IntegerField(choices=[(0, 'Unresolved'), (1, 'Resolved'), (2, 'Fixed'), (3, "Won't Fix"), (4, 'Withdrawn'), (5, 'Rejected'), (6, 'Duplicate'), (7, 'Not Applicable'), (8, 'Replaced By Requirement'), (9, 'Cannot Reproduce'), (10, 'Done')], default=0),
+ ),
+ migrations.AlterField(
+ model_name='defect',
+ name='srt_outcome',
+ field=models.IntegerField(choices=[(0, 'Open'), (1, 'Closed (Not Vulnerable)'), (2, 'Closed (Fixed)'), (3, "Closed (Won't Fix)")], default=0),
+ ),
+ migrations.AlterField(
+ model_name='defect',
+ name='srt_priority',
+ field=models.IntegerField(choices=[(0, 'Undefined'), (1, 'Low'), (2, 'Medium'), (3, 'High'), (4, 'Critical'), (5, 'PRIORITY_ERROR')], default=1),
+ ),
+ migrations.AlterField(
+ model_name='defect',
+ name='srt_status',
+ field=models.IntegerField(choices=[(0, 'Historical'), (1, 'New'), (2, 'New-Reserved'), (3, 'Investigate'), (4, 'Vulnerable'), (5, 'Not Vulnerable')], default=3),
+ ),
+ migrations.AlterField(
+ model_name='defect',
+ name='status',
+ field=models.IntegerField(choices=[(0, 'Open'), (1, 'In progress'), (2, 'On Hold'), (3, 'Checked In'), (4, 'Resolved'), (5, 'Closed')], default=0),
+ ),
+ migrations.AlterField(
+ model_name='defecthistory',
+ name='defect',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='defect_history', to='orm.Defect'),
+ ),
+ migrations.AlterField(
+ model_name='helptext',
+ name='area',
+ field=models.IntegerField(choices=[(0, 'variable')]),
+ ),
+ migrations.AlterField(
+ model_name='helptext',
+ name='text',
+ field=models.TextField(),
+ ),
+ migrations.AlterField(
+ model_name='investigation',
+ name='outcome',
+ field=models.IntegerField(choices=[(0, 'Open'), (1, 'Closed (Not Vulnerable)'), (2, 'Closed (Fixed)'), (3, "Closed (Won't Fix)")], default=3),
+ ),
+ migrations.AlterField(
+ model_name='investigation',
+ name='priority',
+ field=models.IntegerField(choices=[(0, 'Undefined'), (1, 'Low'), (2, 'Medium'), (3, 'High'), (4, 'Critical'), (5, 'PRIORITY_ERROR')], default=1),
+ ),
+ migrations.AlterField(
+ model_name='investigation',
+ name='product',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='product_investigation', to='orm.Product'),
+ ),
+ migrations.AlterField(
+ model_name='investigation',
+ name='srt_created',
+ field=models.DateTimeField(auto_now_add=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='investigation',
+ name='srt_updated',
+ field=models.DateTimeField(auto_now=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='investigation',
+ name='status',
+ field=models.IntegerField(choices=[(0, 'Historical'), (1, 'New'), (2, 'New-Reserved'), (3, 'Investigate'), (4, 'Vulnerable'), (5, 'Not Vulnerable')], default=0),
+ ),
+ migrations.AlterField(
+ model_name='investigation',
+ name='tags',
+ field=models.TextField(blank=True, default=''),
+ ),
+ migrations.AlterField(
+ model_name='investigationaccess',
+ name='investigation',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_users', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='investigationaccess',
+ name='user',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_user', to=settings.AUTH_USER_MODEL),
+ ),
+ migrations.AlterField(
+ model_name='investigationcomments',
+ name='investigation',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_comments', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='investigationhistory',
+ name='investigation',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_history', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='investigationnotification',
+ name='investigation',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_notification', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='investigationnotification',
+ name='user',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_notify', to=settings.AUTH_USER_MODEL),
+ ),
+ migrations.AlterField(
+ model_name='investigationtodefect',
+ name='defect',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='defect_to_investigation', to='orm.Defect'),
+ ),
+ migrations.AlterField(
+ model_name='investigationtodefect',
+ name='investigation',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_to_defect', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='investigationtodefect',
+ name='product',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='defect_to_product', to='orm.Product'),
+ ),
+ migrations.AlterField(
+ model_name='investigationuploads',
+ name='investigation',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation_uploads', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='notify',
+ name='author',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AlterField(
+ model_name='notify',
+ name='description',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AlterField(
+ model_name='notify',
+ name='url',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AlterField(
+ model_name='notifyaccess',
+ name='notify',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='todo2user', to='orm.Notify'),
+ ),
+ migrations.AlterField(
+ model_name='notifyaccess',
+ name='user',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='user2todo', to=settings.AUTH_USER_MODEL),
+ ),
+ migrations.AlterField(
+ model_name='package',
+ name='mode',
+ field=models.IntegerField(choices=[(0, 'For'), (1, 'Against')], default=0),
+ ),
+ migrations.AlterField(
+ model_name='publishpending',
+ name='cve',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='publish_pending_cves', to='orm.Cve'),
+ ),
+ migrations.AlterField(
+ model_name='publishpending',
+ name='investigation',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='publish_pending_investigations', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='publishpending',
+ name='note',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AlterField(
+ model_name='publishpending',
+ name='vulnerability',
+ field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='publish_pending_vulnerabilities', to='orm.Vulnerability'),
+ ),
+ migrations.AlterField(
+ model_name='publishset',
+ name='state',
+ field=models.IntegerField(choices=[(0, 'Skip'), (1, 'New'), (2, 'Modified'), (3, 'New_User'), (4, 'Modified_User'), (5, 'PUBLISH_SET_ERROR')], default=0),
+ ),
+ migrations.AlterField(
+ model_name='srtsetting',
+ name='helptext',
+ field=models.TextField(),
+ ),
+ migrations.AlterField(
+ model_name='vulnerability',
+ name='outcome',
+ field=models.IntegerField(choices=[(0, 'Open'), (1, 'Closed (Not Vulnerable)'), (2, 'Closed (Fixed)'), (3, "Closed (Won't Fix)")], default=0),
+ ),
+ migrations.AlterField(
+ model_name='vulnerability',
+ name='priority',
+ field=models.IntegerField(choices=[(0, 'Undefined'), (1, 'Low'), (2, 'Medium'), (3, 'High'), (4, 'Critical'), (5, 'PRIORITY_ERROR')], default=1),
+ ),
+ migrations.AlterField(
+ model_name='vulnerability',
+ name='public',
+ field=models.BooleanField(default=True),
+ ),
+ migrations.AlterField(
+ model_name='vulnerability',
+ name='srt_created',
+ field=models.DateTimeField(auto_now_add=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='vulnerability',
+ name='srt_updated',
+ field=models.DateTimeField(auto_now=True, null=True),
+ ),
+ migrations.AlterField(
+ model_name='vulnerability',
+ name='status',
+ field=models.IntegerField(choices=[(0, 'Historical'), (1, 'New'), (2, 'New-Reserved'), (3, 'Investigate'), (4, 'Vulnerable'), (5, 'Not Vulnerable')], default=3),
+ ),
+ migrations.AlterField(
+ model_name='vulnerability',
+ name='tags',
+ field=models.TextField(blank=True, default=''),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilityaccess',
+ name='user',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_user', to=settings.AUTH_USER_MODEL),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilityaccess',
+ name='vulnerability',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_users', to='orm.Vulnerability'),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilitycomments',
+ name='vulnerability',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_comments', to='orm.Vulnerability'),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilityhistory',
+ name='vulnerability',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_history', to='orm.Vulnerability'),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilitynotification',
+ name='user',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_notify', to=settings.AUTH_USER_MODEL),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilitynotification',
+ name='vulnerability',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_notification', to='orm.Vulnerability'),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilitytoinvestigation',
+ name='investigation',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='investigation2vulnerability', to='orm.Investigation'),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilitytoinvestigation',
+ name='vulnerability',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability2investigation', to='orm.Vulnerability'),
+ ),
+ migrations.AlterField(
+ model_name='vulnerabilityuploads',
+ name='vulnerability',
+ field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vulnerability_uploads', to='orm.Vulnerability'),
+ ),
+ migrations.AlterUniqueTogether(
+ name='cpefilter',
+ unique_together=set([('key_prime', 'key_sub')]),
+ ),
+ migrations.AlterUniqueTogether(
+ name='product',
+ unique_together=set([('name', 'version', 'profile')]),
+ ),
+ ]
diff --git a/lib/orm/migrations/0007_components_errorlog.py b/lib/orm/migrations/0007_components_errorlog.py
new file mode 100755
index 00000000..88a02ee1
--- /dev/null
+++ b/lib/orm/migrations/0007_components_errorlog.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.5 on 2020-02-01 03:09
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0006_reconcile'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='ErrorLog',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('severity', models.IntegerField(default=0)),
+ ('description', models.TextField(blank=True)),
+ ('srt_created', models.DateTimeField(auto_now_add=True, null=True)),
+ ],
+ ),
+ migrations.AddField(
+ model_name='defect',
+ name='packages',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AddField(
+ model_name='investigation',
+ name='packages',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AddField(
+ model_name='vulnerability',
+ name='packages',
+ field=models.TextField(blank=True),
+ ),
+ ]
diff --git a/lib/orm/migrations/0008_cveaccess.py b/lib/orm/migrations/0008_cveaccess.py
new file mode 100644
index 00000000..c12ac9ed
--- /dev/null
+++ b/lib/orm/migrations/0008_cveaccess.py
@@ -0,0 +1,24 @@
+# Generated by Django 2.2.11 on 2020-10-23 08:03
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
+ ('orm', '0007_components_errorlog'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='CveAccess',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('cve', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cve_users', to='orm.Cve')),
+ ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cve_user', to=settings.AUTH_USER_MODEL)),
+ ],
+ ),
+ ]
diff --git a/lib/orm/migrations/0009_recipetable.py b/lib/orm/migrations/0009_recipetable.py
new file mode 100644
index 00000000..4f3621f1
--- /dev/null
+++ b/lib/orm/migrations/0009_recipetable.py
@@ -0,0 +1,20 @@
+# Generated by Django 2.2.11 on 2020-11-13 21:48
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0008_cveaccess'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='RecipeTable',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('recipe_name', models.CharField(max_length=50)),
+ ],
+ ),
+ ]
diff --git a/lib/orm/migrations/0010_job.py b/lib/orm/migrations/0010_job.py
new file mode 100644
index 00000000..4b837379
--- /dev/null
+++ b/lib/orm/migrations/0010_job.py
@@ -0,0 +1,35 @@
+# Generated by Django 2.2.11 on 2020-11-14 23:59
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0009_recipetable'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='Job',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(default='', max_length=50)),
+ ('description', models.TextField(blank=True)),
+ ('command', models.TextField(blank=True)),
+ ('log_file', models.TextField(blank=True)),
+ ('options', models.TextField(blank=True)),
+ ('status', models.IntegerField(choices=[(0, 'NotStarted'), (1, 'InProgress'), (2, 'Success'), (3, 'Errors'), (4, 'Cancelling'), (5, 'Cancelled')], default=0)),
+ ('parent_name', models.CharField(default='', max_length=50)),
+ ('pid', models.IntegerField(default=0)),
+ ('count', models.IntegerField(default=0)),
+ ('max', models.IntegerField(default=0)),
+ ('errors', models.IntegerField(default=0)),
+ ('warnings', models.IntegerField(default=0)),
+ ('refresh', models.IntegerField(default=0)),
+ ('message', models.CharField(default='', max_length=50)),
+ ('started_on', models.DateTimeField(null=True)),
+ ('completed_on', models.DateTimeField(null=True)),
+ ],
+ ),
+ ]
diff --git a/lib/orm/migrations/0011_extend_field_sizes.py b/lib/orm/migrations/0011_extend_field_sizes.py
new file mode 100644
index 00000000..830a2de3
--- /dev/null
+++ b/lib/orm/migrations/0011_extend_field_sizes.py
@@ -0,0 +1,33 @@
+# Generated by Django 2.2.17 on 2021-02-04 23:27
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0010_job'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='package',
+ name='name',
+ field=models.CharField(blank=True, max_length=80),
+ ),
+ migrations.AlterField(
+ model_name='package',
+ name='realname',
+ field=models.CharField(blank=True, max_length=80),
+ ),
+ migrations.AlterField(
+ model_name='product',
+ name='cpe',
+ field=models.CharField(max_length=255),
+ ),
+ migrations.AlterField(
+ model_name='datasource',
+ name='key',
+ field=models.CharField(max_length=80),
+ ),
+ ]
diff --git a/lib/orm/migrations/0012_job_user.py b/lib/orm/migrations/0012_job_user.py
new file mode 100755
index 00000000..09af561f
--- /dev/null
+++ b/lib/orm/migrations/0012_job_user.py
@@ -0,0 +1,21 @@
+# Generated by Django 2.2.11 on 2021-10-06 18:26
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
+ ('orm', '0011_extend_field_sizes'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='job',
+ name='user',
+ field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
+ ),
+ ]
diff --git a/lib/orm/migrations/0013_update_preinit.py b/lib/orm/migrations/0013_update_preinit.py
new file mode 100755
index 00000000..6711be9a
--- /dev/null
+++ b/lib/orm/migrations/0013_update_preinit.py
@@ -0,0 +1,18 @@
+# Generated by Django 2.2.11 on 2021-12-06 03:03
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0012_job_user'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='datasource',
+ name='update_frequency',
+ field=models.IntegerField(choices=[(0, 'Minute'), (1, 'Hourly'), (2, 'Daily'), (3, 'Weekly'), (4, 'Monthly'), (5, 'OnDemand'), (6, 'OnStartup'), (7, 'PreInit')], default=2),
+ ),
+ ]
diff --git a/lib/orm/migrations/0014_alter_packagetocve_applicable.py b/lib/orm/migrations/0014_alter_packagetocve_applicable.py
new file mode 100644
index 00000000..0a7e2cc0
--- /dev/null
+++ b/lib/orm/migrations/0014_alter_packagetocve_applicable.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.0 on 2023-01-30 18:58
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0013_update_preinit'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='packagetocve',
+ name='applicable',
+ field=models.BooleanField(null=True),
+ ),
+ ]
diff --git a/lib/orm/models.py b/lib/orm/models.py
index 0f6cfb17..f5016b7d 100644
--- a/lib/orm/models.py
+++ b/lib/orm/models.py
@@ -4,7 +4,7 @@
#
# Security Response Tool Implementation
#
-# Copyright (C) 2017 Wind River Systems
+# Copyright (C) 2017-2021 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -27,21 +27,29 @@ from django.db import transaction
from django.core import validators
from django.conf import settings
import django.db.models.signals
+from django.db.models import F, Q, Sum, Count
+from django.contrib.auth.models import AbstractUser, Group, AnonymousUser
+from srtgui.api import execute_process, execute_process_close_fds
from users.models import SrtUser
import sys
import os
import re
+import itertools
from signal import SIGUSR1
from datetime import datetime
import json
+import subprocess
+import time
+import signal
+import pytz
import logging
logger = logging.getLogger("srt")
# quick development/debugging support
-from srtgui.api import _log
+from srtgui.api import _log, parameter_join
# Sqlite support
@@ -74,7 +82,6 @@ if 'sqlite' in settings.DATABASES['default']['ENGINE']:
return _base_insert(self, *args, **kwargs)
QuerySet._insert = _insert
- from django.utils import six
def _create_object_from_params(self, lookup, params):
"""
Tries to create an object using passed params.
@@ -89,7 +96,6 @@ if 'sqlite' in settings.DATABASES['default']['ENGINE']:
return self.get(**lookup), False
except self.model.DoesNotExist:
pass
- six.reraise(*exc_info)
QuerySet._create_object_from_params = _create_object_from_params
@@ -114,6 +120,268 @@ def GitURLField(**kwargs):
# Core Classes
+# Helper class to common mappings
+class SRTool():
+
+ # Global date format
+ DATE_FORMAT = '%Y-%m-%d'
+ DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S'
+
+ # SRTool Priority
+ UNDEFINED = 0
+ LOW = 1
+ MEDIUM = 2
+ HIGH = 3
+ CRITICAL = 4
+ PRIORITY_ERROR = 99
+ SRT_PRIORITY = (
+ (UNDEFINED, 'Undefined'),
+ (LOW, 'Low'),
+ (MEDIUM, 'Medium'),
+ (HIGH, 'High'),
+ (CRITICAL, 'Critical'),
+ )
+ @staticmethod
+ def priority_text(index):
+ if (0 > index) or (index >= len(SRTool.SRT_PRIORITY)):
+ return 'PRIORITY_ERROR'
+ return SRTool.SRT_PRIORITY[index][1]
+ @staticmethod
+ def priority_index(value):
+ for item in SRTool.SRT_PRIORITY:
+ if value == item[1]:
+ return item[0]
+ return SRTool.PRIORITY_ERROR
+
+ # SRTool Severity (same integer values as prority)
+ SRT_SEVERITY = (
+ (UNDEFINED, 'UNDEFINED'),
+ (LOW, 'LOW'),
+ (MEDIUM, 'MEDIUM'),
+ (HIGH, 'HIGH'),
+ (CRITICAL, 'CRITICAL'),
+ )
+ @staticmethod
+ def severity_text(index):
+ if (0 > index) or (index >= len(SRTool.SRT_SEVERITY)):
+ return 'SEVERITY_ERROR'
+ return SRTool.SRT_SEVERITY[index][1]
+ @staticmethod
+ def severity_index(value):
+ for item in SRTool.SRT_SEVERITY:
+ if value == item[1]:
+ return item[0]
+ return SRTool.PRIORITY_ERROR
+
+ # SRTool Status
+ HISTORICAL = 0
+ NEW = 1
+ NEW_RESERVED = 2
+ INVESTIGATE = 3
+ VULNERABLE = 4
+ NOT_VULNERABLE = 5
+ NEW_INACTIVE = 6
+ INVESTIGATE_INACTIVE = 7
+ VULNERABLE_INACTIVE = 8
+ NOT_VULNERABLE_INACTIVE = 9
+ STATUS_ERROR = 99
+ SRT_STATUS = (
+ (HISTORICAL, 'Historical'),
+ (NEW, 'New'),
+ (NEW_RESERVED, 'New-Reserved'),
+ (INVESTIGATE, 'Investigate'),
+ (VULNERABLE, 'Vulnerable'),
+ (NOT_VULNERABLE, 'Not Vulnerable'),
+ (NEW_INACTIVE, '(New)'),
+ (INVESTIGATE_INACTIVE, '(Investigate)'),
+ (VULNERABLE_INACTIVE, '(Vulnerable)'),
+ (NOT_VULNERABLE_INACTIVE, '(Not Vulnerable)'),
+ )
+ @staticmethod
+ def status_text(index):
+ if (0 > index) or (index >= len(SRTool.SRT_STATUS)):
+ return 'STATUS_ERROR'
+ return SRTool.SRT_STATUS[index][1]
+ @staticmethod
+ def status_index(value):
+ for item in SRTool.SRT_STATUS:
+ if value == item[1]:
+ return item[0]
+ return SRTool.STATUS_ERROR
+ @staticmethod
+ def status_to_inactive(value):
+ if SRTool.NEW == value:
+ return SRTool.NEW_INACTIVE
+ elif SRTool.INVESTIGATE == value:
+ return SRTool.INVESTIGATE_INACTIVE
+ elif SRTool.VULNERABLE == value:
+ return SRTool.VULNERABLE_INACTIVE
+ elif SRTool.NOT_VULNERABLE == value:
+ return SRTool.NOT_VULNERABLE_INACTIVE
+ else:
+ return value
+ @staticmethod
+ def status_to_active(value):
+ if SRTool.NEW_INACTIVE == value:
+ return SRTool.NEW
+ elif SRTool.INVESTIGATE_INACTIVE == value:
+ return SRTool.INVESTIGATE
+ elif SRTool.VULNERABLE_INACTIVE == value:
+ return SRTool.VULNERABLE
+ elif SRTool.NOT_VULNERABLE_INACTIVE == value:
+ return SRTool.NOT_VULNERABLE
+ else:
+ return value
+
+ OPEN = 0
+ CLOSED = 1
+ FIXED = 2
+ NOT_FIX = 3
+ OUTCOME_ERROR = 4
+ SRT_OUTCOME = (
+ (OPEN, 'Open'),
+ (CLOSED, 'Closed (Not Vulnerable)'),
+ (FIXED, 'Closed (Fixed)'),
+ (NOT_FIX, "Closed (Won't Fix)"),
+ )
+ @staticmethod
+ def outcome_text(index):
+ if (0 > index) or (index >= len(SRTool.SRT_OUTCOME)):
+ return "OUTCOME_ERROR"
+ return SRTool.SRT_OUTCOME[index][1]
+ @staticmethod
+ def outcome_index(value):
+ for item in SRTool.SRT_OUTCOME:
+ if value == item[1]:
+ return item[0]
+ return SRTool.OUTCOME_ERROR
+
+ # Publish state
+ PUBLISH_UNPUBLISHED = 0
+ PUBLISH_NOPUBLISH = 1
+ PUBLISH_PUBLISHED = 2
+ PUBLISH_REQUEST = 3
+ PUBLISH_UPDATE = 4
+ PUBLISH_SUBMITTED = 5
+ PUBLISH_ERROR = 99
+ SRT_PUBLISH_STATE = (
+ (PUBLISH_UNPUBLISHED, 'Unpublished'),
+ (PUBLISH_NOPUBLISH, 'Not to be Published'),
+ (PUBLISH_PUBLISHED, 'Published'),
+ (PUBLISH_REQUEST, 'Publish Request (New)'),
+ (PUBLISH_UPDATE, 'Publish Request (Update)'),
+ (PUBLISH_SUBMITTED, 'Publish Submitted'),
+ )
+ @staticmethod
+ def publish_text(index):
+ if (0 > index) or (index >= len(SRTool.SRT_PUBLISH_STATE)):
+ return SRTool.SRT_PUBLISH_STATE[SRTool.PUBLISH_ERROR][1]
+ return 'PUBLISH_ERROR'
+ @staticmethod
+ def publish_index(value):
+ for item in SRTool.SRT_PUBLISH_STATE:
+ if value == item[1]:
+ return item[0]
+ return SRTool.PUBLISH_ERROR
+
+ # Normalize displayed dates
+ @staticmethod
+ def date_ymd_text(value):
+ if isinstance(value,datetime):
+ return(value.strftime("%Y-%m-%d"))
+ return(value)
+
+ # Extract dictionary tag values
+ @staticmethod
+ def get_dict_tag(tag,dict_str,default=None):
+ dict = json.loads(dict_str)
+ if tag in dict:
+ return dict[tag]
+ return default
+
+
+# Helper class to format and track updates
+# Enforce strict formatting and content to enable reporting, change filtering, pretty printing
+class Update():
+ # General history prefix format (type,source,semicolon-joined changes):
+ # UPDATE(User):Priority(%s,%s);Tag();Status(%s,%s) {helpful text}
+ # CREATE(Defect): {Created from defect ABCD-1234}
+ # Update report check strings: 'UPDATE(','Priority(','Status('
+
+ # General update label
+ UPDATE_STR = "UPDATE(%s):"
+ CREATE_STR = "CREATE(%s):"
+ UPDATE_PREFIX_STR = "UPDATE("
+ CREATE_PREFIX_STR = "CREATE("
+
+ # Update sources
+ SOURCE_USER = "User"
+ SOURCE_TRIAGE = "Triage"
+ SOURCE_CVE = "CVE"
+ SOURCE_DEFECT = "Defect"
+
+ # Update labels (no string overlaps allowed)
+ NEW_NAME = "New_Name(%s,%s)"
+ PRIORITY = "Priority(%s,%s)"
+ STATUS = "Status(%s,%s)"
+ SEVERITY_V3 = "Severity_V3(%s,%s)"
+ SEVERITY_V2 = "Severity_V2(%s,%s)"
+ OUTCOME = "Outcome(%s,%s)"
+ RELEASE = "Release(%s,%s)"
+ DESCRIPTION = "Description()"
+ LASTMODIFIEDDATE = "LastModifiedDate(%s,%s)"
+ NOTE = "User_Note()"
+ PRIVATE_NOTE = "Private_Note()"
+ TAG = "Tag()"
+ PUBLISH_STATE = "Publish_State(%s,%s)"
+ PUBLISH_DATE = "Publish_Date(%s,%s)"
+ AFFECTED_COMPONENT = "Affected_Component(%s,%s)"
+ ACKNOWLEDGE_DATE = "AcknowledgeDate(%s,%s)"
+ PUBLIC = "Public(%s,%s)"
+ ATTACH_CVE = "Attach_CVE(%s)"
+ DETACH_CVE = "Detach_CVE(%s)"
+ MERGE_CVE = "Merge_CVE(%s)"
+ ATTACH_VUL = "Attach_Vulnerability(%s)"
+ DETACH_VUL = "Detach_Vulnerability(%s)"
+ ATTACH_INV = "Attach_Investigration(%s)"
+ DETACH_INV = "Detach_Investigration(%s)"
+ ATTACH_DEV = "Attach_Defect(%s)"
+ DETACH_DEV = "Detach_Defect(%s)"
+ ATTACH_DOC = "Attach_Document(%s)"
+ DETACH_DOC = "Detach_Document(%s)"
+ ATTACH_USER_NOTIFY = "Attach_User_Notify(%s)"
+ DETACH_USER_NOTIFY = "Detach_User_Notify(%s)"
+ ATTACH_ACCESS = "Attach_Access(%s)"
+ DETACH_ACCESS = "Detach_Access(%s)"
+ ATTACH_PRODUCT = "Attach_Product(%s)"
+ DETACH_PRODUCT = "Detach_Product(%s)"
+ MARK_NEW = "Mark_New(%s)"
+ MARK_UPDATED = "Mark_Updated(%s)"
+ MARK_PREFIX = "Mark_"
+ MARK_NEW_PREFIX = "Mark_New"
+ MARK_UPDATED_PREFIX = "Mark_Updated"
+ MARK_UNMARK = "Mark_Unmark()"
+
+ # Update Report list
+ UPDATE_CHECK_LIST = (
+ PRIORITY,
+ STATUS,
+ SEVERITY_V3,
+ SEVERITY_V2,
+ RELEASE,
+ MARK_NEW,
+ MARK_UPDATED,
+ )
+
+ #Any matching string for the period indicates reportable change
+ @staticmethod
+ def get_check_list():
+ check_list = []
+ for check in UPDATE_CHECK_LIST:
+ simple_check = re.sub(r'(.*', '(', check)
+ check_list.append(simple_check)
+ return(check_list)
+
class SrtSetting(models.Model):
name = models.CharField(max_length=63)
helptext = models.TextField()
@@ -128,6 +396,11 @@ class SrtSetting(models.Model):
return(SrtSetting.objects.get(name=key).value)
except:
return(default)
+ @staticmethod
+ def set_setting(key,value):
+ obj,created = SrtSetting.objects.get_or_create(name=key)
+ obj.value = value
+ obj.save()
class HelpText(models.Model):
@@ -139,9 +412,11 @@ class HelpText(models.Model):
text = models.TextField()
-#UPDATE_FREQUENCY: 0 = every minute, 1 = every hour, 2 = every day, 3 = every week, 4 = every month, 5 = every year
+#UPDATE_FREQUENCY: 0 = every n minutes, 1 = every hour, 2 = every day, 3 = every week, 4 = every month, 5 = on demand
class DataSource(models.Model):
- #UPDATE FREQUENCT
+ search_allowed_fields = ['key', 'name', 'description', 'init', 'update', 'lookup']
+
+ #UPDATE FREQUENCY
MINUTELY = 0
HOURLY = 1
DAILY = 2
@@ -149,6 +424,7 @@ class DataSource(models.Model):
MONTHLY = 4
ONDEMAND = 5
ONSTARTUP = 6
+ PREINIT = 7
FREQUENCY = (
(MINUTELY, 'Minute'),
(HOURLY, 'Hourly'),
@@ -157,13 +433,18 @@ class DataSource(models.Model):
(MONTHLY, 'Monthly'),
(ONDEMAND, 'OnDemand'),
(ONSTARTUP, 'OnStartup'),
+ (PREINIT, 'PreInit'),
)
# Global date format
DATE_FORMAT = '%Y-%m-%d'
DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S'
- key = models.CharField(max_length=20)
+ # Metadata
+ LOOKUP_MISSING = 'LOOKUP-MISSING'
+ PREVIEW_SOURCE = 'PREVIEW-SOURCE'
+
+ key = models.CharField(max_length=80)
data = models.CharField(max_length=20)
source = models.CharField(max_length=20)
name = models.CharField(max_length=20)
@@ -194,20 +475,22 @@ class CweTable(models.Model):
class Cve(models.Model):
search_allowed_fields = ['name', 'description', 'publishedDate',
- 'lastModifiedDate', 'comments', 'comments_private']
+ 'lastModifiedDate', 'comments', 'comments_private', 'tags', 'packages']
# SRTool Priority
UNDEFINED = 0
- MINOR = 1
- LOW = 2
- MEDIUM = 3
- HIGH = 4
+ LOW = 1
+ MEDIUM = 2
+ HIGH = 3
+ CRITICAL = 4
+ PRIORITY_ERROR = 5
PRIORITY = (
(UNDEFINED, 'Undefined'),
- (MINOR, 'Minor'),
(LOW, 'Low'),
(MEDIUM, 'Medium'),
(HIGH, 'High'),
+ (CRITICAL, 'Critical'),
+ (PRIORITY_ERROR, 'PRIORITY_ERROR'),
)
# WR Status
@@ -256,6 +539,7 @@ class Cve(models.Model):
status = models.IntegerField(choices=STATUS, default=NEW)
comments = models.TextField(blank=True)
comments_private = models.TextField(blank=True)
+ tags = models.TextField(blank=True, default='', null=True)
cve_data_type = models.CharField(max_length=100, blank=True)
cve_data_format = models.CharField(max_length=50, blank=True)
@@ -264,6 +548,7 @@ class Cve(models.Model):
public = models.BooleanField(default=True)
publish_state = models.IntegerField(choices=PUBLISH_STATE, default=PUBLISH_UNPUBLISHED)
publish_date = models.CharField(max_length=50, blank=True)
+ acknowledge_date = models.DateTimeField(null=True)
description = models.TextField(blank=True)
publishedDate = models.CharField(max_length=50, blank=True)
@@ -278,20 +563,25 @@ class Cve(models.Model):
cvssV2_baseScore = models.CharField(max_length=50, blank=True)
cvssV2_severity = models.CharField(max_length=50, blank=True)
+ # AKA Affected Components
packages = models.TextField(blank=True)
score_date = models.DateField(null=True, blank=True)
- srt_updated = models.DateTimeField(auto_now=True)
+ srt_updated = models.DateTimeField(auto_now=True, null=True)
+ srt_created = models.DateTimeField(auto_now_add=True, null=True)
@property
def get_priority_text(self):
- return Cve.PRIORITY[int(self.priority)][1]
+ return SRTool.priority_text(self.priority)
+ @property
+ def get_status_text(self):
+ return SRTool.status_text(self.status)
@property
def get_publish_text(self):
return Cve.PUBLISH_STATE[int(self.publish_state)][1]
@property
- def get_status_text(self):
- return Cve.STATUS[int(self.status)][1]
+ def get_public_text(self):
+ return 'Public' if self.public else 'Private'
@property
def is_local(self):
try:
@@ -299,6 +589,62 @@ class Cve(models.Model):
return True
except:
return False
+ @property
+ def get_publishset_state(self):
+ try:
+ obj = PublishSet.objects.get(cve=self)
+ return obj.state_text
+ except:
+ return PublishSet.PUBLISH_SET_STATE[PublishSet.PUBLISH_SET_NONE][1]
+ @property
+ def get_public_comments(self):
+ the_comments = self.comments.strip()
+ the_packages = self.packages.strip()
+ if not the_comments or not the_packages:
+ return '%s%s' % (the_comments,the_packages)
+ if the_comments == the_packages:
+ return the_comments
+ return '%s' % (the_comments)
+ def propagate_private(self):
+ # Gather allowed users
+ user_id_list = []
+ for cveaccess in CveAccess.objects.filter(cve=self):
+ user_id_list.append(cveaccess.user_id)
+ _log("BOO1:user_id=%s" % cveaccess.user_id)
+
+ # Decend the object tree
+ for c2v in CveToVulnerablility.objects.filter(cve=self):
+ vulnerability = Vulnerability.objects.get(id=c2v.vulnerability_id)
+ _log("BOO2:v=%s,%s" % (vulnerability.name,self.public))
+ vulnerability.public = self.public
+ vulnerability.save()
+ if not self.public:
+ # Remove existing users
+ for va in VulnerabilityAccess.objects.filter(vulnerability=vulnerability):
+ _log("BOO3:DEL:v=%s,%s" % (vulnerability.name,va.id))
+ va.delete()
+ # Add valid user list
+ for user_id in user_id_list:
+ va,create = VulnerabilityAccess.objects.get_or_create(vulnerability=vulnerability,user_id=user_id)
+ _log("BOO4:ADD:v=%s,%s,%s" % (vulnerability.name,va.id,user_id))
+ va.save()
+
+ for v2i in VulnerabilityToInvestigation.objects.filter(vulnerability = vulnerability):
+ investigation = Investigation.objects.get(id=v2i.investigation_id)
+ _log("BOO5:i=%s,%s" % (investigation.name,self.public))
+ investigation.public = self.public
+ investigation.save()
+ if not self.public:
+ # Remove existing users
+ for ia in InvestigationAccess.objects.filter(investigation=investigation):
+ _log("BOO6:DEL:v=%s,%s" % (investigation.name,ia.id))
+ ia.delete()
+ # Add valid user list
+ for user_id in user_id_list:
+ ia,create = InvestigationAccess.objects.get_or_create(investigation=investigation,user_id=user_id)
+ _log("BOO7:ADD:i=%s,%s,%s" % (investigation.name,ia.id,user_id))
+ ia.save()
+
class CveDetail():
@@ -317,6 +663,7 @@ class CveDetail():
description = ''
publishedDate = ''
+ acknowledge_date = ''
lastModifiedDate = ''
url_title = ''
url = ''
@@ -431,11 +778,16 @@ class CveLocal(models.Model):
# Map of all sources for the given CVE
class CveSource(models.Model):
- cve = models.ForeignKey(Cve,related_name="cve_parent",on_delete=models.CASCADE,)
+ cve = models.ForeignKey(Cve,related_name="cve_parent",blank=True, null=True,on_delete=models.CASCADE,)
datasource = models.ForeignKey(DataSource,related_name="cve_datasource", blank=True, null=True,on_delete=models.CASCADE,)
+class CveAccess(models.Model):
+ cve = models.ForeignKey(Cve,related_name="cve_users",on_delete=models.CASCADE,)
+ user = models.ForeignKey(SrtUser,related_name="cve_user",on_delete=models.CASCADE,)
+
class CveHistory(models.Model):
- cve = models.ForeignKey(Cve,related_name="cve_history",on_delete=models.CASCADE,)
+ search_allowed_fields = ['cve__name', 'comment', 'date', 'author']
+ cve = models.ForeignKey(Cve,related_name="cve_history",default=None, null=True, on_delete=models.CASCADE,)
comment = models.TextField(blank=True)
date = models.DateField(null=True, blank=True)
author = models.TextField(blank=True)
@@ -470,8 +822,8 @@ class Package(models.Model):
)
mode = models.IntegerField(choices=MODE, default=FOR)
- name = models.CharField(max_length=50, blank=True)
- realname = models.CharField(max_length=50, blank=True)
+ name = models.CharField(max_length=80, blank=True)
+ realname = models.CharField(max_length=80, blank=True)
invalidname = models.TextField(blank=True)
weight = models.IntegerField(default=0)
# computed count data
@@ -485,12 +837,12 @@ class Package(models.Model):
@staticmethod
def update_computed_counts(package_name=None):
# A 'None' indicates all packages
- _log("update_computed_counts0:%s" % package_name)
+# _log("update_computed_counts0:%s" % package_name)
if package_name:
package_list = Package.objects.filter(name=package_name)
else:
package_list = Package.objects.all()
- _log("update_computed_counts:p:%s" % len(package_list))
+# _log("update_computed_counts:p:%s" % len(package_list))
for package in package_list:
try:
state = "p"
@@ -498,7 +850,7 @@ class Package(models.Model):
package.vulnerability_count = 0
package.investigation_count = 0
package.defect_count = 0
- _log("update_computed_counts2:c:%s" % len(package.package2cve.all()))
+# _log("update_computed_counts2:c:%s" % len(package.package2cve.all()))
for pc in package.package2cve.all():
cve = pc.cve
package.cve_count += 1
@@ -518,7 +870,7 @@ class Package(models.Model):
class PackageToCve(models.Model):
package = models.ForeignKey(Package,related_name="package2cve",on_delete=models.CASCADE,)
cve = models.ForeignKey(Cve,related_name="cve2package",on_delete=models.CASCADE,)
- applicable = models.NullBooleanField(default=True, null=True)
+ applicable = models.BooleanField(null=True)
# CPE Filtering
@@ -559,13 +911,18 @@ class CveToCwe(models.Model):
class CveReference(models.Model):
cve = models.ForeignKey(Cve,related_name="references",on_delete=models.CASCADE,)
- hyperlink = models.CharField(max_length=100)
+ hyperlink = models.CharField(max_length=100, null=True)
resource = models.CharField(max_length=100, null=True)
type = models.CharField(max_length=100, null=True)
source = models.CharField(max_length=100, null=True)
name = models.CharField(max_length=100, null=True)
datasource = models.ForeignKey(DataSource,related_name="source_references", blank=True, null=True,on_delete=models.CASCADE,)
+class RecipeTable(models.Model):
+ search_allowed_fields = ['recipe_name']
+ recipe_name = models.CharField(max_length=50)
+
+
# PRODUCT
class Product(models.Model):
@@ -576,7 +933,7 @@ class Product(models.Model):
name = models.CharField(max_length=40)
version = models.CharField(max_length=40)
profile = models.CharField(max_length=40)
- cpe = models.CharField(max_length=40)
+ cpe = models.CharField(max_length=255)
defect_tags = models.TextField(blank=True, default='')
product_tags = models.TextField(blank=True, default='')
@@ -586,26 +943,20 @@ class Product(models.Model):
def long_name(self):
long_name = '%s %s %s' % (self.name,self.version,self.profile)
return long_name.strip()
- def get_defect_tag(self,tag):
- dict = json.loads(self.defect_tags)
- try:
- return dict[tag]
- except:
- _log("ERROR:get_defect_tag:%s[%s]" % (dict,tag))
- return ''
- def get_product_tag(self,tag):
- dict = json.loads(self.product_tags)
- try:
- return dict[tag]
- except:
- _log("ERROR:get_product_tags:%s[%s]" % (dict,tag))
- return ''
+ def get_defect_tag(self,tag,default=None):
+ return SRTool.get_dict_tag(tag,self.defect_tags,default)
+ def get_product_tag(self,tag,default=None):
+ return SRTool.get_dict_tag(tag,self.product_tags,default)
+ def get_defect_str(self):
+ return self.defect_tags.replace('"','')
+ def get_product_str(self):
+ return self.product_tags.replace('"','')
# VULNERABILITY
# Company-level Vulnerablility Record
class Vulnerability(models.Model):
- search_allowed_fields = ['name', 'comments', 'comments_private']
+ search_allowed_fields = ['name', 'comments', 'comments_private', 'tags']
HISTORICAL = 0
NEW = 1
@@ -632,18 +983,21 @@ class Vulnerability(models.Model):
(FIXED, 'Closed (Fixed)'),
(NOT_FIX, "Closed (Won't Fix)"),
)
- # SRTool Severity, matched with Cve/Defect Priority with placeholder for 'minor'
+
+ # SRTool Priority
UNDEFINED = 0
- MINOR = 1
- LOW = 2
- MEDIUM = 3
- HIGH = 4
+ LOW = 1
+ MEDIUM = 2
+ HIGH = 3
+ CRITICAL = 4
+ PRIORITY_ERROR = 5
PRIORITY = (
(UNDEFINED, 'Undefined'),
- (MINOR, 'Minor'),
(LOW, 'Low'),
(MEDIUM, 'Medium'),
(HIGH, 'High'),
+ (CRITICAL, 'Critical'),
+ (PRIORITY_ERROR, 'PRIORITY_ERROR'),
)
name = models.CharField(max_length=50)
@@ -653,25 +1007,36 @@ class Vulnerability(models.Model):
public = models.BooleanField(default=True)
comments = models.TextField(blank=True, default='')
comments_private = models.TextField(blank=True, default='')
+ tags = models.TextField(blank=True, default='')
status = models.IntegerField(choices=STATUS, default=INVESTIGATE)
outcome = models.IntegerField(choices=OUTCOME, default=OPEN)
priority = models.IntegerField(choices=PRIORITY, default=LOW)
+ # AKA Affected Components
+ packages = models.TextField(blank=True)
+
+ srt_updated = models.DateTimeField(auto_now=True, null=True)
+ srt_created = models.DateTimeField(auto_now_add=True, null=True)
+
+ @property
+ def get_priority_text(self):
+ return SRTool.priority_text(self.priority)
@property
def get_status_text(self):
- return Vulnerability.STATUS[int(self.status)][1]
+ return SRTool.status_text(self.status)
@property
def get_outcome_text(self):
+ return SRTool.outcome_text(self.outcome)
return Vulnerability.OUTCOME[int(self.outcome)][1]
@property
- def get_priority_text(self):
- return Vulnerability.PRIORITY[int(self.priority)][1]
- @property
def get_long_name(self):
if self.cve_primary_name:
return "%s (%s)" % (self.name,self.cve_primary_name)
return "%s" % (self.name)
+ @property
+ def get_public_text(self):
+ return 'Public' if self.public else 'Private'
@staticmethod
def new_vulnerability_name():
# get next vulnerability name atomically
@@ -698,6 +1063,9 @@ class Vulnerability(models.Model):
print("Error in new_vulnerability_name")
raise
return "VUL-%05d" % index
+ @property
+ def investigation_list(self):
+ return VulnerabilityToInvestigation.objects.filter(vulnerability_id=self.id).order_by('investigation__product__order')
class VulnerabilityComments(models.Model):
vulnerability = models.ForeignKey(Vulnerability,related_name="vulnerability_comments",on_delete=models.CASCADE,)
@@ -706,6 +1074,7 @@ class VulnerabilityComments(models.Model):
author = models.TextField(blank=True)
class VulnerabilityHistory(models.Model):
+ search_allowed_fields = ['vulnerability__name', 'comment', 'date', 'author']
vulnerability = models.ForeignKey(Vulnerability,related_name="vulnerability_history",on_delete=models.CASCADE,)
comment = models.TextField(blank=True)
date = models.DateField(null=True, blank=True)
@@ -733,58 +1102,62 @@ class Defect(models.Model):
#Issue Type,Key,Summary,Priority,Status,Resolution,Publish To OLS,Fix Version
#Bug,LIN10-2031,Security Advisory - libvorbis - CVE-2017-14633,P3,Closed,Fixed,Reviewed - Publish,10.17.41.3
- NONE = 0
- MINOR = 1
- LOW = 2
- MEDIUM = 3
- HIGH = 4
- Priority = (
- (NONE, 'None'),
- (MINOR, 'P4'),
- (LOW, 'P3'),
- (MEDIUM, 'P2'),
- (HIGH, 'P1'),
+ # Defect/SRTool Priority
+ DEFECT_UNDEFINED = 0
+ DEFECT_LOW = 1
+ DEFECT_MEDIUM = 2
+ DEFECT_HIGH = 3
+ DEFECT_CRITICAL = 4
+ DEFECT_PRIORITY_ERROR = 5
+ DEFECT_PRIORITY = (
+ (DEFECT_UNDEFINED, 'Undefined'),
+ (DEFECT_LOW, 'Low'),
+ (DEFECT_MEDIUM, 'Medium'),
+ (DEFECT_HIGH, 'High'),
+ (DEFECT_CRITICAL, 'Critical'),
+ (DEFECT_PRIORITY_ERROR, 'PRIORITY_ERROR'),
)
- OPEN = 0
- IN_PROGRESS = 1
- ON_HOLD = 2
- CHECKED_IN = 3
- RESOLVED = 4
- CLOSED = 5
- Status = (
- (OPEN, 'Open'),
- (IN_PROGRESS, 'In progress'),
- (ON_HOLD, 'On Hold'),
- (CHECKED_IN, 'Checked In'),
- (RESOLVED, 'Resolved'),
- (CLOSED, 'Closed'),
+ DEFECT_STATUS_OPEN = 0
+ DEFECT_STATUS_IN_PROGRESS = 1
+ DEFECT_STATUS_ON_HOLD = 2
+ DEFECT_STATUS_CHECKED_IN = 3
+ DEFECT_STATUS_RESOLVED = 4
+ DEFECT_STATUS_CLOSED = 5
+ DEFECT_STATUS = (
+ (DEFECT_STATUS_OPEN, 'Open'),
+ (DEFECT_STATUS_IN_PROGRESS, 'In progress'),
+ (DEFECT_STATUS_ON_HOLD, 'On Hold'),
+ (DEFECT_STATUS_CHECKED_IN, 'Checked In'),
+ (DEFECT_STATUS_RESOLVED, 'Resolved'),
+ (DEFECT_STATUS_CLOSED, 'Closed'),
)
- UNRESOLVED = 0
- RESOLVED = 1
- FIXED = 2
- WILL_NOT_FIX = 3
- WITHDRAWN = 4
- REJECTED = 5
- DUPLICATE = 6
- NOT_APPLICABLE = 7
- REPLACED_BY_REQUIREMENT = 8
- CANNOT_REPRODUCE = 9
- DONE = 10
- Resolution = (
- (UNRESOLVED, 'Unresolved'),
- (RESOLVED, 'Resolved'),
- (FIXED, 'Fixed'),
- (WILL_NOT_FIX, 'Won\'t Fix'),
- (WITHDRAWN, 'Withdrawn'),
- (REJECTED, 'Rejected'),
- (DUPLICATE, 'Duplicate'),
- (NOT_APPLICABLE, 'Not Applicable'),
- (REPLACED_BY_REQUIREMENT, 'Replaced By Requirement'),
- (CANNOT_REPRODUCE, 'Cannot Reproduce'),
- (DONE, 'Done'),
+ DEFECT_UNRESOLVED = 0
+ DEFECT_RESOLVED = 1
+ DEFECT_FIXED = 2
+ DEFECT_WILL_NOT_FIX = 3
+ DEFECT_WITHDRAWN = 4
+ DEFECT_REJECTED = 5
+ DEFECT_DUPLICATE = 6
+ DEFECT_NOT_APPLICABLE = 7
+ DEFECT_REPLACED_BY_REQUIREMENT = 8
+ DEFECT_CANNOT_REPRODUCE = 9
+ DEFECT_DONE = 10
+ DEFECT_RESOLUTION = (
+ (DEFECT_UNRESOLVED, 'Unresolved'),
+ (DEFECT_RESOLVED, 'Resolved'),
+ (DEFECT_FIXED, 'Fixed'),
+ (DEFECT_WILL_NOT_FIX, 'Won\'t Fix'),
+ (DEFECT_WITHDRAWN, 'Withdrawn'),
+ (DEFECT_REJECTED, 'Rejected'),
+ (DEFECT_DUPLICATE, 'Duplicate'),
+ (DEFECT_NOT_APPLICABLE, 'Not Applicable'),
+ (DEFECT_REPLACED_BY_REQUIREMENT, 'Replaced By Requirement'),
+ (DEFECT_CANNOT_REPRODUCE, 'Cannot Reproduce'),
+ (DEFECT_DONE, 'Done'),
)
+
Components = (
'BSP',
'Kernel',
@@ -796,41 +1169,146 @@ class Defect(models.Model):
'Test',
)
+ HISTORICAL = 0
+ NEW = 1
+ NEW_RESERVED = 2
+ INVESTIGATE = 3
+ VULNERABLE = 4
+ NOT_VULNERABLE = 5
+ SRT_STATUS = (
+ (HISTORICAL, 'Historical'),
+ (NEW, 'New'),
+ (NEW_RESERVED, 'New-Reserved'),
+ (INVESTIGATE, 'Investigate'),
+ (VULNERABLE, 'Vulnerable'),
+ (NOT_VULNERABLE, 'Not Vulnerable'),
+ )
+
+ OPEN = 0
+ CLOSED = 1
+ FIXED = 2
+ NOT_FIX = 3
+ SRT_OUTCOME = (
+ (OPEN, 'Open'),
+ (CLOSED, 'Closed (Not Vulnerable)'),
+ (FIXED, 'Closed (Fixed)'),
+ (NOT_FIX, "Closed (Won't Fix)"),
+ )
+
+ # SRTool Priority
+ UNDEFINED = 0
+ LOW = 1
+ MEDIUM = 2
+ HIGH = 3
+ CRITICAL = 4
+ PRIORITY_ERROR = 5
+ SRT_PRIORITY = (
+ (UNDEFINED, 'Undefined'),
+ (LOW, 'Low'),
+ (MEDIUM, 'Medium'),
+ (HIGH, 'High'),
+ (CRITICAL, 'Critical'),
+ (PRIORITY_ERROR, 'PRIORITY_ERROR'),
+ )
+
name = models.CharField(max_length=50)
summary = models.TextField(blank=True)
url = models.TextField(blank=True)
- priority = models.IntegerField(choices=Priority, default=MINOR)
- status = models.IntegerField(choices=Status, default=OPEN)
- resolution = models.IntegerField(choices=Resolution, default=UNRESOLVED)
+ duplicate_of = models.CharField(max_length=50, blank=True, default='')
+
+ # External defect specific values
+ priority = models.IntegerField(choices=DEFECT_PRIORITY, default=DEFECT_LOW)
+ status = models.IntegerField(choices=DEFECT_STATUS, default=DEFECT_STATUS_OPEN)
+ resolution = models.IntegerField(choices=DEFECT_RESOLUTION, default=DEFECT_UNRESOLVED)
+ # SRTool compatible values
+ srt_priority = models.IntegerField(choices=SRT_PRIORITY, default=LOW)
+ srt_status = models.IntegerField(choices=SRT_STATUS, default=INVESTIGATE)
+ srt_outcome = models.IntegerField(choices=SRT_OUTCOME, default=OPEN)
+
publish = models.TextField(blank=True)
release_version = models.CharField(max_length=50)
product = models.ForeignKey(Product,related_name="product_defect",on_delete=models.CASCADE,)
date_created = models.CharField(max_length=50)
date_updated = models.CharField(max_length=50)
+ # AKA Affected Components
+ packages = models.TextField(blank=True)
+
srt_updated = models.DateTimeField(auto_now=True)
# Methods
@property
+ def get_defect_priority_text(self):
+ return Defect.DEFECT_PRIORITY[int(self.priority)][1]
+ @property
+ def get_defect_status_text(self):
+ return Defect.DEFECT_STATUS[int(self.status)][1]
+ @property
+ def get_defect_resolution_text(self):
+ return Defect.DEFECT_RESOLUTION[int(self.resolution)][1]
+ @property
def get_priority_text(self):
- return Defect.Priority[int(self.priority)][1]
+ return SRTool.priority_text(self.srt_priority)
@property
def get_status_text(self):
- return Defect.Status[int(self.status)][1]
+ return SRTool.status_text(self.srt_status)
+ @property
+ def get_outcome_text(self):
+ return SRTool.outcome_text(self.srt_outcome)
+ @property
+ def get_date_created_text(self):
+ return re.sub(r"T.*", "", self.date_created)
+ @property
+ def get_date_updated_text(self):
+ return re.sub(r"T.*", "", self.date_updated)
@property
- def get_resolution_text(self):
- return Defect.Resolution[int(self.resolution)][1]
def get_long_name(self):
if self.release_version:
return "%s (%s)" % (self.name,self.release_version)
return "%s" % (self.name)
+ @property
+ def get_cve_names(self):
+ cve_list = []
+ for di in InvestigationToDefect.objects.filter(defect = self):
+ for i2v in VulnerabilityToInvestigation.objects.filter(investigation = di.investigation):
+ for v2c in CveToVulnerablility.objects.filter(vulnerability = i2v.vulnerability):
+ cve_list.append(v2c.cve.name)
+ return ','.join(cve_list)
+ @property
+ def get_cve_ids(self):
+ cve_list = []
+ for di in InvestigationToDefect.objects.filter(defect = self):
+ for i2v in VulnerabilityToInvestigation.objects.filter(investigation = di.investigation):
+ for v2c in CveToVulnerablility.objects.filter(vulnerability = i2v.vulnerability):
+ cve_list.append(str(v2c.cve.id))
+ return ','.join(cve_list)
+ @property
+ def get_publishset_state(self):
+ pub_list = []
+ cve_list = self.get_cve_names
+ if not cve_list:
+ return PublishSet.PUBLISH_SET_STATE[PublishSet.PUBLISH_SET_NONE][1]
+ for cve_name in cve_list.split(','):
+ try:
+ cve = Cve.objects.get(name = cve_name)
+ pub_list.append(cve.get_publishset_state)
+ except Exception as e:
+ pass
+ return ','.join(pub_list)
+
+class DefectHistory(models.Model):
+ search_allowed_fields = ['defect__name', 'comment', 'date', 'author']
+ defect = models.ForeignKey(Defect,related_name="defect_history",on_delete=models.CASCADE,)
+ comment = models.TextField(blank=True)
+ date = models.DateField(null=True, blank=True)
+ author = models.TextField(blank=True)
# INVESTIGATION
# Product-level Vulnerablility Investigation Record
class Investigation(models.Model):
- search_allowed_fields = ['name', 'comments', 'comments_private']
+ search_allowed_fields = ['name', 'comments', 'comments_private', 'tags']
HISTORICAL = 0
NEW = 1
@@ -858,18 +1336,22 @@ class Investigation(models.Model):
(NOT_FIX, "Closed (Won't Fix)"),
)
+ # SRTool Priority
UNDEFINED = 0
- MINOR = 1
- LOW = 2
- MEDIUM = 3
- HIGH = 4
+ LOW = 1
+ MEDIUM = 2
+ HIGH = 3
+ CRITICAL = 4
+ PRIORITY_ERROR = 5
PRIORITY = (
(UNDEFINED, 'Undefined'),
- (MINOR, 'Minor'),
(LOW, 'Low'),
(MEDIUM, 'Medium'),
(HIGH, 'High'),
+ (CRITICAL, 'Critical'),
+ (PRIORITY_ERROR, 'PRIORITY_ERROR'),
)
+
name = models.CharField(max_length=50)
vulnerability = models.ForeignKey(Vulnerability,related_name="vulnerability_investigation",on_delete=models.CASCADE,)
product = models.ForeignKey(Product,related_name="product_investigation",on_delete=models.CASCADE,)
@@ -877,26 +1359,36 @@ class Investigation(models.Model):
public = models.BooleanField(default=True)
comments = models.TextField(blank=True)
comments_private = models.TextField(blank=True)
+ tags = models.TextField(blank=True, default='')
status = models.IntegerField(choices=STATUS, default=OPEN)
outcome = models.IntegerField(choices=OUTCOME, default=INVESTIGATE)
priority = models.IntegerField(choices=PRIORITY, default=LOW)
+ # AKA Affected Components
+ packages = models.TextField(blank=True)
+
+ srt_updated = models.DateTimeField(auto_now=True, null=True)
+ srt_created = models.DateTimeField(auto_now_add=True, null=True)
+
# Methods
@property
+ def get_priority_text(self):
+ return SRTool.priority_text(self.priority)
+ @property
def get_status_text(self):
- return Investigation.STATUS[int(self.status)][1]
+ return SRTool.status_text(self.status)
@property
def get_outcome_text(self):
- return Investigation.OUTCOME[int(self.outcome)][1]
- @property
- def get_priority_text(self):
- return Investigation.PRIORITY[int(self.priority)][1]
+ return SRTool.outcome_text(self.outcome)
@property
def get_long_name(self):
if self.vulnerability and self.vulnerability.cve_primary_name:
return "%s (%s)" % (self.name,self.vulnerability.cve_primary_name.name)
return "%s" % (self.name)
+ @property
+ def get_public_text(self):
+ return 'Public' if self.public else 'Private'
@staticmethod
def new_investigation_name():
current_investigation_index,create = SrtSetting.objects.get_or_create(name='current_investigation_index')
@@ -920,6 +1412,7 @@ class InvestigationComments(models.Model):
author = models.TextField(blank=True)
class InvestigationHistory(models.Model):
+ search_allowed_fields = ['investigation__name', 'comment', 'date', 'author']
investigation = models.ForeignKey(Investigation,related_name="investigation_history",on_delete=models.CASCADE,)
comment = models.TextField(blank=True)
date = models.DateField(null=True, blank=True)
@@ -979,17 +1472,21 @@ def _log_args(msg, *args, **kwargs):
# Action items waiting
class Notify(models.Model):
search_allowed_fields = ['category','description','url']
+
+ # SRTool Priority
UNDEFINED = 0
- MINOR = 1
- LOW = 2
- MEDIUM = 3
- HIGH = 4
+ LOW = 1
+ MEDIUM = 2
+ HIGH = 3
+ CRITICAL = 4
+ PRIORITY_ERROR = 5
PRIORITY = (
(UNDEFINED, 'Undefined'),
- (MINOR, 'Minor'),
(LOW, 'Low'),
(MEDIUM, 'Medium'),
(HIGH, 'High'),
+ (CRITICAL, 'Critical'),
+ (PRIORITY_ERROR, 'PRIORITY_ERROR'),
)
category = models.CharField(max_length=50)
@@ -997,8 +1494,8 @@ class Notify(models.Model):
priority = models.IntegerField(default=0)
url = models.TextField(blank=True)
author = models.TextField(blank=True)
- srt_updated = models.DateTimeField(auto_now_add=True)
- srt_created = models.DateTimeField(auto_now=True)
+ srt_updated = models.DateTimeField(auto_now=True, null=True)
+ srt_created = models.DateTimeField(auto_now_add=True, null=True)
@property
def get_priority_text(self):
@@ -1013,6 +1510,261 @@ class NotifyAccess(models.Model):
class NotifyCategories(models.Model):
category = models.CharField(max_length=50)
+class PublishSet(models.Model):
+ search_allowed_fields = ['cve__name','cve__description','cve__status','cve__publishedDate','cve__lastModifiedDate']
+
+ # Publish state
+ PUBLISH_SET_NONE = 0
+ PUBLISH_SET_NEW = 1
+ PUBLISH_SET_MODIFIED = 2
+ PUBLISH_SET_NEW_USER = 3
+ PUBLISH_SET_MODIFIED_USER = 4
+ PUBLISH_SET_ERROR = 5
+ PUBLISH_SET_STATE = (
+ (PUBLISH_SET_NONE, 'Skip'),
+ (PUBLISH_SET_NEW, 'New'),
+ (PUBLISH_SET_MODIFIED, 'Modified'),
+ (PUBLISH_SET_NEW_USER, 'New_User'),
+ (PUBLISH_SET_MODIFIED_USER, 'Modified_User'),
+ (PUBLISH_SET_ERROR, 'PUBLISH_SET_ERROR'),
+ )
+
+ cve = models.ForeignKey(default=None, to='orm.cve', null=True, on_delete=models.CASCADE,)
+ state = models.IntegerField(choices=PUBLISH_SET_STATE, default=PUBLISH_SET_NONE)
+ reason = models.TextField(blank=True)
+
+ @property
+ def state_text(self):
+ if (0 > self.state) or (self.state >= len(self.PUBLISH_SET_STATE)):
+ return self.PUBLISH_SET_STATE[self.PUBLISH_SET_ERROR][1]
+ return self.PUBLISH_SET_STATE[self.state][1]
+
+# Error Log
+class ErrorLog(models.Model):
+ search_allowed_fields = ['description']
+
+ # Severity
+ INFO = 0
+ WARNING = 1
+ ERROR = 2
+ SEVERITY = (
+ (INFO, 'Info'),
+ (WARNING, 'Warning'),
+ (ERROR, 'Error'),
+ )
+
+ severity = models.IntegerField(default=0)
+ description = models.TextField(blank=True)
+ srt_created = models.DateTimeField(auto_now_add=True, null=True)
+
+ @property
+ def get_severity_text(self):
+ return ErrorLog.SEVERITY[int(self.severity)][1]
+
+class Job(models.Model):
+ search_allowed_fields = ['name', 'title', 'description', 'status']
+ # Job Status
+ NOTSTARTED = 0
+ INPROGRESS = 1
+ SUCCESS = 2
+ ERRORS = 3
+ CANCELLING = 4
+ CANCELLED = 5
+ STATUS = (
+ (NOTSTARTED, 'NotStarted'),
+ (INPROGRESS, 'InProgress'),
+ (SUCCESS, 'Success'),
+ (ERRORS, 'Errors'),
+ (CANCELLING, 'Cancelling'),
+ (CANCELLED, 'Cancelled'),
+ )
+
+ # Required
+ name = models.CharField(max_length=50,default='')
+ description = models.TextField(blank=True)
+ command = models.TextField(blank=True)
+ log_file = models.TextField(blank=True)
+ # Optional
+ parent_name = models.CharField(max_length=50,default='')
+ options = models.TextField(blank=True)
+ user = models.ForeignKey(SrtUser,default=None,null=True,on_delete=models.CASCADE,)
+ # Managed
+ status = models.IntegerField(choices=STATUS, default=NOTSTARTED)
+ pid = models.IntegerField(default=0)
+ count = models.IntegerField(default=0)
+ max = models.IntegerField(default=0)
+ errors = models.IntegerField(default=0)
+ warnings = models.IntegerField(default=0)
+ refresh = models.IntegerField(default=0)
+ message = models.CharField(max_length=50,default='')
+ started_on = models.DateTimeField(null=True)
+ completed_on = models.DateTimeField(null=True)
+
+ @property
+ def get_status_text(self):
+ for s_val,s_name in Job.STATUS:
+ if s_val == self.status:
+ return s_name
+ return "?STATUS?"
+
+ @staticmethod
+ def get_recent(user=None):
+ """
+ Return recent jobs as a list; if sprint is set, only return
+ jobs for that sprint
+ """
+
+ if user and not isinstance(user,AnonymousUser):
+ jobs = Job.objects.filter(user=user)
+ else:
+ jobs = Job.objects.all()
+
+ finished_criteria = \
+ Q(status=Job.SUCCESS) | \
+ Q(status=Job.ERRORS) | \
+ Q(status=Job.CANCELLED)
+
+ recent_jobs = list(itertools.chain(
+ jobs.filter(status=Job.INPROGRESS).order_by("-started_on"),
+ jobs.filter(finished_criteria).order_by("-completed_on")[:3]
+ ))
+
+ # add percentage done property to each job; this is used
+ # to show job progress in mrj_section.html
+ for job in jobs:
+ job.percentDone = job.completeper()
+ job.outcomeText = job.get_status_text
+
+ return recent_jobs
+
+ def completeper(self):
+ if self.max > 0:
+ completeper = (self.count * 100) // self.max
+ else:
+ completeper = 0
+ return completeper
+
+ def eta(self):
+ eta = datetime.now()
+ completeper = self.completeper()
+ if completeper() > 0:
+ eta += ((eta - self.started_on)*(100-completeper))/completeper
+ return eta
+
+ @staticmethod
+ def start(name,description,command,options='',log_file='logs/run_job.log',job_id=1):
+ # The audit_job.py will set the pid and time values so that there is no db race condition
+ command = ['bin/common/srtool_job.py','--name',name,'--description',description,'--command',command,'--options',options,'--log',log_file]
+ if job_id:
+ command.extend(['--job-id',str(job_id)])
+ _log("JOB_START:%s" % parameter_join(command))
+# subprocess.Popen(command,close_fds=True)
+# result_returncode,result_stdout,result_stderr = execute_process(command)
+ execute_process_close_fds(command)
+
+ def cancel(self):
+ if self.status == Job.INPROGRESS:
+ try:
+ if self.pid:
+ os.kill(self.pid, signal.SIGTERM) #or signal.SIGKILL
+ except Exception as e:
+ _log("ERROR_JOB:Cancel:%s" % (e))
+ try:
+ self.status = Job.CANCELLING
+ self.completed_on = datetime.now()
+ self.pid = 0
+ self.save()
+ except Exception as e:
+ _log("ERROR_JOB:Cancelled:%s" % (e))
+
+ def done(self):
+ if not self.pid:
+ return
+ if self.status == Job.INPROGRESS:
+ self.pid = 0
+ self.completed_on = datetime.now()
+ self.status = Job.SUCCESS
+ ### TODO COUNT ERRORS AND WARNINGS
+ self.save()
+ elif self.status == Job.CANCELLING:
+ self.pid = 0
+ self.completed_on = datetime.now()
+ self.status = Job.CANCELLED
+ self.errors = 1
+ self.save()
+
+ @staticmethod
+ def preclear_jobs(user=None,user_id=0,user_none=False):
+ # NOTE: preclear completed jobs so that this page comes up clean
+ # without completed progress bars hanging around
+ if (not user_id) and (not user) and (not user_none):
+ return
+ if user_none:
+ user_id = None
+ elif not user_id:
+ user_id = user.id
+ for job in Job.objects.filter(user_id=user_id):
+ if job.status in (Job.SUCCESS,Job.ERRORS):
+ job.delete()
+
+# Wrapper class to run internal 'jobs' with the progress bar
+class Job_Local():
+ job = None
+ log_file_fd = None
+ INTERNAL_COMMAND = '<internal>'
+ DEFAULT = -1
+ DEFAULT_LOG = '.job_log.txt'
+
+ def __init__(self, name, description='', options='', log_file=DEFAULT_LOG, user=None):
+ self.job = Job(name=name, description=description, options=options, log_file=log_file, user=user)
+ self.job.command = self.INTERNAL_COMMAND
+ self.job.started_on = datetime.now(pytz.utc)
+ self.job.completed_on = None
+ if log_file:
+ self.log_file_fd = open(self.job.log_file, 'w')
+ self.log_file_fd.write(f"JOB_START: {name},{description} @{self.job.started_on}\n" )
+ self.job.status = Job.INPROGRESS
+ self.job.save()
+
+ # If cnt == DEFAULT, increment existing cnt value
+ # If max == DEFAULT, use existing max value
+ def update(self,message,count=DEFAULT,max=DEFAULT):
+ if count == self.DEFAULT:
+ self.job.count += 1
+ else:
+ self.job.count = count
+ if max != self.DEFAULT:
+ self.job.max = max
+ if self.job.count > self.job.max:
+ self.job.count = self.job.max
+ self.job.message = message
+ if True and self.log_file_fd:
+ self.log_file_fd.write(f"JOB_UPDATE({self.job.message},{self.job.count},{self.job.max})\n")
+ self.log_file_fd.flush()
+ self.job.save()
+ def add_warning(self,msg):
+ self.job.warnings += 1
+ self.job.save()
+ if self.log_file_fd:
+ self.log_file_fd.write("WARNING: " + msg + "\n" )
+ def add_error(self,msg):
+ self.job.errors += 1
+ self.job.save()
+ if self.log_file_fd:
+ self.log_file_fd.write("ERROR: " + msg + "\n" )
+ def done(self,sleep_time=4):
+ if sleep_time:
+ time.sleep(sleep_time)
+ self.update('Done',self.job.max,self.job.max)
+ self.job.completed_on = datetime.now(pytz.utc)
+ self.job.status = Job.ERRORS if self.job.errors else Job.SUCCESS
+ self.job.save()
+ if self.log_file_fd:
+ self.log_file_fd.write(f"JOB_STOP: W={self.job.warnings},E={self.job.errors} @{self.job.completed_on}\n" )
+ self.log_file_fd.flush()
+ self.log_file_fd.close()
+ self.log_file_fd = None
+
#
# Database Cache Support
#
diff --git a/lib/srtgui/api.py b/lib/srtgui/api.py
index 16ff88b0..2478fb9e 100644
--- a/lib/srtgui/api.py
+++ b/lib/srtgui/api.py
@@ -2,6 +2,7 @@
# BitBake Toaster Implementation
#
# Copyright (C) 2016-2018 Intel Corporation
+# Copyright (C) 2018-2023 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -22,8 +23,13 @@ import os
import sys
import logging
import subprocess
+from datetime import datetime, date
+import traceback
+import re
+import json
from django.http import JsonResponse
+from django.views.generic import View
logger = logging.getLogger("srt")
@@ -43,31 +49,97 @@ def _log(msg):
f1.write("|" + msg + "|\n" )
f1.close()
+def error_log(severity,description):
+ from orm.models import ErrorLog
+ if (severity < ErrorLog.INFO) or (severity > ErrorLog.ERROR):
+ severity = ErrorLog.ERROR
+ error = ErrorLog.objects.create(severity=severity,description=description,)
+ error.save()
+
+# Quote parameters if spaces
+def parameter_join(a):
+ str = []
+ for s in a:
+ if (' ' in s) or (0 == len(s)):
+ str.append('"%s"' % s)
+ else:
+ str.append(s)
+ return ' '.join(str)
+
+
+#
# Sub Process calls
+#
+# Enforce that all scripts run from the SRT_BASE_DIR context
+#
+
def execute_process(*args):
+ # Only string-type parameters allowed
cmd_list = []
for arg in args:
+ if not arg: continue
if isinstance(arg, (list, tuple)):
# Flatten all the way down
for a in arg:
- cmd_list.append(a)
+ if not a: continue
+ cmd_list.append(str(a))
else:
- cmd_list.append(arg)
+ cmd_list.append(str(arg))
+
+ srt_base_dir = os.environ.get('SRT_BASE_DIR')
+ if srt_base_dir and (srt_base_dir != os.getcwd()):
+ os.chdir(srt_base_dir)
+ _log(f"FOOBAR:CHDIR{srt_base_dir}")
+ if cmd_list[0].startswith('bin/') or cmd_list[0].startswith('./bin'):
+ cmd_list[0] = os.path.join(srt_base_dir,cmd_list[0])
+ _log(f"FOOBAR:{cmd_list[0]}:{os.getcwd()}")
+
+ result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return(result.returncode,result.stdout.decode('utf-8'),result.stderr.decode('utf-8'))
+
+# For Jobs, with captured output
+def execute_process_close_fds(cmnd):
+ srt_base_dir = os.environ.get('SRT_BASE_DIR')
+ if srt_base_dir and (srt_base_dir != os.getcwd()):
+ os.chdir(srt_base_dir)
+ if cmnd[0].startswith('bin/') or cmnd[0].startswith('./bin'):
+ cmnd[0] = os.path.join(srt_base_dir,cmnd[0])
+ subprocess.Popen(cmnd,close_fds=True)
+
+# For Jobs, with captured output
+def execute_system(cmnd):
+ srt_base_dir = os.environ.get('SRT_BASE_DIR')
+ if srt_base_dir and (srt_base_dir != os.getcwd()):
+ os.chdir(srt_base_dir)
+ if cmnd.startswith('bin/') or cmnd.startswith('./bin'):
+ cmnd = srt_base_dir + '/' + cmnd[0]
+ return os.system(cmnd)
- # Python < 3.5 compatible
- if sys.version_info < (3,5):
- process = subprocess.Popen(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- try:
- stdout, stderr = process.communicate(input)
- except:
- process.kill()
- process.wait()
- raise
- retcode = process.poll()
- return retcode, stdout, stderr
- else:
- result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- return result.returncode,result.stdout,result.stderr
+#
+# Update CVE datasource list: (a) fetch alt sources, (b) refresh preview sources
+#
+
+# #### TODO
+def update_cve_datasources(source_filter='',force_update_source=True):
+ # Attach all matching CVE sources
+ _log("Alternate1:%s" % (cve_object.name))
+ query_set = DataSource.objects.filter(data="cve")
+ if source_filter:
+ query_set =query_set.filter(source=source_filter)
+ for ds in query_set:
+ _log("Alternate2:%s" % (ds.key))
+ if ds.cve_filter and cve_object.name.startswith(ds.cve_filter):
+ cve_source_object,created = CveSource.objects.get_or_create(cve=cve_object,datasource=ds)
+ _log("Alternate CVE source %s for %s (created=%s)" % (ds.key,cve_object.name,created))
+
+ # Force update the CVE summary data from sources
+ if force_update_source:
+ result_returncode,result_stdout,result_stderr = execute_process(
+ os.path.join(os.environ.get('SRT_BASE_DIR'),'bin/nist/srtool_nist.py'),
+ '--update-cve-list',
+ cve_object.name,
+ '--force'
+ )
#
# Extract Upstream CVE record details
@@ -82,18 +154,24 @@ def readCveDetails_Upstream(cve, cve_datasource):
# Get the object
lookup_command = cve_datasource.lookup
+ lookup_attributes = ''
if not lookup_command:
v.description = "ERROR(%s):missing lookup command" % (cve_datasource.description)
return v
lookup_command = lookup_command.replace('%command%','--cve-detail=%s' % cve.name)
- result_returncode,result_stdout,result_stderr = execute_process(lookup_command.split(' '))
+ lookup_commands = lookup_command.split(' ')
+ # Convert local SRT bin calls to absolute path calls
+ if not lookup_commands[0].startswith('/'):
+ lookup_commands[0] = os.path.join(os.environ.get('SRT_BASE_DIR', './'),lookup_commands[0])
+ # Execute the call
+ result_returncode,result_stdout,result_stderr = execute_process(*lookup_commands)
#_log("SRT_%s=%s|%s|%s" % (cve_datasource.key,result_returncode,result_stdout,result_stderr))
if 0 != result_returncode:
result_stdout = str(result_stdout)
v.description = "ERROR(%s):%s" % (result_returncode,result_stderr)
return v
- for line in result_stdout.decode("utf-8").splitlines():
+ for line in result_stdout.splitlines():
try:
name = line[:line.index('=')]
value = line[line.index('=')+1:].replace("[EOL]","\n")
@@ -135,6 +213,16 @@ def readCveDetails_Upstream(cve, cve_datasource):
#_log("cpe_list:%s:%s:" % (cve.name,value))
elif name == 'ref_list':
v.ref_list = value
+ elif name == 'ATTRIBUTES':
+ # Returned metadata
+ lookup_attributes = value
+ #_log("NOTE:readCveDetails_Upstream:%s:%s:%s:%s:" % (v.name,v.cvssV2_severity,cve_datasource.description,v.description[:20]))
+
+ # Check for metadata special cases
+ if cve_datasource.LOOKUP_MISSING in lookup_attributes:
+ pass
+
+
return v
#
@@ -298,7 +386,7 @@ def summaryCveDetails(cve,cve_sources):
# No data sources
if not cve_main:
- return cve_detail,cve_html
+ return readCveDetails_None(cve),cve_html
# Merge the data into summary record
summaryMerge(cve_detail,cve_main,cve_local,cve_html,'description')
@@ -338,3 +426,398 @@ def summaryCveDetails(cve,cve_sources):
cve_detail.ref_list = cve_main.ref_list
return cve_detail,cve_html
+
+#
+# Publish Support
+#
+
+
+# Accumulate the history status changes over the date range
+# CVE rec
+# cve[name][key][first,last]
+ # Severity_V3(8.0 HIGH,5.4 MEDIUM)
+ # Severity_V2(8.5 HIGH,4.3 MEDIUM)
+ # Priority(UNDEFINED,Medium)
+ # Status(Historical,Vulnerable)
+# CVE product/defect
+# cve[name][product][defect][key][first,last]
+ # Release(,8.0.0.30)
+ # Status(Historical,Vulnerable)
+
+
+# Calculate the publishable CVEs for a given period
+# Accumulate the CVE history status changes over the date range
+def publishCalculate(date_start,date_stop):
+ from orm.models import SrtSetting, PublishSet, Cve, CveHistory, DefectHistory, Update, SRTool, InvestigationToDefect, Product
+
+ # Precompile the filter for efficiency
+ update_regex = re.compile(r"([^\(]*)\(([^,]*),([^\)]*)\)")
+
+ # Accumulate the CVE history status changes
+ # Severity_V3(8.0 HIGH,5.4 MEDIUM)
+ # Severity_V2(8.5 HIGH,4.3 MEDIUM)
+ # Priority(UNDEFINED,Medium)
+ # Status(Historical,Vulnerable)
+ cve_updates = {}
+ # cve_updates[cve_id_str][key][first,last]
+ def cve_update(cve_id_str,change):
+ m = update_regex.search(change)
+ if m:
+ field = m.group(1)
+ value_old = m.group(2)
+ value_new = m.group(3)
+ else:
+ field = re.sub(r"\(.*", "", change)
+ value_old = ''
+ value_new = ''
+
+ if not field in ('Severity_V3','Severity_V2'):
+ return
+
+ # Fix-up
+ if ('Severity_V3' == field) or ('Severity_V2' == field):
+ score_old,severity_old = value_old.split(' ')
+ score_new,severity_new = value_new.split(' ')
+ if score_old.replace('0','') == score_new.replace('0',''):
+ return
+ if severity_old == severity_new:
+ return
+ value_old = severity_old
+ value_new = severity_new
+
+ if not cve_id_str in cve_updates:
+ cve_updates[cve_id_str] = {}
+ if not field in cve_updates[cve_id_str]:
+ # Preset the old value and accumulate the new value
+ cve_updates[cve_id_str][field] = [value_old,value_new]
+ else:
+ # Only accumulate the new value
+ cve_updates[cve_id_str][field] = [cve_updates[cve_id_str][field][0],value_new]
+
+ # Accumulate the CVE Defect history status changes
+ # Status(Historical,Vulnerable)
+ # Priority(UNDEFINED,Medium)
+ # Release(,8.0.0.30)
+ defect_updates = {}
+ # defect_updates[cve_id_str][product][defect][key][first,last]
+ def defect_update(cve_id_str,product_key,defect_name,change):
+ m = update_regex.search(change)
+ if m:
+ field = m.group(1)
+ value_old = m.group(2)
+ value_new = m.group(3)
+ else:
+ field = re.sub(r"\(.*", "", change)
+ value_old = ''
+ value_new = ''
+
+ if not cve_id_str in defect_updates:
+ defect_updates[cve_id_str] = {}
+ if not product_key in defect_updates[cve_id_str]:
+ defect_updates[cve_id_str][product_key] = {}
+ if not defect_name in defect_updates[cve_id_str][product_key]:
+ defect_updates[cve_id_str][product_key][defect_name] = {}
+ if not field in defect_updates[cve_id_str][product_key][defect_name]:
+ # Preset the old value and accumulate the new value
+ defect_updates[cve_id_str][product_key][defect_name][field] = [value_old,value_new]
+ else:
+ # Only accumulate the new value
+ defect_updates[cve_id_str][product_key][defect_name][field] = [defect_updates[cve_id_str][product_key][defect_name][field][0],value_new]
+
+ try:
+ PublishSet.objects.all().delete()
+
+ # Convert dates to CVE-type dates
+ date_start_text = date_start.strftime('%Y-%m-%d')
+ date_stop_text = date_stop.strftime('%Y-%m-%d')
+
+ # Find all candidate new CVEs
+ queryset = \
+ Cve.objects.filter(acknowledge_date__gte=date_start_text,acknowledge_date__lte=date_stop_text) | \
+ Cve.objects.filter(srt_created__gte=date_start,srt_created__lte=date_stop)
+ exclude_list = [Cve.NEW, Cve.HISTORICAL, Cve.NEW_RESERVED]
+ queryset = queryset.exclude(status__in=exclude_list)
+
+ # Gather only CVE histories from currently supported products
+ # This assumes that the defect names have the format "<DEFECT_KEY>-*"
+ # Example entry: "CREATE(Defect): {Created from defect <DEFECT_KEY>-7058}"
+ # Gather the supported product keys
+ product_filter = []
+ product_query = Product.objects.filter()
+ for product in product_query:
+ if "support" == product.get_product_tag('mode').order_by('-order'):
+ product_filter.append(product.get_defect_tag('key'))
+ # Scan the CVE histories
+ new_cves = {}
+ create_filter = Update.CREATE_STR % Update.SOURCE_DEFECT
+ for cve in queryset:
+ try:
+ history_query = CveHistory.objects.filter(cve=cve,comment__startswith=create_filter)
+ if history_query:
+ supported = False
+ _keys = []
+ for history in history_query:
+ _keys.append(history.comment)
+ for key in product_filter:
+ # CREATE(Defect): {Created from defect <DEFECT_KEY>}
+ if 0 < history.comment.find(' %s-' % key):
+ supported = True
+ break
+ if not supported:
+ continue
+ except:
+ # No matches to test
+ pass
+
+ p = PublishSet(cve=cve, state=PublishSet.PUBLISH_SET_NEW, reason='LastModifiedDate(,%s)' % cve.lastModifiedDate)
+ p.save()
+ new_cves[str(cve.id)] = True
+
+ # Fixup
+ bootstrap_date = datetime.strptime('2019-03-10',"%Y-%m-%d")
+ if date_start < bootstrap_date:
+ date_start = bootstrap_date
+
+ # Find all candidate updated CVEs, made by user or imported from CVE integration tools
+ # UPDATE(CVE):Severity_V3(8.0 HIGH,5.4 MEDIUM);Severity_V2(8.5 HIGH,4.3 MEDIUM);LastModifiedDate(2017-08-12,2019-03-19)
+ for ch in CveHistory.objects.filter(date__gte=date_start_text,date__lte=date_stop_text,comment__startswith=Update.UPDATE_PREFIX_STR).order_by('date'):
+ # Already new
+ if ch.cve.id in new_cves:
+ continue
+ # Ignore CVEs with non-applicable
+ if ch.cve.status in [Cve.NEW, Cve.HISTORICAL, Cve.NEW_RESERVED]:
+ continue
+ change_str = re.sub(r"^.*:", "", ch.comment)
+ change_str = re.sub(r"{.*", "", change_str)
+ for change in change_str.split(';'):
+ cve_update(str(ch.cve.id),change)
+
+ # Find all candidate updated Defects, made by user or imported from defect integration tools
+ # UPDATE(Defect):Priority(UNDEFINED,Medium);Status(Historical,Investigate);Release(,8.0.0.30) {Update from defect LIN8-8669}
+ for dh in DefectHistory.objects.filter(date__gte=date_start_text,date__lte=date_stop_text,comment__startswith=Update.UPDATE_PREFIX_STR).order_by('date'):
+ # Get the product key
+ for i2d in InvestigationToDefect.objects.filter(defect_id=dh.defect.id):
+ # get first product key
+ product_key = i2d.product.key
+ break
+ else:
+ # no investigation for this orphaned defect
+ continue
+ change_str = re.sub(r"^.*:", "", dh.comment)
+ change_str = re.sub(r"{.*", "", change_str)
+ for change in change_str.split(';'):
+ cve_id_strs = dh.defect.get_cve_ids
+ for cve_id_str in cve_id_strs.split(','):
+ # Already new
+ if cve_id_str in new_cves:
+ continue
+ defect_update(cve_id_str,product_key,dh.defect.name,change)
+
+
+ # Merge manual Marks to table
+ queryset = CveHistory.objects.filter(
+ date__gte=date_start,
+ date__lte=date_stop)
+ for cvehistory in queryset:
+ if cvehistory.comment.startswith(Update.MARK_NEW_PREFIX):
+ publish_object,created = PublishSet.objects.get_or_create(cve=cvehistory.cve)
+ publish_object.state = PublishSet.PUBLISH_SET_NEW_USER
+ publish_object.reason= "CC " + cvehistory.comment
+ publish_object.save()
+ elif cvehistory.comment.startswith(Update.MARK_UPDATED_PREFIX):
+ publish_object,created = PublishSet.objects.get_or_create(cve=cvehistory.cve)
+ publish_object.state = PublishSet.PUBLISH_SET_MODIFIED_USER
+ publish_object.reason= "DD " + cvehistory.comment
+ publish_object.save()
+ elif cvehistory.comment.startswith(Update.MARK_UNMARK):
+ publish_object,created = PublishSet.objects.get_or_create(cve=cvehistory.cve)
+ publish_object.state = PublishSet.PUBLISH_SET_NONE
+ publish_object.reason= "EE " + cvehistory.comment
+ _log("PUBLISH_SET_NONE(%d):%s:%s" % (cvehistory.id,cvehistory.cve.name,cvehistory.comment))
+ publish_object.save()
+
+ #
+ # for all cves, merge data, create publish records
+ # cve_change_tree[cve_id_str][dict]
+ #
+
+ cve_change_tree = {}
+ # cve_updates[cve_id_str][key][first,last]
+ for cve_id_str in cve_updates:
+ if not cve_id_str in cve_change_tree:
+ cve_change_tree[cve_id_str] = {}
+ for key in cve_updates[cve_id_str]:
+ cve_change_tree[cve_id_str][key] = cve_updates[cve_id_str][key]
+
+ # defect_updates[cve_id_str][product][defect][key][first,last]
+ for cve_id_str in defect_updates:
+ if not cve_id_str in cve_change_tree:
+ cve_change_tree[cve_id_str] = {}
+ for product in defect_updates[cve_id_str]:
+ product_updates = []
+ for defect in defect_updates[cve_id_str][product]:
+ defect_changes = []
+ for key in defect_updates[cve_id_str][product][defect].keys():
+ defect_changes.append('%s(%s,%s)' % (key,defect_updates[cve_id_str][product][defect][key][0],defect_updates[cve_id_str][product][defect][key][1]))
+ product_updates.append('%s[%s]' % (defect,','.join(defect_changes)))
+ cve_change_tree[cve_id_str][product] = '|'.join(product_updates)
+
+ # Create publish records
+ for cve_id_str in cve_change_tree:
+ publish_object,created = PublishSet.objects.get_or_create(cve_id=int(cve_id_str))
+ publish_object.state = PublishSet.PUBLISH_SET_MODIFIED
+ publish_object.reason = json.dumps(cve_change_tree[cve_id_str])
+ publish_object.save()
+
+ # Update last calculation date
+ SrtSetting.set_setting('publish_last_calc',datetime.today().strftime('%m/%d/%Y %H:%M'))
+ except Exception as e:
+ _log("ERROR:publishCalculate:%s,%s." % (e,traceback.print_stack()))
+
+
+# Reset: for each CVE History:
+# (a) Remove any MARK_NEW or MARK_UPDATED in the period
+#
+def publishReset(date_start,date_stop):
+ from orm.models import Cve, CveHistory, Update
+ # Fixup
+ #bootstrap_date = datetime.strptime('2019-03-10',"%Y-%m-%d")
+
+ # Deleted manual Marks from table
+ queryset = CveHistory.objects.filter(
+ date__gte=date_start,
+ date__lte=date_stop)
+ for cvehistory in queryset:
+ if cvehistory.comment.startswith(Update.MARK_PREFIX):
+ cvehistory.delete()
+
+# MarkNew: for each CVE:
+# (a) Remove any previous MARK_UPDATED in the period (there can be many periods)
+# (a) Remove any previous MARK_NEW (there can only be one)
+# (b) Insert MARK_NEW at period's middle date
+#
+def publishMarkNew(cve_list,reason_map,date_start,date_stop):
+ from orm.models import Cve, CveHistory, Update
+ # Fixup
+ bootstrap_date = datetime.strptime('2019-03-10',"%Y-%m-%d")
+ if date_start < bootstrap_date:
+ date_start = bootstrap_date
+ mid_date = date_start + (date_stop - date_start)/2
+ for cve_name in cve_list.split(','):
+ cve = Cve.objects.get(name = cve_name)
+ # Remove marks in period
+ queryset = CveHistory.objects.filter(
+ cve = cve,
+ comment__startswith = Update.MARK_PREFIX,
+ date__gte=date_start,
+ date__lte=date_stop)
+ for cvehistory in queryset:
+ cvehistory.delete()
+ # Remove all mark news
+ queryset = CveHistory.objects.filter(cve = cve,comment__startswith = Update.MARK_NEW_PREFIX)
+ for cvehistory in queryset:
+ cvehistory.delete()
+ cvehistory = CveHistory(cve=cve, comment=Update.MARK_NEW % reason_map[cve_name], date=mid_date, author='SRTool')
+ cvehistory.save()
+
+# MarkModified: for each CVE:
+# (a) Remove any previous MARK_UPDATED in the period (there can be many periods)
+# (b) Insert MARK_UPDATED at period's middle date
+#
+def publishMarkModified(cve_list,reason_map,date_start,date_stop):
+ from orm.models import Cve, CveHistory, Update
+ # Fixup
+ bootstrap_date = datetime.strptime('2019-03-10',"%Y-%m-%d")
+ if date_start < bootstrap_date:
+ date_start = bootstrap_date
+ mid_date = date_start + (date_stop - date_start)/2
+ for cve_name in cve_list.split(','):
+ cve = Cve.objects.get(name = cve_name)
+ # Remove mark in period
+ queryset = CveHistory.objects.filter(
+ cve = cve,
+ comment__startswith = Update.MARK_PREFIX,
+ date__gte=date_start,
+ date__lte=date_stop)
+ for cvehistory in queryset:
+ cvehistory.delete()
+ cvehistory = CveHistory(cve=cve, comment=Update.MARK_UPDATED % reason_map[cve_name], date=mid_date, author='SRTool')
+ cvehistory.save()
+
+# MarkNone: for each CVE:
+# (a) Remove any MARK_NEW or MARK_UPDATED in the period
+#
+def publishMarkNone(cve_list,date_start,date_stop):
+ from orm.models import Cve, CveHistory, Update
+ # Fixup
+ bootstrap_date = datetime.strptime('2019-03-10',"%Y-%m-%d")
+ date_start_max = max(date_start,bootstrap_date)
+ mid_date = date_start_max + (date_stop - date_start_max)/2
+ for cve_name in cve_list.split(','):
+ cve = Cve.objects.get(name = cve_name)
+ queryset = CveHistory.objects.filter(
+ cve = cve,
+ comment__startswith = Update.MARK_PREFIX,
+ date__gte=date_start,
+ date__lte=date_stop)
+ for cvehistory in queryset:
+ cvehistory.delete()
+ cvehistory = CveHistory(cve=cve, comment=Update.MARK_UNMARK, date=mid_date, author='SRTool')
+ cvehistory.save()
+
+
+class XhrJobRequest(View):
+# from orm.models import Job
+
+ def get(self, request, *args, **kwargs):
+ return HttpResponse()
+
+ def post(self, request, *args, **kwargs):
+ """
+ Job control
+
+ Entry point: /xhr_jobrequest/<project_id>
+ Method: POST
+
+ Args:
+ id: id of job to change
+ jobCancel = job_request_id ...
+ jobDelete = id ...
+
+ Returns:
+ {"error": "ok"}
+ or
+ {"error": <error message>}
+ """
+
+# project = Project.objects.get(pk=kwargs['pid'])
+
+ if 'jobCancel' in request.POST:
+ for i in request.POST['jobCancel'].strip().split(" "):
+ try:
+ job = Job.objects.get(pk=i)
+ job.cancel()
+ except Job.DoesNotExist:
+ return error_response('No such job request id %s' % i)
+
+ return error_response('ok')
+
+ if 'jobDelete' in request.POST:
+ for i in request.POST['jobDelete'].strip().split(" "):
+ try:
+ Job.objects.select_for_update().get(
+ sprint=sprint,
+ pk=i,
+ state__lte=Job.INPROGRESS).delete()
+
+ except Job.DoesNotExist:
+ pass
+ return error_response("ok")
+
+ response = HttpResponse()
+ response.status_code = 500
+ return response
+
+
+
+
diff --git a/lib/srtgui/reports.py b/lib/srtgui/reports.py
index 297b885a..3a7414c6 100644
--- a/lib/srtgui/reports.py
+++ b/lib/srtgui/reports.py
@@ -20,18 +20,25 @@
import os
import logging
-from datetime import datetime
+from datetime import datetime, timedelta
import csv
+from openpyxl import Workbook
+from openpyxl import load_workbook
+from openpyxl.styles import Border, Side, PatternFill, Font, GradientFill, Alignment
+from openpyxl.utils import get_column_letter
from orm.models import Cve, CveSource, Vulnerability, Investigation, Defect, Product
from orm.models import Package
+from orm.models import SRTool, SrtSetting
+from orm.models import PublishSet, DefectHistory
+from orm.models import Notify, ErrorLog
from srtgui.api import readCveDetails, summaryCveDetails
from django.db.models import Q
logger = logging.getLogger("srt")
-SRT_BASE_DIR = os.environ['SRT_BASE_DIR']
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR', '.')
SRT_REPORT_DIR = '%s/reports' % SRT_BASE_DIR
# quick development/debugging support
@@ -49,6 +56,54 @@ def _log_args(msg, *args, **kwargs):
s += ')'
_log(s)
+###############################################################################
+# Excel/openpyxl common look and feel formatting objects
+#
+
+#pyxl_border_all = Border(left=thin, right=thin, top=thin, bottom=thin) # , outline=True)
+pyxl_thin = Side(border_style="thin")
+pyxl_double = Side(border_style="double")
+pyxl_border_left = Border(left=pyxl_thin)
+pyxl_border_bottom = Border(bottom=pyxl_thin)
+pyxl_border_bottom_left = Border(bottom=pyxl_thin, left=pyxl_thin)
+pyxl_alignment_left = Alignment(horizontal='left')
+pyxl_alignment_right = Alignment(horizontal='right')
+pyxl_alignment_wrap = Alignment(wrap_text=True)
+pyxl_font_bold = Font(bold=True)
+pyxl_font_red = Font(color="A00000",bold=True,size = "13")
+pyxl_font_grn = Font(color="00A000",bold=True,size = "13")
+pyxl_font_blu = Font(color="0000A0",bold=True,size = "13")
+pyxl_font_orn = Font(color="FF6600",bold=True,size = "13")
+pyxl_fill_green = PatternFill(start_color="E0FFF0", end_color="E0FFF0", fill_type = "solid")
+# Warning: the form "PatternFill(bgColor="xxxxxx", fill_type = "solid")" returns black cells
+pyxl_backcolor_red = PatternFill(start_color='FCCDBA', end_color='FCCDBA', fill_type = "solid")
+pyxl_backcolor_orn = PatternFill(start_color='FBEAAB', end_color='FBEAAB', fill_type = "solid")
+pyxl_backcolor_yel = PatternFill(start_color='FCFDC7', end_color='FCFDC7', fill_type = "solid")
+pyxl_backcolor_blu = PatternFill(start_color='C5E2FF', end_color='C5E2FF', fill_type = "solid")
+pyxl_backcolor_grn = PatternFill(start_color='D6EDBD', end_color='D6EDBD', fill_type = "solid")
+pyxl_cve_fills = [pyxl_backcolor_red,pyxl_backcolor_orn,pyxl_backcolor_yel,pyxl_backcolor_blu,None,None,None]
+
+def pyxl_write_cell(ws,row_num,column_num,value,border=None,font=None,fill=None,alignment=None):
+ cell = ws.cell(row=row_num, column=column_num)
+ try:
+ cell.value = value
+ if fill:
+ cell.fill = fill
+ if alignment:
+ cell.alignment = alignment
+ if border:
+ cell.border = border
+ if font:
+ cell.font = font
+ except Exception as e:
+ print("ERROR:(%d,%d):%s" % (row_num,column_num,e))
+ # Optional next column return value
+ return(column_num+1)
+
+###############################################################################
+# Core report support
+#
+
class Report():
def __init__(self, parent_page, *args, **kwargs):
self.parent_page = parent_page
@@ -93,7 +148,14 @@ class ManagementReport(Report):
context['report_columnrange_list'] = ''
context['report_format_list'] = '\
<input type="radio" name="format" value="txt" checked> Text<br> \
- <input type="radio" name="format" value="csv"> CSV<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="semi">Semi-colon</option> \
+ <option value="comma">Comma</option> \
+ <option value="tab">Tab</option> \
+ </select>) \
+ <br> \
'
return context
@@ -107,12 +169,15 @@ class ManagementReport(Report):
format = request_POST.get('format', '')
title = request_POST.get('title', '')
report_type = request_POST.get('report_type', '')
+ csv_separator = request_POST.get('csv_separator', 'semi')
- report_name = '%s/management_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/management_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
if 'csv' == format:
- tab = "\t"
+ tab = ';'
+ if csv_separator == 'comma': tab = ','
+ if csv_separator == 'tab': tab = '\t'
else:
tab = " = "
@@ -125,11 +190,13 @@ class ManagementReport(Report):
file.write("%s%s%s\n" % ('cve_open',tab,Cve.objects.filter( Q(status=Cve.INVESTIGATE) & Q(status=Cve.VULNERABLE) ).count()))
file.write("%s%s%s\n" % ('vulnerability_total',tab,Vulnerability.objects.all().count()))
file.write("%s%s%s\n" % ('vulnerability_open',tab,Vulnerability.objects.filter(outcome=Vulnerability.OPEN).count()))
+ file.write("%s%s%s\n" % ('vulnerability_critical',tab,Vulnerability.objects.filter(priority=Vulnerability.CRITICAL).count()))
file.write("%s%s%s\n" % ('vulnerability_high',tab,Vulnerability.objects.filter(priority=Vulnerability.HIGH).count()))
file.write("%s%s%s\n" % ('vulnerability_medium',tab,Vulnerability.objects.filter(priority=Vulnerability.MEDIUM).count()))
- file.write("%s%s%s\n" % ('vulnerability_low',tab,Vulnerability.objects.filter(priority=Vulnerability.HIGH).count()))
+ file.write("%s%s%s\n" % ('vulnerability_low',tab,Vulnerability.objects.filter(priority=Vulnerability.LOW).count()))
file.write("%s%s%s\n" % ('investigation_total',tab,Investigation.objects.all().count()))
file.write("%s%s%s\n" % ('investigation_open',tab,Investigation.objects.filter(outcome=Investigation.OPEN).count()))
+ file.write("%s%s%s\n" % ('investigation_critical',tab,Investigation.objects.filter(priority=Investigation.CRITICAL).count()))
file.write("%s%s%s\n" % ('investigation_high',tab,Investigation.objects.filter(priority=Investigation.HIGH).count()))
file.write("%s%s%s\n" % ('investigation_medium',tab,Investigation.objects.filter(priority=Investigation.MEDIUM).count()))
file.write("%s%s%s\n" % ('investigation_low',tab,Investigation.objects.filter(priority=Investigation.LOW).count()))
@@ -254,7 +321,7 @@ class CveReport(Report):
cve = Cve.objects.get(id=record_list)
- report_name = '%s/cve_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/cve_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
if 'csv' == format:
@@ -417,9 +484,9 @@ class VulnerabilityReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
- v = Vulnerability.objects.get(id=record_list)
+ vulnerability = Vulnerability.objects.get(id=record_list)
- report_name = '%s/vulnerability_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/vulnerability_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
if 'csv' == format:
@@ -429,24 +496,26 @@ class VulnerabilityReport(Report):
if ('summary' == report_type) or ('audit' == report_type):
if 'txt' == format:
- file.write("Report : Vulnerability %s - Summary\n" % v.name)
+ file.write("Report : Vulnerability %s - Summary\n" % vulnerability.name)
file.write("\n")
- file.write("Vulnerability: %s\n" % v.name)
- file.write(" Status: %s\n" % v.get_status_text)
- file.write(" Outcome: %s\n" % v.get_outcome_text)
- file.write(" Priority: %s\n" % v.get_priority_text)
- file.write(" Comments: %s\n" % v.comments)
+ file.write("Vulnerability: %s\n" % vulnerability.name)
+ file.write(" Status: %s\n" % vulnerability.get_status_text)
+ file.write(" Outcome: %s\n" % vulnerability.get_outcome_text)
+ file.write(" Priority: %s\n" % vulnerability.get_priority_text)
+ file.write(" Comments: %s\n" % vulnerability.comments)
file.write("\n")
- file.write("Affected Products:\n")
+ file.write("Products:\n")
found_p = False
- for i,p in enumerate(v.get_affected_list):
- found_p = True
- file.write("%2d) Product: %s\n" % (i,p.product.long_name))
- found_i = False
- for investigation in Investigation.objects.filter(vulnerability=v,product=p.product):
+ for i,product in enumerate(Product.objects.all().order_by('order')):
+ product_header = False
+ for investigation in Investigation.objects.filter(vulnerability=vulnerability,product=product):
+ found_p = True
found_i = True
+ if not product_header:
+ file.write("%2d) Product: %s\n" % (i+1,investigation.product.long_name))
+ product_header = True
file.write(" Investigation: %s\n" % investigation.name)
file.write(" Status: %s\n" % investigation.get_status_text)
file.write(" Outcome: %s\n" % investigation.get_outcome_text)
@@ -457,24 +526,13 @@ class VulnerabilityReport(Report):
file.write(",")
file.write("%s (%s)" % (id.defect.name,id.defect.get_status_text))
file.write("\n")
- if not found_i:
- file.write(" No investigations found\n")
- if not found_p:
- file.write(" No affected products found\n")
-
- file.write("\n")
- file.write("Related Products:\n")
- found_p = False
- for i,p in enumerate(v.get_related_list):
- found_p = True
- file.write("%2d) Product: %s\n" % (i,p.product.long_name))
if not found_p:
- file.write(" No related products found\n")
+ file.write(" No products found\n")
file.write("\n")
file.write("Comments:\n")
found_c = False
- for i,vc in enumerate(v.vulnerability_comments.all()):
+ for i,vc in enumerate(vulnerability.vulnerability_comments.all()):
found_c = True
file.write(" %2d) %s (%s): %s\n" % (i,vc.date,vc.author,vc.comment))
if not found_c:
@@ -483,7 +541,7 @@ class VulnerabilityReport(Report):
if 'audit' == report_type:
file.write("\n")
file.write("Audit Trail:\n")
- for i,vh in enumerate(v.vulnerability_history.all()):
+ for i,vh in enumerate(vulnerability.vulnerability_history.all()):
file.write(" %2d) %s (%s): %s\n" % (i,vh.date,vh.author,vh.comment))
file.write("\n")
@@ -529,7 +587,7 @@ class InvestigationReport(Report):
investigation = Investigation.objects.get(id=record_list)
- report_name = '%s/investigation_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/investigation_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
if 'csv' == format:
@@ -593,7 +651,14 @@ class DefectReport(Report):
context['report_columnrange_list'] = ''
context['report_format_list'] = '\
<input type="radio" name="format" value="txt" checked> Text<br> \
- <input type="radio" name="format" value="csv"> CSV<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="semi">Semi-colon</option> \
+ <option value="comma">Comma</option> \
+ <option value="tab">Tab</option> \
+ </select>) \
+ <br> \
'
return context
@@ -608,29 +673,36 @@ class DefectReport(Report):
title = request_POST.get('title', '')
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
+ csv_separator = request_POST.get('csv_separator', 'semi')
- report_name = '%s/defect_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/defect_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
if 'csv' == format:
- tab = "\t"
+ tab = ';'
+ if csv_separator == 'comma': tab = ','
+ if csv_separator == 'tab': tab = '\t'
else:
tab = ","
if ('summary' == report_type):
if 'csv' == format:
- file.write("Name\tSummary\tPriority\tStatus\tResolution\tReleased Version\tURL\tInvestigations\tProduct\n")
+ file.write("Name\tSummary\tPriority\tStatus\tResolution\tSRT Priority\tSRT Status\tSRT Outcome\tReleased Version\tURL\tInvestigations\tProduct\n")
if 'txt' == format:
file.write("Report : Defects Table\n")
file.write("\n")
- file.write("Name,Summary,Priority,Status,Resolution,Released Version,URL,Investigations,Product\n")
+ file.write("Name,Summary,Priority,Status,Resolution,SRT Priority,SRT Status,SRT Outcome,Released Version,URL,Investigations,Product\n")
defect = Defect.objects.get(id=record_list)
file.write("%s%s" % (defect.name,tab))
file.write("%s%s" % (defect.summary,tab))
+
+ file.write("%s%s" % (defect.get_defect_priority_text,tab))
+ file.write("%s%s" % (defect.get_defect_status_text,tab))
+ file.write("%s%s" % (defect.get_defect_resolution_text,tab))
file.write("%s%s" % (defect.get_priority_text,tab))
file.write("%s%s" % (defect.get_status_text,tab))
- file.write("%s%s" % (defect.get_resolution_text,tab))
+ file.write("%s%s" % (defect.get_outcome_text,tab))
file.write("%s%s" % (defect.release_version,tab))
file.write("%s%s" % (defect.publish,tab))
file.write("%s%s" % (defect.url,tab))
@@ -659,6 +731,8 @@ class CvesReport(Report):
context['report_type_list'] = '\
<option value="summary">CVEs Table</option> \
+ <option value="year_pub_summary">CVE by Year Prefix Summary</option> \
+ <option value="year_summary">CVE by Publish Date Summary</option> \
<option value="cve_defects">CVE to Defects Table</option> \
'
context['report_get_title'] = ''
@@ -672,7 +746,14 @@ class CvesReport(Report):
'
context['report_format_list'] = '\
<input type="radio" name="format" value="txt" checked> Text (comma delimited)<br> \
- <input type="radio" name="format" value="csv"> CSV (tab delimited)<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="comma" checked>Comma</option> \
+ <option value="semi">Semi-colon</option> \
+ <option value="tab">Tab</option> \
+ <br> \
+ </select>) \
'
context['report_custom_list'] = '\
CVE name filter = <input type="text" placeholder="e.g. CVE-2018" name="name_filter" size="40"> <br>\
@@ -824,7 +905,7 @@ class CvesReport(Report):
'%s %s' % (cve.cvssV3_baseScore,cve.cvssV3_baseSeverity),
cve.get_publish_text,
vulnerability.name if vulnerability else '<no_vulnerability>',
- investigation.name if investigation else '',
+ investigation.name if investigation else '<no_investigation>',
investigation.product.long_name if investigation and investigation.product else '<no_product>',
investigation.get_priority_text if investigation else '',
investigation.get_status_text if investigation else '',
@@ -832,28 +913,23 @@ class CvesReport(Report):
defect.name if defect else '<no_defect>',
defect.get_priority_text if defect else '',
defect.get_status_text if defect else '',
- defect.get_resolution_text if defect else '',
+ defect.get_defect_resolution_text if defect else '',
])
else:
writer.writerow([
- cve.name,
cve.get_status_text,
- '%s %s' % (cve.recommend,cve.recommend_list),
- cve.cve_data_type,
- cve.cve_data_format,
- cve.cve_data_version,
'%s %s' % (cve.cvssV3_baseScore,cve.cvssV3_baseSeverity),
- '%s %s' % (cve.cvssV2_baseScore,cve.cvssV2_severity),
cve.get_publish_text,
- vulnerability.name if vulnerability else '',
- investigation.name if investigation else '',
+ vulnerability.name if vulnerability else '<no_vulnerability>',
+ investigation.name if investigation else '<no_investigation>',
+ investigation.product.long_name if investigation and investigation.product else '<no_product>',
investigation.get_priority_text if investigation else '',
investigation.get_status_text if investigation else '',
investigation.get_outcome_text if investigation else '',
- defect.name if defect else '',
+ defect.name if defect else '<no_defect>',
defect.get_priority_text if defect else '',
defect.get_status_text if defect else '',
- defect.get_outcome_text if defect else '',
+ defect.get_defect_resolution_text if defect else '',
])
def exec_report(self, *args, **kwargs):
@@ -862,17 +938,20 @@ class CvesReport(Report):
request_POST = self.request.POST
- range = request_POST.get('range', '')
+ range_rec = request_POST.get('range', '')
columns = request_POST.get('columns', '')
format = request_POST.get('format', '')
title = request_POST.get('title', '')
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
name_filter = request_POST.get('name_filter', '').upper()
+ csv_separator = request_POST.get('csv_separator', 'semi')
- report_name = '%s/cves_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/cves_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
if 'csv' == format:
- delimiter = '\t'
+ delimiter = ';'
+ if csv_separator == 'comma': delimiter = ','
+ if csv_separator == 'tab': delimiter = '\t'
else:
delimiter = ','
@@ -881,14 +960,14 @@ class CvesReport(Report):
quotechar='"', quoting=csv.QUOTE_MINIMAL)
if ('summary' == report_type):
self.print_row_summary(writer,True,"all" == columns,None)
- if 'displayed' == range:
+ if 'displayed' == range_rec:
for id in record_list.split(','):
if not id:
continue
cve = Cve.objects.get(id=id)
if not name_filter or (name_filter in cve.name):
self.print_row_summary(writer,False,"all" == columns,cve)
- elif 'all' == range:
+ elif 'all' == range_rec:
if name_filter:
query = Cve.objects.filter(name__contains=name_filter).order_by('name')
else:
@@ -898,14 +977,14 @@ class CvesReport(Report):
if ('cve_defects' == report_type):
self.print_row_cve_defects(writer,'header',"all" == columns,None,None,None,None)
- if 'displayed' == range:
+ if 'displayed' == range_rec:
for id in record_list.split(','):
if not id:
continue
cve = Cve.objects.get(id=id)
if not name_filter or (name_filter in cve.name):
self.print_row_cve_defects(writer,'cve',"all" == columns,cve,None,None,None)
- elif 'all' == range:
+ elif 'all' == range_rec:
if name_filter:
query = Cve.objects.filter(name__contains=name_filter).order_by('name')
else:
@@ -913,6 +992,114 @@ class CvesReport(Report):
for cve in query:
self.print_row_cve_defects(writer,'line',"all" == columns,cve,None,None,None)
+ if report_type in ['year_summary','year_pub_summary']:
+ columns = ["Year", "CVE_Total", "CVE_HIST", "CVE_NEW", "CVE_RES", "CVE_INV", "CVE_VUL", "CVE_NVUL", "Defect_Total", "DEFECT_HIST", "DEFECT_NEW", "DEFECT_RES", "DEFECT_INV", "DEFECT_VUL", "DEFECT_NVUL","BY_PUBLISH"]
+ for i,column in enumerate(columns):
+ csvfile.write("%s%s" % (columns[i],delimiter))
+ csvfile.write("\n")
+
+ summary = {}
+ YEAR_START = 1999
+ YEAR_STOP = 2020
+ for the_year in range(YEAR_START,YEAR_STOP+1):
+ summary[the_year] = {
+ 'CVE_TOTAL':0,
+ 'CVE_HISTORICAL':0,
+ 'CVE_NEW':0,
+ 'CVE_NEW_RESERVED':0,
+ 'CVE_INVESTIGATE':0,
+ 'CVE_VULNERABLE':0,
+ 'CVE_NOT_VULNERABLE':0,
+ 'DEFECT_TOTAL':0,
+ 'DEFECT_HISTORICAL':0,
+ 'DEFECT_NEW':0,
+ 'DEFECT_NEW_RESERVED':0,
+ 'DEFECT_INVESTIGATE':0,
+ 'DEFECT_VULNERABLE':0,
+ 'DEFECT_NOT_VULNERABLE':0,
+ 'PUBLISH_DATE':0,
+ }
+
+ # Gather historgram on CVE status
+ error_count = 0
+ for cve in Cve.objects.all():
+ # Extract the year created
+ if (report_type == 'year_pub_summary') and (not cve.status in [SRTool.HISTORICAL]) and cve.publishedDate:
+ the_year = cve.publishedDate.split('-')[0]
+ summary[the_year]['PUBLISH_DATE'] += 1
+ else:
+ the_year = cve.name.split('-')[1]
+
+ if (not the_year[0].isdigit()) or (the_year < '1999') or (the_year > '2020'):
+ if 10 > error_count:
+ _log('FOO_CVE_YEARLY:%s,%s' % (cve.name, cve.publishedDate))
+ error_count += 1
+ continue
+ the_year = int(the_year)
+
+ # Register the CVE status
+ summary[the_year]['CVE_TOTAL'] += 1
+ if cve.status in [SRTool.HISTORICAL]:
+ summary[the_year]['CVE_HISTORICAL'] += 1
+ if cve.status in [SRTool.NEW,SRTool.NEW_INACTIVE]:
+ summary[the_year]['CVE_NEW'] += 1
+ if cve.status in [SRTool.NEW_RESERVED]:
+ summary[the_year]['CVE_NEW_RESERVED'] += 1
+ if cve.status in [SRTool.INVESTIGATE,SRTool.INVESTIGATE_INACTIVE]:
+ summary[the_year]['CVE_INVESTIGATE'] += 1
+ if cve.status in [SRTool.VULNERABLE,SRTool.VULNERABLE_INACTIVE]:
+ summary[the_year]['CVE_VULNERABLE'] += 1
+ if cve.status in [SRTool.NOT_VULNERABLE,SRTool.NOT_VULNERABLE_INACTIVE]:
+ summary[the_year]['CVE_NOT_VULNERABLE'] += 1
+
+ # Register the releated defects status
+ for cv in cve.cve_to_vulnerability.all():
+ for investigation in cv.vulnerability.vulnerability_investigation.all():
+ for id in investigation.investigation_to_defect.all():
+
+ # Only check defects for current and previously active products
+ if not id.product.get_product_tag('mode') in ['support','develop','eol']:
+ continue
+
+ # Register the defect status
+ summary[the_year]['DEFECT_TOTAL'] += 1
+ if id.defect.srt_status in [SRTool.HISTORICAL]:
+ summary[the_year]['DEFECT_HISTORICAL'] += 1
+ if id.defect.srt_status in [SRTool.NEW,SRTool.NEW_INACTIVE]:
+ summary[the_year]['DEFECT_NEW'] += 1
+ if id.defect.srt_status in [SRTool.NEW_RESERVED]:
+ summary[the_year]['DEFECT_NEW_RESERVED'] += 1
+ if id.defect.srt_status in [SRTool.INVESTIGATE,SRTool.INVESTIGATE_INACTIVE]:
+ summary[the_year]['DEFECT_INVESTIGATE'] += 1
+ if id.defect.srt_status in [SRTool.VULNERABLE,SRTool.VULNERABLE_INACTIVE]:
+ summary[the_year]['DEFECT_VULNERABLE'] += 1
+ if id.defect.srt_status in [SRTool.NOT_VULNERABLE,SRTool.NOT_VULNERABLE_INACTIVE]:
+ summary[the_year]['DEFECT_NOT_VULNERABLE'] += 1
+
+ # Print historgram
+ for the_year in range(YEAR_START,YEAR_STOP+1):
+ csvfile.write("%s%s" % (the_year,delimiter))
+
+ csvfile.write("%s%s" % (summary[the_year]['CVE_TOTAL'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['CVE_HISTORICAL'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['CVE_NEW'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['CVE_NEW_RESERVED'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['CVE_INVESTIGATE'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['CVE_VULNERABLE'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['CVE_NOT_VULNERABLE'],delimiter))
+
+ csvfile.write("%s%s" % (summary[the_year]['DEFECT_TOTAL'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['DEFECT_HISTORICAL'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['DEFECT_NEW'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['DEFECT_NEW_RESERVED'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['DEFECT_INVESTIGATE'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['DEFECT_VULNERABLE'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['DEFECT_NOT_VULNERABLE'],delimiter))
+
+ csvfile.write("%s%s" % (summary[the_year]['PUBLISH_DATE'],delimiter))
+ csvfile.write("\n")
+
+
return report_name,os.path.basename(report_name)
class SelectCvesReport(Report):
@@ -952,7 +1139,7 @@ class SelectCvesReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
- report_name = '%s/select_cves_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/select_cves_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
if 'csv' == format:
@@ -987,6 +1174,8 @@ class SelectCvesReport(Report):
file.write("%s%s" % (cve.description,tab))
file.write("\n")
+
+
return report_name,os.path.basename(report_name)
class VulnerabilitiesReport(Report):
@@ -1110,7 +1299,7 @@ class VulnerabilitiesReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
- report_name = '%s/vulnerabilities_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/vulnerabilities_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
if 'csv' == format:
delimiter = '\t'
else:
@@ -1158,11 +1347,18 @@ class InvestigationsReport(Report):
<input type="radio" name="columns" value="all" > All<br> \
'
context['report_format_list'] = '\
- <input type="radio" name="format" value="txt" checked> Text (comma delimited)<br> \
- <input type="radio" name="format" value="csv"> CSV (tab delimited)<br> \
+ <input type="radio" name="format" value="txt" checked> Text<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="semi">Semi-colon</option> \
+ <option value="comma">Comma</option> \
+ <option value="tab">Tab</option> \
+ </select>) \
+ <br> \
'
context['report_custom_list'] = '\
- Product defect prefix filter = <input type="text" placeholder="e.g. LIN9" name="name_filter" size="40"> (method to filter by product)<br>\
+ Product defect prefix filter = <input type="text" placeholder="" name="name_filter" size="40"> (method to filter by product)<br>\
'
return context
@@ -1180,6 +1376,7 @@ class InvestigationsReport(Report):
'Comments Private',
'Vulnerability',
'Product',
+ 'Updated',
])
else:
writer.writerow([
@@ -1193,6 +1390,7 @@ class InvestigationsReport(Report):
'Comments Private',
'Vulnerability',
'Product',
+ 'Updated',
])
else:
investigation_defects = ''
@@ -1217,6 +1415,7 @@ class InvestigationsReport(Report):
investigation.comments_private,
investigation.vulnerability.get_long_name,
investigation.product.long_name,
+ investigation.srt_updated.strftime('%m-%d-%Y'),
])
else:
writer.writerow([
@@ -1230,6 +1429,7 @@ class InvestigationsReport(Report):
investigation.comments_private,
investigation.vulnerability.get_long_name,
investigation.product.long_name,
+ investigation.srt_updated.strftime('%m-%d-%Y'),
])
def exec_report(self, *args, **kwargs):
@@ -1245,12 +1445,16 @@ class InvestigationsReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
name_filter = request_POST.get('name_filter', '').upper()
+ csv_separator = request_POST.get('csv_separator', 'semi')
+
+ report_name = '%s/investigations_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
- report_name = '%s/investigations_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
if 'csv' == format:
- delimiter = '\t'
+ delimiter = ';'
+ if csv_separator == 'comma': delimiter = ','
+ if csv_separator == 'tab': delimiter = '\t'
else:
- delimiter = ','
+ delimiter = ","
with open(report_name, 'w', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=delimiter,
@@ -1298,11 +1502,19 @@ class DefectsReport(Report):
'
context['report_format_list'] = '\
<input type="radio" name="format" value="txt" checked> Text (comma delimited)<br> \
- <input type="radio" name="format" value="csv"> CSV (tab delimited)<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="semi">Semi-colon</option> \
+ <option value="comma">Comma</option> \
+ <option value="tab">Tab</option> \
+ </select>) \
+ <br> \
'
+
context['report_custom_list'] = '\
- Defect name filter = <input type="text" placeholder="e.g. LIN9" name="name_filter" size="40"> <br>\
- '
+ Defect name filter = <input type="text" placeholder="e.g. %s" name="name_filter" size="40"> <br>\
+ ' % SrtSetting.get_setting('SRTOOL_DEFECT_SAMPLENAME',"DEFECT-XYZ")
return context
def print_row(self,writer,is_header,is_full,defect):
@@ -1314,6 +1526,9 @@ class DefectsReport(Report):
'Priority',
'Status',
'Resolution',
+ 'SRT Priority',
+ 'SRT Status',
+ 'SRT Outcome',
'Release Version',
'Publish',
'Investigations',
@@ -1326,6 +1541,9 @@ class DefectsReport(Report):
'Priority',
'Status',
'Resolution',
+ 'SRT Priority',
+ 'SRT Status',
+ 'SRT Outcome',
'Release Version',
'Publish',
'URL',
@@ -1342,9 +1560,12 @@ class DefectsReport(Report):
writer.writerow([
defect.name,
defect.summary,
+ defect.get_defect_priority_text,
+ defect.get_defect_status_text,
+ defect.get_defect_resolution_text,
defect.get_priority_text,
defect.get_status_text,
- defect.get_resolution_text,
+ defect.get_outcome_text,
defect.release_version,
defect.publish,
defect_investigations,
@@ -1354,9 +1575,12 @@ class DefectsReport(Report):
writer.writerow([
defect.name,
defect.summary,
+ defect.get_defect_priority_text,
+ defect.get_defect_status_text,
+ defect.get_defect_resolution_text,
defect.get_priority_text,
defect.get_status_text,
- defect.get_resolution_text,
+ defect.get_outcome_text,
defect.release_version,
defect.publish,
defect.url,
@@ -1377,10 +1601,13 @@ class DefectsReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
name_filter = request_POST.get('name_filter', '').upper()
+ csv_separator = request_POST.get('csv_separator', 'semi')
- report_name = '%s/defects_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/defects_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
if 'csv' == format:
- delimiter = '\t'
+ delimiter = ';'
+ if csv_separator == 'comma': delimiter = ','
+ if csv_separator == 'tab': delimiter = '\t'
else:
delimiter = ','
with open(report_name, 'w', newline='') as csvfile:
@@ -1405,8 +1632,72 @@ class DefectsReport(Report):
return report_name,os.path.basename(report_name)
+#
+# Products Reports
+#
+
+product_summary = {}
+
+def scan_product_jira(product):
+ global product_summary
+
+ # Totals
+ critical_count = 0
+ high_count = 0
+ medium_count = 0
+ low_count = 0
+ p1_count = 0
+ p2_count = 0
+ p3_count = 0
+ p4_count = 0
+ px_count = 0
+ unresolved_count = 0
+ resolved_count = 0
+ fixed_count = 0
+ wontfix_count = 0
+ withdrawn_count = 0
+ rejected_count = 0
+
+ # Scan the registered defects
+ queryset = product.product_defect.all()
+ for defect in queryset:
+ if Defect.CRITICAL == defect.srt_priority: critical_count += 1
+ elif Defect.HIGH == defect.srt_priority: high_count += 1
+ elif Defect.MEDIUM == defect.srt_priority: medium_count += 1
+ elif Defect.LOW == defect.srt_priority: low_count += 1
+ if Defect.DEFECT_CRITICAL == defect.priority: p1_count += 1
+ elif Defect.DEFECT_HIGH == defect.priority: p2_count += 1
+ elif Defect.DEFECT_MEDIUM == defect.priority: p3_count += 1
+ elif Defect.DEFECT_LOW == defect.priority: p4_count += 1
+ if Defect.DEFECT_UNRESOLVED == defect.resolution: unresolved_count += 1
+ elif Defect.DEFECT_RESOLVED == defect.resolution: resolved_count += 1
+ elif Defect.DEFECT_FIXED == defect.resolution: fixed_count += 1
+ elif Defect.DEFECT_WILL_NOT_FIX == defect.resolution: wontfix_count += 1
+ elif Defect.DEFECT_WITHDRAWN == defect.resolution: withdrawn_count += 1
+ elif Defect.DEFECT_REJECTED == defect.resolution: rejected_count += 1
+
+ # Add this specific entry
+ product_summary[product.long_name] = [
+ critical_count,
+ high_count,
+ medium_count,
+ low_count,
+ p1_count,
+ p2_count,
+ p3_count,
+ p4_count,
+ px_count,
+ unresolved_count,
+ resolved_count,
+ fixed_count,
+ wontfix_count,
+ withdrawn_count,
+ rejected_count,
+ ]
+
class ProductsReport(Report):
"""Report for the Products Page"""
+ global product_summary
def __init__(self, parent_page, *args, **kwargs):
_log_args("REPORT_PRODUCTS_INIT(%s)" % parent_page, *args, **kwargs)
@@ -1418,6 +1709,7 @@ class ProductsReport(Report):
context['report_type_list'] = '\
<option value="summary">Products Table</option> \
+ <option value="status_jira">Product Jira Status</option> \
'
context['report_get_title'] = '1'
context['report_recordrange_list'] = '\
@@ -1427,6 +1719,7 @@ class ProductsReport(Report):
context['report_format_list'] = '\
<input type="radio" name="format" value="txt" checked> Text<br> \
<input type="radio" name="format" value="csv"> CSV<br> \
+ <input type="radio" name="excel" value="excel"> Excel<br> \
'
return context
@@ -1442,44 +1735,131 @@ class ProductsReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
- report_name = '%s/products_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
- with open(report_name, 'w') as file:
+ if 'summary' == report_type:
+ report_name = '%s/products_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
+ with open(report_name, 'w') as file:
- if 'csv' == format:
- tab = "\t"
- else:
- tab = ","
-
- if ('summary' == report_type):
if 'csv' == format:
- file.write("Name\tVersion\tProfile\tCPE\tSRT SPE\tInvestigations\tDefects\n")
- if 'txt' == format:
- file.write("Report : Products Table\n")
- file.write("\n")
- file.write("Name,Version,Profile,CPE,SRT SPE,Investigations,Defects\n")
-
- for product in Product.objects.all():
- file.write("%s%s" % (product.name,tab))
- file.write("%s%s" % (product.version,tab))
- file.write("%s%s" % (product.profile,tab))
- file.write("%s%s" % (product.cpe,tab))
- file.write("%s%s" % (product.defect_tags,tab))
- file.write("%s%s" % (product.product_tags,tab))
+ tab = "\t"
+ else:
+ tab = ","
- for i,pi in enumerate(product.product_investigation.all()):
- if i > 0:
- file.write(" ")
- file.write("%s" % (pi.name))
- file.write("%s" % tab)
- for i,pd in enumerate(product.product_defect.all()):
- if i > 0:
- file.write(" ")
- file.write("%s" % (pd.name))
- #file.write("%s" % tab)
- file.write("\n")
+ if ('summary' == report_type):
+ if 'csv' == format:
+ file.write("Name\tVersion\tProfile\tCPE\tSRT SPE\tInvestigations\tDefects\n")
+ if 'txt' == format:
+ file.write("Report : Products Table\n")
+ file.write("\n")
+ file.write("Name,Version,Profile,CPE,SRT SPE,Investigations,Defects\n")
+
+ for product in Product.objects.all():
+ file.write("%s%s" % (product.name,tab))
+ file.write("%s%s" % (product.version,tab))
+ file.write("%s%s" % (product.profile,tab))
+ file.write("%s%s" % (product.cpe,tab))
+ file.write("%s%s" % (product.defect_tags,tab))
+ file.write("%s%s" % (product.product_tags,tab))
+
+ if False:
+ for i,pi in enumerate(product.product_investigation.all()):
+ if i > 0:
+ file.write(" ")
+ file.write("%s" % (pi.name))
+ file.write("%s" % tab)
+ for i,pd in enumerate(product.product_defect.all()):
+ if i > 0:
+ file.write(" ")
+ file.write("%s" % (pd.name))
+ #file.write("%s" % tab)
+ file.write("\n")
+ elif 'status_jira' == report_type:
+ def resolution_color(i):
+ if 0 == i: fill = pyxl_backcolor_orn
+ elif 1 == i: fill = pyxl_backcolor_grn
+ elif 2 == i: fill = pyxl_backcolor_grn
+ elif 3 == i: fill = pyxl_backcolor_yel
+ elif 4 == i: fill = pyxl_backcolor_blu
+ elif 5 == i: fill = pyxl_backcolor_blu
+ else: fill = None
+ return(fill)
+
+ for product in Product.objects.all():
+ scan_product_jira(product)
+
+ format = "xlsx"
+ report_name = '%s/products_jira_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
+ wb = Workbook()
+ ws = wb.active
+ ws.title = "Product Jira Summary"
+ ws.column_dimensions[get_column_letter(1)].width = 30
+
+ row = 1
+ first_row = 2
+
+ col = 1
+ for header in ('Product','Critical','High','Medium','Low','P1','P2','P3','P4','Unresolved','Resolved','Fixed',"Won't Fix",'Withdrawn','Rejected'):
+ border = pyxl_border_bottom_left if (col in (2,6,10)) else pyxl_border_bottom
+ pyxl_write_cell(ws,row,col,header,border=border)
+ col += 1
+ row += 1
+
+ for product in Product.objects.order_by("order"):
+ key = product.long_name
+ scan_product_jira(product)
+ pyxl_write_cell(ws,row,1,key)
+ # CVE Severity
+ col_excel = 2
+ col_summary = 1
+ for i in range(0,4):
+ border = pyxl_border_left if (i==0) else None
+ value = product_summary[key][col_summary+i]
+ pyxl_write_cell(ws,row,col_excel+i,value,border=border,fill=pyxl_cve_fills[i] if value else None)
+
+ # Jira Priority
+ col_excel = 6
+ col_summary = 5
+ for i in range(0,4):
+ border = pyxl_border_left if (i==0) else None
+ value = product_summary[key][col_summary+i]
+ pyxl_write_cell(ws,row,col_excel+i,value,border=border,fill=pyxl_cve_fills[i] if value else None)
+ # Jira Resolution
+ col_excel = 10
+ col_summary = 9
+ for i in range(0,6):
+ border = pyxl_border_left if (i==0) else None
+ value = product_summary[key][col_summary+i]
+ pyxl_write_cell(ws,row,col_excel+i,value,border=border,fill=resolution_color(i) if value else None)
+ row += 1
+
+ # Sums
+ row -= 1
+ for i in range(1,16):
+ border = pyxl_border_bottom_left if (i in (2,6,10)) else pyxl_border_bottom
+ ws.cell(row=row,column=i).border=border
+ row += 1
+ letters = (' ','A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q')
+ for col_excel in range(2,16):
+ # CVE Severity
+ col_excel = 2
+ for i in range(0,4):
+ pyxl_write_cell(ws,row,col_excel+i,'=SUM(%s%d:%s%d)' % (letters[col_excel+i],first_row,letters[col_excel+i],row-1),fill=pyxl_cve_fills[i])
+ # Jira Priority
+ col_excel = 6
+ for i in range(0,4):
+ pyxl_write_cell(ws,row,col_excel+i,'=SUM(%s%d:%s%d)' % (letters[col_excel+i],first_row,letters[col_excel+i],row-1),fill=pyxl_cve_fills[i])
+ # Jira Resolution
+ col_excel = 10
+ for i in range(0,6):
+ pyxl_write_cell(ws,row,col_excel+i,'=SUM(%s%d:%s%d)' % (letters[col_excel+i],first_row,letters[col_excel+i],row-1),fill=resolution_color(i))
+
+ wb.save(report_name)
return report_name,os.path.basename(report_name)
+#
+# CVE Reports
+#
+
class PublishCveReport(Report):
"""Report for the Publish Cve Page"""
@@ -1509,7 +1889,6 @@ class PublishCveReport(Report):
_log_args("REPORT_PUBLISHCVE_EXEC", *args, **kwargs)
super(PublishCveReport, self).exec_report(*args, **kwargs)
- _log("FOO1")
request_POST = self.request.POST
records = request_POST.get('records', '')
@@ -1518,11 +1897,9 @@ class PublishCveReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
- _log("FOO2 (%s,%s,%s" % (record_list,format,report_type))
- report_name = '%s/cve_publish_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/cve_publish_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
- _log("FOO3")
if 'csv' == format:
tab = "\t"
else:
@@ -1536,9 +1913,7 @@ class PublishCveReport(Report):
file.write("\n")
file.write("Name,Status,Type,Format,Version,Vulnerabilities,Description\n")
- _log("FOO4")
for id in record_list.split(','):
- _log("FOO5:%s" % id)
if not id:
continue
try:
@@ -1558,9 +1933,8 @@ class PublishCveReport(Report):
file.write("%s" % (cve.description))
file.write("\n")
except Exception as e:
- _log("FOOX:%s" % e)
+ _log("EXCEPTION:%s" % e)
- _log("FOO9:%s" % (report_name))
return report_name,os.path.basename(report_name)
class PublishPendingCveReport(Report):
@@ -1591,7 +1965,6 @@ class PublishPendingCveReport(Report):
_log_args("REPORT_PUBLISHPENDINGCVE_EXEC", *args, **kwargs)
super(PublishPendingCveReport, self).exec_report(*args, **kwargs)
- _log("FOO1")
request_POST = self.request.POST
records = request_POST.get('records', '')
@@ -1600,11 +1973,9 @@ class PublishPendingCveReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
- _log("FOO2 (%s,%s,%s" % (record_list,format,report_type))
- report_name = '%s/cve_publish_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/cve_publish_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
with open(report_name, 'w') as file:
- _log("FOO3")
if 'csv' == format:
tab = "\t"
else:
@@ -1618,11 +1989,9 @@ class PublishPendingCveReport(Report):
file.write("\n")
file.write("Name,Status,Type,Format,Version,Vulnerabilities,Description\n")
- _log("FOO4")
for id in record_list.split(','):
if not id:
continue
- _log("FOO5:%s" % id)
try:
cve = Cve.objects.get(id=id)
file.write("%s%s" % (cve.name,tab))
@@ -1640,9 +2009,188 @@ class PublishPendingCveReport(Report):
file.write("%s" % (cve.description))
file.write("\n")
except Exception as e:
- _log("FOOX:%s" % e)
+ _log("EXCEPTION:%s" % e)
+
+ return report_name,os.path.basename(report_name)
+
+class PublishListReport(Report):
+ """Report for the Publish Cve Page"""
+
+ def __init__(self, parent_page, *args, **kwargs):
+ _log_args("REPORT_PUBLISHLIST_INIT(%s)" % parent_page, *args, **kwargs)
+ super(PublishListReport, self).__init__(parent_page, *args, **kwargs)
+
+ def get_context_data(self, *args, **kwargs):
+ _log_args("REPORT_PUBLISHLIST_CONTEXT", *args, **kwargs)
+ context = super(PublishListReport, self).get_context_data(*args, **kwargs)
+ context['report_type_list'] = '\
+ <option value="preview">Preview CVE Publish List</option> \
+ <option value="report">Publish Report </option> \
+ '
+ context['report_columnrange_list'] = ''
+ context['report_format_list'] = '\
+ <input type="radio" name="format" value="txt" checked> Text<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="semi">Semi-colon</option> \
+ <option value="comma">Comma</option> \
+ <option value="tab">Tab</option> \
+ </select>) \
+ <br> \
+ '
+ return context
+
+ def exec_report(self, *args, **kwargs):
+ _log_args("REPORT_PUBLISHLIST_EXEC", *args, **kwargs)
+ super(PublishListReport, self).exec_report(*args, **kwargs)
+
+ request_POST = self.request.POST
+ format = request_POST.get('format', '')
+ report_type = request_POST.get('report_type', '')
+ csv_separator = request_POST.get('csv_separator', 'semi')
+
+ report_name = '%s/publish_list_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
+ with open(report_name, 'w') as file:
+
+ if 'csv' == format:
+ tab = ';'
+ if csv_separator == 'comma': tab = ','
+ if csv_separator == 'tab': tab = '\t'
+ else:
+ tab = ","
+
+ if ('preview' == report_type):
+ if 'csv' == format:
+ file.write("State\tCve_Name\tCve_Published\tCve_Modified\tCve_Status\tCve_Acknowledge\tReason\tCVE_Description\n".replace('\t',tab))
+ if 'txt' == format:
+ file.write("Report : CVEs Table\n")
+ file.write("\n")
+ file.write('%-7s %-18s %11s %11s %16s %11s %-35s %s\n' % ('State','Cve_Name','Published','Modified','Cve_Status','Acknowledge','CVE_Description','Reason'))
+
+ for publishset in PublishSet.objects.all():
+ if 'csv' == format:
+ file.write("%s%s" % (publishset.state_text,tab))
+ file.write("%s%s" % (publishset.cve.name,tab))
+ file.write("%s%s" % (publishset.cve.publishedDate,tab))
+ file.write("%s%s" % (publishset.cve.lastModifiedDate,tab))
+ file.write("%s%s" % (publishset.cve.get_status_text,tab))
+ file.write("%s%s" % (publishset.cve.acknowledge_date,tab))
+ file.write("%s%s" % (publishset.reason,tab))
+ file.write("%s%s" % (publishset.cve.description,tab))
+ file.write("\n")
+ if 'txt' == format:
+ try:
+ acknowledge_date = publishset.cve.acknowledge_date.strftime('%m/%d/%Y')
+ except:
+ acknowledge_date = ''
+ if publishset.cve.description:
+ description = publishset.cve.description[:30] + '...'
+ else:
+ description = ''
+ file.write("%-7s," % publishset.state_text)
+ file.write("%-18s," % publishset.cve.name)
+ file.write("%11s," % publishset.cve.publishedDate)
+ file.write("%11s," % publishset.cve.lastModifiedDate)
+ file.write("%16s," % publishset.cve.get_status_text)
+ file.write("%11s," % acknowledge_date)
+ file.write("%-35s," % description)
+ file.write("%s," % publishset.reason)
+ file.write("\n")
+
+ if ('report' == report_type):
+ product_list = Product.objects.all()
+
+ def get_product_status_matrix(product_list,cve):
+ # Preset the default product status labels
+ status_table = {}
+ product_top_order = 99
+ product_top_defect = []
+ for product in product_list:
+ status_table[product.key] = publishset.cve.get_status_text
+ # Set the specific status for the child investigations
+ for cv in cve.cve_to_vulnerability.all():
+ #status_text = cv.vulnerability.get_status_text
+ for investigation in cv.vulnerability.vulnerability_investigation.all():
+ product_key = investigation.product.key
+ release_version_list = []
+ for id in investigation.investigation_to_defect.all():
+ # Find defect(s) for higest ordered product
+ if product_top_order > investigation.product.order:
+ product_top_order = investigation.product.order
+ product_top_defect = []
+ if product_top_order == investigation.product.order:
+ product_top_defect.append(id.defect.name)
+ # Gather the status or release version
+ if id.defect.release_version:
+ release_version_list.append(id.defect.release_version)
+ release_version = '/'.join(release_version_list)
+ if release_version:
+ status_table[product_key] = release_version
+ elif investigation.status in (SRTool.NOT_VULNERABLE,SRTool.VULNERABLE):
+ status_table[product_key] = investigation.get_status_text
+ else:
+ status_table[product_key] = ''
+ return status_table
+
+ if 'csv' == format:
+ file.write("State\tCve_Name\tCve_Published\tCve_Modified\tCve_Status\tCve_Acknowledge\tCVE_Description")
+ for product in product_list:
+ file.write("\t%s" % product.long_name)
+ file.write("\n")
+
+ if 'txt' == format:
+ file.write("Report : CVEs Table\n")
+ file.write("\n")
+ file.write('%-7s,%-18s,%11s,%11s,%16s,%11s,%-35s,' % ('State','Cve_Name','Published','Modified','Cve_Status','Acknowledge','CVE_Description'))
+ for product in product_list:
+ min_len = max(16,len(product.long_name)+1)
+ str_format = "%s%ds," % ('%',min_len)
+ file.write(str_format % product.long_name)
+ file.write("\n")
+ for publishset in PublishSet.objects.all():
+ if 'csv' == format:
+ # Print common status
+ file.write("%s%s" % (publishset.state_text,tab))
+ file.write("%s%s" % (publishset.cve.name,tab))
+ file.write("%s%s" % (publishset.cve.publishedDate,tab))
+ file.write("%s%s" % (publishset.cve.lastModifiedDate,tab))
+ file.write("%s%s" % (publishset.cve.get_status_text,tab))
+ file.write("%s%s" % (publishset.cve.acknowledge_date,tab))
+ file.write("%s%s" % (publishset.reason,tab))
+ file.write("%s%s" % (publishset.cve.description,tab))
+ # Compute the product columns
+ status_table = get_product_status_matrix(product_list,publishset.cve)
+ # Print the product columns
+ for product in Product.objects.all():
+ file.write("%s%s" % (status_table[product.key],tab))
+ file.write("\n")
+ if 'txt' == format:
+ try:
+ acknowledge_date = publishset.cve.acknowledge_date.strftime('%m/%d/%Y')
+ except:
+ acknowledge_date = ''
+ if publishset.cve.description:
+ description = publishset.cve.description[:30] + '...'
+ else:
+ description = ''
+ # Print common status
+ file.write("%-7s," % publishset.state_text)
+ file.write("%-18s," % publishset.cve.name)
+ file.write("%11s," % publishset.cve.publishedDate)
+ file.write("%11s," % publishset.cve.lastModifiedDate)
+ file.write("%16s," % publishset.cve.get_status_text)
+ file.write("%11s," % acknowledge_date)
+ file.write("%-35s," % description)
+ # Compute the product columns
+ status_table = get_product_status_matrix(product_list,publishset.cve)
+ # Print the product columns
+ for product in Product.objects.all():
+ min_len = max(16,len(product.long_name)+1)
+ str_format = "%s%ds," % ('%',min_len)
+ file.write(str_format % status_table[product.key])
+ file.write("\n")
- _log("FOO9:%s" % (report_name))
return report_name,os.path.basename(report_name)
class PackageFiltersReport(Report):
@@ -1664,7 +2212,7 @@ class PackageFiltersReport(Report):
'
context['report_columnrange_list'] = ''
context['report_format_list'] = '\
- <input type="radio" name="format" value="csv" checked> CSV<br> \
+ <input type="radio" name="format" value="csv" checked> r<br> \
'
return context
@@ -1699,6 +2247,198 @@ class PackageFiltersReport(Report):
return report_name,os.path.basename(report_name)
+###############################################################################
+#
+# PublishSummaryReport: Publish CVE status summary across products
+#
+
+class PublishSummaryReport(PublishListReport):
+ """Report for the Publish Cve Page"""
+
+ def __init__(self, parent_page, *args, **kwargs):
+ _log_args("REPORT_PUBLISHSUMMARY_INIT(%s)" % parent_page, *args, **kwargs)
+ super(PublishSummaryReport, self).__init__(parent_page, *args, **kwargs)
+
+ def get_context_data(self, *args, **kwargs):
+ _log_args("REPORT_PUBLISHSUMMARY_CONTEXT", *args, **kwargs)
+ context = super(PublishSummaryReport, self).get_context_data(*args, **kwargs)
+
+ # Add a custom extension report type
+ context['report_type_list'] = '\
+ <option value="publish_summary">CVE Summary Report</option> \
+ '
+
+ context['report_custom_list'] = ''
+ # Add scope
+ context['report_custom_list'] += '\
+ <input type="checkbox" id="new" name="new" checked>&nbsp;New CVEs</input> <br>\
+ <input type="checkbox" id="investigate" name="investigate" checked>&nbsp;Investigate CVEs</input> <br>\
+ <input type="checkbox" id="vulnerable" name="vulnerable" checked>&nbsp;Vulnerable CVEs</input> <br>\
+ <input type="checkbox" id="not-vulnerable" name="not-vulnerable" checked>&nbsp;Not Vulnerable CVEs</input> <br>\
+ <input type="checkbox" id="new-reserved" name="new-reserved" >&nbsp;New-Reserved CVEs</input> <br>\
+ <input type="checkbox" id="historical" name="historical" >&nbsp;Historical CVEs</input> <br>\
+ '
+ # Add extra
+ context['report_custom_list'] += '<br>'
+ context['report_custom_list'] += '\
+ <input type="checkbox" id="truncate" name="truncate" checked>&nbsp;Truncate fields (for simple text reports)</input> <BR>\
+ '
+
+ return context
+
+ def get_product_status_matrix(self,product_list,cve):
+ # Preset the default product status labels
+ status_table = {}
+ product_top_order = 99
+ product_top_defect = []
+ # Default all product status to the CVE's status
+ for product in product_list:
+ status_table[product.key] = ''
+ # Set the specific status for the child investigations
+ for cv in cve.cve_to_vulnerability.all():
+ #status_text = cv.vulnerability.get_status_text
+ for investigation in cv.vulnerability.vulnerability_investigation.all():
+# product_key = investigation.product.key
+ release_version_list = []
+ # Gather release versions, find the highest product's respective defect
+ for id in investigation.investigation_to_defect.all():
+ # Find defect(s) for higest ordered product
+ if product_top_order > investigation.product.order:
+ product_top_order = investigation.product.order
+ product_top_defect = []
+ if product_top_order == investigation.product.order:
+ product_top_defect.append(id.defect.name)
+ # Gather the status or release version
+ if id.defect.release_version:
+ release_version_list.append(id.defect.release_version)
+ release_version = '/'.join(release_version_list)
+ # Set investigation status, unless there are release versions
+ status_table[investigation.product.key] = investigation.get_status_text
+ if release_version:
+ status_table[investigation.product.key] = release_version
+ return status_table,product_top_defect
+
+ def exec_report(self, *args, **kwargs):
+ _log_args("REPORT_PUBLISHSUMMARY_EXEC", *args, **kwargs)
+ super(PublishSummaryReport, self).exec_report(*args, **kwargs)
+
+ request_POST = self.request.POST
+ format = request_POST.get('format', '')
+ report_type = request_POST.get('report_type', '')
+ csv_separator = request_POST.get('csv_separator', 'semi')
+ truncate = ('on' == request_POST.get('truncate', 'off'))
+ status_list = []
+ if ('on' == request_POST.get('new', 'off')): status_list.append(Cve.NEW)
+ if ('on' == request_POST.get('investigate', 'off')): status_list.append(Cve.INVESTIGATE)
+ if ('on' == request_POST.get('vulnerable', 'off')): status_list.append(Cve.VULNERABLE)
+ if ('on' == request_POST.get('not-vulnerable', 'off')): status_list.append(Cve.NOT_VULNERABLE)
+ if ('on' == request_POST.get('new-reserved', 'off')): status_list.append(Cve.NEW_RESERVED)
+ if ('on' == request_POST.get('historical', 'off')): status_list.append(Cve.HISTORICAL)
+
+ # Default to the regular report output if not our custom extension
+ if not report_type in ('publish_summary'):
+ return(super(PublishSummaryReport, self).exec_report(*args, **kwargs))
+
+ if 'csv' == format:
+ separator = ';'
+ if csv_separator == 'comma': separator = ','
+ if csv_separator == 'tab': separator = '\t'
+ report_name = '%s/cve-svns-srtool-%s.csv' % (SRT_REPORT_DIR,datetime.today().strftime('%Y_%m_%d'))
+ else:
+ separator = ","
+ report_name = '%s/cve-svns-srtool-%s.txt' % (SRT_REPORT_DIR,datetime.today().strftime('%Y_%m_%d'))
+
+ # Get the desired product list
+ product_list = Product.objects.order_by('-order')
+
+ if 'publish_summary' == report_type:
+ with open(report_name, 'w', newline='') as csvfile:
+ writer = None
+
+ # Assemble the header
+ text_format = '%-18s,%16s,%-11s,%-8s,%-11s,%-8s,%-30s,%-25s,%15s,%15s,%11s,'
+ header = [
+ 'CVE Number',
+ 'Status',
+ 'V2_Severity',
+ 'V2_Score',
+ 'V3_Severity',
+ 'V3_Score',
+ 'CVE Description',
+ 'YP Comments',
+ 'Created Date',
+ 'Modified Date',
+ 'YP Ack Date',
+ ]
+ # Assemble the product column namess
+ for product in product_list:
+ product_title = product.key
+ header.append(product_title)
+ min_len = max(16,len(product_title)+1)
+ str_format = "%s%ds," % ('%',min_len)
+ text_format += str_format
+# # Add Top Defect
+# header.append('Top Defect')
+# text_format += '%s'
+
+ # Print the header
+ if 'csv' == format:
+ writer = csv.writer(csvfile, delimiter=separator, quotechar='"', quoting=csv.QUOTE_MINIMAL)
+ writer.writerow(header)
+ else:
+ writer = csvfile
+ print(text_format % tuple(header), file=csvfile)
+
+ for i,cve in enumerate(Cve.objects.filter(status__in=status_list).order_by('name_sort')):
+ # Compute the product columns
+ status_table,product_top_defect = self.get_product_status_matrix(product_list,cve)
+ # Assemble the row data
+ if cve.description:
+ if truncate:
+ description = cve.description[:26] + '...'
+ else:
+ description = cve.description
+ else:
+ description = ''
+
+ # Use publish date if acknowledge date not available
+ try:
+ acknowledge_date = cve.acknowledge_date
+ if not acknowledge_date:
+ acknowledge_date = datetime.strptime(cve.publishedDate, '%Y-%m-%d')
+ acknowledge_date = acknowledge_date.strftime('%m/%d/%Y')
+ except:
+ acknowledge_date = ''
+ _log("NO ACK:%s,%s" % (cve.acknowledge_date,cve.publishedDate))
+
+ row = [
+ cve.name,
+ cve.get_status_text,
+ cve.cvssV2_severity,
+ cve.cvssV2_baseScore,
+ cve.cvssV3_baseSeverity,
+ cve.cvssV3_baseScore,
+ description,
+ cve.get_public_comments[:20] if truncate else cve.get_public_comments,
+ cve.srt_created.strftime('%Y/%m/%d') if cve.srt_created else '',
+ cve.srt_updated.strftime('%Y/%m/%d') if cve.srt_updated else '',
+ acknowledge_date,
+ ]
+ # Append the product columns
+ for product in product_list:
+ # Show inactive status as normal status
+ row.append(status_table[product.key].replace('(','').replace(')',''))
+# row.append('/'.join(product_top_defect))
+ # Print the row
+ if 'csv' == format:
+ writer.writerow(row)
+ else:
+ print(text_format % tuple(row), file=writer)
+
+
+ return report_name,os.path.basename(report_name)
+
+
class CpesSrtoolReport(Report):
"""Report for the Publish Cve Page"""
@@ -1736,7 +2476,7 @@ class CpesSrtoolReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
- report_name = '%s/cpes_srtool_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d%H%M'),format)
+ report_name = '%s/cpes_srtool_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
reportfile = open(report_name, 'w', newline='')
if 'csv' == format:
@@ -1781,6 +2521,328 @@ class CpesSrtoolReport(Report):
return report_name,os.path.basename(report_name)
+###############################################################################
+#
+# History reports
+#
+
+class HistoryDefectReport(Report):
+ """Report for the History Defect Page"""
+
+ def __init__(self, parent_page, *args, **kwargs):
+ _log_args("WR_HISTORY_DEFECT_INIT(%s)" % parent_page, *args, **kwargs)
+ super(HistoryDefectReport, self).__init__(parent_page, *args, **kwargs)
+
+ def get_context_data(self, *args, **kwargs):
+ _log_args("WR_HISTORY_DEFECT_CONTEXT", *args, **kwargs)
+ context = super(HistoryDefectReport, self).get_context_data(*args, **kwargs)
+
+ context['report_type_list'] = '\
+ <option value="history">Defect History</option> \
+ '
+
+ context['report_columnrange_list'] = ''
+ context['report_format_list'] = '\
+ <input type="radio" name="format" value="txt" checked> Text<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="semi">Semi-colon</option> \
+ <option value="comma">Comma</option> \
+ <option value="tab">Tab</option> \
+ </select>) \
+ <br> \
+ '
+
+ context['report_recordrange_list'] = '\
+ <input type="radio" name="records" value="selected" checked> Selected<br> \
+ <input type="radio" name="records" value="all"> All<br> \
+ '
+
+ # Add a date range
+ date_start = datetime.strptime('2019-2-15', '%Y-%m-%d')
+ date_stop = datetime.strptime('2019-3-15', '%Y-%m-%d')
+ context['report_date_list'] = '\
+ Start: <input type="text" name="date_start" value="%s"><br> \
+ Stop: <input type="text" name="date_stop" value="%s"> \
+ ' % (date_start.strftime('%m/%d/%Y'),date_stop.strftime('%m/%d/%Y'))
+
+ # Done!
+ return context
+
+ def exec_report(self, *args, **kwargs):
+ _log_args("WR_HISTORY_DEFECT_EXEC", *args, **kwargs)
+
+ request_POST = self.request.POST
+
+ records = request_POST.get('records', '')
+ format = request_POST.get('format', '')
+# title = request_POST.get('title', '')
+ report_type = request_POST.get('report_type', '')
+ record_list = request_POST.get('record_list', '')
+ csv_separator = request_POST.get('csv_separator', 'semi')
+
+ # Dates (make as no timezone)
+ msg = ''
+ try:
+ msg = 'Start:%s' % request_POST.get('date_start', '')
+ date_start = datetime.strptime(request_POST.get('date_start', ''), '%m/%d/%Y')
+ msg = 'Stop:%s' % request_POST.get('date_stop', '')
+ date_stop = datetime.strptime(request_POST.get('date_stop', ''), '%m/%d/%Y')
+ if date_stop < date_start:
+ return 'Error:stop date is before start date',''
+ except Exception as e:
+ return 'Error:bad format for dates (must be mm/dd/yyyy) (%s)(%s)' % (msg,e),''
+
+ report_name = '%s/defect_history_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
+ with open(report_name, 'w') as file:
+
+ if 'csv' == format:
+ separator = ";"
+ if csv_separator == 'comma': separator = ","
+ if csv_separator == 'tab': separator = "\t"
+ writer = csv.writer(report_name, delimiter=separator,
+ quotechar='"', quoting=csv.QUOTE_MINIMAL)
+ else:
+ separator = ","
+
+ if ('history' == report_type):
+ if 'csv' == format:
+ writer.writerow(['Index','Defect','Date','Author','Comment'])
+ if 'txt' == format:
+ file.write("Report : Defect History\n")
+ file.write("\n")
+ text_format='%02d) %-14s %-10s %-10s %s\n'
+ file.write(text_format % (0,'Defect','Date','Author','Comment'))
+
+ for i,dh in enumerate(DefectHistory.objects.filter(date__gte=date_start,date__lte=date_stop).order_by('defect__name')):
+ if 'csv' == format:
+ writer.writerow([i+1,dh.defect.name,dh.date.strftime('%Y-%m-%d'),dh.author,dh.comment])
+ if 'txt' == format:
+ file.write(text_format % (i+1,dh.defect.name,dh.date.strftime('%Y-%m-%d'),dh.author,dh.comment))
+
+ return report_name,os.path.basename(report_name)
+
+###############################################################################
+#
+# Notifications reports
+#
+
+class NotificationsReport(Report):
+ """Report for the Notifications Page"""
+
+ def __init__(self, parent_page, *args, **kwargs):
+ _log_args("WR_NOTIFICATION_INIT(%s)" % parent_page, *args, **kwargs)
+ super(NotificationsReport, self).__init__(parent_page, *args, **kwargs)
+
+ def get_context_data(self, *args, **kwargs):
+ _log_args("WR_NOTIFICATION_CONTEXT", *args, **kwargs)
+ context = super(NotificationsReport, self).get_context_data(*args, **kwargs)
+
+ context['report_type_list'] = '\
+ <option value="summary">Notification List</option> \
+ '
+
+ context['report_columnrange_list'] = ''
+ context['report_format_list'] = '\
+ <input type="radio" name="format" value="txt" checked> Text<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="semi">Semi-colon</option> \
+ <option value="comma">Comma</option> \
+ <option value="tab">Tab</option> \
+ </select>) \
+ <br> \
+ '
+
+ context['report_recordrange_list'] = '\
+ <input type="radio" name="records" value="all"> All<br> \
+ '
+
+ # Add a date range
+ date_start = datetime.today() - timedelta(days=30)
+ date_stop = datetime.today()
+ context['report_date_list'] = '\
+ Start: <input type="text" name="date_start" value="%s"><br> \
+ Stop: <input type="text" name="date_stop" value="%s"> \
+ ' % (date_start.strftime('%m/%d/%Y'),date_stop.strftime('%m/%d/%Y'))
+
+ # Done!
+ return context
+
+ def exec_report(self, *args, **kwargs):
+ _log_args("WR_NOTIFICATION_EXEC", *args, **kwargs)
+
+ request_POST = self.request.POST
+
+ records = request_POST.get('records', '')
+ format = request_POST.get('format', '')
+# title = request_POST.get('title', '')
+ report_type = request_POST.get('report_type', '')
+ record_list = request_POST.get('record_list', '')
+ csv_separator = request_POST.get('csv_separator', 'semi')
+
+ # Dates (make as no timezone)
+ msg = ''
+ try:
+ msg = 'Start:%s' % request_POST.get('date_start', '')
+ date_start = datetime.strptime(request_POST.get('date_start', ''), '%m/%d/%Y')
+ msg = 'Stop:%s' % request_POST.get('date_stop', '')
+ date_stop = datetime.strptime(request_POST.get('date_stop', ''), '%m/%d/%Y')
+ if date_stop < date_start:
+ return 'Error:stop date is before start date',''
+ except Exception as e:
+ return 'Error:bad format for dates (must be mm/dd/yyyy) (%s)(%s)' % (msg,e),''
+
+ date_start = date_start.strftime('%Y-%m-%d')
+ date_stop = date_stop.strftime('%Y-%m-%d')
+
+ report_name = '%s/notifications_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
+ with open(report_name, 'w') as file:
+
+ if 'csv' == format:
+ separator = ";"
+ if csv_separator == 'comma': separator = ","
+ if csv_separator == 'tab': separator = "\t"
+ writer = csv.writer(file, delimiter=separator,
+ quotechar='"', quoting=csv.QUOTE_MINIMAL)
+ else:
+ separator = ","
+
+ if ('summary' == report_type):
+ if 'csv' == format:
+ writer.writerow(['Date','Category','Priority','Decription','URL','Author'])
+ if 'txt' == format:
+ file.write("Report : Notifications\n")
+ file.write("\n")
+ text_format='%02d) %-10s %-25s %-10s "%s",%s,%s\n'
+ file.write(text_format % (0,'Date','Category','Priority','Decription','URL','Author'))
+
+# for i,notify in enumerate(Notify.objects.filter(srt_updated__gte=date_start,srt_updated__lte=date_stop).order_by('-srt_updated')):
+ for i,notify in enumerate(Notify.objects.all().order_by('-srt_updated')):
+ srt_updated = notify.srt_updated.strftime('%Y-%m-%d')
+ if (date_start > srt_updated) or (date_stop < srt_updated):
+ continue
+
+ if 'csv' == format:
+ writer.writerow([i+1,srt_updated,notify.category,notify.get_priority_text,notify.description,notify.url,notify.author])
+ if 'txt' == format:
+ file.write(text_format % (i+1,srt_updated,notify.category,notify.get_priority_text,notify.description,notify.url,notify.author))
+
+ return report_name,os.path.basename(report_name)
+
+###############################################################################
+#
+# ErrorLogs reports
+#
+
+class ErrorLogsReport(Report):
+ """Report for the Error Logs Page"""
+
+ def __init__(self, parent_page, *args, **kwargs):
+ _log_args("WR_ERRORLOGS_INIT(%s)" % parent_page, *args, **kwargs)
+ super(ErrorLogsReport, self).__init__(parent_page, *args, **kwargs)
+
+ def get_context_data(self, *args, **kwargs):
+ _log_args("WR_ERRORLOGS_CONTEXT", *args, **kwargs)
+ context = super(ErrorLogsReport, self).get_context_data(*args, **kwargs)
+
+ context['report_type_list'] = '\
+ <option value="summary">Error Log List</option> \
+ '
+
+ context['report_columnrange_list'] = ''
+ context['report_format_list'] = '\
+ <input type="radio" name="format" value="txt" checked> Text<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="semi">Semi-colon</option> \
+ <option value="comma">Comma</option> \
+ <option value="tab">Tab</option> \
+ </select>) \
+ <br> \
+ '
+
+ context['report_recordrange_list'] = '\
+ <input type="radio" name="records" value="all"> All<br> \
+ '
+
+ # Add a date range
+ date_start = datetime.today() - timedelta(days=30)
+ date_stop = datetime.today()
+ context['report_date_list'] = '\
+ Start: <input type="text" name="date_start" value="%s"><br> \
+ Stop: <input type="text" name="date_stop" value="%s"> \
+ ' % (date_start.strftime('%m/%d/%Y'),date_stop.strftime('%m/%d/%Y'))
+
+ # Done!
+ return context
+
+ def exec_report(self, *args, **kwargs):
+ _log_args("WR_ERRORLOGS_EXEC", *args, **kwargs)
+
+ request_POST = self.request.POST
+
+ records = request_POST.get('records', '')
+ format = request_POST.get('format', '')
+# title = request_POST.get('title', '')
+ report_type = request_POST.get('report_type', '')
+ record_list = request_POST.get('record_list', '')
+ csv_separator = request_POST.get('csv_separator', 'semi')
+
+ # Dates (make as no timezone)
+ msg = ''
+ try:
+ msg = 'Start:%s' % request_POST.get('date_start', '')
+ date_start = datetime.strptime(request_POST.get('date_start', ''), '%m/%d/%Y')
+ msg = 'Stop:%s' % request_POST.get('date_stop', '')
+ date_stop = datetime.strptime(request_POST.get('date_stop', ''), '%m/%d/%Y')
+ if date_stop < date_start:
+ return 'Error:stop date is before start date',''
+ except Exception as e:
+ return 'Error:bad format for dates (must be mm/dd/yyyy) (%s)(%s)' % (msg,e),''
+
+ date_start = date_start.strftime('%Y-%m-%d')
+ date_stop = date_stop.strftime('%Y-%m-%d')
+
+ report_name = '%s/errorlogs_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
+ with open(report_name, 'w') as file:
+
+ if 'csv' == format:
+ separator = ";"
+ if csv_separator == 'comma': separator = ","
+ if csv_separator == 'tab': separator = "\t"
+ writer = csv.writer(file, delimiter=separator,
+ quotechar='"', quoting=csv.QUOTE_MINIMAL)
+ else:
+ separator = ","
+
+ if ('summary' == report_type):
+ if 'csv' == format:
+ writer.writerow(['Date','Severity','Decription'])
+ if 'txt' == format:
+ file.write("Report : Error Logs\n")
+ file.write("Start=%s,Stop=%s\n" % (date_start,date_stop))
+ text_format='%02d) %-10s %-10s "%s"\n'
+ file.write(text_format % (0,'Date','Severity','Decription'))
+
+# for i,notify in enumerate(ErrorLog.objects.filter(srt_created__gte=date_start,srt_created__lte=date_stop).order_by('-srt_created')):
+ for i,notify in enumerate(ErrorLog.objects.all().order_by('-srt_created')):
+ srt_created = notify.srt_created.strftime('%Y-%m-%d')
+ if (date_start > srt_created) or (date_stop < srt_created):
+ continue
+ if 'csv' == format:
+ writer.writerow([i+1,srt_created,notify.severity,notify.description])
+ if 'txt' == format:
+ file.write(text_format % (i+1,srt_created,notify.get_severity_text,notify.description))
+
+ return report_name,os.path.basename(report_name)
+
+###############################################################################
+#
+
class DefaultReport(Report):
"""Report for the Default Page"""
@@ -1848,11 +2910,26 @@ class ReportManager():
elif 'update-published' == parent_page:
return PublishPendingCveReport(parent_page, *args, **kwargs)
+ elif 'publish' == parent_page:
+ return PublishListReport(parent_page, *args, **kwargs)
+ elif 'publish-list' == parent_page:
+ return PublishListReport(parent_page, *args, **kwargs)
+ elif 'publish-summary' == parent_page:
+ return PublishSummaryReport(parent_page, *args, **kwargs)
+
elif 'package-filters' == parent_page:
return PackageFiltersReport(parent_page, *args, **kwargs)
elif 'cpes_srtool' == parent_page:
return CpesSrtoolReport(parent_page, *args, **kwargs)
+ elif 'manage_notifications' == parent_page:
+ return NotificationsReport(parent_page, *args, **kwargs)
+ elif 'error_logs' == parent_page:
+ return ErrorLogsReport(parent_page, *args, **kwargs)
+
+ elif 'history_defect' == parent_page:
+ return HistoryDefectReport(parent_page, *args, **kwargs)
+
else:
return DefaultReport(parent_page, *args, **kwargs)
diff --git a/lib/srtgui/static/js/libtoaster.js b/lib/srtgui/static/js/libtoaster.js
index 6f9b5d0f..b09511a1 100644
--- a/lib/srtgui/static/js/libtoaster.js
+++ b/lib/srtgui/static/js/libtoaster.js
@@ -81,57 +81,20 @@ var libtoaster = (function () {
});
}
- /* startABuild:
- * url: xhr_buildrequest or null for current project
- * targets: an array or space separated list of targets to build
+ /* cancelAJob:
+ * url: xhr_jobrequest url or null for current scrum
+ * jobRequestIds: space separated list of build request ids
* onsuccess: callback for successful execution
* onfail: callback for failed execution
*/
- function _startABuild (url, targets, onsuccess, onfail) {
-
+ function _cancelAJob(url, jobRequestIds, onsuccess, onfail){
if (!url)
- url = libtoaster.ctx.xhrBuildRequestUrl;
-
- /* Flatten the array of targets into a space spearated list */
- if (targets instanceof Array){
- targets = targets.reduce(function(prevV, nextV){
- return prev + ' ' + next;
- });
- }
+ url = libtoaster.ctx.xhrJobRequestUrl;
$.ajax( {
type: "POST",
url: url,
- data: { 'targets' : targets },
- headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
- success: function (_data) {
- if (_data.error !== "ok") {
- console.warn(_data.error);
- } else {
- if (onsuccess !== undefined) onsuccess(_data);
- }
- },
- error: function (_data) {
- console.warn("Call failed");
- console.warn(_data);
- if (onfail) onfail(data);
- } });
- }
-
- /* cancelABuild:
- * url: xhr_buildrequest url or null for current project
- * buildRequestIds: space separated list of build request ids
- * onsuccess: callback for successful execution
- * onfail: callback for failed execution
- */
- function _cancelABuild(url, buildRequestIds, onsuccess, onfail){
- if (!url)
- url = libtoaster.ctx.xhrBuildRequestUrl;
-
- $.ajax( {
- type: "POST",
- url: url,
- data: { 'buildCancel': buildRequestIds },
+ data: { 'jobCancel': jobRequestIds },
headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
success: function (_data) {
if (_data.error !== "ok") {
@@ -148,7 +111,7 @@ var libtoaster = (function () {
});
}
- function _getMostRecentBuilds(url, onsuccess, onfail) {
+ function _getMostRecentJobs(url, onsuccess, onfail) {
$.ajax({
url: url,
type: 'GET',
@@ -163,80 +126,6 @@ var libtoaster = (function () {
});
}
- /* Get a project's configuration info */
- function _getProjectInfo(url, onsuccess, onfail){
- $.ajax({
- type: "GET",
- url: url,
- headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
- success: function (_data) {
- if (_data.error !== "ok") {
- console.warn(_data.error);
- } else {
- if (onsuccess !== undefined) onsuccess(_data);
- }
- },
- error: function (_data) {
- console.warn(_data);
- if (onfail) onfail(_data);
- }
- });
- }
-
- /* Properties for data can be:
- * layerDel (csv)
- * layerAdd (csv)
- * projectName
- * projectVersion
- * machineName
- */
- function _editCurrentProject(data, onSuccess, onFail){
- $.ajax({
- type: "POST",
- url: libtoaster.ctx.xhrProjectUrl,
- data: data,
- headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
- success: function (data) {
- if (data.error != "ok") {
- console.log(data.error);
- if (onFail !== undefined)
- onFail(data);
- } else {
- if (onSuccess !== undefined)
- onSuccess(data);
- }
- },
- error: function (data) {
- console.log("Call failed");
- console.log(data);
- }
- });
- }
-
- function _getLayerDepsForProject(url, onSuccess, onFail){
- /* Check for dependencies not in the current project */
- $.getJSON(url,
- { format: 'json' },
- function(data) {
- if (data.error != "ok") {
- console.log(data.error);
- if (onFail !== undefined)
- onFail(data);
- } else {
- var deps = {};
- /* Filter out layer dep ids which are in the
- * project already.
- */
- deps.list = data.layerdeps.list.filter(function(layerObj){
- return (data.projectlayers.lastIndexOf(layerObj.id) < 0);
- });
-
- onSuccess(deps);
- }
- }, function() {
- console.log("E: Failed to make request");
- });
- }
/* parses the query string of the current window.location to an object */
function _parseUrlParams() {
@@ -469,13 +358,9 @@ var libtoaster = (function () {
enableAjaxLoadingTimer: _enableAjaxLoadingTimer,
disableAjaxLoadingTimer: _disableAjaxLoadingTimer,
reload_params : reload_params,
- startABuild : _startABuild,
- cancelABuild : _cancelABuild,
- getMostRecentBuilds: _getMostRecentBuilds,
+ cancelAJob : _cancelAJob,
+ getMostRecentJobs: _getMostRecentJobs,
makeTypeahead : _makeTypeahead,
- getProjectInfo: _getProjectInfo,
- getLayerDepsForProject : _getLayerDepsForProject,
- editCurrentProject : _editCurrentProject,
debug: false,
parseUrlParams : _parseUrlParams,
dumpsUrlParams : _dumpsUrlParams,
diff --git a/lib/srtgui/static/js/mrjsection.js b/lib/srtgui/static/js/mrjsection.js
new file mode 100755
index 00000000..800f0e6f
--- /dev/null
+++ b/lib/srtgui/static/js/mrjsection.js
@@ -0,0 +1,131 @@
+
+function mrjSectionInit(ctx){
+ $('#latest-jobs').on('click', '.cancel-job-btn', function(e){
+ e.stopImmediatePropagation();
+ e.preventDefault();
+
+ var url = $(this).data('request-url');
+ var jobReqIds = $(this).data('jobrequest-id');
+
+ libtoaster.cancelAJob(url, jobReqIds, function () {
+ alert("CANCEL JOB");
+ window.location.reload();
+ }, null);
+ });
+
+ // cached version of jobData, so we can determine whether a job has
+ // changed since it was last fetched, and update the DOM appropriately
+ var jobData = {};
+
+ // returns the cached version of this job, or {} is there isn't a cached one
+ function getCached(job) {
+ return jobData[job.id] || {};
+ }
+
+ // returns true if a job's state changed to "Success", "Errors"
+ // or "Cancelled" from some other value
+ function jobFinished(job) {
+ var cached = getCached(job);
+ return cached.state &&
+ cached.state !== job.state &&
+ (job.state == 'Success' || job.state == 'Errors' ||
+ job.state == 'Cancelled');
+ }
+
+ // returns true if the state changed
+ function stateChanged(job) {
+ var cached = getCached(job);
+ return (cached.state !== job.state);
+ }
+
+ // returns true if the tasks_complete_percentage changed
+ function tasksProgressChanged(job) {
+ var cached = getCached(job);
+ var a = cached.tasks_complete_percentage;
+ var b = job.tasks_complete_percentage;
+ var c = cached.tasks_complete_percentage !== job.tasks_complete_percentage;
+ return (cached.tasks_complete_percentage !== job.tasks_complete_percentage);
+ }
+
+ // Auto-refresh 1500 ms AFTER its last successful refresh, to avoid refresh race conditions
+ function refreshMostRecentJobs(){
+ libtoaster.getMostRecentJobs(
+ libtoaster.ctx.mostRecentJobsUrl,
+
+ // success callback
+ function (data) {
+ var job;
+ var tmpl;
+ var container;
+ var selector;
+ var colourClass;
+ var elements;
+
+ for (var i = 0; i < data.length; i++) {
+ job = data[i];
+
+ var jobEle = document.getElementById("job-instance-"+job.id);
+ if (null == jobEle) {
+ // Job's display instance does not exist, so force refresh of page's Job MRU
+ // DISABLE THESE LINES TO Avoid a race condition loop
+// alert("NO JOB");
+ setTimeout(() => { console.log("NO_JOB_YET_DELAY!"); }, 2000);
+ window.location.reload();
+ return;
+ }
+ else if (jobFinished(job)) {
+ // a job finished: reload the whole page so that the job
+ // shows up in the jobs table
+// alert("DONE JOB");
+ window.location.reload();
+ return;
+ }
+ else if (stateChanged(job)) {
+ // update the whole template
+ job.warnings_pluralise = (job.warnings !== 1 ? 's' : '');
+ job.errors_pluralise = (job.errors !== 1 ? 's' : '');
+
+ tmpl = $.templates("#job-template");
+
+ html = $(tmpl.render(job));
+
+ selector = '[data-latest-job-result="' + job.id + '"] ' +
+ '[data-role="job-status-container"]';
+ container = $(selector);
+
+ // initialize bootstrap tooltips in the new HTML
+ html.find('span.glyphicon-question-sign').tooltip();
+
+ container.html(html);
+ }
+ else if (tasksProgressChanged(job)) {
+ // update the task progress text
+ selector = '#job-pc-done-' + job.id;
+ $(selector).html(job.tasks_complete_percentage);
+ selector = '#job-message-done-' + job.id;
+ $(selector).html(job.targets);
+
+ // update the task progress bar
+ selector = '#job-pc-done-bar-' + job.id;
+ $(selector).width(job.tasks_complete_percentage + '%');
+ }
+
+ jobData[job.id] = job;
+ }
+ },
+
+ // fail callback
+ function (data) {
+ console.error(data);
+ }
+ );
+ window.setTimeout(refreshMostRecentJobs, 1500);
+ var msg = "REFRESH:"+Date.now();
+ console.log(msg);
+ }
+
+ // window.setInterval(refreshMostRecentJobs, 1500);
+
+ // Self refresh every 1500 ms
+ refreshMostRecentJobs();
+}
diff --git a/lib/srtgui/static/js/table.js b/lib/srtgui/static/js/table.js
index fd241aa6..9d3030d7 100644
--- a/lib/srtgui/static/js/table.js
+++ b/lib/srtgui/static/js/table.js
@@ -1,11 +1,14 @@
'use strict';
-function tableInit(ctx){
+function tableInit(ctx, SelectedFilterVal = ""){
if (ctx.url.length === 0) {
throw "No url supplied for retreiving data";
}
+ var clearallfilterBtn = $("#clear-all-filter");
+ var lstRemoveCurrent = []
+ let result = ""
var tableChromeDone = false;
var tableTotal = 0;
@@ -32,6 +35,28 @@ function tableInit(ctx){
tableParams.limit = Number(tableParams.limit);
tableParams.page = Number(tableParams.page);
+ if (tableParams.filter != null && SelectedFilterVal != ""){
+ lstFilterval.splice(lstFilterval.indexOf(SelectedFilterVal),1)
+
+ lstRemoveCurrent = tableParams.filter.replace(/%20/g, " ").split(",");
+ lstRemoveCurrent.splice(lstRemoveCurrent.indexOf(SelectedFilterVal),1)
+ if (lstRemoveCurrent.length > 1){
+ tableParams.filter = lstRemoveCurrent.join(",")
+ }
+ else{
+ tableParams.filter = lstRemoveCurrent[0]
+ clearallfilterBtn.tooltip('destroy');
+ clearallfilterBtn.removeClass("btn-primary");
+ }
+
+ }
+ else if(tableParams.filter != null && SelectedFilterVal == ""){
+ tableParams.filter = null;
+ lstFilterval = [];
+ clearallfilterBtn.tooltip('destroy');
+ clearallfilterBtn.removeClass("btn-primary");
+ }
+
loadData(tableParams);
// clicking on this set of elements removes the search
@@ -263,10 +288,9 @@ function tableInit(ctx){
filterBtn.prop('id', col.filter_name);
filterBtn.click(filterOpenClicked);
- /* If we're currently being filtered setup the visial indicator */
+ /* If we're currently being filtered setup the visual indicator */
if (tableParams.filter &&
- tableParams.filter.match('^'+col.filter_name)) {
-
+ tableParams.filter.includes(col.filter_name)) {
filterBtnActive(filterBtn, true);
}
header.append(filterBtn);
@@ -310,24 +334,36 @@ function tableInit(ctx){
}
/* Toggles the active state of the filter button */
- function filterBtnActive(filterBtn, active){
+ function filterBtnActive(filterBtn, active, ActiveButton = ""){
+// var clearallfilterBtn = $("#clear-all-filter");
if (active) {
filterBtn.removeClass("btn-link");
filterBtn.addClass("btn-primary");
+ if(lstFilterval.length > 1){
+ clearallfilterBtn.addClass("btn-primary");
+ clearallfilterBtn.tooltip(
+ {
+ html: true,
+ title: '<button class="btn btn-sm btn-primary" onClick=\'$("#clear-filter-btn-'+ ctx.tableName +'").click();\'>Clear filter</button>',
+ placement: 'bottom',
+ delay: {
+ hide: 1500,
+ show: 400,
+ },
+ }
+ );
+ };
+
filterBtn.tooltip({
html: true,
- title: '<button class="btn btn-sm btn-primary" onClick=\'$("#clear-filter-btn-'+ ctx.tableName +'").click();\'>Clear filter</button>',
+ title: '<button class="btn btn-sm btn-primary" onClick=\'ClearFilter("'+ ActiveButton +'").click();\'>Clear filter</button>',
placement: 'bottom',
delay: {
hide: 1500,
show: 400,
},
});
- } else {
- filterBtn.removeClass("btn-primary");
- filterBtn.addClass("btn-link");
- filterBtn.tooltip('destroy');
}
}
@@ -627,16 +663,29 @@ function tableInit(ctx){
return action;
}
+ function table_objToString (obj) {
+ let str = '';
+ for (const [p, val] of Object.entries(obj)) {
+ str += `${p}=${val},`;
+ }
+ return str;
+ }
+
function filterOpenClicked(){
var filterName = $(this).data('filter-name');
/* We need to pass in the current search so that the filter counts take
- * into account the current search term
+ * into account the current search term.
+ *
+ * Also, pass all of the URL params via the tableParams object, in case
+ * the user's table needs custom params for processing.
*/
+
var params = {
'name' : filterName,
'search': tableParams.search,
'cmd': 'filterinfo',
+ 'tableParams': table_objToString(tableParams),
};
$.ajax({
@@ -816,11 +865,20 @@ function tableInit(ctx){
$("#clear-filter-btn-"+ctx.tableName).click(function(e){
e.preventDefault();
- var filterBtn = $("#" + tableParams.filter.split(":")[0]);
- filterBtnActive(filterBtn, false);
-
+ // var filterBtn = $("#" + tableParams.filter.split(":")[0]);
+ //filterBtnActive(filterBtn, false);
+ for(var i = 0, size = lstFilterval.length; i < size ; i++){
+ var item = lstFilterval[i];
+ var filterBtn = $("#" + item.split(":")[0]);
+ filterBtn.tooltip('destroy');
+ filterBtn.removeClass("btn-primary");
+ }
tableParams.filter = null;
+ lstFilterval = [];
loadData(tableParams);
+
+ clearallfilterBtn.tooltip('destroy');
+ clearallfilterBtn.removeClass("btn-primary");
});
$("#filter-modal-form-"+ctx.tableName).submit(function(e){
@@ -834,20 +892,33 @@ function tableInit(ctx){
// checked radio button
var checkedFilter = $(this).find("input[name='filter']:checked");
- tableParams.filter = checkedFilter.val();
+// # True? vvvv FOO
+// tableParams.filter = checkedFilter.val();
// hidden field holding the value for the checked filter
var checkedFilterValue = $(this).find("input[data-value-for='" +
tableParams.filter + "']");
tableParams.filter_value = checkedFilterValue.val();
+ if (lstFilterval.indexOf(checkedFilter.val()) == -1){
+ lstFilterval.push(checkedFilter.val());
+ tableParams.filter = lstFilterval.join(",")
+ }
+ else{
+ tableParams.filter =lstFilterval.join(",")
+ }
+ //tableParams.filter = checkedFilter.val() //lstFilterval
+ // hidden field holding the value for the checked filter
+ // tableParams.filter_value = checkedFilterValue.val();
+ var currentFilterValue = String(lstFilterval.slice(-1))
+
/* All === remove filter */
- if (tableParams.filter.match(":all$")) {
+ if (currentFilterValue.match(":all$")) {
tableParams.filter = null;
tableParams.filter_value = null;
} else {
- var filterBtn = $("#" + tableParams.filter.split(":")[0]);
- filterBtnActive(filterBtn, true);
+ var filterBtn = $("#" + currentFilterValue.split(":")[0]);
+ filterBtnActive(filterBtn, true,currentFilterValue);
}
loadData(tableParams);
diff --git a/lib/srtgui/static/js/typeahead_affected_components.js b/lib/srtgui/static/js/typeahead_affected_components.js
new file mode 100755
index 00000000..d8f5d25e
--- /dev/null
+++ b/lib/srtgui/static/js/typeahead_affected_components.js
@@ -0,0 +1,9 @@
+'use strict';
+
+function autocompInit() {
+ var newComponentInput = $("#input-isvulnerable-components");
+
+ libtoaster.makeTypeahead(newComponentInput,
+ libtoaster.ctx.recipeTypeAheadUrl, {}, function (item) {});
+
+} \ No newline at end of file
diff --git a/lib/srtgui/tables.py b/lib/srtgui/tables.py
index 0bfef4e5..dfb0571b 100644
--- a/lib/srtgui/tables.py
+++ b/lib/srtgui/tables.py
@@ -4,7 +4,7 @@
#
# Security Response Tool Implementation
#
-# Copyright (C) 2017 Wind River Systems
+# Copyright (C) 2017-2021 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -19,21 +19,75 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# NOTICE: Important ToasterTable implementation concepts and limitations
+#
+# 1) The order of table method execution:
+#
+# a) __init__
+# b) get_context_data
+# c) __init__ (second call reason unknown)
+# d) setup_queryset
+# e) setup_filters (if present)
+# f) setup_columns
+# g) apply_row_customization (if present)
+#
+# 2) Named URL path arguments from "urls.py" are accessible via kwargs
+# WARNING: these values not NOT available in "__init__"
+#
+# Example:
+# urls.ps : url(r'^foo/(?P<my_value>\d+)$',
+# tables.py: my_value = int(kwargs['my_value'])
+#
+# 3) Named URL query arguments the table's url are accessible via the request
+#
+# Example:
+# url : http://.../foo/bar/42605?my_value=25
+# tables.py: my_value = self.request.GET.get('my_value','0')
+#
+# 4) The context[] values are NOT present in the "setup_columns" context
+# They must be explicitly implemented into the column data
+#
+# 5) The HTML page's templatetags are NOT present in the "setup_columns" context
+# They must be explicitly added into the template code
+#
+# Example:
+# static_data_template = '''
+# {% load jobtags %}<span onclick="toggle_select(\'box_{{data.id}}\');">{{data.recommend|recommend_display}}</span>
+# '''
+#
+# WARNING: because there is no context (#4), you cannot for example use dictionary lookup filters
+# use apply_row_customization() method instead, and set the self.dict_name in setup_columns()
+#
+
+import os
+import re
+import json
+from datetime import timedelta, datetime
+import traceback
+
from srtgui.widgets import ToasterTable
+from srtgui.api import execute_process
+from orm.models import SRTool
from orm.models import Cve, Vulnerability, Investigation, CweTable, Product
+from orm.models import CveAccess
from orm.models import Package
-from orm.models import CpeTable, CpeFilter, Defect, DataSource
+from orm.models import CpeTable, CpeFilter, Defect, DataSource, SrtSetting
from orm.models import PublishPending
from orm.models import Notify, NotifyCategories
+from orm.models import CveHistory, VulnerabilityHistory, InvestigationHistory, DefectHistory
+from orm.models import PublishSet
+from orm.models import ErrorLog
+from orm.models import Job
from users.models import UserSafe
+from django.contrib.auth.models import AnonymousUser
+
from django.db.models import Q
from srtgui.tablefilter import TableFilter
from srtgui.tablefilter import TableFilterActionToggle
-import re
-
# quick development/debugging support
from srtgui.api import _log
@@ -100,7 +154,23 @@ class CvesTable(ToasterTable):
is_recommend.add_action(exec_p3)
self.add_filter(is_recommend)
- def setup_queryset(self, *args, **kwargs):
+ # Is Public filter
+ is_public = TableFilter(name="is_public",
+ title="Filter CVEs by 'Public'")
+ exec_public = TableFilterActionToggle(
+ "public",
+ "Public",
+ Q(public=True))
+ exec_private = TableFilterActionToggle(
+ "private",
+ "Private",
+ Q(public=False))
+ is_public.add_action(exec_public)
+ is_public.add_action(exec_private)
+ self.add_filter(is_public)
+
+
+ def orig_setup_queryset(self, *args, **kwargs):
self.queryset = \
Cve.objects.all()
@@ -110,6 +180,27 @@ class CvesTable(ToasterTable):
self.queryset = self.queryset.order_by(self.default_orderby)
+ def setup_queryset(self, *args, **kwargs):
+ _log("FOO_PRIVATE0:%s:" % (self.request.user))
+ if UserSafe.is_admin(self.request.user):
+ self.queryset = Cve.objects.all()
+ else:
+ # Add all public records
+ self.queryset = Cve.objects.filter(public = True)
+ if not isinstance(self.request.user,AnonymousUser):
+ # Add all user accessible private records
+ for cve_private_access in CveAccess.objects.filter(user=self.request.user):
+ cve = cve_private_access.cve
+ _log("FOO_PRIVATE1:%s:%s" % (self.request.user.username,cve.name))
+ private_queryset = Cve.objects.filter(name=cve.name)
+ _log("FOO_PRIVATE2:%s:%s" % (self.request.user.username,cve.name))
+ self.queryset |= private_queryset
+ _log("FOO_PRIVATE3:%s:%s" % (self.request.user.username,cve.name))
+
+ _log("FOO_PRIVATE4")
+ self.queryset = self.queryset.order_by(self.default_orderby)
+ _log("FOO_PRIVATE5")
+
def setup_columns(self, *args, **kwargs):
@@ -140,6 +231,15 @@ class CvesTable(ToasterTable):
static_data_template="{{data.get_status_text}}"
)
+ self.add_column(title="Public",
+ field_name="public",
+ hideable=True,
+ orderable=True,
+ filter_name="is_public",
+ static_data_name="public",
+ static_data_template="{{data.get_public_text}}"
+ )
+
score_link_template = '''
{% if 0 == data.recommend %}0{% else %}{{data.recommend}}{% endif %}
'''
@@ -159,24 +259,6 @@ class CvesTable(ToasterTable):
hidden=True,
)
- self.add_column(title="Data Type",
- field_name="cve_data_type",
- hideable=True,
- hidden=True,
- )
-
- self.add_column(title="Data Format",
- field_name="cve_data_format",
- hideable=True,
- hidden=True,
- )
-
- self.add_column(title="Data Version",
- field_name="cve_data_version",
- hideable=True,
- hidden=True,
- )
-
self.add_column(title="Description",
field_name="description",
hideable=False,
@@ -205,23 +287,23 @@ class CvesTable(ToasterTable):
static_data_template=priority_v2_template,
)
- self.add_column(title="Packages",
+ self.add_column(title="Affected Components",
field_name="packages",
- hideable=True,
- hidden=True,
+ hideable=False,
+ hidden=False,
)
self.add_column(title="Published",
help_text="Initial publish date of the CVE",
hideable=False,
- #orderable=True,
+ orderable=True,
field_name="publishedDate",
)
self.add_column(title="Modified",
help_text="Last modification date of the CVE",
hideable=True,
- #orderable=True,
+ orderable=True,
field_name="lastModifiedDate",
)
@@ -237,6 +319,12 @@ class CvesTable(ToasterTable):
hideable=True,
)
+ self.add_column(title="Tags",
+ field_name="tags",
+ hideable=True,
+ hidden=True,
+ )
+
self.add_column(title="Publish Request",
help_text="SRT Publish Request State",
hideable=True,
@@ -289,7 +377,7 @@ class CvesTable(ToasterTable):
orderable=True,
field_name="srt_updated",
static_data_name="srt_updated",
- static_data_template='{{data.srt_updated | date:"m/d/y H:i"}}'
+ static_data_template='{{data.srt_updated | date:"Y/m/d"}}'
)
source_count_template = '''
@@ -315,9 +403,12 @@ class SelectCveTable(ToasterTable):
context = super(SelectCveTable, self).get_context_data(**kwargs)
context['products'] = Product.objects.all()
context['components'] = Defect.Components
+ context['doesnotimpact_text'] = SrtSetting.get_setting('SRTOOL_DEFECT_DOESNOTIMPACT',"It doesn't impact the product")
+ context['components'] = Defect.Components
+
return context
- def apply_row_customization(self, row):
+ def apply_row_customization(self, row, **kwargs):
data = super(SelectCveTable, self).apply_row_customization(row)
# data:dict_keys(['rows', 'total', 'default_orderby', 'error', 'columns'])
@@ -421,11 +512,18 @@ class SelectCveTable(ToasterTable):
def setup_columns(self, *args, **kwargs):
+ self.add_column(title="Id (creation order)",
+ field_name="id",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ )
+
self.add_column(title="Select",
field_name="Select",
hideable=False,
static_data_name="select",
- static_data_template='<input type="checkbox" id="box_{{data.id}}" name="{{data.name}}" />',
+ static_data_template='<input type="checkbox" class="selectbox" id="box_{{data.id}}" name="{{data.name}}" />',
)
self.add_column(title="Status",
@@ -447,7 +545,7 @@ class SelectCveTable(ToasterTable):
)
recommend_link_template = '''
- {% load projecttags %}<span onclick="toggle_select(\'box_{{data.id}}\');">{{data.recommend|recommend_display}}</span>
+ {% load jobtags %}<span onclick="toggle_select(\'box_{{data.id}}\');">{{data.recommend|recommend_display}}</span>
'''
self.add_column(title="Recommendation",
hideable=False,
@@ -501,20 +599,20 @@ class SelectCveTable(ToasterTable):
)
self.add_column(title="publishedDate",
- field_name="publisheddate",
+ field_name="publishedDate",
hideable=True,
hidden=True,
orderable=True,
- static_data_name="publisheddate",
+ static_data_name="publishedDate",
static_data_template='<span onclick="toggle_select(\'box_{{data.id}}\');">{{data.publishedDate}}</span>',
)
self.add_column(title="lastModifiedDate",
- field_name="lastmodifieddate",
+ field_name="lastModifiedDate",
hideable=True,
hidden=True,
orderable=True,
- static_data_name="lastmodifieddate",
+ static_data_name="lastModifiedDate",
static_data_template='<span onclick="toggle_select(\'box_{{data.id}}\');">{{data.lastModifiedDate}}</span>',
)
@@ -562,39 +660,76 @@ class DefectsTable(ToasterTable):
def setup_filters(self, *args, **kwargs):
- # Priority filter
- is_priority = TableFilter(name="is_priority",
+ # Defect Priority filter
+ is_defect_priority = TableFilter(name="is_defect_priority",
title="Filter defects by 'Priority'")
- for priority in range(len(Defect.Priority)):
- is_priority.add_action(TableFilterActionToggle(
- Defect.Priority[priority][1].lower().replace(' ','_'),
- Defect.Priority[priority][1],
- Q(priority=Defect.Priority[priority][0]))
+ for priority in range(len(Defect.DEFECT_PRIORITY)):
+ if Defect.DEFECT_PRIORITY_ERROR == Defect.DEFECT_PRIORITY[priority][0]:
+ continue
+ is_defect_priority.add_action(TableFilterActionToggle(
+ Defect.DEFECT_PRIORITY[priority][1].lower().replace(' ','_'),
+ Defect.DEFECT_PRIORITY[priority][1],
+ Q(priority=Defect.DEFECT_PRIORITY[priority][0]))
)
- self.add_filter(is_priority)
-
- # Status filter
- is_status = TableFilter(name="is_status",
- title="Filter defects by 'Status'")
- for status in range(len(Defect.Status)):
- is_status.add_action(TableFilterActionToggle(
- Defect.Status[status][1].lower().replace(' ','_'),
- Defect.Status[status][1],
- Q(status=Defect.Status[status][0]))
+ self.add_filter(is_defect_priority)
+
+ # Defect Status filter
+ is_defect_status = TableFilter(name="is_defect_status",
+ title="Filter defects by defect 'Status'")
+ for status in range(len(Defect.DEFECT_STATUS)):
+ is_defect_status.add_action(TableFilterActionToggle(
+ Defect.DEFECT_STATUS[status][1].lower().replace(' ','_'),
+ Defect.DEFECT_STATUS[status][1],
+ Q(status=Defect.DEFECT_STATUS[status][0]))
)
- self.add_filter(is_status)
+ self.add_filter(is_defect_status)
# Resolution filter
is_resolution = TableFilter(name="is_resolution",
title="Filter defects by 'Resolution'")
- for resolution in range(len(Defect.Resolution)):
+ for resolution in range(len(Defect.DEFECT_RESOLUTION)):
is_resolution.add_action(TableFilterActionToggle(
- Defect.Resolution[resolution][1].lower().replace(' ','_'),
- Defect.Resolution[resolution][1],
- Q(resolution=Defect.Resolution[resolution][0]))
+ Defect.DEFECT_RESOLUTION[resolution][1].lower().replace(' ','_'),
+ Defect.DEFECT_RESOLUTION[resolution][1],
+ Q(resolution=Defect.DEFECT_RESOLUTION[resolution][0]))
)
self.add_filter(is_resolution)
+ # SRT Priority filter
+ is_srt_priority = TableFilter(name="is_srt_priority",
+ title="Filter defects by 'SRT Priority'")
+ for priority in range(len(SRTool.SRT_PRIORITY)):
+ if SRTool.PRIORITY_ERROR == SRTool.SRT_PRIORITY[priority][0]:
+ continue
+ is_srt_priority.add_action(TableFilterActionToggle(
+ SRTool.SRT_PRIORITY[priority][1].lower().replace(' ','_'),
+ SRTool.SRT_PRIORITY[priority][1],
+ Q(srt_priority=SRTool.SRT_PRIORITY[priority][0]))
+ )
+ self.add_filter(is_srt_priority)
+
+ # SRTool Status filter
+ is_srt_status = TableFilter(name="is_srt_status",
+ title="Filter defects by 'SRT Status'")
+ for status in range(len(SRTool.SRT_STATUS)):
+ is_srt_status.add_action(TableFilterActionToggle(
+ SRTool.SRT_STATUS[status][1].lower().replace(' ','_'),
+ SRTool.SRT_STATUS[status][1],
+ Q(srt_status=SRTool.SRT_STATUS[status][0]))
+ )
+ self.add_filter(is_srt_status)
+
+ # SRTool Outcome filter
+ is_srt_outcome = TableFilter(name="is_srt_outcome",
+ title="Filter defects by 'SRT Outcome'")
+ for status in range(len(Defect.SRT_OUTCOME)):
+ is_srt_outcome.add_action(TableFilterActionToggle(
+ Defect.SRT_OUTCOME[status][1].lower().replace(' ','_'),
+ Defect.SRT_OUTCOME[status][1],
+ Q(srt_outcome=Defect.SRT_OUTCOME[status][0]))
+ )
+ self.add_filter(is_srt_outcome)
+
# Product filter
#(name="Wind River Linux",version="LTS-17")
is_product = TableFilter(name="is_product",
@@ -631,31 +766,58 @@ class DefectsTable(ToasterTable):
field_name="summary",
)
- self.add_column(title="Priority",
+ self.add_column(title="Defect Priority",
hideable=False,
- field_name="priority",
orderable=True,
- filter_name="is_priority",
- static_data_name="priority",
- static_data_template='{{data.get_priority_text}}',
+ filter_name="is_defect_priority",
+ static_data_name="defect_priority",
+ static_data_template='{{data.get_defect_priority_text}}',
)
- self.add_column(title="Status",
+ self.add_column(title="Defect Status",
hideable=False,
- field_name="status",
orderable=True,
- filter_name="is_status",
- static_data_name="status",
- static_data_template='{{data.get_status_text}}',
+ filter_name="is_defect_status",
+ static_data_name="defect_status",
+ static_data_template='{{data.get_defect_status_text}}',
)
- self.add_column(title="Resolution",
+ self.add_column(title="Defect Resolution",
hideable=False,
- field_name="resolution",
orderable=True,
filter_name="is_resolution",
- static_data_name="resolution",
- static_data_template='{{data.get_resolution_text}}',
+ static_data_name="defect_resolution",
+ static_data_template='{{data.get_defect_resolution_text}}',
+ )
+
+ self.add_column(title="SRT Priority",
+ hideable=False,
+ orderable=True,
+ filter_name="is_srt_priority",
+ static_data_name="srt_priority",
+ static_data_template='{{data.get_priority_text}}',
+ )
+
+ self.add_column(title="SRT Status",
+ hideable=False,
+ orderable=True,
+ filter_name="is_srt_status",
+ static_data_name="srt_status",
+ static_data_template='{{data.get_status_text}}',
+ )
+
+ self.add_column(title="Duplicate Of",
+ hideable=True,
+ hidden=True,
+ field_name="duplicate_of",
+ )
+
+ self.add_column(title="Outcome",
+ hideable=True,
+ hidden=True,
+ filter_name="is_srt_outcome",
+ static_data_name="srt_outcome",
+ static_data_template='{{data.get_outcome_text}}',
)
self.add_column(title="Release Version",
@@ -712,6 +874,23 @@ class DefectsTable(ToasterTable):
static_data_template=product_link_template,
)
+ self.add_column(title="Defect Created",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ field_name="date_created",
+ static_data_name="date_created",
+ static_data_template='{{data.date_created}}'
+ )
+ self.add_column(title="Defect Update",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ field_name="date_updated",
+ static_data_name="date_updated",
+ static_data_template='{{data.date_updated}}'
+ )
+
self.add_column(title="SRT Update",
hideable=True,
hidden=True,
@@ -726,7 +905,7 @@ class CwesTable(ToasterTable):
def __init__(self, *args, **kwargs):
super(CwesTable, self).__init__(*args, **kwargs)
- self.default_orderby = "name_sort"
+ self.default_orderby = "name"
def get_context_data(self, **kwargs):
context = super(CwesTable, self).get_context_data(**kwargs)
@@ -1162,13 +1341,17 @@ class ProductsTable(ToasterTable):
)
self.add_column(title="Defect Tags",
- field_name="defect_tags",
- hideable=False,
+ hideable=True,
+ hidden=True,
+ static_data_name="defect_tags",
+ static_data_template='{{data.get_defect_str}}',
)
self.add_column(title="Product Tags",
- field_name="product_tags",
- hideable=False,
+ hideable=True,
+ hidden=True,
+ static_data_name="product_tags",
+ static_data_template='{{data.get_product_str}}',
)
@@ -1262,6 +1445,10 @@ class VulnerabilitiesTable(ToasterTable):
# Priority filter
is_priority = TableFilter(name="is_priority",
title="Filter Vulnerabilities by 'Priority'")
+ exec_is_undefined = TableFilterActionToggle(
+ "undefined",
+ "Undefined",
+ Q(priority=Vulnerability.UNDEFINED))
exec_is_low = TableFilterActionToggle(
"low",
"Low",
@@ -1274,11 +1461,33 @@ class VulnerabilitiesTable(ToasterTable):
"high",
"High",
Q(priority=Vulnerability.HIGH))
+ exec_is_critical = TableFilterActionToggle(
+ "critical",
+ "Critical",
+ Q(priority=Vulnerability.CRITICAL))
+ is_priority.add_action(exec_is_undefined)
is_priority.add_action(exec_is_low)
is_priority.add_action(exec_is_medium)
is_priority.add_action(exec_is_high)
+ is_priority.add_action(exec_is_critical)
self.add_filter(is_priority)
+ # Is Public filter
+ is_public = TableFilter(name="is_public",
+ title="Filter Vulnerabilities by 'Public'")
+ exec_public = TableFilterActionToggle(
+ "public",
+ "Public",
+ Q(public=True))
+ exec_private = TableFilterActionToggle(
+ "private",
+ "Private",
+ Q(public=False))
+ is_public.add_action(exec_public)
+ is_public.add_action(exec_private)
+ self.add_filter(is_public)
+
+
def setup_queryset(self, *args, **kwargs):
self.queryset = \
Vulnerability.objects.all()
@@ -1302,6 +1511,15 @@ class VulnerabilitiesTable(ToasterTable):
static_data_template=id_link_template,
)
+ self.add_column(title="Public",
+ field_name="public",
+ hideable=True,
+ orderable=True,
+ filter_name="is_public",
+ static_data_name="public",
+ static_data_template="{{data.get_public_text}}"
+ )
+
# !!! HACK: 'vc.cve.name' is returning '%s' when it is supposed to be null !!!
cve_link_template = '''
{% for vc in data.vulnerability_to_cve.all %}
@@ -1356,6 +1574,12 @@ class VulnerabilitiesTable(ToasterTable):
hideable=True,
)
+ self.add_column(title="Tags",
+ field_name="tags",
+ hideable=True,
+ hidden=True,
+ )
+
investigate_link_template = '''
{% for investigation in data.vulnerability_investigation.all %}
{% if not forloop.first %} {% endif %}<a href="{% url 'investigation' investigation.name %}" target="_blank">{{investigation.name}}</a>
@@ -1397,6 +1621,16 @@ class VulnerabilitiesTable(ToasterTable):
hidden=False,
)
+ self.add_column(title="SRT Update",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ field_name="srt_updated",
+ static_data_name="srt_updated",
+ static_data_template='{{data.srt_updated | date:"Y/m/d"}}'
+ )
+
+
class InvestigationsTable(ToasterTable):
"""Table of All Investigations in SRTool"""
@@ -1458,29 +1692,54 @@ class InvestigationsTable(ToasterTable):
# Priority filter
is_priority = TableFilter(name="is_priority",
title="Filter Investigations by 'Priority'")
+ exec_is_undefined = TableFilterActionToggle(
+ "undefined",
+ "Undefined",
+ Q(priority=Vulnerability.UNDEFINED))
exec_is_low = TableFilterActionToggle(
"low",
"Low",
- Q(priority=Investigation.LOW))
+ Q(priority=Vulnerability.LOW))
exec_is_medium = TableFilterActionToggle(
"medium",
"Medium",
- Q(priority=Investigation.MEDIUM))
+ Q(priority=Vulnerability.MEDIUM))
exec_is_high = TableFilterActionToggle(
"high",
"High",
- Q(priority=Investigation.HIGH))
+ Q(priority=Vulnerability.HIGH))
+ exec_is_critical = TableFilterActionToggle(
+ "critical",
+ "Critical",
+ Q(priority=Vulnerability.CRITICAL))
+ is_priority.add_action(exec_is_undefined)
is_priority.add_action(exec_is_low)
is_priority.add_action(exec_is_medium)
is_priority.add_action(exec_is_high)
+ is_priority.add_action(exec_is_critical)
self.add_filter(is_priority)
+ # Is Public filter
+ is_public = TableFilter(name="is_public",
+ title="Filter CVEs by 'Public'")
+ exec_public = TableFilterActionToggle(
+ "public",
+ "Public",
+ Q(public=True))
+ exec_private = TableFilterActionToggle(
+ "private",
+ "Private",
+ Q(public=False))
+ is_public.add_action(exec_public)
+ is_public.add_action(exec_private)
+ self.add_filter(is_public)
+
# Product filter
is_product = TableFilter(name="is_product",
title="Filter Investigations by 'Product'")
for p in Product.objects.all():
is_product.add_action( TableFilterActionToggle(
- p.pk,
+ p.key,
p.long_name,
Q(product=p)) )
self.add_filter(is_product)
@@ -1509,6 +1768,16 @@ class InvestigationsTable(ToasterTable):
static_data_template=id_link_template,
)
+ self.add_column(title="Public",
+ field_name="public",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ filter_name="is_public",
+ static_data_name="public",
+ static_data_template="{{data.get_public_text}}"
+ )
+
defect_link_template = '''
{% for ij in data.investigation_to_defect.all %}
{% if not forloop.first %} {% endif %}<a href="{% url 'defect_name' ij.defect.name %}">{{ij.defect.name}} </a>
@@ -1572,6 +1841,12 @@ class InvestigationsTable(ToasterTable):
hideable=True,
)
+ self.add_column(title="Tags",
+ field_name="tags",
+ hideable=True,
+ hidden=True,
+ )
+
self.add_column(title="Vulnerability",
hidden=False,
orderable=False,
@@ -1587,6 +1862,16 @@ class InvestigationsTable(ToasterTable):
static_data_template="<a href=\"{% url 'product' data.product.id %}\">{{data.product.long_name}}</a>",
)
+ self.add_column(title="SRT Update",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ field_name="srt_updated",
+ static_data_name="srt_updated",
+ static_data_template='{{data.srt_updated | date:"Y/m/d"}}'
+ )
+
+
class SourcesTable(ToasterTable):
"""Table of All Data Sources in SRTool"""
@@ -1597,6 +1882,7 @@ class SourcesTable(ToasterTable):
def get_context_data(self, **kwargs):
context = super(SourcesTable, self).get_context_data(**kwargs)
+ context['mru'] = Job.get_recent()
return context
def setup_queryset(self, *args, **kwargs):
@@ -1606,6 +1892,27 @@ class SourcesTable(ToasterTable):
self.queryset = self.queryset.order_by(self.default_orderby)
def setup_columns(self, *args, **kwargs):
+ # Get the 'next update' values
+ source_update = {}
+ cmnds = [os.path.join(os.environ.get('SRT_BASE_DIR'),'./bin/common/srtool_update.py'),'--fetch-updates-dhm']
+ result_returncode,result_stdout,result_stderr = execute_process(*cmnds)
+ if 0 != result_returncode:
+ _log("ERROR:FETCH-UPDATES-DHM:%s" % result_stderr)
+ for line in result_stdout.splitlines():
+ try:
+ name = line[:line.index(',')]
+ value = line[line.index(',')+1:].strip()
+ source_update[name] = value
+ except:
+ continue
+ self.source_update = source_update
+
+ self.add_column(title="ID",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ field_name="id",
+ )
self.add_column(title="Key",
hideable=False,
@@ -1638,6 +1945,8 @@ class SourcesTable(ToasterTable):
hidden=True,
orderable=False,
field_name="attributes",
+ static_data_name="attributes",
+ static_data_template='''<span id="attr_{{data.id}}">{{data.attributes}}</span>''',
)
self.add_column(title="Description",
@@ -1667,11 +1976,14 @@ class SourcesTable(ToasterTable):
field_name="lookup",
)
+ last_modified_date_template = '{% load jobtags %}'
+ last_modified_date_template += '{{ data.lastModifiedDate|shift_timezone:"%d" }}' % self.request.user.get_timezone_offset
self.add_column(title="Data Modified",
help_text="Last upstream date",
hideable=False,
orderable=True,
- field_name="lastModifiedDate",
+ static_data_name="str_lastModifiedDate",
+ static_data_template=last_modified_date_template,
)
updated_template = '''
@@ -1690,7 +2002,8 @@ class SourcesTable(ToasterTable):
{% if data.update %}{{data.get_frequency_text}}{% else %}({{data.get_frequency_text}}){% endif %}
'''
self.add_column(title="Update Freq.",
- hideable=False,
+ hideable=True,
+ hidden=True,
orderable=True,
field_name="update_frequency",
static_data_name="update_frequency",
@@ -1714,6 +2027,54 @@ class SourcesTable(ToasterTable):
field_name="cve_filter",
)
+# update_now_template = '''
+# {% load jobtags %}<span id="next_{{data.id}}">{{source_update|get_dict_value:data.id}}{% if data.update %}<button class="execute run-update-job" style="float:right;" x-data="{{data.id}}">Now</button>{% endif %}</span>
+# '''
+ update_now_template = '''{{data.id}}|{{data.attributes}}'''
+ self.add_column(title="Update Next (D|H:M:S)",
+ hideable=True,
+ hidden=False,
+ static_data_name="update_now",
+ static_data_template=update_now_template,
+ )
+
+ source_enabled_template='''
+ <input type="checkbox" class="source-enabled" name="source_enabled" x-data="{{data.id}}" {% if "DISABLE " in data.attributes %}checked{% endif %}>
+ <label for="audit_top_artifact"> Disabled</label><br>
+ '''
+ self.add_column(title="Disable",
+ hideable=True,
+ hidden=True,
+ static_data_name="source_enabled",
+ static_data_template=source_enabled_template,
+ )
+
+ def apply_row_customization(self, row):
+ data = super(SourcesTable, self).apply_row_customization(row)
+ def get_key(key,dict):
+ if key in dict:
+ return(dict[key])
+ return ''
+ # {'Severity_V2': '["", "MEDIUM"]', 'Severity_V3': '["", "MEDIUM"]'}
+ for i in range(len(data['rows'])):
+ source_id,attributes = data['rows'][i]['update_now'].split('|')
+ try:
+ update_now_str = self.source_update[source_id]
+ disabled = ("DISABLE " in attributes)
+ hidden_on = 'style="display:none;"' if disabled else ''
+ hidden_off = 'style="display:none;"' if not disabled else ''
+# <span id="next_{{data.id}}">{{source_update|get_dict_value:data.id}}{% if data.update %}<button class="execute run-update-job" style="float:right;" x-data="{{data.id}}">Now</button>{% endif %}</span>
+ update_now = '<span id="next_on_%s" %s>%s' % (source_id,hidden_on,update_now_str)
+ if update_now_str and ('(' != update_now_str[0]):
+ update_now += '<button class="execute run-update-job" style="float:right;" x-data="%s">Now</button>' % source_id
+ update_now += '</span>'
+ update_now += '<span id="next_off_%s" %s>(Disabled)</span>' % (source_id,hidden_off)
+ data['rows'][i]['update_now'] = update_now
+ except Exception as e:
+ _log("ERROR_APPLY_ROW_CUSTOMIZATION:%s" % e)
+ continue
+ return data
+
class SelectPublishTable(ToasterTable):
"""Table of Publishable CVE's in SRTool"""
@@ -1778,8 +2139,7 @@ class SelectPublishTable(ToasterTable):
field_name="Select",
hideable=False,
static_data_name="select",
- static_data_template='<input type="checkbox" name="{{data.name}}" />',
- )
+ static_data_template='<input type="checkbox" class="selectbox" id="box_{{data.id}}" name="{{data.name}}" />', )
self.add_column(title="Status",
field_name="status",
@@ -1918,7 +2278,7 @@ class NotificationsTable(ToasterTable):
orderable=True,
field_name="srt_created",
static_data_name="srt_created",
- static_data_template='{{data.srt_updated | date:"m/d/y H:i"}}'
+ static_data_template='{{data.srt_created | date:"m/d/y H:i"}}'
)
self.add_column(title="Category",
@@ -1981,14 +2341,70 @@ class NotificationsTable(ToasterTable):
# static_data_template='''{{data.author.name}}''',
# )
- manage_link_template = '''
- <span class="glyphicon glyphicon-edit edit-notify" id="notify_edit_'+{{data.id}}+'" x-data="{{data.id}}"></span>
- '''
-# <span class="glyphicon glyphicon-trash trash-notify" id="notify_trash_'+{{data.id}}+'" x-data="{{data.id}}"></span>
- self.add_column(title="Manage",
- static_data_name="manage",
- static_data_template=manage_link_template,
- )
+ if False:
+ manage_link_template = '''
+ <span class="glyphicon glyphicon-edit edit-notify" id="notify_edit_'+{{data.id}}+'" x-data="{{data.id}}"></span>
+ '''
+# <span class="glyphicon glyphicon-trash trash-notify" id="notify_trash_'+{{data.id}}+'" x-data="{{data.id}}"></span>
+ self.add_column(title="Manage",
+ static_data_name="manage",
+ static_data_template=manage_link_template,
+ )
+
+
+class ErrorLogsTable(ToasterTable):
+ """Table of ErrorLogs in SRTool"""
+
+ def __init__(self, *args, **kwargs):
+ super(ErrorLogsTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "-srt_created"
+
+ def get_context_data(self,**kwargs):
+ context = super(ErrorLogsTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = ErrorLog.objects.all()
+ self.queryset = self.queryset.order_by(self.default_orderby)
+
+ def setup_columns(self, *args, **kwargs):
+
+ self.add_column(title="Select",
+ field_name="Select",
+ hideable=False,
+ static_data_name="select",
+ static_data_template='<input type="checkbox" class="selectbox" id="box_{{data.pk}}" value="{{data.pk}}" name="select-notify" />',
+ )
+
+ self.add_column(title="SRT Created",
+ hideable=False,
+ orderable=True,
+ field_name="srt_created",
+ static_data_name="srt_created",
+ static_data_template='{{data.srt_created | date:"m/d/y H:i"}}'
+ )
+
+ self.add_column(title="Severity",
+ field_name="severity",
+ orderable=True,
+ static_data_name="severity",
+ static_data_template='''{{ data.get_severity_text }}''',
+ )
+
+ self.add_column(title="Description",
+ field_name="description",
+ hideable=False,
+ orderable=True,
+ )
+
+ if False:
+ manage_link_template = '''
+ <span class="glyphicon glyphicon-trash trash-errorlog" id="errorlog_trash_'+{{data.id}}+'" x-data="{{data.id}}"></span>
+ '''
+ self.add_column(title="Manage",
+ static_data_name="manage",
+ static_data_template=manage_link_template,
+ )
class PackageFilterTable(ToasterTable):
@@ -2193,3 +2609,693 @@ class PackageFilterDetailTable(ToasterTable):
static_data_name="defects",
static_data_template=defect_link_template,
)
+
+class HistoryCveTable(ToasterTable):
+ """Table of History Cves Details in SRTool"""
+
+ def __init__(self, *args, **kwargs):
+ super(HistoryCveTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "-date"
+
+ def get_context_data(self,**kwargs):
+ context = super(HistoryCveTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = CveHistory.objects.all()
+
+ def setup_columns(self, *args, **kwargs):
+ self.add_column(title="Cve",
+ hideable=False,
+ orderable=True,
+ static_data_name="name",
+ static_data_template='''<a href="{% url 'cve' data.cve.name %}">{{data.cve.name}}</a>''',
+ )
+ self.add_column(title="Comment",
+ field_name="comment",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Date",
+ field_name="date",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Author",
+ field_name="author",
+ hideable=False,
+ orderable=True,
+ )
+
+class HistoryVulnerabilityTable(ToasterTable):
+ """Table of History Vulnerability Details in SRTool"""
+
+ def __init__(self, *args, **kwargs):
+ super(HistoryVulnerabilityTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "-date"
+
+ def get_context_data(self,**kwargs):
+ context = super(HistoryVulnerabilityTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = VulnerabilityHistory.objects.all()
+
+ def setup_columns(self, *args, **kwargs):
+ self.add_column(title="Vulnerability",
+ hideable=False,
+ orderable=True,
+ static_data_name="vulnerability",
+ static_data_template='''<a href="{% url 'vulnerability' data.vulnerability.name %}">{{data.vulnerability.name}}</a>''',
+ )
+ self.add_column(title="Comment",
+ field_name="comment",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Date",
+ field_name="date",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Author",
+ field_name="author",
+ hideable=False,
+ orderable=True,
+ )
+
+class HistoryInvestigationTable(ToasterTable):
+ """Table of History Investigation Details in SRTool"""
+
+ def __init__(self, *args, **kwargs):
+ super(HistoryInvestigationTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "-date"
+
+ def get_context_data(self,**kwargs):
+ context = super(HistoryInvestigationTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = InvestigationHistory.objects.all()
+
+ def setup_columns(self, *args, **kwargs):
+ self.add_column(title="Investigation",
+ hideable=False,
+ orderable=True,
+ static_data_name="investigation",
+ static_data_template='''<a href="{% url 'investigation' data.investigation.name %}">{{data.investigation.name}}</a>''',
+ )
+ self.add_column(title="Comment",
+ field_name="comment",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Date",
+ field_name="date",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Author",
+ field_name="author",
+ hideable=False,
+ orderable=True,
+ )
+
+class HistoryDefectTable(ToasterTable):
+ """Table of History Defect Details in SRTool"""
+
+ def __init__(self, *args, **kwargs):
+ super(HistoryDefectTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "-date"
+
+ def get_context_data(self,**kwargs):
+ context = super(HistoryDefectTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = DefectHistory.objects.all()
+
+ def setup_columns(self, *args, **kwargs):
+ self.add_column(title="Defect",
+ hideable=False,
+ orderable=True,
+ static_data_name="defect",
+ static_data_template='''<a href="{% url 'defect_name' data.defect.name %}">{{data.defect.name}}</a>''',
+ )
+ self.add_column(title="Comment",
+ field_name="comment",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Date",
+ field_name="date",
+ hideable=False,
+ orderable=True,
+ )
+ self.add_column(title="Author",
+ field_name="author",
+ hideable=False,
+ orderable=True,
+ )
+
+
+class PublishListTable(ToasterTable):
+ """Table of Publish View in SRTool"""
+
+ def __init__(self, *args, **kwargs):
+ super(PublishListTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "cve"
+
+ def get_context_data(self,**kwargs):
+ context = super(PublishListTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_filters(self, *args, **kwargs):
+ # Is Status filter
+ is_status = TableFilter(name="is_status",
+ title="Filter CVE's by 'Status'")
+ for status in range(len(Cve.STATUS)):
+ is_status.add_action(TableFilterActionToggle(
+ Cve.STATUS[status][1].lower().replace(' ','_'),
+ Cve.STATUS[status][1],
+ Q(cve__status=Cve.STATUS[status][0]))
+ )
+ self.add_filter(is_status)
+
+ # Is State filter
+ is_state = TableFilter(name="is_state",
+ title="Filter items by 'State'")
+ for state in range(len(PublishSet.PUBLISH_SET_STATE)):
+ if PublishSet.PUBLISH_SET_ERROR == PublishSet.PUBLISH_SET_STATE[state][0]:
+ continue
+ is_state.add_action(TableFilterActionToggle(
+ PublishSet.PUBLISH_SET_STATE[state][1].lower().replace(' ','_'),
+ PublishSet.PUBLISH_SET_STATE[state][1],
+ Q(state=PublishSet.PUBLISH_SET_STATE[state][0]))
+ )
+ self.add_filter(is_state)
+
+#Record.objects.filter( Q(parameter__icontains="wd2") | ~Q(parameter__icontains="wd") )
+
+ # V3 filter
+ is_v3 = TableFilter(name="is_v3",title="Filter items by 'V3'")
+ exec_v3 = TableFilterActionToggle(
+ "v3",
+ "Severity_V3 change",
+ Q(reason__icontains="Severity_V3"))
+ is_v3.add_action(exec_v3)
+ self.add_filter(is_v3)
+
+ # V2 filter
+ is_v2 = TableFilter(name="is_v2",title="Filter items by 'V2'")
+ exec_v2 = TableFilterActionToggle(
+ "v2",
+ "Severity_V2 change",
+ Q(reason__icontains="Severity_V2"))
+ is_v2.add_action(exec_v2)
+ self.add_filter(is_v2)
+
+ # Product filters
+ # Gather the supported products
+ product_query = Product.objects.filter()
+ product_filter = []
+ for product in product_query:
+ if "support" == product.get_product_tag('mode').order_by('-order'):
+ product_filter.append(product.get_defect_tag('key'))
+ for product_key in product_filter:
+ is_filter = TableFilter(name="is_%s" % product_key,title="Filter CVE's by '%s'" % product_key)
+ for status in range(len(Cve.STATUS)):
+ is_filter.add_action(TableFilterActionToggle(
+ product_key.lower(),
+ product_key,
+ Q(reason__icontains==product_key))
+ )
+ self.add_filter(is_filter)
+
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = PublishSet.objects.all()
+
+ def apply_row_customization(self, row):
+ data = super(PublishListTable, self).apply_row_customization(row)
+ # data:dict_keys(['rows', 'total', 'default_orderby', 'error', 'columns'])
+ def get_key(key,dict):
+ if key in dict:
+ return(dict[key])
+ return ''
+ # {'Severity_V2': '["", "MEDIUM"]', 'Severity_V3': '["", "MEDIUM"]'}
+ for i in range(len(data['rows'])):
+ reason = data['rows'][i]['reason']
+ if not reason:
+ continue
+ try:
+ # CvssV3
+ reason_dict = json.loads(reason)
+ cvssV3 = get_key('Severity_V3',reason_dict)
+ if cvssV3:
+ data['rows'][i]['cvssV3'] = "%s,%s" % (cvssV3[0],cvssV3[1])
+ # CvssV2
+ cvssV2 = get_key('Severity_V2',reason_dict)
+ if cvssV2:
+ data['rows'][i]['cvssV2'] = "%s,%s" % (cvssV2[0],cvssV2[1])
+ # Products
+ for product_key in ('LIN5','CGP5','SCP5','OVP','LIN6','CGP6','SCP6','LIN7','CGP7','SCP7','LIN8','LIN9','LIN10','LIN1018'):
+ product_col = get_key(product_key,reason_dict)
+ if product_col:
+ data['rows'][i][product_key] = "%s" % (product_col)
+ except Exception as e:
+ continue
+ return data
+
+ def setup_columns(self, *args, **kwargs):
+
+ self.add_column(title="Select",
+ field_name="Select",
+ hideable=False,
+ static_data_name="select",
+ static_data_template='<input type="checkbox" id="box_{{data.id}}" name="{{data.cve.name}}" />',
+ )
+
+ self.add_column(title="State",
+ hideable=False,
+ orderable=True,
+ filter_name="is_state",
+ static_data_name="state",
+ static_data_template='''{{data.state_text}}''',
+ )
+
+ self.add_column(title="CVE",
+ field_name="cve__name",
+ hideable=False,
+ orderable=True,
+ static_data_name="cve__name",
+ static_data_template='''<a href="{% url 'cve' data.cve.name %}">{{data.cve.name}}</a>''',
+ )
+
+ self.add_column(title="CVE Published",
+ field_name="cve__publishedDate",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="CVE Modified",
+ field_name="cve__lastModifiedDate",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="CVE Status",
+ field_name="cve_status",
+ hideable=False,
+ orderable=True,
+ filter_name="is_status",
+ static_data_name="cve_status",
+ static_data_template='''{{data.cve.get_status_text}}''',
+ )
+
+ self.add_column(title="cvssV3",
+ hideable=True,
+ hidden=False,
+ filter_name="is_v3",
+ static_data_name="cvssV3",
+ static_data_template='',
+ )
+
+ self.add_column(title="cvssV2",
+ hideable=True,
+ hidden=False,
+ filter_name="is_v2",
+ static_data_name="cvssV2",
+ static_data_template='',
+ )
+
+ self.add_column(title="CVE Description",
+ field_name="cve__description",
+ hideable=False,
+ orderable=False,
+ )
+
+ # Product columns
+ for product_key in ('LIN5','CGP5','SCP5','OVP','LIN6','CGP6','SCP6','LIN7','CGP7','SCP7','LIN8','LIN9','LIN10','LIN1018'):
+ self.add_column(title=product_key,
+# hideable=True,
+# hidden=True,
+ filter_name="is_%s" % product_key,
+ static_data_name=product_key,
+ static_data_template='',
+ )
+
+ self.add_column(title="CVE Acknowledge",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ static_data_name="cve_acknowledge",
+ static_data_template='''{{data.cve.acknowledge_date|date:'Y-m-d'}}''',
+ )
+
+ self.add_column(title="Public Comments",
+ hideable=False,
+ orderable=False,
+ static_data_name="public_comments",
+ static_data_template='''{{data.cve.get_public_comments}}''',
+ )
+
+ self.add_column(title="reason",
+ field_name="reason",
+ hideable=False,
+ orderable=False,
+ )
+
+
+class PublishCveTable(ToasterTable):
+ """Table of Publish View in SRTool"""
+
+ def __init__(self, *args, **kwargs):
+ super(PublishCveTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "-status"
+
+ def get_context_data(self,**kwargs):
+ context = super(PublishCveTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_filters(self, *args, **kwargs):
+ # Is Status filter
+ is_status = TableFilter(name="is_status",
+ title="Filter CVE's by 'Status'")
+ for status in range(len(Cve.STATUS)):
+ is_status.add_action(TableFilterActionToggle(
+ Cve.STATUS[status][1].lower().replace(' ','_'),
+ Cve.STATUS[status][1],
+ Q(status=Cve.STATUS[status][0]))
+ )
+ self.add_filter(is_status)
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = Cve.objects.all()
+ exclude_list = [Cve.NEW, Cve.HISTORICAL, Cve.NEW_RESERVED] # CVE.NEW
+ self.queryset = self.queryset.exclude(status__in=exclude_list)
+
+ def setup_columns(self, *args, **kwargs):
+
+ self.add_column(title="Select",
+ field_name="Select",
+ hideable=False,
+ static_data_name="select",
+ static_data_template='<input type="checkbox" id="box_{{data.id}}" name="{{data.name}}" />',
+ )
+
+ self.add_column(title="State",
+ hideable=False,
+ orderable=True,
+ static_data_name="state",
+ static_data_template='''{{data.get_publishset_state}}''',
+ )
+
+ self.add_column(title="Name",
+ field_name="name",
+ hideable=False,
+ orderable=True,
+ static_data_name="cve_name",
+ static_data_template='''<a href="{% url 'cve' data.name %}">{{data.name}}</a>''',
+ )
+
+ self.add_column(title="CVE Published",
+ field_name="publishedDate",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="CVE Modified",
+ field_name="lastModifiedDate",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="CVE Status",
+ field_name="status",
+ hideable=False,
+ orderable=True,
+ filter_name="is_status",
+ static_data_name="cve_status",
+ static_data_template='''{{data.get_status_text}}''',
+ )
+
+ self.add_column(title="CVE Description",
+ field_name="description",
+ hideable=False,
+ orderable=False,
+ )
+
+ self.add_column(title="CVE Acknowledge",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ static_data_name="cve_acknowledge",
+ static_data_template='''{{data.acknowledge_date|date:'Y-m-d'}}''',
+ )
+
+
+class PublishDefectTable(ToasterTable):
+ """Table of Publish View in SRTool"""
+
+ def __init__(self, *args, **kwargs):
+ super(PublishDefectTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "-date_updated"
+
+ def get_context_data(self,**kwargs):
+ context = super(PublishDefectTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_filters(self, *args, **kwargs):
+ # Is Status filter
+ is_status = TableFilter(name="is_status",
+ title="Filter CVE's by 'Status'")
+ for status in range(len(Defect.DEFECT_STATUS)):
+ is_status.add_action(TableFilterActionToggle(
+ Defect.DEFECT_STATUS[status][1].lower().replace(' ','_'),
+ Defect.DEFECT_STATUS[status][1],
+ Q(status=Defect.DEFECT_STATUS[status][0]))
+ )
+ self.add_filter(is_status)
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = Defect.objects.all()
+
+ def setup_columns(self, *args, **kwargs):
+
+ self.add_column(title="Select",
+ field_name="Select",
+ hideable=False,
+ static_data_name="select",
+ static_data_template='<input type="checkbox" id="box_{{data.id}}" name="{{data.name}}" />',
+ )
+
+ self.add_column(title="State",
+ hideable=False,
+ orderable=True,
+ static_data_name="state",
+ static_data_template='''{{data.get_publishset_state}}''',
+ )
+
+ self.add_column(title="Name",
+ hideable=False,
+ orderable=True,
+ static_data_name="name",
+ static_data_template='''<a href="{% url 'defect_name' data.name %}">{{data.name}}</a>''',
+ )
+
+ self.add_column(title="Created",
+ hideable=False,
+ orderable=True,
+ static_data_name="date_created",
+ static_data_template='''{{data.get_date_created_text}}''',
+ )
+
+ self.add_column(title="Modified",
+ hideable=False,
+ orderable=True,
+ static_data_name="date_updated",
+ static_data_template='''{{data.get_date_updated_text}}''',
+ )
+
+ self.add_column(title="Status",
+ field_name="status",
+ hideable=False,
+ orderable=True,
+ filter_name="is_status",
+ static_data_name="status",
+ static_data_template='''{{data.get_defect_status_text}}''',
+ )
+
+ self.add_column(title="Summary",
+ field_name="summary",
+ hideable=False,
+ orderable=False,
+ )
+
+ self.add_column(title="Release Version",
+ field_name="release_version",
+ orderable=True,
+ )
+
+ self.add_column(title="CVE List",
+ field_name="get_cve_names",
+ hideable=False,
+ orderable=False,
+ )
+
+class ManageJobsTable(ToasterTable):
+ """Table of All Audits """
+
+ def __init__(self, *args, **kwargs):
+ super(ManageJobsTable, self).__init__(*args, **kwargs)
+ _log("MANAGEJOBSTABLE:INIT|%s|%s|%s|" % (str(self),','.join(args),json.dumps(kwargs) ))
+ self.default_orderby = "-started_on"
+# _log("TRACE:%s" % str(traceback.print_stack()))
+
+ def get_context_data(self, **kwargs):
+ context = super(ManageJobsTable, self).get_context_data(**kwargs)
+ _log("MANAGEJOBSTABLE:GET_CONTEXT_DATA|%s|%s|" % (str(self),json.dumps(kwargs) ))
+ context['mru'] = Job.get_recent()
+ context['mrj_type'] = 'all'
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ _log("MANAGEJOBSTABLE:SETUP_QUERYSET|%s|%s|%s|" % (str(self),','.join(args),json.dumps(kwargs) ))
+ self.queryset = Job.objects.all()
+ self.queryset = self.queryset.order_by(self.default_orderby)
+
+ def setup_filters(self, *args, **kwargs):
+ _log("MANAGEJOBSTABLE:SETUP_FILTERS|%s|%s|%s|" % (str(self),','.join(args),json.dumps(kwargs) ))
+ # Is Status filter
+ is_status = TableFilter(name="is_status",
+ title="Filter Jobs by 'Status'")
+ for status in range(len(Job.STATUS)):
+ is_status.add_action(TableFilterActionToggle(
+ Job.STATUS[status][1].lower().replace(' ','_'),
+ Job.STATUS[status][1],
+ Q(status=Job.STATUS[status][0]))
+ )
+ self.add_filter(is_status)
+
+ def setup_columns(self, *args, **kwargs):
+ _log("MANAGEJOBSTABLE:SETUP_COLUMNS|%s|%s|%s|" % (str(self),','.join(args),json.dumps(kwargs) ))
+
+ # Fetch pid run data
+ pid_table = {}
+ pid_table[0] = 0
+ pid_table['0'] = 0
+ pid_table[''] = 0
+ result_returncode,result_stdout,result_stderr = execute_process(['bin/common/srtool_job.py','--job-pid-status'])
+ for line in result_stdout.splitlines():
+ pid = line[:line.index(':')]
+ value = line[line.index(':')+1:]
+ pid_table[pid] = value
+ self.pid_table = pid_table
+ _log("FOO:%s" % pid_table)
+
+ self.add_column(title="ID",
+ field_name="id",
+ orderable=True,
+ )
+
+ self.add_column(title="Name",
+ field_name="name",
+ orderable=True,
+ )
+
+ self.add_column(title="Status",
+ filter_name="is_status",
+ static_data_name="status",
+ static_data_template='{{data.get_status_text}}',
+ )
+
+ self.add_column(title="Description",
+ field_name="description",
+ hideable=True,
+ )
+
+ self.add_column(title="Command",
+ field_name="command",
+ hideable=True,
+ )
+
+ self.add_column(title="Message",
+ field_name="message",
+ hideable=True,
+ hidden=True,
+ )
+
+ job_pid_template = '''
+ {{data.pid}}
+ '''
+ self.add_column(title="PID",
+ hideable=True,
+ static_data_name="pid",
+ static_data_template=job_pid_template,
+ )
+
+ self.add_column(title="Started_On",
+ field_name="started_on",
+ hideable=True,
+ orderable=True,
+ )
+
+ self.add_column(title="Completed_On",
+ field_name="completed_on",
+ hideable=True,
+ )
+
+ self.add_column(title="Count",
+ field_name="count",
+ hideable=True,
+ hidden=True,
+ )
+ self.add_column(title="Max",
+ field_name="max",
+ hideable=True,
+ hidden=True,
+ )
+ self.add_column(title="Errors",
+ hideable=True,
+ hidden=True,
+ static_data_name="errors",
+ static_data_template='{{data.errors|default:"0"}}',
+ )
+ self.add_column(title="Warnings",
+ hideable=True,
+ hidden=True,
+ static_data_name="warnings",
+ static_data_template='{{data.warnings|default:"0"}}',
+ )
+
+ self.add_column(title="Log_File",
+ field_name="log_file",
+ hideable=True,
+ )
+
+ if UserSafe.is_creator(self.request.user):
+ trash_job_template = '''
+ <span class="glyphicon glyphicon-trash trash-job" x-data="{{data.name}}|{{data.id}}"></span>
+ '''
+ self.add_column(title="Manage",
+ hideable=False,
+ static_data_name="trash_job",
+ static_data_template=trash_job_template,
+ )
+
+ def apply_row_customization(self, row, **kwargs):
+ # data:dict_keys(['rows', 'total', 'default_orderby', 'error', 'columns'])
+ data = super(ManageJobsTable, self).apply_row_customization(row)
+ for i in range(len(data['rows'])):
+ pid = data['rows'][i]['pid'].strip()
+ status = data['rows'][i]['status'].strip()
+ if (pid in self.pid_table) and ('1' == self.pid_table[pid]):
+ if 'Success' == status:
+ data['rows'][i]['pid'] = '%s <span style="color:green"> (Done)</span>' % (pid)
+ else:
+ data['rows'][i]['pid'] = '%s <span style="color:red"> (Dead)</span>' % (pid)
+ else:
+ data['rows'][i]['pid'] = '%s <span style="color:blue"> (Running)</span>' % (pid)
+ return data
+
diff --git a/lib/srtgui/templates/base.html b/lib/srtgui/templates/base.html
index f8b43194..8967a104 100644
--- a/lib/srtgui/templates/base.html
+++ b/lib/srtgui/templates/base.html
@@ -1,7 +1,6 @@
<!DOCTYPE html>
{% load static %}
-{% load projecttags %}
-{% load project_url_tag %}
+{% load jobtags %}
<html lang="en">
<head>
<title>
@@ -39,6 +38,9 @@
libtoaster.ctx = {
jsUrl : "{% static 'js/' %}",
htmlUrl : "{% static 'html/' %}",
+ recipeTypeAheadUrl: {% url 'xhr_recipetypeahead' as paturl %}{{paturl|json}},
+ xhrJobRequestUrl: "{% url 'xhr_jobrequest' %}",
+ mostRecentJobsUrl: "{% url 'most_recent_jobs' %}",
};
</script>
{% block extraheadcontent %}
@@ -71,7 +73,7 @@
display: none;
position: absolute;
background-color: #f9f9f9;
- min-width: 160px;
+ min-width: 260px;
overflow: auto;
box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2);
z-index: 1;
@@ -112,6 +114,11 @@ toggle between hiding and showing the dropdown content */
function myFunction() {
document.getElementById("myDropdown").classList.toggle("show");
}
+/* When the user clicks on the Product,
+toggle between hiding and showing the dropdown content */
+function selectProduct() {
+ document.getElementById("ProductDropdown").classList.toggle("show");
+}
// Close the dropdown if the user clicks outside of it
window.onclick = function(event) {
@@ -158,7 +165,7 @@ window.onclick = function(event) {
<img class="logo" src="{{ srt_logo.1 }}" alt="{{srt_logo.0}}"/>
</a>
{% endif %}
- <a class="brand" href="/">SRTool:Security Response Tool</a>
+ <a class="brand" href="/">SRTool:Security Response Tool {{srt_mode}}</a>
{% if DEBUG %}
<span class="glyphicon glyphicon-info-sign" title="<strong>SRTool version information</strong>" data-content="<dl><dt>Git branch</dt><dd>{{TOASTER_BRANCH}}</dd><dt>Git revision</dt><dd>{{TOASTER_REVISION}}</dd></dl>"></i>
{% endif %}
@@ -168,7 +175,7 @@ window.onclick = function(event) {
<ul class="nav navbar-nav">
<li id="navbar-home" {% if request.resolver_match.url_name == 'landing' %}class="active"{% endif %}>
- <a href="{% url 'landing' %}">
+ <a href="/"> <!--href="{ % url 'landing' % }"> -->
<i class="glyphicon glyphicon-tasks"></i>
Home
</a>
@@ -226,15 +233,15 @@ window.onclick = function(event) {
<li id="navbar-export">
{% if request.resolver_match.url_name == 'landing' %}
- <a href="{% url 'report' request.resolver_match.url_name %}"><i class="glyphicon glyphicon-tasks"></i> Export</a>
+ <a href="{% url 'report' request.resolver_match.url_name %}" target="_blank"><i class="glyphicon glyphicon-tasks"></i> Export</a>
{% else %}
- <a id="report_link" href="{% url 'report' request.resolver_match.url_name %}"><i class="glyphicon glyphicon-tasks"></i> Export</a>
+ <a id="report_link" href="{% url 'report' request.resolver_match.url_name %}" target="_blank"><i class="glyphicon glyphicon-tasks"></i> Export</a>
{% endif %}
</li>
<div class="dropdown navbar-right">
{% if user.is_authenticated %}
- <button onclick="myFunction()" class="dropbtn ">Hello '{{user.username}}'</button>
+ <button onclick="myFunction()" class="dropbtn ">Hello '{{user.username}}'</button>{% if user.timezone %}({{user.timezone}}){% endif %}
{% else %}
<button onclick="myFunction()" class="dropbtn ">Hello 'Guest' (Login here)</button>
{% endif %}
@@ -247,9 +254,11 @@ window.onclick = function(event) {
<a href="{% url 'password_reset' %}">Reset password</a>
<a href="{% url 'tbd' %}">Request permissions</a>
-->
+ <a href="{% url 'email_admin' %}">Request admin help</a>
{% else %}
<a href="{% url 'login' %}">Login</a>
<a href="{% url 'signup' %}">Request account</a>
+ <a href="{% url 'email_admin' %}">Request admin help</a>
{% endif %}
</div>
</div>
diff --git a/lib/srtgui/templates/basetable_top.html b/lib/srtgui/templates/basetable_top.html
index 5a9076d2..db9ca7ed 100644
--- a/lib/srtgui/templates/basetable_top.html
+++ b/lib/srtgui/templates/basetable_top.html
@@ -1,4 +1,4 @@
-{% load projecttags %}
+{% load jobtags %}
<!-- component to display a generic table -->
<script>
@@ -140,7 +140,7 @@
if ( !editColTimer ) {
//
- // we don't have a timer active so set one up
+ // we do not have a timer active so set one up
// and clear the action list
//
@@ -173,8 +173,9 @@
<form class="navbar-form navbar-left" id="searchform">
<div class="form-group">
<div class="btn-group">
- <input class="form-control" id="search" name="search" type="text" placeholder="Search {%if object_search_display %}{{object_search_display}}{%else%}{{objectname}}{%endif%}" value="{%if request.GET.search %}{{request.GET.search}}{% endif %}"/>
+ <input class="form-control" id="search" name="search" type="text" placeholder="Search! {%if object_search_display %}{{object_search_display}}{%else%}{{objectname}}{%endif%}" value="{%if request.GET.search %}{{request.GET.search}}{% endif %}"/>
{% if request.GET.search %}<a href="javascript:$('#search').val('');searchform.submit()" tabindex="-1"><span class="remove-search-btn-variables glyphicon glyphicon-remove-circle"></span></a>{%endif%}
+ <span class="glyphicon glyphicon-question-sign get-help" title="Default is an 'and' search; use 'OR' keyword to 'or' the terms"></span>
</div>
</div>
<input type="hidden" name="orderby" value="{{request.GET.orderby}}">
diff --git a/lib/srtgui/templates/create_vulnerability.html b/lib/srtgui/templates/create_vulnerability.html
index f8e56d24..c62aed2d 100644
--- a/lib/srtgui/templates/create_vulnerability.html
+++ b/lib/srtgui/templates/create_vulnerability.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Create New Vulnerability {% endblock %}
diff --git a/lib/srtgui/templates/cve-edit-local.html b/lib/srtgui/templates/cve-edit-local.html
index 7dde6b68..6c8bcdce 100755
--- a/lib/srtgui/templates/cve-edit-local.html
+++ b/lib/srtgui/templates/cve-edit-local.html
@@ -12,7 +12,7 @@
</div>
<div class="col-md-5">
<div class="well" style="width: 400px;">
- <h3>Quick Info: </h3>
+ <h3>EDIT Quick Info: </h3>
<p/>
<dl class="dl-horizontal">
diff --git a/lib/srtgui/templates/cve-nist-local.html b/lib/srtgui/templates/cve-nist-local.html
index 31236a42..3fe16e74 100755
--- a/lib/srtgui/templates/cve-nist-local.html
+++ b/lib/srtgui/templates/cve-nist-local.html
@@ -5,7 +5,7 @@
<div class="col-md-5">
<div>
<h3>Description</h3>
- <textarea rows="9" style="min-width: 100%" class="localblue">{{details.description}}</textarea>
+ <textarea rows="9" readonly style="min-width: 100%" class="localblue">{{details.description}}</textarea>
</div>
<p/>
</div>
@@ -31,7 +31,8 @@
<dd>
{% if object.cve_to_vulnerability.all %}
{% for cv in object.cve_to_vulnerability.all %}
- {% if not forloop.first %}| {% endif %}<a href="{% url 'vulnerability' cv.vulnerability.pk %}">{{cv.vulnerability.name}}</a>
+ {% if not forloop.first %}<p>{% endif %}<a href="{% url 'vulnerability' cv.vulnerability.pk %}">{{cv.vulnerability.name}}</a>
+ <span class="glyphicon glyphicon-trash detach-vulnerability" id="detach_vulnerability_'+{{cv.vulnerability.id}}+'" x-data="{{cv.vulnerability.id}}"></span>
{% endfor %}
{% endif %}
<button class="execute btn btn-info" id="submit-create-vulnerability" style="margin-bottom: 5px; margin-top: 5px;">Create Vulnerability</button>
@@ -61,10 +62,10 @@
<dd class="localblue">{{details.cvssV3_vectorString}}</dd>
<dt>Impact Score:</dt>
- <dd class="localblue">{{details.cvssV3_impactScore}}</dd>
+ <dd class="localblue">{{details.cvssV3_impactScore|floatformat:2}}</dd>
<dt>Exploitability Score:</dt>
- <dd class="localblue">{{details.cvssV3_exploitabilityScore}}</dd>
+ <dd class="localblue">{{details.cvssV3_exploitabilityScore|floatformat:2}}</dd>
</dl>
<h3>CVSS Version 3 Metrics:</h3>
<dl class="dl-horizontal">
@@ -103,10 +104,10 @@
<dd class="localblue">{{details.cvssV2_vectorString}}</dd>
<dt>Impact Subscore:</dt>
- <dd class="localblue">{{details.cvssV2_impactScore}}</dd>
+ <dd class="localblue">{{details.cvssV2_impactScore|floatformat:2}}</dd>
<dt>Exploitability Subscore:</dt>
- <dd class="localblue">{{details.cvssV2_exploitabilityScore}}</dd>
+ <dd class="localblue">{{details.cvssV2_exploitabilityScore|floatformat:2}}</dd>
</dl>
<h3>CVSS Version 2 Metrics:</h3>
diff --git a/lib/srtgui/templates/cve-nist.html b/lib/srtgui/templates/cve-nist.html
index ead11ca4..0b8cf3f0 100755
--- a/lib/srtgui/templates/cve-nist.html
+++ b/lib/srtgui/templates/cve-nist.html
@@ -1,13 +1,13 @@
<!-- vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv -->
-{% load projecttags %}
+{% load jobtags %}
<!-- Row: Description and Quick Info -->
<div class="row" style="padding-left: 25px;">
<div class="col-md-6">
<h3>Description</h3>
<div>
- <textarea rows="9" style="min-width: 100%" {{cve_html|get_dict_value:'description'}}>{{details.description}}</textarea>
+ <textarea rows="9" readonly style="min-width: 100%" {{cve_html|get_dict_value:'description'}}>{{details.description}}</textarea>
</div>
<p/>
</div>
@@ -33,7 +33,8 @@
<dd>
{% if object.cve_to_vulnerability.all %}
{% for cv in object.cve_to_vulnerability.all %}
- {% if not forloop.first %}| {% endif %}<a href="{% url 'vulnerability' cv.vulnerability.pk %}">{{cv.vulnerability.name}}</a>
+ {% if not forloop.first %}<p>{% endif %}<a href="{% url 'vulnerability' cv.vulnerability.pk %}">{{cv.vulnerability.name}}</a>
+ <span class="glyphicon glyphicon-trash detach-vulnerability" id="detach_vulnerability_'+{{cv.vulnerability.id}}+'" x-data="{{cv.vulnerability.id}}"></span>
{% endfor %}
{% endif %}
<button class="execute btn btn-info" id="submit-create-vulnerability" style="margin-bottom: 5px; margin-top: 5px;">Create Vulnerability</button>
@@ -64,10 +65,10 @@
<dd {{cve_html|get_dict_value:'cvssV3_vectorString'}}>{{details.cvssV3_vectorString}}</dd>
<dt>Impact Score:</dt>
- <dd {{cve_html|get_dict_value:'cvssV3_impactScore'}}>{{details.cvssV3_impactScore}}</dd>
+ <dd {{cve_html|get_dict_value:'cvssV3_impactScore'}}>{{details.cvssV3_impactScore|floatformat:2}}</dd>
<dt>Exploitability Score:</dt>
- <dd {{cve_html|get_dict_value:'cvssV3_exploitabilityScore'}}>{{details.cvssV3_exploitabilityScore}}</dd>
+ <dd {{cve_html|get_dict_value:'cvssV3_exploitabilityScore'}}>{{details.cvssV3_exploitabilityScore|floatformat:2}}</dd>
</dl>
<h3>CVSS Version 3 Metrics:</h3>
<dl class="dl-horizontal">
@@ -107,10 +108,10 @@
<dd {{cve_html|get_dict_value:'cvssV2_vectorString'}}>{{details.cvssV2_vectorString}}</dd>
<dt>Impact Subscore:</dt>
- <dd {{cve_html|get_dict_value:'cvssV2_impactScore'}}>{{details.cvssV2_impactScore}}</dd>
+ <dd {{cve_html|get_dict_value:'cvssV2_impactScore'}}>{{details.cvssV2_impactScore|floatformat:2}}</dd>
<dt>Exploitability Subscore:</dt>
- <dd {{cve_html|get_dict_value:'cvssV2_exploitabilityScore'}}>{{details.cvssV2_exploitabilityScore}}</dd>
+ <dd {{cve_html|get_dict_value:'cvssV2_exploitabilityScore'}}>{{details.cvssV2_exploitabilityScore|floatformat:2}}</dd>
</dl>
<h3>CVSS Version 2 Metrics:</h3>
@@ -231,6 +232,7 @@
{% for cpe in details.get_cpe_list %}
{% if not cpe %}
{% elif not cpe.0 %}
+ No CPE configurations
{% elif '[config' in cpe.0 %}
<div style="padding-left: 25px;">
<h4>&bull; Configuration </h3>
diff --git a/lib/srtgui/templates/cve.html b/lib/srtgui/templates/cve.html
index ecbcf39e..e3fe0ca0 100644
--- a/lib/srtgui/templates/cve.html
+++ b/lib/srtgui/templates/cve.html
@@ -1,6 +1,6 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} {{object.name}} - SRTool {% endblock %}
@@ -29,8 +29,11 @@
<div class="col-md-12">
<div class="page-header build-data">
<span id="cve-name-container">
- <span id="cve-name" class="srt_h1">{{object.name}} {% if not object.public %} <font color="red">[PRIVATE]</font> {% endif %}</span>
- {% if object.is_local and request.user.is_contributor %}<span class="glyphicon glyphicon-edit" id="cve-change-form-toggle"></span>{% endif %}
+ &nbsp;&nbsp;
+ <span id="cve-name" class="srt_h1">{{object.name}}
+ {% if object.is_local and request.user.is_contributor %}&nbsp;&nbsp;<span class="glyphicon glyphicon-edit" id="cve-change-form-toggle"></span>{% endif %}
+ {% if not object.public %}&nbsp;&nbsp;<font color="red" >[PRIVATE]</font> {% endif %}
+ </span>
{% if request.user.is_creator %}
<span style="padding-left:30px;"><button id="select-quickedit" class="btn btn-default" type="button">Edit Status...</button></span>
<span style="padding-left:30px;"><button id="select-notification" class="btn btn-default" type="button">Create Notification ...</button></span>
@@ -40,6 +43,10 @@
{% else %}
<span style="padding-left:30px;"><button id="select-cveedit" class="btn btn-default" type="button">Edit CVE Data ...</button></span>
{% endif %}
+ <span style="padding-left:30px;"><button id="submit-delete-cve" class="btn btn-default" type="button">Delete CVE</button></span>
+ {% if object.is_local %}
+ <span style="padding-left:30px;"><button id="select-merge-cve" class="btn btn-default" type="button">Merge CVE</button></span>
+ {% endif %}
{% endif %}
</span>
{% if not is_edit %}
@@ -58,13 +65,26 @@
<!-- include SRtool Metadata/Notification -->
{% include "srtool_metadata_include.html" with default_category="CVE" default_url="cve" %}
+<!-- CVE Merge -->
+{% if object.is_local %}
+ <div id="details-cve-merge" style="display:none;padding-left:25px;">
+ <fieldset style="border: 1px solid Blue; background-color:LightBlue; padding-left: 25px; padding-right: 20px;"> <!-- class="fieldset-auto-width" -->
+ <p><p>
+ <button class="btn btn-primary btn-lg" id="submit-merge-cve"> Submit Merge </button>
+ <p>Target CVE: <input type="text" placeholder="CVE Number" id="target-cve-name" size="40" ></p>
+ </fieldset>
+ <p>
+ <p>
+ </div>
+{% endif %}
+
<div class="row">
<div class="col-md-12 tabbable">
<ul class="nav nav-tabs">
- {% for details,state,id,cve_html in cve_list_table %}
+ {% for details,state,id,cve_html,ds_id in cve_list_table %}
<li class="{{state}}">
<a href="#{{id}}" data-toggle="tab">
- {{id}}
+ {{id}}{% if request.user.is_admin %}({{ds_id}}){% endif %}
<span class="glyphicon glyphicon-question-sign get-help" title="{{id}} CVE data"></span>
</a>
</li>
@@ -72,7 +92,7 @@
</ul>
<div class="tab-content">
- {% for details,state,id,cve_html in cve_list_table %}
+ {% for details,state,id,cve_html,ds_id in cve_list_table %}
<div class="tab-pane {{state}}" id="{{id}}">
{% if 'Local' == id %}
@@ -94,6 +114,73 @@
</form>{% csrf_token %}
{% endif %}
+{% if not object.public %}
+ {% if request.user.is_creator %}
+
+ <div class="row" style="padding-left: 25px;">
+ <h3>User Access
+ {% if request.user.is_creator %}
+ <button id="select-adduseraccess" class="btn btn-default" type="button">Add user access ...</button>
+ {% endif %}
+ </h3>
+
+ <div id="details-adduseraccess" style="padding-left: 50px; display:none;">
+ <p><p>
+ <button class="execute" id="submit-adduseraccess"> Submit </button>
+ <div class="row">
+ <p>
+ <div id="all-users" class="scrolling" style="width: 300px;">
+ {% for user in users %}
+ <div class="checkbox">
+ <label>
+ <input class="checkbox-users" name="access-users" value="{{user.pk}}" type="checkbox">{{user.name}}
+ </label>
+ <p>
+ </div>
+ {% endfor %}
+ </div>
+ </div>
+ </div>
+
+ <table class="table table-striped table-condensed" data-testid="vuln-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>User</th>
+ <th>Manage</th>
+ </tr>
+ </thead>
+
+ {% if object.public %}
+ <tr>
+ <td>All</td>
+ <td>
+ </td>
+ </tr>
+ {% endif %}
+
+ {% if object.cve_users.all %}
+ {% for u in object.cve_users.all %}
+ <tr>
+ <td>{{ u.user.username }}</td>
+ <td>
+ <span id="attachment_entry_'+{{u.id}}+'" class="js-config-var-name"></span>
+ <span class="glyphicon glyphicon-trash trash-useraccess" id="attachment_trash_'+{{u.id}}+'" x-data="{{u.id}}"></span>
+ </td>
+ </tr>
+ {% endfor %}
+ {% else %}
+ {% if not object.public %}
+ <tr>
+ <td>No users found</td>
+ </tr>
+ {% endif %}
+ {% endif %}
+ </table>
+
+ </div>
+ {% endif %}
+{% endif %}
+
<div class="row" style="padding-left: 25px;">
<h3>History</h3>
@@ -106,8 +193,8 @@
</tr>
</thead>
- {% if cve_list_table.1.0.cve_history.all %}
- {% for c in cve_list_table.1.0.cve_history.all %}
+ {% if object.cve_history.all %}
+ {% for c in object.cve_history.all %}
<tr>
<td>{{ c.comment }}</td>
<td>{{ c.date }}</td>
@@ -123,10 +210,13 @@
</div>
<HR ALIGN="center" WIDTH="100%">
+Created={{object.srt_created}} Updated={{object.srt_updated}}
<script>
var selected_quickedit=false;
var selected_notifyedit=false;
+ var selected_adduseraccess=false;
+ var selected_mergecve=false;
/* CVE Name change support */
var cveNameForm = $("#cve-name-change-form");
@@ -149,8 +239,10 @@
}
// reload the page with the updated tables
- if (('new_name' in data) && ("" != data.new_name)) {
- var new_url = "{% url 'cve' object.name %}".replace("{{object.name}}",data.new_name);
+ if (('new_name' in data) && (0 == data.new_name.indexOf("url:"))) {
+ window.location.replace(data.new_name.replace("url:",""));
+ } else if (('new_name' in data) && ("" != data.new_name)) {
+ var new_url = "{% url 'cve' 123 %}".replace("123",data.new_name);
window.location.replace(new_url);
} else {
location.reload(true);
@@ -186,7 +278,7 @@
selected_quickedit=true;
$("#display-status").slideUp();
$("#details-quickedit").slideDown();
- document.getElementById("select-quickedit").innerText = "Close edit status...";
+ document.getElementById("select-quickedit").innerText = "Cancel edit status...";
$("#select-quickedit").addClass("blueborder");
document.getElementById("select-status-state").focus();
}
@@ -199,18 +291,41 @@
$('#submit-quickedit').click(function(){
var note=$('#text-note').val().trim();
var private_note=$('#text-private-note').val().trim();
+ var tags=$('#text-tags').val().trim();
var priority=$('#select-priority-state').val();
var status=$('#select-status-state').val();
+ var public=$('#select-public-state').val();
var publish_state=$('#select-publish-state').val();
var publish_date=$('#select-publish-date').val();
+ var acknowledge_date=$('#text-acknowledge-date').val();
+ var affected_components=$('#text-affected-components').val();
+ /* Double check any public status changes */
+ {% if object.public %}
+ if ("0" == public) {
+ if (! confirm("Are you sure you want to make this CVE and all its children as PRIVATE?")) {
+ return
+ }
+ }
+ {% endif %}
+ {% if not object.public %}
+ if ("1" == public) {
+ if (! confirm("Are you sure you want to make this CVE and all its children as PUBLIC?")) {
+ return
+ }
+ }
+ {% endif %}
postCommitAjaxRequest({
"action" : 'submit-quickedit',
"priority" : priority,
"status" : status,
+ "public" : public,
"note" : note,
"private_note" : private_note,
+ "tags" : tags,
"publish_state" : publish_state,
"publish_date" : publish_date,
+ "acknowledge_date" : acknowledge_date,
+ "affected_components" : affected_components
});
});
@@ -291,8 +406,84 @@
"vul_name" : $("#vulnerability_name").val(),
});
});
+ $('.detach-vulnerability').click(function() {
+ var result = confirm("Are you sure you want to detach this Vulnerability?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-detach-vulnerability',
+ "record_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
+
+ $("#submit-delete-cve").click(function(){
+ var result = confirm("Are you sure you want to permamently delete '{{object.name}}' and all its related records?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-delete-cve'
+ });
+ }
+ });
+
+ $('#select-merge-cve').click(function(){
+ if (selected_mergecve) {
+ selected_mergecve=false;
+ $("#details-cve-merge").slideUp();
+ document.getElementById("select-merge-cve").innerText = "Merge CVE";
+ $("#select-merge-cve").removeClass("blueborder");
+ } else {
+ selected_mergecve=true;
+ $("#details-cve-merge").slideDown();
+ document.getElementById("select-merge-cve").innerText = "Close merge CVE";
+ $("#select-merge-cve").addClass("blueborder");
+ document.getElementById("target-cve-name").focus();
+ }
+ });
+ $("#submit-merge-cve").click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-merge-cve',
+ "cve_merge_name" : $("#target-cve-name").val(),
+ });
+ });
+ $('#select-adduseraccess').click(function(){
+ if (selected_adduseraccess) {
+ selected_adduseraccess=false;
+ $("#details-adduseraccess").slideUp();
+ } else {
+ selected_adduseraccess=true;
+ $("#details-adduseraccess").slideDown();
+ }
+ });
+
+ $('#submit-adduseraccess').click(function(){
+ var user_list=[];
+ $('input[name="access-users"]').each(function(){
+ if ($(this).is(':checked')) {
+ user_list.push($(this).prop('value'));
+ }
+ });
+ user_list = user_list.join(",");
+ if ("" == user_list) {
+ alert("No users were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'submit-adduseraccess',
+ "users" : user_list,
+ });
+ });
+ $('.trash-useraccess').click(function(){
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-trashuseraccess',
+ "record_id" : $(this).attr('x-data'),
+ });
+ }
+ })
/* Set the report link */
$('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list={{object.id}}");
diff --git a/lib/srtgui/templates/cve.html_orig b/lib/srtgui/templates/cve.html_orig
index e5ec7eff..f674c1d3 100755
--- a/lib/srtgui/templates/cve.html_orig
+++ b/lib/srtgui/templates/cve.html_orig
@@ -1,6 +1,6 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} {{cve_list_table.0.0.name}} - SRTool {% endblock %}
diff --git a/lib/srtgui/templates/cves-select-toastertable.html b/lib/srtgui/templates/cves-select-toastertable.html
index 38828359..d1b89f34 100644
--- a/lib/srtgui/templates/cves-select-toastertable.html
+++ b/lib/srtgui/templates/cves-select-toastertable.html
@@ -1,4 +1,7 @@
{% extends 'base.html' %}
+{% load jobtags %}
+{% load humanize %}
+
{% load static %}
@@ -6,8 +9,8 @@
<link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
<link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
<link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
- <script src="{% static 'js/jquery-ui.min.js' %}">
- </script>
+ <script src="{% static 'js/jquery-ui.min.js' %}"></script>
+ <script src="{% static 'js/typeahead_affected_components.js' %}"></script>
<script>
// Toggle the row checkbox if any column element is clicked
function toggle_select(toggle_id) {
@@ -29,7 +32,7 @@
/* Define the columns that floats next to each other */
.column1 {
float: left;
- width: 280px;
+ width: 380px;
padding: 10px;
}
.column2 {
@@ -48,7 +51,7 @@
{% endblock %}
-{% block title %} Select CVE's - SRTool {% endblock %}
+{% block title %} Select CVEs - SRTool {% endblock %}
{% block pagecontent %}
@@ -58,8 +61,8 @@
<ul class="breadcrumb" id="breadcrumb">
<li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
<li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
- <li><a href="{% url 'triage_cves' %}">Triage CVE's</a></li><span class="divider">&rarr;</span>
- <li>Select CVE's</li>
+ <li><a href="{% url 'triage_cves' %}">Triage CVEs</a></li><span class="divider">&rarr;</span>
+ <li>Select CVEs</li>
</ul>
</div>
</div>
@@ -79,17 +82,18 @@
</div>
-->
- <div id="details-isvulnerable" style="display:none;">
+ <div id="details-isvulnerable-investigate" style="display:none;">
<p><p>
- <button class="execute" id="submit-isvulnerable"> Submit Vulnerable CVE's </button>
+ <button class="execute" id="submit-isvulnerable-investigate"> Submit Vulnerable CVEs </button>
<div class="row">
<div class="column1">
<p><b><label id="products_count">Optional: Add Vulnerable Products (0):</label></b>
- <div id="all-products" class="scrolling" style="width: 250px;">
- {% for product in products %}
+ <div id="all-products" class="scrolling" style="width: 350px;">
+ {% for product in products|dictsort:"order" %}
<div class="checkbox">
<label>
<input class="checkbox-products" name="{{product.pk}}" type="checkbox">{{product.long_name}}
+ &nbsp;({{product.defect_tags|get_strdict_value:"found_version"}})
</label>
<p>
</div>
@@ -100,10 +104,13 @@
<p><b><label id="priority">Set Priority:</label></b>
<div id="priority-list" class="scrolling" style="width: 120px;">
<div class="checkbox"> <label>
+ <input type="radio" name="priority" value="99" type="checkbox" checked="yes"> Auto <span class="glyphicon glyphicon-question-sign get-help" title="CVSSv3, else CVSSv2, else Medium"></span>
+ </label><p></div>
+ <div class="checkbox"> <label>
<input type="radio" name="priority" value="4" type="checkbox"> High
</label><p></div>
<div class="checkbox"> <label>
- <input type="radio" name="priority" value="3" type="checkbox" checked="yes"> Medium
+ <input type="radio" name="priority" value="3" type="checkbox"> Medium
</label><p></div>
<div class="checkbox"> <label>
<input type="radio" name="priority" value="2" type="checkbox"> Low
@@ -127,11 +134,23 @@
</div>
<p><input id="create_defects" type="checkbox"> Create Defect(s) </input>
- Reason: <input type="text" id="input-defect-reason" name="defect-reason" size="20" placeholder="(optional)"> (e.g. "Security Advisory [- REASON -] CVE-2020-1234")
+ Reason: <input type="text" id="input-defect-reason" name="defect-reason" size="20" placeholder="(optional)"> (e.g. Defect summary: "Security Advisory [- REASON -] CVE-2020-1234")
&nbsp;&nbsp;<input id="create_notifications" type="checkbox" checked>&nbsp;Create Notifications</input>
<p><b><big>Reason: </big></b>
<input type="text" id="input-isvulnerable-reason" name="reason" size="40">&nbsp;&nbsp;<input id="markPublishIs" type="checkbox">&nbsp;Mark for Publish</input>&nbsp;&nbsp;<input id="markFor" type="checkbox"> Add Keywords to 'For' </input>
+ <p><b><big>Affected Components: </big></b>
+ <input type="text" id="input-isvulnerable-components" name="components" size="40" autocomplete="off"> (e.g. space-separated list of packages, recipes, sub-system list, applications, )
+
+ <div id="published-date-list">
+ <p><i>Acknowledge Date</i> =
+ <select name="Acknowledge_Date" id="select-acknowledge-date">
+ <option value="today" selected>Today</option>
+ <option value="publish">CVE's original release date</option>
+ <option value="update">CVE's last revised date</option>
+ <option value="no_change">No change</option>
+ </select>
+ </div>
<div id="group_vulnerability" style="border: 1px solid #dddddd; padding: 0 5px; width: 400px; margin-bottom: 10px; margin-top: 0px; " >
<div class="checkbox"> <label>
@@ -151,15 +170,15 @@
<div id="details-notvulnerable" style="display:none;">
<p><p>
- <button class="execute" id="submit-notvulnerable"> Submit Not-vulnerable CVE's </button>
- <input id="notship" type="checkbox"> "We do not ship ..." </input>
+ <button class="execute" id="submit-notvulnerable"> Submit Not-vulnerable CVEs </button>
+ <input id="notship" type="checkbox"> "{{doesnotimpact_text|safe}}" </input>
<p><b><big>Reason: </big></b>
<p><input type="text" id="input-notvulnerable-reason" name="reason" size="40">&nbsp;&nbsp;<input id="markPublishNot" type="checkbox">&nbsp;Mark for Publish</input>&nbsp;&nbsp;<input id="markAgainst" type="checkbox"> Add Keywords to 'Against' </input>
</div>
<div id="details-investigate" style="display:none;">
<p><p>
- <button class="execute" id="submit-investigate"> Submit Investigation CVE's </button>
+ <button class="execute" id="submit-investigate"> Submit Investigation CVEs </button>
</div>
<div id="details-other" style="display:none;">
@@ -188,7 +207,7 @@
</div>
<!-- Javascript support -->
- <script>
+ <script type="text/javascript">
//# sourceURL=somename.js
@@ -198,9 +217,19 @@
var selected_investigate=false;
var selected_other=false;
var cve_total=0;
+ var lastChecked = null;
+ var $selectboxes = null;
$(document).ready(function() {
+ // Init the recipe typeahead
+ try {
+ autocompInit();
+ } catch (e) {
+ document.write("Sorry, An error has occurred initiating the autocomplete feature");
+ console.warn(e);
+ }
+
function onCommitAjaxSuccess(data, textstatus) {
if (window.console && window.console.log) {
console.log("XHR returned:", data, "(" + textstatus + ")");
@@ -251,13 +280,11 @@
}
document.getElementById("unselect-these").innerText = "Un-select "+cve_checked_count+" checked";
if (0 == cve_checked_count) {
- //$("#submit-isvulnerable").attr("disabled","disabled");
document.getElementById("submit-notvulnerable").disabled = true;
- document.getElementById("submit-isvulnerable").disabled = true;
+ document.getElementById("submit-isvulnerable-investigate").disabled = true;
} else {
- //$("#submit-isvulnerable").removeAttr("disabled");
document.getElementById("submit-notvulnerable").disabled = false;
- document.getElementById("submit-isvulnerable").disabled = false;
+ document.getElementById("submit-isvulnerable-investigate").disabled = false;
}
}
@@ -282,19 +309,20 @@
$("#select-these").removeAttr("disabled");
$("#unselect-these").removeAttr("disabled");
$("#select-notvulnerable").removeAttr("disabled");
- $("#select-investigate").removeAttr("disabled");
$("#select-other").removeAttr("disabled");
- $("#details-isvulnerable").slideUp();
+ $("#details-isvulnerable-investigate").slideUp();
} else {
selected_isvulnerable=true;
$("#select-these").attr("disabled","disabled");
$("#unselect-these").attr("disabled","disabled");
$("#select-notvulnerable").attr("disabled","disabled");
- $("#select-investigate").attr("disabled","disabled");
$("#select-other").attr("disabled","disabled");
- $("#input-isvulnerable-reason").val($("#search-input-selectcvetable").val())
+ /* preset reason/components from search */
+ /* $("#input-isvulnerable-reason").val($("#search-input-selectcvetable").val()) */
+ /* $("#input-isvulnerable-components").val($("#search-input-selectcvetable").val()) */
update_vulnerable_status();
- $("#details-isvulnerable").slideDown();
+ document.getElementById("submit-isvulnerable-investigate").innerText = " Submit Vulnerable CVEs ";
+ $("#details-isvulnerable-investigate").slideDown();
}
});
@@ -330,7 +358,7 @@
$("#select-isvulnerable").removeAttr("disabled");
$("#select-notvulnerable").removeAttr("disabled");
$("#select-other").removeAttr("disabled");
- $("#details-investigate").slideUp();
+ $("#details-isvulnerable-investigate").slideUp();
} else {
selected_investigate=true;
$("#select-these").attr("disabled","disabled");
@@ -339,7 +367,8 @@
$("#select-notvulnerable").attr("disabled","disabled");
$("#select-other").attr("disabled","disabled");
update_vulnerable_status();
- $("#details-investigate").slideDown();
+ document.getElementById("submit-isvulnerable-investigate").innerText = " Submit Investigation CVEs ";
+ $("#details-isvulnerable-investigate").slideDown();
}
});
@@ -367,19 +396,19 @@
$('#notship').click(function(){
- not_ship = "We do not ship '";
+ not_ship = "{{doesnotimpact_text|safe}}";
var reason = $("#input-notvulnerable-reason").val().trim();
reason = reason.replace(not_ship,'');
- if ("'" == reason.slice(-1)) {
- reason = reason.slice(0,-1)
- }
if ($('#notship').is(':checked')) {
- reason = not_ship + reason + "'";
+ reason = not_ship + reason;
}
$("#input-notvulnerable-reason").val(reason);
});
- $('#submit-isvulnerable').click(function(){
+ $('#submit-isvulnerable-investigate').click(function(){
+
+ /* Disable the button so the we do not get double clicks */
+ $("#submit-isvulnerable-investigate").attr("disabled","disabled");
var cve_list=[];
$('#selectcvetable input:checked').each(function(){
@@ -387,7 +416,7 @@
});
cve_list = cve_list.join(",");
if ("" == cve_list) {
- alert("No CVE's were selected");
+ alert("No CVEs were selected");
return;
}
var product_list=[];
@@ -409,9 +438,15 @@
$('#group_vulnerability input:checked').each(function(){
group_vulnerability = $(this).attr('value');
});
+ if (selected_isvulnerable) {
+ action = 'submit-isvulnerable';
+ } else {
+ action = 'submit-investigate';
+ }
postCommitAjaxRequest({
- "action" : 'submit-isvulnerable',
+ "action" : action,
"reason" : $("#input-isvulnerable-reason").val(),
+ "affected_components" : $("#input-isvulnerable-components").val(),
"defect_reason" : $("#input-defect-reason").val(),
"cves" : cve_list,
"products": product_list,
@@ -420,6 +455,7 @@
"pub" : $('#markPublishIs').is(':checked') ? "yes" : "no",
"for" : $('#markFor').is(':checked') ? "yes" : "no",
"mk_d" : $('#create_defects').is(':checked') ? "yes" : "no",
+ "acknowledge_date" : $('#select-acknowledge-date').val(),
"vul_group": group_vulnerability,
"vul_name": $("#vulnerability_name").val(),
"notify" : $('#create_notifications').is(':checked') ? "yes" : "no",
@@ -435,36 +471,20 @@
});
cve_list = cve_list.join(",");
if ("" == cve_list) {
- alert("No CVE's were selected");
+ alert("No CVEs were selected");
return;
}
+ reason = $("#input-notvulnerable-reason").val()
+ reason = reason.replace(/:$/, "");
postCommitAjaxRequest({
"action" : 'submit-notvulnerable',
- "reason" : $("#input-notvulnerable-reason").val(),
+ "reason" : reason,
"cves" : cve_list,
"pub" : $('#markPublishNot').is(':checked') ? "yes" : "no",
"against" : $('#markAgainst').is(':checked') ? "yes" : "no",
});
});
- $('#submit-investigate').click(function(){
- var cve_list=[];
- $('#selectcvetable input').each(function(){
- if ($(this).is(':checked')) {
- cve_list.push($(this).prop('name'));
- }
- });
- cve_list = cve_list.join(",");
- if ("" == cve_list) {
- alert("No CVE's were selected");
- return;
- }
- postCommitAjaxRequest({
- "action" : 'submit-investigate',
- "cves" : cve_list,
- });
- });
-
$('#submit-other').click(function(){
var cve_list=[];
$('#selectcvetable input').each(function(){
@@ -474,7 +494,7 @@
});
cve_list = cve_list.join(",");
if ("" == cve_list) {
- alert("No CVE's were selected");
+ alert("No CVEs were selected");
return;
}
var status=$('#select-status-state').val();
@@ -501,11 +521,11 @@
var titleElt = $("[data-role='page-title']");
tableElt.on("table-done", function (e, total, tableParams) {
- var title = "Triage CVE's";
+ var title = "Triage CVEs";
if (tableParams.search || tableParams.filter) {
if (total === 0) {
- title = "No CVE's found";
+ title = "No CVEs found";
}
else if (total > 0) {
title = total + " CVE" + (total > 1 ? "'s" : '') + " found";
@@ -526,6 +546,21 @@
});
$('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list="+record_list);
+ /* Enable shift-select ranges */
+ $selectboxes = $('.selectbox');
+ $selectboxes.click(function(e) {
+ if (!lastChecked) {
+ lastChecked = this;
+ return;
+ }
+ if (e.shiftKey) {
+ var start = $selectboxes.index(this);
+ var end = $selectboxes.index(lastChecked);
+ $selectboxes.slice(Math.min(start,end), Math.max(start,end)+ 1).prop('checked', lastChecked.checked);
+ }
+ lastChecked = this;
+ });
+
});
});
</script>
diff --git a/lib/srtgui/templates/cves-toastertable.html b/lib/srtgui/templates/cves-toastertable.html
index 45dce261..1f6548f5 100644
--- a/lib/srtgui/templates/cves-toastertable.html
+++ b/lib/srtgui/templates/cves-toastertable.html
@@ -47,7 +47,7 @@
$("#table-loading").slideDown();
tableElt.on("table-done", function (e, total, tableParams) {
- var title = "All CVE's";
+ var title = "All CVE's (" + total + ")";
if (tableParams.search || tableParams.filter) {
if (total === 0) {
diff --git a/lib/srtgui/templates/date-time-test.html b/lib/srtgui/templates/date-time-test.html
new file mode 100755
index 00000000..d123f79c
--- /dev/null
+++ b/lib/srtgui/templates/date-time-test.html
@@ -0,0 +1,88 @@
+{% extends "base.html" %}
+
+{% load static %}
+{% load jobtags %}
+{% load humanize %}
+
+{% block title %} DateTime Test {% endblock %}
+{% block pagecontent %}
+
+ <div class="col-md-5">
+ <b>DateTime Info</b>
+ <div class="well">
+ <dl class="dl-horizontal">
+ <dt>UTC Current Time:</dt>
+ <dd>{{current_utc}}</dd>
+ <dt>Alameda Current Time:</dt>
+ <dd>{{current_ala}}</dd>
+ <dt>Your Current Time:</dt>
+ <dd>{{current_local}}</dd>
+ <dt>Datetime Shift</dt>
+ <dd>"2021-05-10 13:38:22"|shift_timezone:"{{user.get_timezone_offset}}" => {{ "2021-05-10 13:38:22"|shift_timezone:user.get_timezone_offset }},{{user.get_timezone_offset}}</dd>
+ </dl>
+ </div>
+ </div>
+ <p><i>Timezone</i> = {{user_timezone}}</p>
+
+ <fieldset style="border: 1px solid Blue; background-color:LightBlue; padding-left: 25px; padding-right: 20px;">
+ <button class="execute btn btn-primary btn-lg" id="submit-timezone"> Submit Changes </button>
+
+<select name="timezone" id="select-timezone">
+
+ {% for tz in timezone_list %}
+
+ <option value="{{tz}}" {% if user_timezone == tz %}selected{% endif %}>{{tz}}</option>
+
+ {% endfor %}
+
+</select>
+</fieldset>
+
+<script type="text/javascript">
+
+ $(document).ready(function() {
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:"{% url 'xhr_date_time_test' %}",
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+ $('#submit-timezone').click(function(){
+ var timezone=$('#select-timezone').val();
+
+ /* Double check any public status changes */
+ postCommitAjaxRequest({
+ "action" : 'submit-timezone',
+ "timezone" : timezone,
+ });
+ });
+
+ });
+
+</script>
+
+{% endblock %}
diff --git a/lib/srtgui/templates/defect.html b/lib/srtgui/templates/defect.html
index aa13b0dd..b1ccae31 100644
--- a/lib/srtgui/templates/defect.html
+++ b/lib/srtgui/templates/defect.html
@@ -1,6 +1,6 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} {{object.name}} - SRTool {% endblock %}
@@ -38,14 +38,33 @@
<dt>URL:</dt>
<dd><a href="{{object.url}}" id="dataid_{{object.id}}" target="_blank">{{object.url}}</a></dd>
- <dt>Priority:</dt>
+ <dt>Defect Priority:</dt>
+ <dd>{{object.get_defect_priority_text}}</dd>
+
+ <dt>Defect Status:</dt>
+ <dd>{{object.get_defect_status_text}}</dd>
+
+ <dt>Defect Resolution:</dt>
+ <dd>{{object.get_defect_resolution_text}}</dd>
+
+ <dt>Duplicate Of:</dt>
+ <dd>
+ {% if object.duplicate_of %}
+ <a href="{% url 'defect_name' object.duplicate_of %}">{{object.duplicate_of}}</a>&nbsp;(<a href="{{SRTOOL_DEFECT_URLBASE}}/{{object.duplicate_of}}">{{SRTOOL_DEFECT_URLBASE}}/{{object.duplicate_of}}</a>)
+ {% endif %}
+ </dd>
+
+ <dt>SRTool Priority:</dt>
<dd>{{object.get_priority_text}}</dd>
- <dt>Status:</dt>
+ <dt>SRTool Status:</dt>
<dd>{{object.get_status_text}}</dd>
- <dt>Resolution:</dt>
- <dd>{{object.get_resolution_text}}</dd>
+ <dt>SRTool Outcome:</dt>
+ <dd>{{object.get_outcome_text}}</dd>
+
+ <dt>Affected Components:</dt>
+ <dd>{{object.packages}}</dd>
<dt>Publish:</dt>
<dd>{{object.publish}}</dd>
@@ -74,6 +93,36 @@
</div>
</div>
+<div class="row" style="padding-left: 25px;">
+ <h3>History</h3>
+ <table class="table table-striped table-condensed" data-testid="vuln-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>Comment</th>
+ <th>Date</th>
+ <th>Author</th>
+ </tr>
+ </thead>
+ {% if object.defect_history.all %}
+ {% for c in object.defect_history.all %}
+ <tr>
+ <td>{{ c.comment }}</td>
+ <td>{{ c.date }}</td>
+ <td>{{ c.author }}</td>
+ </tr>
+ {% endfor %}
+ {% else %}
+ <tr>
+ <td>No history found</td>
+ </tr>
+ {% endif %}
+ </table>
+</div>
+
+<HR ALIGN="center" WIDTH="100%">
+Updated={{object.srt_updated}}
+
+
<!-- Javascript support -->
<script>
diff --git a/lib/srtgui/templates/detail_search_header.html b/lib/srtgui/templates/detail_search_header.html
index 7a986590..6c61996a 100644
--- a/lib/srtgui/templates/detail_search_header.html
+++ b/lib/srtgui/templates/detail_search_header.html
@@ -30,12 +30,13 @@ $(document).ready(function() {
<div class="form-group">
<div class="btn-group">
- <input id="search" class="form-control" type="text" placeholder="Search {{search_what}}" name="search" value="{% if request.GET.search %}{{request.GET.search}}{% endif %}">
+ <input id="search" class="form-control" type="text" placeholder="@Search {{search_what}}" name="search" value="{% if request.GET.search %}{{request.GET.search}}{% endif %}">
<input type="hidden" value="name:+" name="orderby">
<input type="hidden" value="l" name="page">
{% if request.GET.search %}
<span class="remove-search-btn-detail-search search-clear glyphicon glyphicon-remove-circle"></span>
{% endif %}
+ <span class="glyphicon glyphicon-question-sign get-help" title="Default is an 'and' search; use 'OR' keyword to 'or' the terms"></span>
</div>
</div>
<button type="submit" class="btn btn-default">Search</button>
diff --git a/lib/srtgui/templates/detail_sorted_header.html b/lib/srtgui/templates/detail_sorted_header.html
index 4434df43..6554df2e 100644
--- a/lib/srtgui/templates/detail_sorted_header.html
+++ b/lib/srtgui/templates/detail_sorted_header.html
@@ -4,7 +4,7 @@
Must be followed by <tbody>...</tbody></table>.
Requires tablecols setup column fields dclass, clclass, qhelp, orderfield.
{% endcomment %}
-{% load projecttags %}
+{% load jobtags %}
{# <table class="table table-bordered table-hover tablesorter" id="otable"> #}
<thead>
<!-- Table header row; generated from "tablecols" entry in the context dict -->
diff --git a/lib/srtgui/templates/email_admin.html b/lib/srtgui/templates/email_admin.html
new file mode 100755
index 00000000..7c5f0fda
--- /dev/null
+++ b/lib/srtgui/templates/email_admin.html
@@ -0,0 +1,70 @@
+{% extends "base.html" %}
+
+{% load jobtags %}
+
+{% block title %} Admin Help - SRTool {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li>Admin Help</li>
+ </ul>
+ </div>
+</div>
+
+<!-- Begin container -->
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header build-data">
+ <h1>Admin Help</h1>
+ Send an email to the SRTool Admin staff for assistance.
+ </div>
+ </div>
+</div>
+
+<form method="post">
+{% csrf_token %}
+
+ {% if error_message %} <h3 style="color:red">{{error_message}}</h3><br> {% endif %}
+
+ <button class="btn btn-primary btn-lg" name="action" value="submit">Submit Request</button>
+ <button class="btn btn-primary btn-lg" name="action" value="cancel">Cancel</button>
+ <br>
+
+ <h3>Request:
+ <select name="request-type">
+ <option value="Request account verification">Request account verification</option>
+ <option value="Request password reset">Request password reset</option>
+ <option value="Request new group name">Request new group name</option>
+ <option value="Request new repo source name">Request new repo source name</option>
+ <option value="Request 'Contributor' status">Request 'Contributor' status</option>
+ <option value="Request 'Creator' status">Request 'Creator' status</option>
+ <option value="Request general Help">Request general help</option>
+ </select>
+ </h2><br>
+
+ <p>Your name: <input type="text" placeholder="your name" name="user-name" size="80" value="{% if request.user.user_fullname %}{{request.user.user_fullname}}{% endif %}"></p>
+ <p>Your email: <input type="text" placeholder="your name" name="user-email" size="80" value="{% if request.user.user_fullname %}{{request.user.email}}{% endif %}"></p>
+
+ <h3>Message text:</h2>
+ <textarea rows="9" style="min-width: 50%" name="message"></textarea>
+
+<hr>
+
+</form>
+
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function() {
+
+ });
+</script>
+
+
+{% endblock %}
diff --git a/lib/srtgui/templates/email_success.html b/lib/srtgui/templates/email_success.html
new file mode 100755
index 00000000..baa44163
--- /dev/null
+++ b/lib/srtgui/templates/email_success.html
@@ -0,0 +1,49 @@
+{% extends "base.html" %}
+
+{% load jobtags %}
+
+{% block title %} Admin Help - SRTool {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li>Admin Help</li>
+ </ul>
+ </div>
+</div>
+
+<!-- Begin container -->
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header build-data">
+ <h1>Admin Success</h1>
+ Email sent to the SRTool Admin staff for assistance.
+ </div>
+ </div>
+</div>
+
+<form method="post">
+{% csrf_token %}
+
+ <h3> Email successfully sent!</h3>
+ <br>
+ <button class="btn btn-primary btn-lg" name="action" value="close">Close</button>
+ <br>
+
+</form>
+
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function() {
+
+ });
+</script>
+
+
+{% endblock %}
diff --git a/lib/srtgui/templates/errorlog-toastertable.html b/lib/srtgui/templates/errorlog-toastertable.html
new file mode 100755
index 00000000..91cf8d55
--- /dev/null
+++ b/lib/srtgui/templates/errorlog-toastertable.html
@@ -0,0 +1,142 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Error Logs {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li>Error Logs</li>
+ </ul>
+ </div>
+</div>
+
+<div > <!--class="form-inline" -->
+ <b><big>Actions: </big></b>
+ <button id="delete-notification" class="btn btn-default" type="button">Delete Selected</button>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <!-- Javascript support -->
+ <script type="text/javascript">
+ var selected_notifyedit=false;
+ var lastChecked = null;
+ var $selectboxes = null;
+
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Pending Notifications";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No Error Logs found";
+ }
+ else if (total > 0) {
+ title = total + " Error" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+
+ /* Enable shift-select ranges */
+ $selectboxes = $('.selectbox');
+ $selectboxes.click(function(e) {
+ if (!lastChecked) {
+ lastChecked = this;
+ return;
+ }
+ if (e.shiftKey) {
+ var start = $selectboxes.index(this);
+ var end = $selectboxes.index(lastChecked);
+ $selectboxes.slice(Math.min(start,end), Math.max(start,end)+ 1).prop('checked', lastChecked.checked);
+ }
+ lastChecked = this;
+ });
+
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ } else if (('results_msg' in data) && ("" != data.results_msg)) {
+ alert("Results: " + data.results_msg);
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:"{% url 'xhr_errorlogs'%}",
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ })
+ }
+
+ $('#delete-notification').click(function(){
+ log_list = [];
+ $('#errorlogstable input').each(function(){
+ if ($(this).is(':checked')) {
+ log_list.push($(this).prop('value'));
+ }
+ });
+ log_list = log_list.join(",");
+ if ("" == log_list) {
+ alert("No Error Logs were selected");
+ return;
+ }
+ if ("" != log_list) {
+ postCommitAjaxRequest({
+ "action" : 'delete-errorlogs',
+ "log_list" : log_list,
+ });
+ }
+ });
+
+ }); <!-- $(document).ready() -->
+
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/export.html b/lib/srtgui/templates/export.html
index 8b2309ca..82f48016 100644
--- a/lib/srtgui/templates/export.html
+++ b/lib/srtgui/templates/export.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Export Report {% endblock %}
diff --git a/lib/srtgui/templates/filtersnippet.html b/lib/srtgui/templates/filtersnippet.html
index 1286ca31..eb835c1a 100644
--- a/lib/srtgui/templates/filtersnippet.html
+++ b/lib/srtgui/templates/filtersnippet.html
@@ -1,4 +1,4 @@
-{% load projecttags %}
+{% load jobtags %}
<!-- '{{f.class}}' filter -->
{% with f.class as key %}
diff --git a/lib/srtgui/templates/generic-toastertable-page.html b/lib/srtgui/templates/generic-toastertable-page.html
index b3eabe1a..cecfacc9 100644
--- a/lib/srtgui/templates/generic-toastertable-page.html
+++ b/lib/srtgui/templates/generic-toastertable-page.html
@@ -1,5 +1,5 @@
{% extends "baseprojectpage.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% load static %}
diff --git a/lib/srtgui/templates/guided_tour.html b/lib/srtgui/templates/guided_tour.html
index bdc8987e..68c114ef 100644
--- a/lib/srtgui/templates/guided_tour.html
+++ b/lib/srtgui/templates/guided_tour.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Guided Tour of SRTool {% endblock %}
diff --git a/lib/srtgui/templates/history-cve-toastertable.html b/lib/srtgui/templates/history-cve-toastertable.html
new file mode 100755
index 00000000..78319466
--- /dev/null
+++ b/lib/srtgui/templates/history-cve-toastertable.html
@@ -0,0 +1,73 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} CVE Histories - SRTool {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'maintenance' %}">Maintenance</a></li><span class="divider">&rarr;</span>
+ <li>History CVE</li>
+ </ul>
+ </div>
+</div>
+
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <script>
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "History CVE";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No History CVE found";
+ }
+ else if (total > 0) {
+ title = total + " History CVE" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+
+ /* Set the report link */
+ var record_list=""
+ $(".data > span").each(function(){
+ var this_id=$(this).prop('id');
+ if (this_id.startsWith("dataid_")) {
+ record_list +=this_id.replace(/dataid_/,"") + ",";
+ }
+ });
+ $('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list="+record_list);
+
+ });
+ });
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/history-defect-toastertable.html b/lib/srtgui/templates/history-defect-toastertable.html
new file mode 100755
index 00000000..63e9ea4d
--- /dev/null
+++ b/lib/srtgui/templates/history-defect-toastertable.html
@@ -0,0 +1,73 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Defect Histories - SRTool {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'maintenance' %}">Maintenance</a></li><span class="divider">&rarr;</span>
+ <li>History Defect</li>
+ </ul>
+ </div>
+</div>
+
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <script>
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "History Defect";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No History Defect found";
+ }
+ else if (total > 0) {
+ title = total + " History Defect" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+
+ /* Set the report link */
+ var record_list=""
+ $(".data > span").each(function(){
+ var this_id=$(this).prop('id');
+ if (this_id.startsWith("dataid_")) {
+ record_list +=this_id.replace(/dataid_/,"") + ",";
+ }
+ });
+ $('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list="+record_list);
+
+ });
+ });
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/history-investigation-toastertable.html b/lib/srtgui/templates/history-investigation-toastertable.html
new file mode 100755
index 00000000..bde11ec7
--- /dev/null
+++ b/lib/srtgui/templates/history-investigation-toastertable.html
@@ -0,0 +1,73 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Investigation Histories - SRTool {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'maintenance' %}">Maintenance</a></li><span class="divider">&rarr;</span>
+ <li>History Investigation</li>
+ </ul>
+ </div>
+</div>
+
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <script>
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "History Investigation";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No History Investigation found";
+ }
+ else if (total > 0) {
+ title = total + " History Investigation" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+
+ /* Set the report link */
+ var record_list=""
+ $(".data > span").each(function(){
+ var this_id=$(this).prop('id');
+ if (this_id.startsWith("dataid_")) {
+ record_list +=this_id.replace(/dataid_/,"") + ",";
+ }
+ });
+ $('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list="+record_list);
+
+ });
+ });
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/history-vulnerability-toastertable.html b/lib/srtgui/templates/history-vulnerability-toastertable.html
new file mode 100755
index 00000000..bc3b7881
--- /dev/null
+++ b/lib/srtgui/templates/history-vulnerability-toastertable.html
@@ -0,0 +1,73 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Vulnerability Histories - SRTool {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'maintenance' %}">Maintenance</a></li><span class="divider">&rarr;</span>
+ <li>History Vulnerability</li>
+ </ul>
+ </div>
+</div>
+
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <script>
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "History Vulnerability";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No History Vulnerability found";
+ }
+ else if (total > 0) {
+ title = total + " History Vulnerabilit" + (total > 1 ? 'ies' : 'y') + " found";
+ }
+ }
+
+ titleElt.text(title);
+
+ /* Set the report link */
+ var record_list=""
+ $(".data > span").each(function(){
+ var this_id=$(this).prop('id');
+ if (this_id.startsWith("dataid_")) {
+ record_list +=this_id.replace(/dataid_/,"") + ",";
+ }
+ });
+ $('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list="+record_list);
+
+ });
+ });
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/investigation.html b/lib/srtgui/templates/investigation.html
index b662c5e1..bd974796 100644
--- a/lib/srtgui/templates/investigation.html
+++ b/lib/srtgui/templates/investigation.html
@@ -29,7 +29,7 @@
</style>
{% endblock %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} {{object.name}} - SRTool {% endblock %}
@@ -107,18 +107,32 @@
</div>
<div class="row">
<div class="column1">
- <p><b><label id="priority">Set Priority:</label></b>
+ <p><b><label id="priority">Set Priority ({{object.priority}}):</label></b>
+
<div id="priority-list" class="scrolling" style="width: 120px;">
- <div class="checkbox"> <label>
- <input type="radio" name="priority" value="4" type="checkbox"> High
- </label><p></div>
- <div class="checkbox"> <label>
- <input type="radio" name="priority" value="3" type="checkbox" checked="yes"> Medium
- </label><p></div>
- <div class="checkbox"> <label>
- <input type="radio" name="priority" value="2" type="checkbox"> Low
- </label><p></div>
+ <div>
+ <label for="priority_p4">P4</label>
+ <input type="radio" id="priority_p4" name="priority_defect" value="1"
+ {% if 1 == object.priority %}checked{% endif %}
+ {% if 0 == object.priority %}checked{% endif %}>
+ </div>
+ <div>
+ <label for="priority_p3">P3</label>
+ <input type="radio" id="priority_p3" name="priority_defect" value="2"
+ {% if 2 == object.priority %}checked{% endif %}>
+ </div>
+ <div>
+ <label for="priority_p2">P2</label>
+ <input type="radio" id="priority_p2" name="priority_defect" value="3"
+ {% if 3 == object.priority %}checked{% endif %}>
+ </div>
+ <div>
+ <label for="priority_p1">P1</label>
+ <input type="radio" id="priority_p1" name="priority_defect" value="4"
+ {% if 4 == object.priority %}checked{% endif %}>
+ </div>
</div>
+
</div>
<div class="column2">
<p><b><label id="components">Set Components:</label></b>
@@ -135,8 +149,14 @@
</div>
</div>
</div>
- Reason: <input type="text" id="input-defect-reason" name="defect-reason" size="20" placeholder="(optional)"> (e.g. "Security Advisory [- REASON -] CVE-2020-1234")
+ Defect Reason: <input type="text" id="input-defect-reason" name="defect-reason" size="20" placeholder="(optional)"> (e.g. "Security Advisory [- REASON -] CVE-2020-1234")
+ <p><p>
+ <b><big>Affected Components: </big></b>
+ <input type="text" id="input-affected-components" name="components" size="40" value="{{affected_components}}"> (e.g. space-separated list of packages, recipes, sub-system list, applications, )
+ <p><p>
+ Found Version: {{found_version}}
<p><p>
+
</div>
</div>
@@ -163,7 +183,7 @@
<td>{{ id.defect.summary }}</td>
<td>{{ id.defect.get_priority_text }}</td>
<td>{{ id.defect.get_status_text }}</td>
- <td>{{ id.defect.get_resolution_text }}</td>
+ <td>{{ id.defect.get_defect_resolution_text }}</td>
<td>{{ id.defect.release_version }}</td>
<td><a href="{{id.defect.url}}" id="dataid_{{id.defect.id}}" target="_blank">{{id.defect.url}}</a></td>
{% if request.user.is_creator %}
@@ -268,10 +288,10 @@
<td>{{ u.author }}</td>
<td>
<span id="attachment_entry_'+{{u.id}}+'" class="js-config-var-name"></span>
- <form id="downloadbanner" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <form id="downloadbanner-{{forloop.counter}}" enctype="multipart/form-data" method="post" >{% csrf_token %}
<input type="hidden" id="action" name="action" value="download">
<input type="hidden" id="record_id" name="record_id" value={{u.id}}>
- <span class="glyphicon glyphicon-download-alt submit-downloadattachment" id="attachment_download_'+{{u.id}}+'" x-data="{{u.id}}"></span>
+ <span class="glyphicon glyphicon-download-alt submit-downloadattachment" id="attachment_download_'+{{u.id}}+'" x-data="{{forloop.counter}}"></span>
{% if request.user.is_creator %}
<span class="glyphicon glyphicon-trash trash-attachment" id="attachment_trash_'+{{u.id}}+'" x-data="{{u.id}}"></span>
{% endif %}
@@ -378,9 +398,11 @@
<thead>
<tr>
<th>User</th>
+<!--
{% if request.user.is_creator %}
<th>Manage</th>
{% endif %}
+-->
</tr>
</thead>
@@ -395,7 +417,8 @@
{% if object.investigation_users.all %}
{% for u in object.investigation_users.all %}
<tr>
- <td>{{ u.user.name }}</td>
+ <td>{{ u.user.username }}</td>
+<!--
{% if request.user.is_creator %}
<td>
<span id="attachment_entry_'+{{u.id}}+'" class="js-config-var-name"></span>
@@ -404,6 +427,7 @@
</td>
{% endif %}
</tr>
+-->
{% endfor %}
{% else %}
{% if not object.public %}
@@ -442,6 +466,9 @@
</table>
</div>
+<HR ALIGN="center" WIDTH="100%">
+Created={{object.srt_created}} Updated={{object.srt_updated}}
+
<script>
var selected_newcomment=false;
var selected_addusernotify=false;
@@ -465,6 +492,9 @@
alert("error on request:\n" + data.error);
return;
}
+ if (data.note.startsWith("DEFECT-")) {
+ alert("ERROR:Defect creation failed, temporary defect '" + data.note + "' created. See Error Log.");
+ }
// reload the page with the updated tables
location.reload(true);
}
@@ -539,6 +569,7 @@
postCommitAjaxRequest({
"action" : 'submit-createdefect',
"defect_reason" : $("#input-defect-reason").val(),
+ "affected_components" : $("#input-affected-components").val(),
"components": component_list,
"priority": priority,
});
@@ -615,7 +646,7 @@
});
$('.submit-downloadattachment').click(function() {
- $("#downloadbanner").submit();
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
});
$('.trash-attachment').click(function() {
@@ -719,7 +750,7 @@
selected_quickedit=true;
$("#display-status").slideUp();
$("#details-quickedit").slideDown();
- document.getElementById("select-quickedit").innerText = "Close edit status...";
+ document.getElementById("select-quickedit").innerText = "Cancel edit status...";
$("#select-quickedit").addClass("blueborder");
document.getElementById("select-status-state").focus();
}
@@ -728,16 +759,20 @@
$('#submit-quickedit').click(function(){
var note=$('#text-note').val().trim()
var private_note=$('#text-private-note').val().trim()
+ var tags=$('#text-tags').val().trim();
var priority=$('#select-priority-state').val();
var status=$('#select-status-state').val();
var outcome=$('#select-outcome-state').val();
+ var affected_components=$('#text-affected-components').val();
postCommitAjaxRequest({
- "action" : 'submit-quickedit',
- "note" : note,
- "private_note" : private_note,
- "status" : status,
- "outcome" : outcome,
- "priority" : priority,
+ "action" : 'submit-quickedit',
+ "priority" : priority,
+ "status" : status,
+ "note" : note,
+ "private_note" : private_note,
+ "tags" : tags,
+ "outcome" : outcome,
+ "affected_components" : affected_components,
});
});
@@ -746,13 +781,13 @@
selected_notifyedit=false;
$("#details-notify-edit").slideUp();
$("#display-status").slideDown();
- document.getElementById("select-notification").innerText = "Create Notification ...";
+ document.getElementById("select-notification").innerText = "Create notification ...";
$("#select-notification").removeClass("blueborder");
} else {
selected_notifyedit=true;
$("#display-status").slideUp();
$("#details-notify-edit").slideDown();
- document.getElementById("select-notification").innerText = "Close notification";
+ document.getElementById("select-notification").innerText = "Cancel notification";
$("#select-notification").addClass("blueborder");
document.getElementById("select-category-notify").focus();
}
diff --git a/lib/srtgui/templates/joblog.html b/lib/srtgui/templates/joblog.html
new file mode 100755
index 00000000..1e4abca0
--- /dev/null
+++ b/lib/srtgui/templates/joblog.html
@@ -0,0 +1,39 @@
+{% extends "base.html" %}
+
+{% load static %}
+{% load jobtags %}
+{% load humanize %}
+
+{% block title %} Job Log {% endblock %}
+{% block pagecontent %}
+<div class="row">
+ <div class="col-md-7" style="padding-left: 50px;">
+ <h1>Job Log: {{object.name}} : {{log_date}}
+ <form id="downloadbanner-log" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" id="action" name="action" value="download-job-log">
+ <input type="hidden" id="report_path" name="report_path" value="JOBLOG">
+ <span class="glyphicon glyphicon-download-alt submit-download-log" x-data="log"></span>
+ </form>
+ </h1>
+ </div>
+</div>
+
+<div class="row" style="padding-left: 25px;">
+ <textarea id="log-text" readonly placeholder="Job log" cols="120" rows="30" style="background-color: #cccccc;" />{{ log_text }}</textarea>
+</div>
+
+
+<!-- Javascript support -->
+<script>
+ $(document).ready(function() {
+
+ $('.submit-download-log').click(function() {
+ $("#downloadbanner-log").submit();
+ });
+
+ /* Set the report link */
+ $('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list={{object.id}}");
+ });
+</script>
+
+{% endblock %}
diff --git a/lib/srtgui/templates/js-unit-tests.html b/lib/srtgui/templates/js-unit-tests.html
index ca248962..6ebca39f 100644
--- a/lib/srtgui/templates/js-unit-tests.html
+++ b/lib/srtgui/templates/js-unit-tests.html
@@ -1,5 +1,5 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% load static %}
{% block pagecontent %}
diff --git a/lib/srtgui/templates/landing.html b/lib/srtgui/templates/landing.html
index 67c61b1e..f0e4f13d 100644
--- a/lib/srtgui/templates/landing.html
+++ b/lib/srtgui/templates/landing.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Welcome to SRTool{% endblock %}
@@ -9,7 +9,7 @@
<div class="row">
<div class="col-md-7" style="padding-left: 50px;">
<h1>Security Response Tool (SRTool)</h1>
- <p>A web interface to SRTool CVE investigations</p>
+ <p>A web interface to SRTool CVE investigations ({{this_landing}})</p>
</div>
</div>
<div class="row">
@@ -67,6 +67,13 @@
<td>SRTool Products<td>
</tr>
+ {% for ext_url,ext_title,ext_description in landing_extensions_table %}
+ <tr>
+ <td> <a class="btn btn-info btn-lg" href="{% url ext_url %}">{{ext_title}}</a></td>
+ <td>{{ext_description}}<td>
+ </tr>
+ {% endfor %}
+
</table>
</div>
diff --git a/lib/srtgui/templates/landing_not_managed.html b/lib/srtgui/templates/landing_not_managed.html
index baa4b72c..25e7f713 100644
--- a/lib/srtgui/templates/landing_not_managed.html
+++ b/lib/srtgui/templates/landing_not_managed.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Welcome to Toaster {% endblock %}
diff --git a/lib/srtgui/templates/login.html b/lib/srtgui/templates/login.html
index 96fb6fe1..49d4ab30 100644
--- a/lib/srtgui/templates/login.html
+++ b/lib/srtgui/templates/login.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Login Page {% endblock %}
diff --git a/lib/srtgui/templates/maintenance.html b/lib/srtgui/templates/maintenance.html
new file mode 100755
index 00000000..c35d6961
--- /dev/null
+++ b/lib/srtgui/templates/maintenance.html
@@ -0,0 +1,216 @@
+{% extends "base.html" %}
+
+{% load static %}
+{% load jobtags %}
+{% load humanize %}
+
+{% block title %} Maintenance tools {% endblock %}
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li>Maintenance</a>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-7" style="padding-left: 50px;">
+ <h1>Maintenance</h1>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<div class="row">
+<div class="jumbotron well-transparent">
+
+ <div class="col-md-6">
+ <div>
+ <table class="table table-striped table-condensed" data-testid="landing-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>Action</th>
+ <th>Description</th>
+ </tr>
+ </thead>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'error_logs' %}">Error Logs</a></td>
+ <td>Examine Error Logs ({{errorlog_total}})</td>
+ </tr>
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'history_cve' %}">History CVE</a></td>
+ <td>Examine History for CVEs</td>
+ </tr>
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'history_vulnerability' %}">History Vulnerabilities</a></td>
+ <td>Examine History for Vulnerabilities</td>
+ </tr>
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'history_investigation' %}">History Investigations</a></td>
+ <td>Examine History for Investigations</td>
+ </tr>
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'history_defect' %}">History Defects</a></td>
+ <td>Examine History for Defects</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'manage_jobs' 77 %}">Manage Jobs</a></td>
+ <td>Manage the Jobs table</td>
+ </tr>
+
+ <tr>
+ <td>
+ <button class="execute" id="submit-clearjobs"> Clear Jobs</button>
+ <td>Clear the Jobs table of all entries</td>
+ </tr>
+
+ <tr>
+ <td>
+ <button class="execute" id="submit-testjob1"> Test Job #1 Progress </button>
+ <td>Test job progress bars support with default job #1</td>
+ </tr>
+
+ <tr>
+ <td>
+ <button class="execute" id="submit-testjob1-2"> Test Job #2 Progress </button>
+ <td>Test job progress bars support with job #2</td>
+ </tr>
+
+ <tr>
+ <td>
+ <button class="execute" id="submit-parent-child"> Test Parent/Child Jobs</button>
+ <td>Test Progress using parent and child jobs</td>
+ </tr>
+
+ </table>
+ </div>
+
+ </div>
+
+ <div class="col-md-5">
+ <b>Quick Info</b>
+ <div class="well">
+ <dl class="dl-horizontal">
+
+ <dt>CVE History: Total Count =</dt>
+ <dd>
+ {{history_cve_total}}
+ </dd>
+ <dt>Vulnerability History: Total Count =</dt>
+ <dd>
+ {{history_vulnerability_total}}
+ </dd>
+ <dt>Investigation: Total Count =</dt>
+ <dd>
+ {{history_investigation_total}}
+ </dd>
+ <dt>Defect: Total Count =</dt>
+ <dd>
+ {{defect_investigation_total}}
+ </dd>
+
+ </dl>
+ </div>
+
+ <form method="post"> {% csrf_token %}
+ <b>Remote Backup Path
+ <button class="execute btn btn-primary">Update</button> <!-- btn-lg -->
+ </b>
+ <div class="well">
+ <input type="hidden" name="action" value="submit-remote-backup-path">
+ Path = <input type="text" placeholder="remote backup path" name="text-remote-backup-path" size="60" value="{{remote_backup_path}}">
+ </div>
+ <form>
+ </div>
+
+</div>
+</div>
+
+
+<script type="text/javascript">
+
+ $(document).ready(function() {
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url: url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+
+ $('#submit-clearjobs').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-clearjobs',
+ },"{% url 'xhr_job_post' %}");
+ });
+
+ $('#submit-testjob1').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-testjob',
+ "command" : 'SELFTEST',
+ "name" : 'Basic self test',
+ },"{% url 'xhr_job_post' %}");
+ });
+
+ $('#submit-testjob1-2').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-testjob-j2',
+ "command" : 'SELFTEST',
+ "name" : 'Basic self test',
+ },"{% url 'xhr_job_post' %}");
+ });
+
+ $('#submit-parent-child').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-testjob-parent',
+ "command" : 'PARENTTEST',
+ "name" : 'Test Parent/Children',
+ },"{% url 'xhr_job_post' %}");
+ });
+
+
+ /* Set the report link */
+ $('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}");
+ });
+</script>
+
+{% endblock %}
diff --git a/lib/srtgui/templates/manage-jobs-toastertable.html b/lib/srtgui/templates/manage-jobs-toastertable.html
new file mode 100755
index 00000000..34e89c57
--- /dev/null
+++ b/lib/srtgui/templates/manage-jobs-toastertable.html
@@ -0,0 +1,126 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Manage Jobs {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'maintenance' %}">Maintenance</a></li><span class="divider">&rarr;</span>
+ <li>Manage Jobs</a>
+ </ul>
+ </div>
+</div>
+
+<p><b><big>Actions: </big></b>
+<a class="btn btn-default navbar-btn " id="submit-clearjobs" href="">Clear Jobs</a>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Manage Jobs ("+total+")";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No Projects found";
+ }
+ else if (total > 0) {
+ title = total + " Job" + (total > 1 ? "s" : '') + " found";
+ }
+ }
+
+ /* Add handler into the Toaster Table context */
+ $('.trash-job').click(function() {
+ var result = confirm("Are you sure you want to remove Job '" + $(this).attr('x-data').split('|')[0] + "'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-trash-job',
+ "record_id" : $(this).attr('x-data').split('|')[1],
+ });
+ }
+ });
+
+ titleElt.text(title);
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url) {
+ url = url || "{% url 'xhr_job_post' %}";
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+ $('#project_refresh').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-refresh-projects',
+ "audit_id" : '{% if hb_audit %}{{hb_audit.id}}{% endif %}',
+ },"");
+ });
+
+ $('#submit-clearjobs').click(function(){
+ var result = confirm("Are you sure you want to remove all jobs?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-clearjobs',
+ },"");
+ }
+ });
+
+ });
+
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/management.html b/lib/srtgui/templates/management.html
index fe40ecb3..60769873 100644
--- a/lib/srtgui/templates/management.html
+++ b/lib/srtgui/templates/management.html
@@ -1,163 +1,188 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Manage Resources {% endblock %}
{% block pagecontent %}
- <div class="row">
- <div class="col-md-7" style="padding-left: 50px;">
- <h1>Management</h1>
- </div>
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li>Management</a>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-7" style="padding-left: 50px;">
+ <h1>Management</h1>
</div>
- <div class="row">
- <div class="jumbotron well-transparent">
-
- <div class="col-md-6">
- <div>
- <table class="table table-striped table-condensed" data-testid="landing-hyperlinks-table">
- <thead>
- <tr>
- <th>Action</th>
- <th>Description</th>
- </tr>
- </thead>
-
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'triage_cves' %}">Triage CVE's</a></td>
- <td>Triage the CVE's</td>
- </tr>
-
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'manage_notifications' %}">Pending notifications</a></td>
- <td>Triage the pending notifications</td>
- </tr>
-
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'manage_report' %}">Summary Report</a></td>
- <td>Report on the over all response system status</td>
- </tr>
-
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'publish' %}">Publish Request</a></td>
- <td>Process the items that are ready to be published from SRTool</td>
- </tr>
-
- {% if request.user.is_admin %}
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'users' %}">Manage Users</a></td>
- <td>Add, edit, and remove users</td>
- </tr>
-
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'sources' %}?nocache=1">Manage Sources</a></td>
- <td>Manage source list, perform manual pulls</td>
- </tr>
- {% endif %}
-
- </table>
- </div>
-
- </div>
-
- <div class="col-md-5">
- <b>Quick Info</b>
- <div class="well">
- <dl class="dl-horizontal">
- <dt>CVE's: Total Count =</dt>
- <dd>
- <a href="{% url 'cves' %}"> {{cve_total}} </a>
- </dd>
- <dt>Pending triaged =</dt>
- <dd>
- <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:new&default_orderby=name&filter_value=on&"> {{cve_new}} </a>
- </dd>
- <dt>Investigate =</dt>
- <dd>
- <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:new&default_orderby=name&filter_value=on&"> {{cve_investigate}} </a>
- </dd>
- <dt>Vulnerable =</dt>
- <dd>
- <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:new&default_orderby=name&filter_value=on&"> {{cve_vulnerable}} </a>
- </dd>
- <dt>Not Vulnerable =</dt>
- <dd>
- <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:new&default_orderby=name&filter_value=on&"> {{cve_not_vulnerable}} </a>
- </dd>
- <dt>Vulnerabilities: Total Count =</dt>
- <dd>
- <a href="{% url 'vulnerabilities' %}"> {{vulnerability_total}} </a>
- </dd>
- <dt>Open =</dt>
- <dd>
- <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_outcome:open&default_orderby=name&filter_value=on&"> {{vulnerability_open}} </a>
- </dd>
- <dt>High active =</dt>
- <dd>
- <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_severity:high&default_orderby=name&filter_value=on&" %}> {{vulnerability_high}} </a>
- </dd>
- <dt>Medium active =</dt>
- <dd>
- <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_severity:medium&default_orderby=name&filter_value=on&" %}> {{vulnerability_medium}} </a>
- </dd>
- <dt>Low active =</dt>
- <dd>
- <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_severity:low&default_orderby=name&filter_value=on&" %}> {{vulnerability_low}} </a>
- </dd>
-
- <dt>Investigations: Total Count =</dt>
- <dd>
- <a href="{% url 'investigations' %}" %}> {{investigation_total}} </a>
- </dd>
- <dt>Open =</dt>
- <dd>
- <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_outcome:open&default_orderby=name&filter_value=on&" %}> {{investigation_open}} </a>
- </dd>
- <dt>High active =</dt>
- <dd>
- <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_severity:high&default_orderby=name&filter_value=on&" %}> {{investigation_high}} </a>
- </dd>
- <dt>Medium active =</dt>
- <dd>
- <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_severity:medium&default_orderby=name&filter_value=on&" %}> {{investigation_medium}} </a>
- </dd>
- <dt>Low active =</dt>
- <dd>
- <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_severity:low&default_orderby=name&filter_value=on&" %}> {{investigation_low}} </a>
- </dd>
-
- <dt>Defects: Total Count =</dt>
- <dd>
- <a href="{% url 'defects' %}" %}> {{defect_total}} </a>
- </dd>
- <dt>Open =</dt>
- <dd>
- <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_status:open&default_orderby=name&filter_value=on&" %}> {{defect_open}} </a>
- </dd>
- <dt>InProgress =</dt>
- <dd>
- <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_status:in_progress&default_orderby=name&filter_value=on&" %}> {{defect_inprogress}} </a>
- </dd>
- <dt>P1 active =</dt>
- <dd>
- <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_status:in_progress&default_orderby=name&filter_value=on&" %}> {{defect_p1}} </a>
- </dd>
- <dt>P2 active =</dt>
- <dd>
- <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_status:in_progress&default_orderby=name&filter_value=on&" %}> {{defect_p2}} </a>
- </dd>
-
- <dt>Packages: Affected=</dt>
- <dd>
- <a href="{% url 'cpes_srtool' %}" %}> {{package_total}} </a>
- </dd>
-
- </dl>
- </div>
- </div>
-
- </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<div class="row">
+ <div class="jumbotron well-transparent">
+
+ <div class="col-md-6">
+ <div>
+ <table class="table table-striped table-condensed" data-testid="landing-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>Action</th>
+ <th>Description</th>
+ </tr>
+ </thead>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'triage_cves' %}">Triage CVE's</a></td>
+ <td>Triage the CVE's ({{cve_new}})</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'manage_notifications' %}">Pending notifications</a></td>
+ <td>Triage the pending notifications ({{notification_total}})</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'manage_report' %}">Summary Report</a></td>
+ <td>Report on the over all response system status</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'publish' %}">Publish Reports</a></td>
+ <td>Process items to be published from the SRTool</td>
+ </tr>
+
+ {% if request.user.is_admin %}
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'users' %}">Manage Users</a></td>
+ <td>Add, edit, and remove users</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'sources' %}?nocache=1">Manage Sources</a></td>
+ <td>Manage source list, perform manual pulls</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'maintenance' %}?nocache=1">Maintenance</a></td>
+ <td>Maintenance utilities ({{errorlog_total}})</td>
+ </tr>
+ {% endif %}
+
+ </table>
+ </div>
+
</div>
+ <div class="col-md-5">
+ <b>Quick Info</b>
+ <div class="well">
+ <dl class="dl-horizontal">
+ <dt>CVE's: Total Count =</dt>
+ <dd>
+ <a href="{% url 'cves' %}"> {{cve_total}} </a>
+ </dd>
+ <dt>Pending triaged =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:new&default_orderby=name&filter_value=on&"> {{cve_new}} </a>
+ </dd>
+ <dt>Investigate =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:investigate&default_orderby=name&filter_value=on&"> {{cve_investigate}} </a>
+ </dd>
+ <dt>Vulnerable =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:vulnerable&default_orderby=name&filter_value=on&"> {{cve_vulnerable}} </a>
+ </dd>
+ <dt>Not Vulnerable =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:not_vulnerable&default_orderby=name&filter_value=on&"> {{cve_not_vulnerable}} </a>
+ </dd>
+ <dt>Vulnerabilities: Total Count =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}"> {{vulnerability_total}} </a>
+ </dd>
+ <dt>Open =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_outcome:open&default_orderby=name&filter_value=on&"> {{vulnerability_open}} </a>
+ </dd>
+ <dt>Critical active =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:critical&default_orderby=name&filter_value=on&" %}> {{vulnerability_critical}} </a>
+ </dd>
+ <dt>High active =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:high&default_orderby=name&filter_value=on&" %}> {{vulnerability_high}} </a>
+ </dd>
+ <dt>Medium active =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:medium&default_orderby=name&filter_value=on&" %}> {{vulnerability_medium}} </a>
+ </dd>
+
+ <dt>Investigations: Total Count =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}" %}> {{investigation_total}} </a>
+ </dd>
+ <dt>Open =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_outcome:open&default_orderby=name&filter_value=on&" %}> {{investigation_open}} </a>
+ </dd>
+ <dt>Critical active =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:critical&default_orderby=name&filter_value=on&" %}> {{investigation_critical}} </a>
+ </dd>
+ <dt>High active =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:high&default_orderby=name&filter_value=on&" %}> {{investigation_high}} </a>
+ </dd>
+ <dt>Medium active =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:medium&default_orderby=name&filter_value=on&" %}> {{investigation_medium}} </a>
+ </dd>
+
+ <dt>Defects: Total Count =</dt>
+ <dd>
+ <a href="{% url 'defects' %}" %}> {{defect_total}} </a>
+ </dd>
+ <dt>Open =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_srt_outcome:open&default_orderby=name&filter_value=on&" %}> {{defect_open}} </a>
+ </dd>
+ <dt>InProgress =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_status:in_progress&default_orderby=name&filter_value=on&" %}> {{defect_inprogress}} </a>
+ </dd>
+ <dt>P1 active =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_priority:critical&default_orderby=name&filter_value=on&" %}> {{defect_p1}} </a>
+ </dd>
+ <dt>P2 active =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_priority:high&default_orderby=name&filter_value=on&" %}> {{defect_p2}} </a>
+ </dd>
+
+ <dt>Packages: Affected=</dt>
+ <dd>
+ <a href="{% url 'cpes_srtool' %}" %}> {{package_total}} </a>
+ </dd>
+
+ </dl>
+ </div>
+ </div>
+
+ </div>
+</div>
+
{% endblock %}
diff --git a/lib/srtgui/templates/mrj_section.html b/lib/srtgui/templates/mrj_section.html
new file mode 100755
index 00000000..480dfef2
--- /dev/null
+++ b/lib/srtgui/templates/mrj_section.html
@@ -0,0 +1,194 @@
+{% load static %}
+{% load humanize %}
+<script src="{% static 'js/mrjsection.js' %}"></script>
+
+{% if mru %}
+ <div id="latest-jobs">
+ {% for job in mru %}
+ <div id="job-instance-{{job.id}}" data-latest-job-result="{{job.id}}" class="alert job-result {% if job.status == job.SUCCESS %}alert-success{% elif job.status == job.ERRORS %}alert-danger{% else %}alert-info{% endif %}">
+ <!-- job title -->
+ <div class="row job-name">
+ <div class="col-md-12">
+ <small>
+ {{job.name}}{% if request.user.is_admin %} ({{job.id}}){% endif %}
+ </small>
+ </div>
+ </div>
+
+ <div class="row" data-role="job-status-container">
+ <div class="col-md-12">
+ Loading...
+ </div>
+ </div>
+ </div>
+ {% endfor %}
+ </div>
+{% endif %}
+
+<!-- job main template -->
+<script id="job-template" type="text/x-jsrender">
+ <div class="col-md-3">
+ <!-- only show link for completed jobs -->
+ <%if state == 'Success' || state == 'Errors'%>
+ <%:targets%>
+ <%else%>
+ <span data-toggle="tooltip" id="job-message-done-<%:id%>" data-role="targets-text" title="Job: <%:targets%>">
+ <%:targets%>
+ </span>
+ <%/if%>
+ </div>
+
+ <div data-job-state="<%:state%>">
+ <%if state == 'Success' || state == 'Errors'%>
+ <%include tmpl='#succeeded-or-failed-job-template'/%>
+ <%else state == 'Cancelling'%>
+ <%include tmpl='#cancelling-job-template'/%>
+ <%else state == 'NotStarted'%>
+ <%include tmpl='#starting-template'/%>
+ <%else state == 'InProgress'%>
+ <%include tmpl='#in-progress-job-template'/%>
+ <%else state == 'Cancelled'%>
+ <%include tmpl='#cancelled-job-template'/%>
+ <%/if%>
+ </div>
+</script>
+
+<!-- queued job -->
+<script id="queued-job-template" type="text/x-jsrender">
+ <div class="col-md-5">
+ <span class="glyphicon glyphicon-question-sign get-help get-help-blue"
+ title="This job is waiting for the background application to start">
+ </span>
+
+ Job queued
+ </div>
+
+ <div class="col-md-4">
+ <!-- cancel button -->
+ <%include tmpl='#cancel-template'/%>
+ </div>
+</script>
+
+<!-- in progress job; at least one task finished -->
+<script id="in-progress-job-template" type="text/x-jsrender">
+ <!-- progress bar and task completion percentage -->
+ <div data-role="job-status" class="col-md-4 col-md-offset-1 progress-info">
+ <!-- progress bar -->
+ <div class="progress" id="job-pc-done-title-<%:id%>">
+ <div id="job-pc-done-bar-<%:id%>"
+ style="width: <%:tasks_complete_percentage%>%;"
+ class="progress-bar">
+ </div>
+ </div>
+ </div>
+
+ <div class="col-md-4 progress-info">
+ <!-- task completion percentage -->
+ <span id="job-pc-done-<%:id%>"><%:tasks_complete_percentage%></span>% of
+ tasks complete
+
+ <!-- cancel button -->
+ <%include tmpl='#cancel-template'/%>
+ </div>
+</script>
+
+<!-- cancelling job -->
+<script id="cancelling-job-template" type="text/x-jsrender">
+ <div class="col-md-9">
+ Cancelling the job ...
+ </div>
+</script>
+
+<!-- succeeded or failed job -->
+<script id="succeeded-or-failed-job-template" type="text/x-jsrender">
+ <!-- completed_on -->
+ <div class="col-md-2">
+ <%:completed_on%>
+ </div>
+
+ <!-- errors -->
+ <div class="col-md-2">
+ <%if errors%>
+ <span class="glyphicon glyphicon-minus-sign"></span>
+ <a href="<%:dashboard_errors_url%>" class="alert-link">
+ <%:errors%> error<%:errors_pluralise%>
+ </a>
+ <%/if%>
+ </div>
+
+ <!-- warnings -->
+ <div class="col-md-2">
+ <%if warnings%>
+ <span class="glyphicon glyphicon-warning-sign job-warnings"></span>
+ <a href="<%:dashboard_warnings_url%>" class="alert-link job-warnings">
+ <%:warnings%> warning<%:warnings_pluralise%>
+ </a>
+ <%/if%>
+ </div>
+
+<!-- <%if errors == 0 and warnings == 0%>
+ <div class="col-md-2">
+ No Errors
+ </div>
+ <%/if%>
+-->
+
+ <!-- job time -->
+ <div class="col-md-3">
+ Job time:
+ <span data-role="data-recent-job-jobtime-field">
+ <b><%:jobtime%></b>
+ </span>
+ <form id="downloadbanner-log" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" name="action" value="download-log">
+ <input type="hidden" name="report_path" value="JOBLOG">
+ <a href="/srtgui/joblog/<%:id%>" class="glyphicon glyphicon-download-alt submit-download-joblog" x-data="log" target="_blank"></a>
+ </form>
+ </div>
+</script>
+
+<!-- cancelled job -->
+<script id="cancelled-job-template" type="text/x-jsrender">
+ <!-- job cancelled message -->
+ <div class="col-md-6">
+ Job cancelled
+ </div>
+</script>
+
+<!-- cancel button or no cancel icon -->
+<script id="cancel-template" type="text/x-jsrender">
+ <!-- cancel button -->
+ <span class="cancel-job-btn pull-right alert-link"
+ data-jobrequest-id="<%:id%>" data-request-url="<%:cancel_url%>">
+ <span class="glyphicon glyphicon-remove-circle"></span>
+ Cancel
+ </span>
+</script>
+
+<script>
+ $(document).ready(function () {
+ var ctx = {
+ }
+
+ try {
+ mrjSectionInit(ctx);
+ $('.submit-download-joblog').click(function() {
+ alert("submit-download-joblog:"+this.getAttribute("x-data"));
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
+ });
+ } catch (e) {
+ document.write("Sorry, An error has occurred loading this page");
+ console.warn(e);
+ }
+
+ $('.submit-downloadattachment').click(function() {
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
+ });
+
+ $('.submit-download-joblog').click(function() {
+ alert("submit-download-joblog:"+this.getAttribute("x-data"));
+ $("#downloadbanner-job").submit();
+ });
+
+ });
+</script>
diff --git a/lib/srtgui/templates/notifications-toastertable.html b/lib/srtgui/templates/notifications-toastertable.html
index dde76482..c9e572df 100755
--- a/lib/srtgui/templates/notifications-toastertable.html
+++ b/lib/srtgui/templates/notifications-toastertable.html
@@ -19,7 +19,7 @@
<ul class="breadcrumb" id="breadcrumb">
<li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
<li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
- <li>Pending To-do Notifications</li>
+ <li>Pending Notifications</li>
</ul>
</div>
</div>
diff --git a/lib/srtgui/templates/product.html b/lib/srtgui/templates/product.html
index 7f8d1b1f..64234778 100644
--- a/lib/srtgui/templates/product.html
+++ b/lib/srtgui/templates/product.html
@@ -1,6 +1,6 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} {{object.name}} - SRTool {% endblock %}
diff --git a/lib/srtgui/templates/publish-cve-toastertable.html b/lib/srtgui/templates/publish-cve-toastertable.html
new file mode 100755
index 00000000..c46128cc
--- /dev/null
+++ b/lib/srtgui/templates/publish-cve-toastertable.html
@@ -0,0 +1,162 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Publish Table via CVEs {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'publish' %}">Publish</a></li><span class="divider">&rarr;</span>
+ <li>Publish Table via CVEs</li>
+ </ul>
+ </div>
+</div>
+
+<div > <!--class="form-inline" -->
+ <b><big>Actions: </big></b>
+ <button id="mark-new" class="btn btn-default" type="button">Mark New</button>
+ <button id="mark-modified" class="btn btn-default" type="button">Mark Updated</button>
+ <button id="unmark" class="btn btn-default" type="button">Unmark</button>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <!-- Javascript support -->
+ <script>
+ var selected_notifyedit=false;
+
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Publish Table via CVEs";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "Publish CVEs found";
+ }
+ else if (total > 0) {
+ title = total + " Publish CVE" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ } else if (('results_msg' in data) && ("" != data.results_msg)) {
+ alert("Results: " + data.results_msg);
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:"{% url 'xhr_publish'%}",
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ })
+ }
+
+ $('#mark-new').click(function(){
+ var cve_list=[];
+ $('#publishcvetable input').each(function(){
+ if ($(this).is(':checked')) {
+ cve_list.push($(this).prop('name'));
+ }
+ });
+ cve_list = cve_list.join(",");
+ if ("" == cve_list) {
+ alert("No CVE's were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'mark-new',
+ "cves" : cve_list,
+ });
+ });
+
+ $('#mark-modified').click(function(){
+ var cve_list=[];
+ $('#publishcvetable input').each(function(){
+ if ($(this).is(':checked')) {
+ cve_list.push($(this).prop('name'));
+ }
+ });
+ cve_list = cve_list.join(",");
+ if ("" == cve_list) {
+ alert("No CVE's were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'mark-modified',
+ "cves" : cve_list,
+ });
+ });
+
+ $('#unmark').click(function(){
+ var cve_list=[];
+ $('#publishcvetable input').each(function(){
+ if ($(this).is(':checked')) {
+ cve_list.push($(this).prop('name'));
+ }
+ });
+ cve_list = cve_list.join(",");
+ if ("" == cve_list) {
+ alert("No CVE's were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'unmark',
+ "cves" : cve_list,
+ });
+ });
+
+
+ }); <!-- $(document).ready() -->
+
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/publish-defect-toastertable.html b/lib/srtgui/templates/publish-defect-toastertable.html
new file mode 100755
index 00000000..c31e3b6a
--- /dev/null
+++ b/lib/srtgui/templates/publish-defect-toastertable.html
@@ -0,0 +1,168 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Publish Table via Defects {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'publish' %}">Publish</a></li><span class="divider">&rarr;</span>
+ <li>Publish Table via Defects</li>
+ </ul>
+ </div>
+</div>
+
+<div > <!--class="form-inline" -->
+ <b><big>Actions: </big></b>
+ <button id="mark-new" class="btn btn-default" type="button">Mark New</button>
+ <button id="mark-modified" class="btn btn-default" type="button">Mark Updated</button>
+ <button id="unmark" class="btn btn-default" type="button">Unmark</button>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+<div id="table-loading">
+<h3><font color="blue">[ Table Loading... ]</font></h3>
+</div>
+
+ <!-- Javascript support -->
+ <script>
+ var selected_notifyedit=false;
+
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ $("#table-loading").slideDown();
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Publish Table via Defects";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "Publish CVEs found";
+ }
+ else if (total > 0) {
+ title = total + " Publish CVEs via Defect" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+ $("#table-loading").slideUp();
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ } else if (('results_msg' in data) && ("" != data.results_msg)) {
+ alert("Results: " + data.results_msg);
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:"{% url 'xhr_publish'%}",
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ })
+ }
+
+ $('#mark-new').click(function(){
+ var defect_list=[];
+ $('#publishdefecttable input').each(function(){
+ if ($(this).is(':checked')) {
+ defect_list.push($(this).prop('name'));
+ }
+ });
+ defect_list = defect_list.join(",");
+ if ("" == defect_list) {
+ alert("No Defects were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'mark-new',
+ "defects" : defect_list,
+ });
+ });
+
+ $('#mark-modified').click(function(){
+ var defect_list=[];
+ $('#publishdefecttable input').each(function(){
+ if ($(this).is(':checked')) {
+ defect_list.push($(this).prop('name'));
+ }
+ });
+ defect_list = defect_list.join(",");
+ if ("" == defect_list) {
+ alert("No Defects were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'mark-modified',
+ "defects" : defect_list,
+ });
+ });
+
+ $('#unmark').click(function(){
+ var defect_list=[];
+ $('#publishdefecttable input').each(function(){
+ if ($(this).is(':checked')) {
+ defect_list.push($(this).prop('name'));
+ }
+ });
+ defect_list = defect_list.join(",");
+ if ("" == defect_list) {
+ alert("No Defects were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'unmark',
+ "defects" : defect_list,
+ });
+ });
+
+
+ }); <!-- $(document).ready() -->
+
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/publish-list-toastertable.html b/lib/srtgui/templates/publish-list-toastertable.html
new file mode 100755
index 00000000..b5a88323
--- /dev/null
+++ b/lib/srtgui/templates/publish-list-toastertable.html
@@ -0,0 +1,162 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Publish Table {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'publish' %}">Publish</a></li><span class="divider">&rarr;</span>
+ <li>Publish Table</li>
+ </ul>
+ </div>
+</div>
+
+<div > <!--class="form-inline" -->
+ <b><big>Actions: </big></b>
+ <button id="mark-new" class="btn btn-default" type="button">Mark New</button>
+ <button id="mark-modified" class="btn btn-default" type="button">Mark Updated</button>
+ <button id="unmark" class="btn btn-default" type="button">Unmark</button>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <!-- Javascript support -->
+ <script>
+ var selected_notifyedit=false;
+
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Publish Table";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "Publish CVEs found";
+ }
+ else if (total > 0) {
+ title = total + " Publish CVE" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ } else if (('results_msg' in data) && ("" != data.results_msg)) {
+ alert("Results: " + data.results_msg);
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:"{% url 'xhr_publish'%}",
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ })
+ }
+
+ $('#mark-new').click(function(){
+ var cve_list=[];
+ $('#publishlisttable input').each(function(){
+ if ($(this).is(':checked')) {
+ cve_list.push($(this).prop('name'));
+ }
+ });
+ cve_list = cve_list.join(",");
+ if ("" == cve_list) {
+ alert("No CVE's were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'mark-new',
+ "cves" : cve_list,
+ });
+ });
+
+ $('#mark-modified').click(function(){
+ var cve_list=[];
+ $('#publishlisttable input').each(function(){
+ if ($(this).is(':checked')) {
+ cve_list.push($(this).prop('name'));
+ }
+ });
+ cve_list = cve_list.join(",");
+ if ("" == cve_list) {
+ alert("No CVE's were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'mark-modified',
+ "cves" : cve_list,
+ });
+ });
+
+ $('#unmark').click(function(){
+ var cve_list=[];
+ $('#publishlisttable input').each(function(){
+ if ($(this).is(':checked')) {
+ cve_list.push($(this).prop('name'));
+ }
+ });
+ cve_list = cve_list.join(",");
+ if ("" == cve_list) {
+ alert("No CVE's were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'unmark',
+ "cves" : cve_list,
+ });
+ });
+
+
+ }); <!-- $(document).ready() -->
+
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/publish.html b/lib/srtgui/templates/publish.html
index b1f3d83f..6c915f85 100644
--- a/lib/srtgui/templates/publish.html
+++ b/lib/srtgui/templates/publish.html
@@ -1,44 +1,52 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
-{% block title %} Publish Requests {% endblock %}
-
+{% block title %} Publish Reports {% endblock %}
{% block pagecontent %}
-<div class="row">
- <!-- Breadcrumbs -->
- <div class="col-md-12">
- <ul class="breadcrumb" id="breadcrumb">
- <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
- <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
- <li>Publish (Proposals)</li>
- </ul>
- </div>
-</div>
-
-<h2> Manage Publish Requests</h2>
-<ul>
- <li>The SRTool supports an external publishing tool, for example a business table or the vendor's public website</li>
- <li>These tools can be used to (a) submit CVEs to that tool, and (b) update the CVEs when they have been published</li>
-</ul>
-
-<h2> Publishing Actions</h2>
-<ul>
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'select-publish' %}">Publish Request</a></td>
- <td>Process the items that are ready to be published from SRTool</td>
- </tr>
-
- <br>
- <br>
- <br>
-
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'update-published' %}">Published Update</a></td>
- <td>Process the items that have been published</td>
- </tr>
-</ul>
+ <div class="row">
+ <div class="col-md-7" style="padding-left: 50px;">
+ <h1>Publish the CVE Database Status</h1>
+ </div>
+ </div>
+ <div class="row">
+ <div class="jumbotron well-transparent">
+
+ <div class="col-md-6">
+ <div>
+ <table class="table table-striped table-condensed" data-testid="landing-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>Action</th>
+ <th>Description</th>
+ </tr>
+ </thead>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'report' 'publish-summary' %}">Summary of CVEs</a></td>
+ <td>Summary across CVEs and Products</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'publish_diff_snapshot' %}">Difference Snapshots</a></td>
+ <td>Difference Report via Snapshots [UNDER DEVELOPMENT]</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'publish_diff_history' %}">Difference History</a></td>
+ <td>Difference Report via History [UNDER DEVELOPMENT]</td>
+ </tr>
+
+ </table>
+ </div>
+
+ </div>
+
+ </div>
+
+ </div>
+ </div>
{% endblock %}
diff --git a/lib/srtgui/templates/publish_diff_snapshot.html b/lib/srtgui/templates/publish_diff_snapshot.html
new file mode 100644
index 00000000..44958632
--- /dev/null
+++ b/lib/srtgui/templates/publish_diff_snapshot.html
@@ -0,0 +1,365 @@
+{% extends "base.html" %}
+
+{% load static %}
+{% load jobtags %}
+{% load humanize %}
+
+{% block title %} Publish Requests {% endblock %}
+
+{% block pagecontent %}
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li>Publish Report Management</li>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<h2>Publish Report Management</h2>
+<ul>
+ <li>The SRTool supports exporting new and updated CVEs to external publishing tools</li>
+</ul>
+
+<hr>
+
+<h2>Publish Via Database Snapshots</h2>
+<h3> On Demand</h3>
+<ul>
+ <li>This extracts the changes from a 'base' database backup snapshot to more recent 'top' snapshot</li>
+ <li>The 'start' and 'stop' dates can extract a subset of those changes. Normally they are set to the 'base' and 'top' dates</li>
+</ul>
+
+<div style="padding-left:30px;">
+ <div>
+ <label> Start Snapshot: </label>
+ <select id="snap_date_base">
+ {% for snap in snapshot_list %}
+ <option value="{{snap.date}}" {% if snap_start_index == snap.index %}selected{% endif %}>
+ ({{snap.mode}}) {{snap.date}} {{snap.time}} | {{snap.day}}
+ </option>
+ {% endfor %}
+ </select>
+ </div>
+ <div>
+ <label> Stop Snapshot: </label>
+ <select id="snap_date_top">
+ {% for snap in snapshot_list %}
+ <option value="{{snap.date}}" {% if snap_stop_index == snap.index %}selected{% endif %}>
+ ({{snap.mode}}) {{snap.date}} {{snap.time}} | {{snap.day}}
+ </option>
+ {% endfor %}
+ </select>
+ </div>
+ <div>
+ Start Date: <input type="text" id="snap_date_start" value="{{snap_date_start}}">&nbsp;&nbsp;
+ Stop Date: <input type="text" id="snap_date_stop" value="{{snap_date_stop}}">&nbsp;&nbsp;
+ <I>(Format: yyyy-mm-dd)</I>
+ </div>
+<br>
+</div>
+
+<div>
+ <span style="padding-left:30px;"><button id="export-snapshot" class="btn btn-default" type="button">Generate</button></span>
+ <span style="padding-left:30px;"><button id="export-snapshot-progress" class="btn btn-default" type="button">Generate (Progress)</button></span>
+ <!--<button type="submit" name="action" value="export-snapshot">Export</button> -->
+ <span id="export-snapshot-text">Generate the publish table on-demand (using snapshots)</span>
+ <span id="generating-report" hidden style="color:red"><I>... Generating the report - this will take a few minutes ...</I></span>
+</div>
+<br>
+
+<form method="POST"> {% csrf_token %}
+<h3>Automatic (Under Development)</h3>
+<div style="padding-left: 25px;">
+ <label> Frequency: </label>
+ <select id="snap_frequency">
+ {% for snap in snapshot_frequency_list %}
+ <option value="{{snap}}" {% if snap == snap_frequency_select %}selected{% endif %}>
+ {{snap}}
+ </option>
+ {% endfor %}
+ </select>
+ <span style="padding-left:30px;"><button id="export-snapshot-XXX" class="btn btn-default" type="button" disabled>Save</button></span>
+ <!--<button type="submit" name="action" value="export-snapshot">Export</button> -->
+ Save the automatic publishing frequency
+</div>
+</form>
+
+<h3>Generated Reports</h3>
+<div style="padding-left: 25px;">
+ <table class="table table-striped table-condensed" data-testid="vuln-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>Size</th>
+ <th>Date</th>
+ <th>Manage</th>
+ </tr>
+ </thead>
+ {% if generated_report_list %}
+ {% for report in generated_report_list %}
+ <tr>
+ <td>{{report.name}}</td>
+ <td>{{report.size}}</td>
+ <td>{{report.date}}</td>
+ <td>
+ <span id="attachment_entry_'+{{report.name}}+'" class="js-config-var-name"></span>
+ <form id="downloadbanner-{{forloop.counter}}" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" id="action" name="action" value="download">
+ <input type="hidden" id="report_id" name="report_name" value={{report.name}}>
+ <span class="glyphicon glyphicon-download-alt submit-downloadreport" id="report_download_'+{{report.name}}+'" x-data="{{forloop.counter}}"></span>
+ {% if request.user.is_creator %}
+ <span class="glyphicon glyphicon-trash trash-report" id="report_trash_'+{{report.name}}+'" x-data="{{report.name}}"></span>
+ {% endif %}
+ </form>
+ </td>
+ </tr>
+ {% endfor %}
+ {% else %}
+ <tr>
+ <td>No report files found</td>
+ </tr>
+ {% endif %}
+ </table>
+ (last report = {{snap_last_calc}})
+</div>
+
+<hr>
+
+<form method="POST"> {% csrf_token %}
+<h2>Publish Via History Tables (Under development)</h2>
+<ul>
+ <li>These tools can be used to (a) gather the candidate CVEs, (b) review and edit the list if needed, (c) generate the report when ready</li>
+ <li>The user can explicitly include and exclude CVEs from the "New" list and the "Updated" list, in case the automatic caltulations need adjustment</li>
+ <li>These mark-ups are inserted into the respective CVE's history at a mid-point date of the period, so they are both persistent and period-specific</li>
+ <li>The user can clear the markups from the given period and start over, but this will not affect any other period</li>
+</ul>
+<h3> Publish Preparation</h3>
+<ul>
+ <div>
+ Start Date: <input type="text" name="date_start" value="{{date_start}}">&nbsp;&nbsp;
+ Stop Date: <input type="text" name="date_stop" value="{{date_stop}}">
+ </div>
+ <br>
+ <div>
+ Product filter:
+ <select name="product-filter" id="select-product-filter">
+ <option value="0">WR Linux Suported Products</option>
+ </select>
+ </div>
+ <br>
+ <div>
+ <button type="submit" name="action" value="recalculate">Recalculate publish table</button>
+ Gather the items for this period to be published from SRTool, with user changes (last done {{last_calc}})
+ </div>
+ <br>
+ <div>
+ <button type="submit" name="action" value="reset">Reset user edits, Recalculate</button>
+ Remove the user changes for this period, recalculate the table
+ </div>
+</ul>
+<h3> Publish Preview and Modifications</h3>
+<ul>
+ <div>
+ <button type="submit" name="action" value="view">View the publish table</button>
+ View the publish table, prune entries
+ </div>
+ <br>
+ <div>
+ <button type="submit" name="action" value="add-cve">Add via CVEs</button>
+ Add recent CVEs to the table
+ </div>
+ <br>
+ <div>
+ <button type="submit" name="action" value="add-defect">Add via defects</button>
+ Add CVEs of recent defects to the table
+ </div>
+ <br>
+</ul>
+<h3> Publish the Report</h3>
+<ul>
+ <div>
+ <button type="submit" name="action" value="export">Export</button>
+ Export the publish table (using history)
+ </div>
+ <br>
+</ul>
+</form>
+
+<script>
+ var selected_newcomment=false;
+
+ $(document).ready(function() {
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ document.getElementById("export-snapshot").disabled = false;
+ /* document.getElementById("download-snapshot").disabled = false;*/
+ document.getElementById("export-snapshot-text").innerText = "Generate the publish table on-demand (using snapshots)";
+ document.getElementById("generating-report").style.display = "block";
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ document.getElementById("export-snapshot").disabled = false;
+ document.getElementById("export-snapshot-text").innerText = "Generate the publish table on-demand (using snapshots)";
+ /* document.getElementById("download-snapshot").disabled = false; */
+ document.getElementById("generating-report").style.display = "block";
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:"{% url 'xhr_publish' %}",
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+ $("#snap_date_base").change(function(){
+ snap_date_base = $("#snap_date_base").val();
+ snap_date_top = $("#snap_date_top").val();
+ if (snap_date_base > snap_date_top) {
+ $("#snap_date_base").val(snap_date_top);
+ $("#snap_date_top").val(snap_date_base);
+ $("#snap_date_start").val(snap_date_top);
+ $("#snap_date_stop").val(snap_date_base);
+ } else {
+ snap_date_start = $("#snap_date_start").val();
+ snap_date_stop = $("#snap_date_stop").val();
+ $("#snap_date_start").val(snap_date_base);
+ if (snap_date_stop < snap_date_base) {
+ $("#snap_date_stop").val(snap_date_top);
+ }
+ }
+ });
+
+ $("#snap_date_top").change(function(){
+ snap_date_base = $("#snap_date_base").val();
+ snap_date_top = $("#snap_date_top").val();
+ if (snap_date_base > snap_date_top) {
+ $("#snap_date_base").val(snap_date_top);
+ $("#snap_date_top").val(snap_date_base);
+ $("#snap_date_start").val(snap_date_top);
+ $("#snap_date_stop").val(snap_date_base);
+ } else {
+ snap_date_start = $("#snap_date_start").val();
+ snap_date_stop = $("#snap_date_stop").val();
+ if (snap_date_start > snap_date_top) {
+ $("#snap_date_start").val(snap_date_base);
+ }
+ $("#snap_date_stop").val(snap_date_top);
+ }
+ });
+
+ $('#export-snapshot').click(function(){
+ snap_date_base = $("#snap_date_base").val();
+ snap_date_top = $("#snap_date_top").val();
+ snap_date_start = $("#snap_date_start").val();
+ snap_date_stop = $("#snap_date_stop").val();
+ if (snap_date_start > snap_date_stop) {
+ alert("Error: the start date is after the stop date");
+ return;
+ }
+ if (snap_date_start < snap_date_base) {
+ alert("Error: the start date is before the snapshot base date");
+ return;
+ }
+ if (snap_date_stop > snap_date_top) {
+ alert("Error: the stop date is after the snapshot top date");
+ return;
+ }
+ var result = confirm("Generate the report? This will take several minutes.");
+ if (result){
+ document.getElementById("export-snapshot").disabled = true;
+ document.getElementById("export-snapshot-text").innerText = "... Generating the report - this will take a few minutes ...";
+
+ /* document.getElementById("download-snapshot").disabled = true; */
+ document.getElementById("generating-report").style.display = "none";
+ postCommitAjaxRequest({
+ "action" : 'export-snapshot',
+ "snap_date_base" : snap_date_base,
+ "snap_date_top" : snap_date_top,
+ "snap_date_start" : snap_date_start,
+ "snap_date_stop" : snap_date_stop
+ });
+ }
+ });
+
+ $('#export-snapshot-progress').click(function(){
+ snap_date_base = $("#snap_date_base").val();
+ snap_date_top = $("#snap_date_top").val();
+ snap_date_start = $("#snap_date_start").val();
+ snap_date_stop = $("#snap_date_stop").val();
+ if (snap_date_start > snap_date_stop) {
+ alert("Error: the start date is after the stop date");
+ return;
+ }
+ if (snap_date_start < snap_date_base) {
+ alert("Error: the start date is before the snapshot base date");
+ return;
+ }
+ if (snap_date_stop > snap_date_top) {
+ alert("Error: the stop date is after the snapshot top date");
+ return;
+ }
+ var result = confirm("Generate the report? This will take several minutes.");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'export-snapshot-progress',
+ "snap_date_base" : snap_date_base,
+ "snap_date_top" : snap_date_top,
+ "snap_date_start" : snap_date_start,
+ "snap_date_stop" : snap_date_stop
+ });
+ }
+ });
+
+
+ /* Manage report files */
+
+ $('.submit-downloadreport').click(function() {
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
+ });
+
+ $('.trash-report').click(function() {
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-trashreport',
+ "report_name" : $(this).attr('x-data'),
+ });
+ }
+ });
+
+
+
+ });
+</script>
+
+{% endblock %}
diff --git a/lib/srtgui/templates/report.html b/lib/srtgui/templates/report.html
index d4d27f76..f89628fe 100644
--- a/lib/srtgui/templates/report.html
+++ b/lib/srtgui/templates/report.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Report/Export {% endblock %}
@@ -13,10 +13,16 @@
</div>
</div>
- <form method="POST">{% csrf_token %}
+ <form method="POST"> {% csrf_token %}
<input type="hidden" name="parent_page" value="{{parent_page}}">
<input type="hidden" name="record_list" value="{{record_list}}">
+ {% if error_message %}
+ <br>
+ <font size="3" color="red">{{error_message}}</font>
+ <br>
+ {% endif %}
+
{% if report_type_list %}
<hr>
Report Type:<br>
@@ -28,6 +34,7 @@
Note: There is no report defined for this page.<br>
{% endif %}
+ <!--
{% if report_get_title %}
<hr>
Title:<br>
@@ -35,6 +42,7 @@
<br>
{% endif %}
<hr>
+ -->
{% if report_recordrange_list %}
Record Range:<br>
@@ -53,6 +61,12 @@
<hr>
{% endif %}
+ {% if report_date_list %}
+ Date Range (mm/dd/yyyy):<br>
+ {{report_date_list|safe}}
+ <hr>
+ {% endif %}
+
{% if report_custom_list %}
Page Specific Settings:<br>
{{report_custom_list|safe}}
@@ -68,6 +82,31 @@
</form>
-<br>
+ <br>
+ <!-- <input type="submit" id="submit-report-button" class="btn btn-primary btn-lg" value="Generate and Download Report"/> -->
+ <!-- <button type="button" id="submit-report-button" class="btn btn-primary btn-lg"> Generate and Download Report </button> -->
+ <!-- <button id="select-these" class="btn btn-default" type="button">Select these</button> -->
+
+ <!-- Javascript support -->
+ <script>
+
+ $(document).ready(function() {
+
+ /* Handle the post button */
+ $('#test-submit-report-button').click(function(){
+ document.getElementById("submit-report-button").innerText = " ... working ... ";
+ data = {
+ "action" : 'foo',
+ "pub" : $('#bar').is(':checked') ? "yes" : "no",
+ }
+
+ $.post("wr/report/management", data, function(data, status){
+ alert("Data: " + data + "\nStatus: " + status);
+ });
+ });
+
+ });
+ </script>
+
{% endblock %}
diff --git a/lib/srtgui/templates/snippets/gitrev_popover.html b/lib/srtgui/templates/snippets/gitrev_popover.html
index c1e3dabf..445c39cd 100644
--- a/lib/srtgui/templates/snippets/gitrev_popover.html
+++ b/lib/srtgui/templates/snippets/gitrev_popover.html
@@ -1,4 +1,4 @@
-{% load projecttags %}
+{% load jobtags %}
{% if vcs_ref|is_shaid %}
<a class="btn btn-default" data-content="{{vcs_ref}}">
{{vcs_ref|truncatechars:10}}
diff --git a/lib/srtgui/templates/snippets/investigations_popover.html b/lib/srtgui/templates/snippets/investigations_popover.html
index 0f65d3d4..22197a13 100644
--- a/lib/srtgui/templates/snippets/investigations_popover.html
+++ b/lib/srtgui/templates/snippets/investigations_popover.html
@@ -1,5 +1,5 @@
{# Popover that displays the Investigations related to this Product #}
-{% load projecttags %}
+{% load jobtags %}
{% with investigations='Wind River Linux 9' %}
{% with count_investigations=1 %}
diff --git a/lib/srtgui/templates/snippets/pkg_dependencies_popover.html b/lib/srtgui/templates/snippets/pkg_dependencies_popover.html
index 273437e3..eefbc122 100644
--- a/lib/srtgui/templates/snippets/pkg_dependencies_popover.html
+++ b/lib/srtgui/templates/snippets/pkg_dependencies_popover.html
@@ -1,5 +1,5 @@
{# Popover that displays the dependences and sizes of a package 'data' used in the Packages table #}
-{% load projecttags %}
+{% load jobtags %}
{% with package_deps=data.package_dependencies_source|for_target:extra.target_name %}
{% with count_package=package_deps.packages|length %}
diff --git a/lib/srtgui/templates/snippets/pkg_revdependencies_popover.html b/lib/srtgui/templates/snippets/pkg_revdependencies_popover.html
index e6ef816e..8eca0357 100644
--- a/lib/srtgui/templates/snippets/pkg_revdependencies_popover.html
+++ b/lib/srtgui/templates/snippets/pkg_revdependencies_popover.html
@@ -1,5 +1,5 @@
{# Popover that displays the reverse dependences and sizes of a package 'data' used in the Packages table #}
-{% load projecttags %}
+{% load jobtags %}
{% with package_deps=data.package_dependencies_target|for_target:extra.target_name %}
{% with count_package=package_deps.packages|length %}
diff --git a/lib/srtgui/templates/sources-toastertable.html b/lib/srtgui/templates/sources-toastertable.html
index 1721e3b0..279f279c 100644
--- a/lib/srtgui/templates/sources-toastertable.html
+++ b/lib/srtgui/templates/sources-toastertable.html
@@ -1,5 +1,8 @@
{% extends 'base.html' %}
+
{% load static %}
+{% load jobtags %}
+{% load humanize %}
{% block extraheadcontent %}
<link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
@@ -24,6 +27,14 @@
</div>
</div>
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
<div class="row">
<div class="col-md-12">
@@ -66,6 +77,76 @@
});
$('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list="+record_list);
+ //
+ // Listeners that must reside in the Toaster table context
+ //
+
+ /* Dynamically run the job function */
+ $('.run-update-job').click(function(){
+ var datasource_id = $(this).attr('x-data');
+ var result = confirm("Are you sure you want to force update this datasource right now?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-run-update-job',
+ "id" : datasource_id,
+ });
+ }
+ });
+
+ // Toggle the data source init/update enables
+ $('.source-enabled').click(function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-toggle-enable',
+ "id" : $(this).attr('x-data'),
+ });
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error == "no_refresh") {
+ if (data.data_message) {
+ const nv_pair = data.data_message.split("=");
+ document.getElementById('attr_'+nv_pair[0]).innerText = nv_pair[1];
+ if (0 <= nv_pair[1].indexOf("DISABLE ")) {
+ document.getElementById('next_on_'+nv_pair[0]).style.display = 'none';
+ document.getElementById('next_off_'+nv_pair[0]).style.display = 'inline';
+ } else {
+ document.getElementById('next_on_'+nv_pair[0]).style.display = 'inline';
+ document.getElementById('next_off_'+nv_pair[0]).style.display = 'none';
+ };
+ };
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:"{% url 'xhr_sources_commit' %}",
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
});
});
</script>
diff --git a/lib/srtgui/templates/sources.html b/lib/srtgui/templates/sources.html
index 1b017c06..df2852a2 100644
--- a/lib/srtgui/templates/sources.html
+++ b/lib/srtgui/templates/sources.html
@@ -1,6 +1,6 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} Data Sources - SRTool {% endblock %}
diff --git a/lib/srtgui/templates/srtool_metadata_include.html b/lib/srtgui/templates/srtool_metadata_include.html
index 7471f0f9..297510a1 100755
--- a/lib/srtgui/templates/srtool_metadata_include.html
+++ b/lib/srtgui/templates/srtool_metadata_include.html
@@ -9,15 +9,19 @@
<i>Status:</i> {{object.get_status_text}},&nbsp;&nbsp;
{% if default_category == "CVE" %}
{% if request.user.is_creator %}
- <i>Publish</i> = {{object.get_publish_text}}, <i>Publish Date</i> = {{object.publish_date}}
+ <i>Publish</i> = {{object.get_publish_text}}, <i>Publish Date</i> = {{object.publish_date}}, <i>Acknowledge Date</i> = {{object.acknowledge_date|date:'Y-m-d'}}, <i>Initial Release</i> = {{object.publishedDate}}, <i>Last Modified</i> = {{object.lastModifiedDate}}
<!--<a class="btn btn-default navbar-btn " id="login-button" href="">Publish Now</a> -->
+ , <i>Public = </i> {{object.get_public_text}}&nbsp;&nbsp;
{% else %}
<i>Publish = {{object.get_publish_text}}</i>
{% endif %}
</LI>
- <LI>
- <i>Packages:</i> {{object.packages}}
- </LI>
+ {% elif default_category == "VULNERABILITY" %}
+ {% if request.user.is_creator %}
+ <i>Public = </i> {{object.get_public_text}}&nbsp;&nbsp;
+ {% endif %}
+ <i>Outcome:</i> {{object.get_outcome_text}}
+ <p>
{% else %}
<i>Outcome:</i> {{object.get_outcome_text}}
<p>
@@ -26,10 +30,16 @@
<i>Public Notes:</i> {{object.comments}}
</LI>
{% if request.user.is_creator %}
+ <LI>
+ <i>Private Notes:</i> {{object.comments_private}}
+ </LI>
+ {% endif %}
<LI>
- <i>Private Notes:</i> {{object.comments_private}}
+ <i>Tags:</i> {{object.tags}}
+ </LI>
+ <LI>
+ <i>Affected Components:</i> {{object.packages}}
</LI>
- {% endif %}
</UL>
</fieldset>
@@ -42,10 +52,10 @@
<p><i>Priority</i> =
<select name="Priority" id="select-priority-state">
<option value="0" {% if 0 == object.priority %}selected{% endif %}>Undefined</option>
- <option value="1" {% if 1 == object.priority %}selected{% endif %}>Minor</option>
- <option value="2" {% if 2 == object.priority %}selected{% endif %}>Low</option>
- <option value="3" {% if 3 == object.priority %}selected{% endif %}>Medium</option>
- <option value="4" {% if 4 == object.priority %}selected{% endif %}>High</option>
+ <option value="1" {% if 1 == object.priority %}selected{% endif %}>Low</option>
+ <option value="2" {% if 2 == object.priority %}selected{% endif %}>Medium</option>
+ <option value="3" {% if 3 == object.priority %}selected{% endif %}>High</option>
+ <option value="4" {% if 4 == object.priority %}selected{% endif %}>Critical</option>
</select>
&nbsp;&nbsp;
<i>Status</i> =
@@ -56,7 +66,21 @@
<option value="3" {% if 3 == object.status %}selected{% endif %}>Investigate</option>
<option value="4" {% if 4 == object.status %}selected{% endif %}>Vulnerable</option>
<option value="5" {% if 5 == object.status %}selected{% endif %}>Not Vulnerable</option>
+ <option value="6" {% if 6 == object.status %}selected{% endif %}>(New)</option>
+ <option value="7" {% if 7 == object.status %}selected{% endif %}>(Investigate)</option>
+ <option value="8" {% if 8 == object.status %}selected{% endif %}>(Vulnerable)</option>
+ <option value="9" {% if 9 == object.status %}selected{% endif %}>(Not Vulnerable)</option>
+ </select>
+
+ {% if default_category == "CVE" or default_category == "VULNERABILITY" %}
+ &nbsp;&nbsp;
+ <i>Public</i> =
+ <select name="Public" id="select-public-state">
+ <option value="1" {% if object.public %}selected{% endif %}>Public</option>
+ <option value="0" {% if not object.public %}selected{% endif %}>Private</option>
</select>
+ {% endif %}
+
<p>
{% if default_category == "CVE" %}
<i>Publish</i> =
@@ -78,10 +102,21 @@
<option value="3" {% if 3 == object.outcome_state %}selected{% endif %}>Closed (Won't Fix)</option>
</select>
{% endif %}
- <p>Note: <input type="text" placeholder="Edit Note" id="text-note" size="80" value="{{object.comments}}"></p>
+ <p>Notes: <input type="text" placeholder="Edit comments" id="text-note" size="80" value="{{object.comments}}"></p>
{% if request.user.is_creator %}
- <p>Private Note: <input type="text" placeholder="Edit Private Note" id="text-private-note" size="80" value="{{object.comments_private}}"></p>
+ <p>Private Notes: <input type="text" placeholder="Edit private comments" id="text-private-note" size="80" value="{{object.comments_private}}"></p>
{% endif %}
+ <p>Tags: <input type="text" placeholder="Edit tags" id="text-tags" size="80" value="{{object.tags}}"></p>
+ <p>Affected Components: <input type="text" placeholder="Edit affected components" id="text-affected-components" size="80" value="{{object.packages}}"></p>
+ {% if default_category == "CVE" %}
+ <i>Acknowledge Date</i> = <input type="text" placeholder="Acknowledge Date" id="text-acknowledge-date" size="40" value="{{object.acknowledge_date|date:'Y-m-d'}}"> (YYYY-MM-DD, or empty string for None)<p>
+ {% endif %}
+
+ {% if default_category == "VULNERABILITY" %}
+ <p>Description:<p>
+ <textarea name="description" rows="9" style="min-width: 100%" class="localblue" id="text-description">{{object.description}}</textarea>
+ {% endif %}
+
<p><p>
</fieldset>
</div>
diff --git a/lib/srtgui/templates/tablesort.html b/lib/srtgui/templates/tablesort.html
index 36247429..1224b3bf 100644
--- a/lib/srtgui/templates/tablesort.html
+++ b/lib/srtgui/templates/tablesort.html
@@ -1,4 +1,4 @@
-{% load projecttags %}
+{% load jobtags %}
<!-- component to display a generic table -->
{% if disable_sort %}
<table class="table table-bordered table-hover" id="detail_table">
diff --git a/lib/srtgui/templates/tbd.html b/lib/srtgui/templates/tbd.html
index d8979f6e..a50d806f 100644
--- a/lib/srtgui/templates/tbd.html
+++ b/lib/srtgui/templates/tbd.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} TBD {% endblock %}
diff --git a/lib/srtgui/templates/toastertable-simple.html b/lib/srtgui/templates/toastertable-simple.html
index 56cd2ce3..858c87e6 100644
--- a/lib/srtgui/templates/toastertable-simple.html
+++ b/lib/srtgui/templates/toastertable-simple.html
@@ -1,6 +1,6 @@
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
<script src="{% static 'js/table.js' %}"></script>
<script src="{% static 'js/layerBtn.js' %}"></script>
diff --git a/lib/srtgui/templates/toastertable.html b/lib/srtgui/templates/toastertable.html
index 6882b394..a33321a9 100644
--- a/lib/srtgui/templates/toastertable.html
+++ b/lib/srtgui/templates/toastertable.html
@@ -1,10 +1,28 @@
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
<script src="{% static 'js/table.js' %}"></script>
<script src="{% static 'js/layerBtn.js' %}"></script>
<script>
+
+ var lstFilterval = [];
+ function ClearFilter(test) {
+ (function(){
+ var ctx = {
+ tableName : "{{table_name}}",
+ url : "{{ xhr_table_url }}?format=json",
+ title : "{{title}}",
+ };
+
+ try {
+ tableInit(ctx,test);
+ } catch (e) {
+ document.write("Problem loading table widget: " + e);
+ }
+ })();
+ }
+
$(document).ready(function() {
(function(){
@@ -26,7 +44,9 @@
{% include 'toastertable-filter.html' %}
<div class="row-fluid" id="empty-state-{{table_name}}" style="display:none">
- <div class="alert alert-info">{{empty_state|safe}}</div>
+ <div class="alert alert-info">{{empty_state|safe}}
+ &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;<button id="clear-all-filter" class="btn btn-default navbar-btn" align="right">Clear All Filters</button>
+ </div>
</div>
<div id="no-results-{{table_name}}" style="display:none">
@@ -69,6 +89,7 @@
</div>
</div>
<button class="btn btn-default" id="search-submit-{{table_name}}" >Search</button>
+ <span class="glyphicon glyphicon-question-sign get-help" title="Default is to 'and' terms. Use 'OR' to 'or' terms. Use '-' to exclude terms. Example:abc OR 'def ghi' AND -jkl"></span>
</form>
<form class="navbar-form navbar-right">
<div clas="form-group">
@@ -82,6 +103,11 @@
</select>
</div>
</form>
+
+ <div class="btn-group navbar-right">
+ <button id="clear-all-filter" class="btn btn-default navbar-btn " >Clear All Filters</button>&nbsp;&nbsp;&nbsp;&nbsp;
+ </div>
+
<div class="btn-group navbar-right">
<button id="edit-columns-button" class="btn btn-default navbar-btn dropdown-toggle" data-toggle="dropdown">Edit columns
<span class="caret"></span>
diff --git a/lib/srtgui/templates/triage_cves.html b/lib/srtgui/templates/triage_cves.html
index ddef1501..0cc774d3 100644
--- a/lib/srtgui/templates/triage_cves.html
+++ b/lib/srtgui/templates/triage_cves.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Export Report {% endblock %}
diff --git a/lib/srtgui/templates/unavailable_artifact.html b/lib/srtgui/templates/unavailable_artifact.html
index fc77e405..dedaa41b 100644
--- a/lib/srtgui/templates/unavailable_artifact.html
+++ b/lib/srtgui/templates/unavailable_artifact.html
@@ -1,5 +1,5 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% load static %}
diff --git a/lib/srtgui/templates/users.html b/lib/srtgui/templates/users.html
index fd2c8c18..970291b6 100644
--- a/lib/srtgui/templates/users.html
+++ b/lib/srtgui/templates/users.html
@@ -1,6 +1,6 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} Users - SRTool {% endblock %}
@@ -34,46 +34,56 @@
<div style="padding-left: 25px;">
<p><b>Reader</b>: User that can read the content (Field, TechPubs)</p>
<p><b>Contributor</b>: Reader that can can add notes and attachements (Engineers, Test, Managers)</p>
- <p><b>Creator</b>: Contributor that can create Investiations and defect records </p>
+ <p><b>Creator</b>: Contributor that can create Investigations and defect records </p>
<p><b>Admin</b>: Creator that can manage users, data sources</p>
</div>
</div>
- <p/>
</div>
</div>
<div class="row" style="padding-left: 25px;">
<h3>User List
- <a class="btn btn-default navbar-btn " id="new-investigation-attachement" href="{% url 'edit_user' 0 %}">Add user</a>
+ <a class="btn btn-default navbar-btn " href="{% url 'edit_user' 0 %}">Add user</a>
</h3>
- <table class="table table-striped table-condensed" data-testid="vuln-hyperlinks-table">
+ <table class="table table-striped table-condensed">
<thead>
<tr>
+ {% if user.is_admin %}
+ <th>ID</th>
+ {% endif %}
<th>User</th>
<th>First</th>
<th>Last</th>
<th>Email</th>
<th>Role</th>
+ <th>Time zone</th>
<th>Group</th>
+ <th>Last Login</th>
<th>Manage</th>
</tr>
</thead>
{% if object.all %}
- {% for user in object.all %}
+ {% for user_obj in object.all %}
<tr>
- <td>{{ user.username }} </td>
- <td>{{ user.first_name }} </td>
- <td>{{ user.last_name }} </td>
- <td>{{ user.email }} </td>
- <td>{{ user.role }} </td>
- <td>{{ user.get_groups }} </td>
+ {% if user.is_admin %}
+ <td>{{ user_obj.id }}</td>
+ {% endif %}
+ <td>{{ user_obj.username }}</td>
+ <td>{{ user_obj.first_name }}</td>
+ <td>{{ user_obj.last_name }}</td>
+ <td>{{ user_obj.email }}</td>
+ <td>{{ user_obj.role }}</td>
+ <td>{{ user_obj.timezone }}</td>
+ <td>{% if user_obj.is_superuser %}SuperUser{% else %}{{ user_obj.get_groups }}{%endif %}</td>
+ <td>{{ user_obj.last_login|date:'Y-m-d'}}</td>
<td>
- {% if user.is_superuser or not user.is_staff %}
+ {% if user_obj.is_superuser or not user_obj.is_staff %}
<span id="user_'+{{user.id}}+'" class="js-user-name"></span>
- <a href="{% url 'edit_user' user.id %}"><span class="glyphicon glyphicon-edit js-icon-pencil-config_var"></span></a>
- <span class="glyphicon glyphicon-trash trash-user" id="user_trash_'+{{user.id}}+'" x-data="{{user.username}}:{{user.id}}"></span>
+ <a href="{% url 'edit_user' user_obj.id %}"><span class="glyphicon glyphicon-edit js-icon-pencil-config_var"></span></a>
+ &nbsp;&nbsp;
+ <span class="glyphicon glyphicon-trash trash-user" id="user_trash_'+{{user_obj.id}}+'" x-data="{{user_obj.username}}:{{user_obj.id}}"></span>
{% else %}
Built-in
{% endif %}
@@ -91,6 +101,89 @@
</div>
+<!-- pass the full user list here -->
+{% for user in object.all %}
+<input type="hidden" class="js-checkbox-users-list" value="{{user.id}}|{{user.user_fullname}}">
+{% endfor %}
+
+<div class="row" id="group-section" style="padding-left: 25px;width:70%;">
+
+ <h3 style="white-space: nowrap;">Group List ({{builtin_groups}})
+ <a class="btn btn-default navbar-btn" id="add_group">Add group</a>
+ <!--<button class="execute" id="add_group" style="display:inline-block;"> Add group: </button>-->
+ <input type="text" value="" style="width:16%;display:inline-block;" class="form-control" id="add-group-name" placeholder="Name for new group">
+ </h3>
+
+ <div class="row" id="edit_group_options" style="display:none;padding-left:25px;color:DarkCyan;">
+ <h3>Group Edit:
+ <a class="btn btn-default navbar-btn" style="color:DarkCyan;" id="edit-save" >Save</a>
+ <a class="btn btn-default navbar-btn" style="color:DarkCyan;" id="edit-cancel" >Cancel</a>
+ </h3>
+ <label style="width:100px;height:24px;">Group name:</label>
+ <input type="text" value="" style="width:25%;" class="form-control" id="new-group-name" placeholder="Name for the group">
+ <input type="text" style="display:none;" id="new-group-id" >
+ <br>
+ <label style="width:100px;height:24px;">User list:</label>
+ <div id="all-users" class="scrolling"></div>
+ <br>
+ <hr>
+ </div>
+
+ <table class="table table-striped table-condensed">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>User</th>
+ <th>Manage User</th>
+ <th>Manage Group</th>
+ </tr>
+ </thead>
+
+ {% if groups.all %}
+ {% for group in groups.all %}
+ <tr>
+ <td>{{ group.name }} </td>
+ <td></td>
+ <td></td>
+ <td>
+ {% if group.name in builtin_groups %}
+ Built-in
+ {% else %}
+ <span id="group_'+{{group.id}}+'" class="js-group-name"></span>
+ <a id="edit_group">
+ <span class="glyphicon glyphicon-edit js-icon-pencil-config_var edit_group"
+ x-data="{{group.id}}|{{group.name}}|{% for user in group.user_set.all %}{{user.user_fullname}},{% endfor %}">
+ </span></a>
+ &nbsp;&nbsp;
+ <span class="glyphicon glyphicon-trash trash-group" x-data="{{group.id}}|{{group.name}}"></span>
+ {% endif %}
+ </td>
+ </tr>
+ {% for user in group.user_set.all %}
+ <tr>
+ <td></td>
+ <td>{{ user.user_fullname }} </td>
+ <td>
+ {% if group.name in builtin_groups %}
+ (Managed above)
+ {% else %}
+ <span class="glyphicon glyphicon-trash trash-user-from-group" x-data="{{group.id}}|{{group.name}}|{{user.id}}|{{user.user_fullname}}"></span>
+ {% endif %}
+ </td>
+ <td></td>
+ </tr>
+ {% endfor %}
+ {% endfor %}
+ {% else %}
+ <tr>
+ <td>No groups found</td>
+ </tr>
+ {% endif %}
+
+ </table>
+
+</div>
+
<!-- Javascript support -->
<script>
$(document).ready(function() {
@@ -137,8 +230,86 @@
}
});
- });
+ $('.edit_group').click(function() {
+ document.getElementById("new-group-name").value= $(this).attr('x-data').split('|')[1];
+ document.getElementById("new-group-id").value= $(this).attr('x-data').split('|')[0];
+ $("#edit_group_options").slideDown();
+ // build the user list: avoid false substring matches by including comma separators
+ var html = "";
+ var group_user_set = "," + $(this).attr('x-data').split('|')[2] + ",";
+ var users_list = document.getElementsByClassName('js-checkbox-users-list');
+ // Add the checked boxes first
+ for (var i = 0, length = users_list.length; i < length; i++) {
+ var status = '" >';
+ var user_id = users_list[i].value.split("|")[0];
+ var user_name = users_list[i].value.split("|")[1];
+ if (0 <= group_user_set.indexOf(","+user_name+",")) {
+ status = '" checked="checked">';
+ };
+ html += '<div class="checkbox"><label><input type="checkbox" class="checkbox-users" x-data="'+user_id+'" value="'+users_list[i].value+status+user_name+'</label></div>';
+ }
+ document.getElementById("all-users").innerHTML = html;
+ //document.getElementById("edit_group_options").focus();
+ document.getElementById("group-section").scrollIntoView();
+ });
+
+ $('#edit-save').click(function() {
+ $("#edit_group_options").slideUp();
+ var user_id_list = "";
+ $("input[type='checkbox']").each(function(){
+ var user_id = $(this).attr('x-data');
+ var ischecked = $(this).is(":checked");
+ if (ischecked) {
+ user_id_list = user_id_list + user_id + ',';
+ }
+ });
+ postCommitAjaxRequest({
+ "action" : 'submit-group-users',
+ "group_id" : document.getElementById("new-group-id").value,
+ "user_id_list" : user_id_list,
+ });
+ });
+
+ $('#edit-cancel').click(function() {
+ $("#edit_group_options").slideUp();
+ });
+
+ $('#add_group').click(function() {
+ var new_group_name = document.getElementById("add-group-name").value;
+ var result = confirm("Create new group '"+new_group_name+"'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-group-create',
+ "group_name" : new_group_name,
+ });
+ };
+ });
+
+ $('.trash-group').click(function() {
+ var result = confirm("Are you sure you want to remove group '" + $(this).attr('x-data').split('|')[1] + "'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-trashgroup',
+ "record_id" : $(this).attr('x-data').split('|')[0],
+ });
+ }
+ });
+ $('.trash-user-from-group').click(function() {
+ var group_id = $(this).attr('x-data').split('|')[0];
+ var group_name = $(this).attr('x-data').split('|')[1];
+ var user_id = $(this).attr('x-data').split('|')[2];
+ var user_name = $(this).attr('x-data').split('|')[3];
+ var result = confirm("Are you sure you want to remove user '" + user_name + "' from group '" + group_name + "'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-trashusergroup',
+ "group_id" : group_id,
+ "record_id" : user_id,
+ });
+ }
+ });
+ });
</script>
diff --git a/lib/srtgui/templates/vulnerability.html b/lib/srtgui/templates/vulnerability.html
index 35b2c0e6..c8fdd995 100644
--- a/lib/srtgui/templates/vulnerability.html
+++ b/lib/srtgui/templates/vulnerability.html
@@ -1,5 +1,5 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block extraheadcontent %}
<style>
@@ -45,12 +45,25 @@
<div class="row">
<div class="col-md-12">
<div class="page-header build-data">
- <span class="srt_h1">Vulnerability {{object.get_long_name}} {% if not object.public %} <font color="red">[PRIVATE]</font> {% endif %}</span>
+ <span id="vul-name-container">
+ &nbsp;&nbsp;
+ <span id="vulnerability-name" class="srt_h1">Vulnerability {{object.get_long_name}}
+ {% if request.user.is_contributor %}&nbsp;&nbsp;<span class="glyphicon glyphicon-edit" id="vul-change-form-toggle"></span>{% endif %}
+ {% if not object.public %}&nbsp;&nbsp;<font color="red" >[PRIVATE]</font> {% endif %}
+ </span>
{% if request.user.is_creator %}
<span style="padding-left:30px;"><button id="select-quickedit" class="btn btn-default" type="button">Edit Status...</button></span>
<span style="padding-left:30px;"><button id="select-notification" class="btn btn-default" type="button">Create Notification ...</button></span>
<span style="padding-left:30px;"><button id="select-delete" class="btn btn-default" type="button" x-data="{{object.id}}">Delete</button></span>
{% endif %}
+ </span>
+ <form id="vul-name-change-form" class="form-inline" style="display: none;">
+ <div class="form-group">
+ <input class="form-control input-lg" type="text" id="vul-name-change-input" autocomplete="off" value="{{object.name}}">
+ </div>
+ <button id="vul-name-change-btn" class="btn btn-default btn-lg" type="button">Save</button>
+ <a href="#" id="vul-name-change-cancel" class="btn btn-lg btn-link">Cancel</a>
+ </form>
</div>
</div>
</div>
@@ -73,7 +86,8 @@
<dt>CVE Dictionary Entry:</dt>
<dd>
{% for vc in object.vulnerability_to_cve.all %}
- {% if not forloop.first %}| {% endif %}<a href="{% url 'cve' vc.cve.name %}">{{vc.cve.name}}</a>
+ {% if not forloop.first %}<p>{% endif %}<a href="{% url 'cve' vc.cve.name %}">{{vc.cve.name}}</a>
+ <span class="glyphicon glyphicon-trash detach-cve" id="detach_cve_'+{{vc.cve.id}}+'" x-data="{{vc.cve.id}}"></span>
{% endfor %}
</dd>
@@ -104,6 +118,15 @@
{% if not forloop.first %}| {% endif %}{{vc.cve.cvssV2_baseScore}},{{vc.cve.cvssV2_severity}} </a>
{% endfor %}
</dd>
+
+ {% if request.user.is_creator %}
+ <dt>Attach CVE:</dt>
+ <dd>
+ <input type="text" id="cve_name" name="cve_name" size="20" placeholder="(CVE name)">
+ <button class="execute btn btn-info" id="submit-attach-cve" style="margin-bottom: 5px; margin-top: 0px;">Attach CVE</button>
+ </dd>
+ {% endif %}
+
</dl>
</div>
</div>
@@ -148,26 +171,26 @@
</tr>
</thead>
- {% if object.vulnerability_investigation.all %}
- {% for investigation in object.vulnerability_investigation.all %}
+ {% if object.investigation_list %}
+ {% for v2i in object.investigation_list %}
<tr>
- <td><a href="{% url 'product' investigation.product.id %}">{{ investigation.product.long_name }}<a></td>
- <td><a href="{% url 'investigation' investigation.id %}">{{ investigation.name }}<a></td>
- <td>{{ investigation.get_status_text }}</td>
- <td>{{ investigation.get_outcome_text }}</td>
+ <td><a href="{% url 'product' v2i.investigation.product.id %}">{{ v2i.investigation.product.long_name }}<a></td>
+ <td><a href="{% url 'investigation' v2i.investigation.id %}">{{ v2i.investigation.name }}<a></td>
+ <td>{{ v2i.investigation.get_status_text }}</td>
+ <td>{{ v2i.investigation.get_outcome_text }}</td>
<td>
- {% for ij in investigation.investigation_to_defect.all %}
+ {% for ij in v2i.investigation.investigation_to_defect.all %}
{% if not forloop.first %}| {% endif %}<a href="{% url 'defect' ij.defect.id %}">{{ij.defect.name}} </a>
{% endfor %}
</td>
<td>
- {% for ij in investigation.investigation_to_defect.all %}
+ {% for ij in v2i.investigation.investigation_to_defect.all %}
{% if not forloop.first %}| {% endif %}<a href="{% url 'defect' ij.defect.id %}">{{ij.defect.release_version}} </a>
{% endfor %}
</td>
{% if request.user.is_creator %}
<td>
- <span class="glyphicon glyphicon-trash trash-investigation" id="affected_trash_'+{{investigation.id}}+'" x-data="{{investigation.id}}"></span>
+ <span class="glyphicon glyphicon-trash trash-investigation" id="affected_trash_'+{{v2i.investigation.id}}+'" x-data="{{v2i.investigation.id}}"></span>
</td>
{% endif %}
</tr>
@@ -268,10 +291,10 @@
<td>{{ u.author }}</td>
<td>
<span id="attachment_entry_'+{{u.id}}+'" class="js-config-var-name"></span>
- <form id="downloadbanner" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <form id="downloadbanner-{{forloop.counter}}" enctype="multipart/form-data" method="post" >{% csrf_token %}
<input type="hidden" id="action" name="action" value="download">
<input type="hidden" id="record_id" name="record_id" value={{u.id}}>
- <span class="glyphicon glyphicon-download-alt submit-downloadattachment" id="attachment_download_'+{{u.id}}+'" x-data="{{u.id}}"></span>
+ <span class="glyphicon glyphicon-download-alt submit-downloadattachment" id="attachment_download_'+{{u.id}}+'" x-data="{{forloop.counter}}"></span>
{% if request.user.is_creator %}
<span class="glyphicon glyphicon-trash trash-attachment" id="attachment_trash_'+{{u.id}}+'" x-data="{{u.id}}"></span>
{% endif %}
@@ -379,7 +402,9 @@
<thead>
<tr>
<th>User</th>
+<!--
<th>Manage</th>
+-->
</tr>
</thead>
@@ -394,11 +419,13 @@
{% if object.vulnerability_users.all %}
{% for u in object.vulnerability_users.all %}
<tr>
- <td>{{ u.user.name }}</td>
+ <td>{{ u.user.username }}</td>
+<!--
<td>
<span id="attachment_entry_'+{{u.id}}+'" class="js-config-var-name"></span>
<span class="glyphicon glyphicon-trash trash-useraccess" id="attachment_trash_'+{{u.id}}+'" x-data="{{u.id}}"></span>
</td>
+-->
</tr>
{% endfor %}
{% else %}
@@ -440,6 +467,9 @@
</table>
</div>
+<HR ALIGN="center" WIDTH="100%">
+Created={{object.srt_created}} Updated={{object.srt_updated}}
+
<!-- Javascript support -->
<script>
var selected_addrelatedproduct=false;
@@ -452,6 +482,13 @@
var selected_quickedit=false;
var selected_notifyedit=false;
+ /* Vulnerability Name change support */
+ var vulNameForm = $("#vul-name-change-form");
+ var vulNameContainer = $("#vul-name-container");
+ var vulName = $(".vul-name");
+ var vulNameFormToggle = $("#vul-change-form-toggle");
+ var vulNameChangeCancel = $("#vul-name-change-cancel");
+
window.onload = function() {
$("input[name=status][value=" + {{ object.status }} + "]").prop('checked', true);
$("input[name=outcome][value=" + {{ object.outcome }} + "]").prop('checked', true);
@@ -470,8 +507,15 @@
alert("error on request:\n" + data.error);
return;
}
- // reload the page with the updated tables
- location.reload(true);
+ // reload the page with the updated tables
+ if (('new_name' in data) && (0 == data.new_name.indexOf("url:"))) {
+ window.location.replace(data.new_name.replace("url:",""));
+ } else if (('new_name' in data) && ("" != data.new_name)) {
+ var new_url = "{% url 'vulnerability' 123 %}".replace("123",data.new_name);
+ window.location.replace(new_url);
+ } else {
+ location.reload(true);
+ }
}
function onCommitAjaxError(jqXHR, textstatus, error) {
@@ -583,7 +627,7 @@
});
$('.submit-downloadattachment').click(function() {
- $("#downloadbanner").submit();
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
});
$('.trash-attachment').click(function() {
@@ -685,7 +729,7 @@
selected_quickedit=true;
$("#display-status").slideUp();
$("#details-quickedit").slideDown();
- document.getElementById("select-quickedit").innerText = "Close edit status...";
+ document.getElementById("select-quickedit").innerText = "Cancel edit status...";
$("#select-quickedit").addClass("blueborder");
document.getElementById("select-status-state").focus();
}
@@ -694,16 +738,39 @@
$('#submit-quickedit').click(function(){
var note=$('#text-note').val().trim()
var private_note=$('#text-private-note').val().trim()
+ var tags=$('#text-tags').val().trim();
var priority=$('#select-priority-state').val();
var status=$('#select-status-state').val();
+ var public=$('#select-public-state').val();
var outcome=$('#select-outcome-state').val();
+ var affected_components=$('#text-affected-components').val();
+ var description=$('#text-description').val();
+ /* Double check any public status changes */
+ {% if object.public %}
+ if ("0" == public) {
+ if (! confirm("Are you sure you want to make this Vulnerability and all its related records as PRIVATE?")) {
+ return
+ }
+ }
+ {% endif %}
+ {% if not object.public %}
+ if ("1" == public) {
+ if (! confirm("Are you sure you want to make this Vulnerability and all its related records as PUBLIC?")) {
+ return
+ }
+ }
+ {% endif %}
postCommitAjaxRequest({
"action" : 'submit-quickedit',
"note" : note,
"private_note" : private_note,
+ "tags" : tags,
"status" : status,
+ "public" : public,
"outcome" : outcome,
"priority" : priority,
+ "affected_components" : affected_components,
+ "description" : description,
});
});
@@ -712,13 +779,13 @@
selected_notifyedit=false;
$("#details-notify-edit").slideUp();
$("#display-status").slideDown();
- document.getElementById("select-notification").innerText = "Create Notification ...";
+ document.getElementById("select-notification").innerText = "Create notification ...";
$("#select-notification").removeClass("blueborder");
} else {
selected_notifyedit=true;
$("#display-status").slideUp();
$("#details-notify-edit").slideDown();
- document.getElementById("select-notification").innerText = "Close notification";
+ document.getElementById("select-notification").innerText = "Cancel notification";
$("#select-notification").addClass("blueborder");
document.getElementById("select-category-notify").focus();
}
@@ -769,6 +836,47 @@
}
});
+ /* Vulnerability name change functionality */
+ vulNameFormToggle.click(function(e){
+ e.preventDefault();
+ vulNameContainer.hide();
+ vulNameForm.fadeIn();
+ });
+ vulNameChangeCancel.click(function(e){
+ e.preventDefault();
+ vulNameForm.hide();
+ vulNameContainer.fadeIn();
+ });
+ $("#vul-name-change-btn").click(function(){
+ var newvulName = $("#vul-name-change-input").val();
+ postCommitAjaxRequest({
+ "action" : 'submit-newname',
+ "old_name" : '{{object.name}}',
+ "new_name" : newvulName,
+ });
+ });
+
+ $("#submit-attach-cve").click(function(){
+ var cve_name=$("#cve_name").val();
+ if ("" == cve_name) {
+ alert("No CVE name was entered");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'submit-attach-cve',
+ "cve_name" : cve_name,
+ });
+ });
+ $('.detach-cve').click(function() {
+ var result = confirm("Are you sure you want to detach this CVE?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-detach-cve',
+ "record_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
/* Set the report link */
$('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list={{object.id}}");
});
diff --git a/lib/srtgui/templatetags/projecttags.py b/lib/srtgui/templatetags/jobtags.py
index d7bc5319..4a987d99 100644..100755
--- a/lib/srtgui/templatetags/projecttags.py
+++ b/lib/srtgui/templatetags/jobtags.py
@@ -23,6 +23,7 @@ import os
from os.path import relpath
import re
import json as JsonLib
+from datetime import datetime, timedelta
from django import template
from django.template.defaultfilters import filesizeformat
@@ -275,6 +276,24 @@ def get_dict_value(dictionary, key):
return ''
@register.filter
+def get_strdict_value(dictionary_str, key):
+ """ return the value of a dictionary key
+ where the dictionary is in string form
+ """
+ try:
+ dictionary = JsonLib.loads(dictionary_str)
+ return dictionary[key]
+ except (KeyError, IndexError):
+ return ''
+
+def get_tag_key(tag,key,default=None):
+ d = json.loads(tag)
+ if key in d:
+ return d[key]
+ return default
+
+
+@register.filter
def is_shaid(text):
""" return True if text length is 40 characters and all hex-digits
"""
@@ -324,3 +343,13 @@ def has_group(user, group_name):
group = Group.objects.get(name=group_name)
return group in user.groups.all()
+@register.filter(name='shift_timezone')
+def shift_timezone(datetime_str, hours_offset):
+ # do some calculation (offset + time passed)
+ try:
+ dt = datetime.strptime(datetime_str,'%Y-%m-%d %H:%M:%S')
+ dt += timedelta(hours=int(hours_offset))
+ return dt.strftime('%Y-%m-%d %H:%M:%S')
+ except:
+ return("TIME_ERROR:%s" % datetime_str)
+
diff --git a/lib/srtgui/templatetags/multi_tags.py b/lib/srtgui/templatetags/multi_tags.py
new file mode 100755
index 00000000..6a436825
--- /dev/null
+++ b/lib/srtgui/templatetags/multi_tags.py
@@ -0,0 +1,22 @@
+import os
+
+from django import template
+from django.utils.safestring import mark_safe
+
+ml_register = template.Library()
+
+@ml_register.filter(name = 'multitag')
+def multitag(tags):
+ """
+ Convert a comma-delimited list into HTML separate lines.
+ """
+ return mark_safe(tags.replace(',','<p>'))
+
+@ml_register.filter(name = 'get_dict_value')
+def get_dict_value(dictionary, key):
+ """ return the value of a dictionary key
+ """
+ try:
+ return dictionary[key]
+ except (KeyError, IndexError):
+ return ''
diff --git a/lib/srtgui/templatetags/project_url_tag.py b/lib/srtgui/templatetags/project_url_tag.py
deleted file mode 100644
index 51ccc560..00000000
--- a/lib/srtgui/templatetags/project_url_tag.py
+++ /dev/null
@@ -1,34 +0,0 @@
-from django import template
-from django.urls import reverse
-
-register = template.Library()
-
-def project_url(parser, token):
- """
- Create a URL for a project's main page;
- for non-default projects, this is the configuration page;
- for the default project, this is the project builds page
- """
- try:
- tag_name, project = token.split_contents()
- except ValueError:
- raise template.TemplateSyntaxError(
- "%s tag requires exactly one argument" % tag_name
- )
- return ProjectUrlNode(project)
-
-class ProjectUrlNode(template.Node):
- def __init__(self, project):
- self.project = template.Variable(project)
-
- def render(self, context):
- try:
- project = self.project.resolve(context)
- if project.is_default:
- return reverse('projectbuilds', args=(project.id,))
- else:
- return reverse('project', args=(project.id,))
- except template.VariableDoesNotExist:
- return ''
-
-register.tag('project_url', project_url)
diff --git a/lib/srtgui/typeaheads.py b/lib/srtgui/typeaheads.py
index e32c16ad..800e9b0e 100644
--- a/lib/srtgui/typeaheads.py
+++ b/lib/srtgui/typeaheads.py
@@ -19,6 +19,8 @@
import subprocess
from srtgui.widgets import ToasterTypeAhead
+from orm.models import RecipeTable
+
from django.urls import reverse
from django.core.cache import cache
@@ -184,3 +186,27 @@ class GitRevisionTypeAhead(ToasterTypeAhead):
'detail': '[ %s ]' % str(rev)})
return results
+
+
+class RecipeTypeAhead(ToasterTypeAhead):
+ """ Typeahead for all the recipes """
+ def apply_search(self, search_term, cve, request):
+
+ recipes = RecipeTable.objects.all().order_by("recipe_name")
+
+ primary_results = recipes.filter(recipe_name__icontains=search_term)
+
+ results = []
+
+ for recipe in list(primary_results):
+
+ detail = ''
+ needed_fields = {
+ 'id': recipe.pk,
+ 'name': recipe.recipe_name,
+ 'detail': detail,
+ }
+
+ results.append(needed_fields)
+
+ return results
diff --git a/lib/srtgui/urls.py b/lib/srtgui/urls.py
index 26c484d8..45d15fde 100644
--- a/lib/srtgui/urls.py
+++ b/lib/srtgui/urls.py
@@ -16,11 +16,13 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-from django.conf.urls import url
+from django.urls import re_path as url
from django.views.generic import RedirectView
from srtgui import tables
from srtgui import views
+from srtgui import typeaheads
+from srtgui import widgets
urlpatterns = [
# landing page
@@ -34,12 +36,15 @@ urlpatterns = [
url(r'^cves/$',
tables.CvesTable.as_view(template_name="cves-toastertable.html"),
name='cves'),
+
+ # "cve_status" is passed by URL: redirect("/srtgui/select-cves/?cve_status=%d" % cve_select_status)
url(r'^select-cves/$',
tables.SelectCveTable.as_view(template_name="cves-select-toastertable.html"),
name='select-cves'),
url(r'^select-cves/(?P<cve_status>\d+)$',
tables.SelectCveTable.as_view(template_name="cves-select-toastertable.html"),
name='select-cves'),
+
url(r'^cve-create/$', views.cve_create, name="cve_create"),
url(r'^cve-alternates/(?P<cve_pk>\d+)$', views.cve_alternates, name="cve_alternates"),
@@ -88,16 +93,26 @@ urlpatterns = [
tables.PackageFilterDetailTable.as_view(template_name="package-filter-detail-toastertable.html"),
name='package-filter-detail'),
+ url(r'^publish-list/$',
+ tables.PublishListTable.as_view(template_name="publish-list-toastertable.html"),
+ name='publish-list'),
+ url(r'^publish-cve/$',
+ tables.PublishCveTable.as_view(template_name="publish-cve-toastertable.html"),
+ name='publish-cve'),
+ url(r'^publish-defect/$',
+ tables.PublishDefectTable.as_view(template_name="publish-defect-toastertable.html"),
+ name='publish-defect'),
+
url(r'^select-publish/$',
tables.SelectPublishTable.as_view(template_name="publish-select-toastertable.html"),
name='select-publish'),
-
url(r'^update-published/$',
tables.UpdatePublishedTable.as_view(template_name="published-select-toastertable.html"),
name='update-published'),
url(r'^report/(?P<page_name>\D+)$', views.report, name='report'),
+ # XHR URLs
url(r'^xhr_triage_commit/$', views.xhr_triage_commit,
name='xhr_triage_commit'),
@@ -117,9 +132,16 @@ urlpatterns = [
url(r'^xhr_notifications/$', views.xhr_notifications,
name='xhr_notifications'),
+ url(r'^xhr_errorlogs/$', views.xhr_errorlogs,
+ name='xhr_errorlogs'),
+
url(r'^xhr_packages/$', views.xhr_packages,
name='xhr_packages'),
+ url(r'^xhr_publish/$', views.xhr_publish,
+ name='xhr_publish'),
+
+ # Management URLs
url(r'^manage/$', views.management, name='manage'),
url(r'^manage_cpes/$',
@@ -130,18 +152,71 @@ urlpatterns = [
name='manage_notifications'),
url(r'^triage_cves/$', views.triage_cves, name='triage_cves'),
url(r'^create_vulnerability/$', views.create_vulnerability, name='create_vulnerability'),
- url(r'^publish/$', views.publish, name='publish'),
url(r'^manage_report/$', views.manage_report, name='manage_report'),
url(r'^sources/$',
tables.SourcesTable.as_view(template_name="sources-toastertable.html"),
name='sources'),
url(r'^users/$', views.users, name='users'),
+ url(r'^publish/$', views.publish, name='publish'),
+ url(r'^publish_summary/$', views.publish_summary, name='publish_summary'),
+ url(r'^publish_diff_snapshot/$', views.publish_diff_snapshot, name='publish_diff_snapshot'),
+ url(r'^publish_diff_history/$', views.publish_diff_history, name='publish_diff_history'),
+
+ url(r'^maintenance/$', views.maintenance, name='maintenance'),
+ url(r'^error_logs/$',
+ tables.ErrorLogsTable.as_view(template_name="errorlog-toastertable.html"),
+ name='error_logs'),
+ url(r'^history_cve/$',
+ tables.HistoryCveTable.as_view(template_name="history-cve-toastertable.html"),
+ name='history_cve'),
+ url(r'^history_vulnerability/$',
+ tables.HistoryVulnerabilityTable.as_view(template_name="history-vulnerability-toastertable.html"),
+ name='history_vulnerability'),
+ url(r'^history_investigation/$',
+ tables.HistoryInvestigationTable.as_view(template_name="history-investigation-toastertable.html"),
+ name='history_investigation'),
+ url(r'^history_defect/$',
+ tables.HistoryDefectTable.as_view(template_name="history-defect-toastertable.html"),
+ name='history_defect'),
+
+ # typeahead api end points
+ url(r'^xhr_recipetypeahead/recipes$',
+ typeaheads.RecipeTypeAhead.as_view(), name='xhr_recipetypeahead'),
+
+ #
+ # Job progress URLs
+ #
+
+ url(r'^joblog/(?P<job_pk>\d+)$', views.joblog, name='joblog'),
+ url(r'^mostrecentjobs$', widgets.MostRecentJobsView.as_view(),
+ name='most_recent_jobs'),
+ url(r'^xhr_maintenance_commit/$', views.xhr_maintenance_commit,
+ name='xhr_maintenance_commit'),
+ url(r'^xhr_jobrequest/$',
+ widgets.XhrJobRequest.as_view(),
+ name='xhr_jobrequest'),
+ url(r'^xhr_job_post/$', views.xhr_job_post,
+ name='xhr_job_post'),
+ url(r'^xhr_sources_commit/$', views.xhr_sources_commit,
+ name='xhr_sources_commit'),
+
+ url(r'^manage_jobs/(?P<foo_id>\d+)$',
+ tables.ManageJobsTable.as_view(template_name="manage-jobs-toastertable.html"),
+ name='manage_jobs'),
+
+ #
+ # Extra
+ #
+
+ url(r'^email_admin/$', views.email_admin, name='email_admin'),
+ url(r'^email_success/$', views.email_success, name='email_success'),
url(r'^guided_tour/$', views.guided_tour, name='guided_tour'),
url(r'^quicklink/$', views.quicklink, name='quicklink'),
+ url(r'^date_time_test/$', views.date_time_test, name='date_time_test'),
url(r'^tbd/$', views.tbd, name='tbd'),
# default redirection
diff --git a/lib/srtgui/views.py b/lib/srtgui/views.py
index d87dca38..2cfe0e19 100644
--- a/lib/srtgui/views.py
+++ b/lib/srtgui/views.py
@@ -4,7 +4,7 @@
#
# Security Response Tool Implementation
#
-# Copyright (C) 2017-2018 Wind River Systems
+# Copyright (C) 2017-2023 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -23,41 +23,52 @@ import os
import traceback
import subprocess
from datetime import timedelta, datetime
+from datetime import timezone as datetime_timezone
from decimal import Decimal
import mimetypes
import json
import re
+import time
+import pytz
from django.db.models import Q
from django.shortcuts import render, redirect
from django.db.models.functions import Lower
-from orm.models import Cve, CveLocal, CveSource, CveHistory
+from django.contrib.auth.models import Group
+from django.urls import reverse, resolve
+from django.core.paginator import EmptyPage, PageNotAnInteger
+from django.http import HttpResponse
+from django.utils import timezone
+
+from orm.models import Cve, CveLocal, CveSource, CveHistory, CveAccess
from orm.models import Vulnerability, VulnerabilityHistory, CveToVulnerablility, VulnerabilityToInvestigation, VulnerabilityNotification, VulnerabilityAccess, VulnerabilityComments, VulnerabilityUploads
from orm.models import Investigation, InvestigationHistory, InvestigationToDefect, InvestigationComments, InvestigationNotification, InvestigationAccess, InvestigationUploads
from orm.models import SrtSetting, Product
from orm.models import Package
from orm.models import DataSource
-from orm.models import Defect, PublishPending
+from orm.models import Defect, DefectHistory, PublishPending, PublishSet
from orm.models import Notify, NotifyAccess, NotifyCategories
-
+from orm.models import SRTool, Update
+from orm.models import ErrorLog
+from orm.models import Job
from users.models import SrtUser, UserSafe
-
from srtgui.reports import ReportManager
from srtgui.api import readCveDetails, writeCveDetails, summaryCveDetails, execute_process
-
-from django.urls import reverse, resolve
-from django.core.paginator import EmptyPage, PageNotAnInteger
-from django.http import HttpResponse
-from django.utils import timezone
+from srtgui.api import publishCalculate, publishReset, publishMarkNew, publishMarkModified, publishMarkNone
import logging
-SRT_BASE_DIR = os.environ['SRT_BASE_DIR']
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR', '.')
+SRT_MAIN_APP = os.environ.get('SRT_MAIN_APP', '.')
logger = logging.getLogger("srt")
# quick development/debugging support
-from srtgui.api import _log
+from srtgui.api import error_log, _log
+
+#
+# ================= Helper Routines ============================================
+#
def get_name_sort(cve_name):
try:
@@ -67,6 +78,10 @@ def get_name_sort(cve_name):
cve_name_sort = cve_name
return cve_name_sort
+#
+# ================= Page Helper Routines ============================================
+#
+
class MimeTypeFinder(object):
# setting this to False enables additional non-standard mimetypes
# to be included in the guess
@@ -94,17 +109,28 @@ def managedcontextprocessor(request):
ret['srt_logo'] = SrtSetting.objects.get(name='SRTOOL_LOGO').value.split(',')
# Add optional local logo link
ret['srt_local_logo'] = SrtSetting.objects.get(name='SRTOOL_LOCAL_LOGO').value.split(',')
+ # Add optional SRTool mode
+ ret['srt_mode'] = ' (%s)' % os.environ['SRT_MODE'] if (('SRT_MODE' in os.environ) and os.environ['SRT_MODE']) else ''
return ret
# determine in which mode we are running in, and redirect appropriately
def landing(request):
- # we only redirect to projects page if there is a user-generated project
-# num_builds = Build.objects.all().count()
-# user_projects = Project.objects.filter(is_default = False)
-# has_user_project = user_projects.count() > 0
+ # Django sometimes has a race condition with this view executing
+ # for the master app's landing page HTML which can lead to context
+ # errors, so hard enforce the default re-direction
+ if SRT_MAIN_APP and (SRT_MAIN_APP != "srtgui"):
+ return redirect(f"/{SRT_MAIN_APP}/landing/")
- context = {}
+ # Append the list of landing page extensions
+ landing_extensions_table = []
+ for landing_extension in SrtSetting.objects.filter(name__startswith='LANDING_LINK').order_by('name'):
+ landing_extensions_table.append(landing_extension.value.split('|'))
+
+ context = {
+ 'landing_extensions_table' : landing_extensions_table,
+ 'this_landing' : 'srtgui',
+ }
return render(request, 'landing.html', context)
@@ -450,14 +476,15 @@ def management(request):
return redirect(landing)
# Keep it simple now, later use Q sets
- defect_open = Defect.objects.filter(status=Defect.OPEN)
- defects_inprogress = Defect.objects.filter(status=Defect.IN_PROGRESS)
- defect_p1 = defect_open.filter(priority=Defect.HIGH).count() + defects_inprogress.filter(priority=Defect.HIGH).count()
- defect_p2 = defect_open.filter(priority=Defect.MEDIUM).count() + defects_inprogress.filter(priority=Defect.MEDIUM).count()
+ defect_open = Defect.objects.filter(status=Defect.DEFECT_STATUS_OPEN)
+ defects_inprogress = Defect.objects.filter(status=Defect.DEFECT_STATUS_IN_PROGRESS)
+ defect_p1 = defect_open.filter(priority=Defect.CRITICAL).count() + defects_inprogress.filter(priority=Defect.CRITICAL).count()
+ defect_p2 = defect_open.filter(priority=Defect.HIGH).count() + defects_inprogress.filter(priority=Defect.HIGH).count()
defect_open = defect_open.count()
defects_inprogress = defects_inprogress.count()
context = {
+ 'mru' : Job.get_recent(),
'cve_total' : Cve.objects.all().count(),
'cve_new' : Cve.objects.filter(status=Cve.NEW).count(),
# 'cve_open' : Cve.objects.filter( Q(status=Cve.INVESTIGATE) & Q(status=Cve.VULNERABLE) ).count(),
@@ -467,15 +494,15 @@ def management(request):
'vulnerability_total' : Vulnerability.objects.all().count(),
'vulnerability_open' : Vulnerability.objects.filter(outcome=Vulnerability.OPEN).count(),
+ 'vulnerability_critical' : Vulnerability.objects.filter(outcome=Vulnerability.OPEN).filter(priority=Vulnerability.CRITICAL).count(),
'vulnerability_high' : Vulnerability.objects.filter(outcome=Vulnerability.OPEN).filter(priority=Vulnerability.HIGH).count(),
'vulnerability_medium' : Vulnerability.objects.filter(outcome=Vulnerability.OPEN).filter(priority=Vulnerability.MEDIUM).count(),
- 'vulnerability_low' : Vulnerability.objects.filter(outcome=Vulnerability.OPEN).filter(priority=Vulnerability.HIGH).count(),
'investigation_total' : Investigation.objects.all().count(),
'investigation_open' : Investigation.objects.filter(outcome=Investigation.OPEN).count(),
+ 'investigation_critical' : Investigation.objects.filter(outcome=Investigation.OPEN).filter(priority=Investigation.CRITICAL).count(),
'investigation_high' : Investigation.objects.filter(outcome=Investigation.OPEN).filter(priority=Investigation.HIGH).count(),
'investigation_medium' : Investigation.objects.filter(outcome=Investigation.OPEN).filter(priority=Investigation.MEDIUM).count(),
- 'investigation_low' : Investigation.objects.filter(outcome=Investigation.OPEN).filter(priority=Investigation.HIGH).count(),
'defect_total' : Defect.objects.all().count(),
'defect_open' : defect_open,
@@ -484,9 +511,39 @@ def management(request):
'defect_p2' : defect_p2,
'package_total' : Package.objects.all().count(),
+
+ 'notification_total' : Notify.objects.all().count(),
+ 'errorlog_total' : ErrorLog.objects.all().count(),
+
}
return render(request, 'management.html', context)
+def maintenance(request):
+ _log("MAINTENANCE: %s" % request)
+ # does this user have permission to see this record?
+ if not UserSafe.is_creator(request.user):
+ return redirect(landing)
+
+ if request.method == "GET":
+ context = {
+ 'errorlog_total' : ErrorLog.objects.all().count(),
+ 'history_cve_total' : CveHistory.objects.all().count(),
+ 'history_vulnerability_total' : VulnerabilityHistory.objects.all().count(),
+ 'history_investigation_total' : InvestigationHistory.objects.all().count(),
+ 'defect_investigation_total' : DefectHistory.objects.all().count(),
+ 'mru' : Job.get_recent(),
+ 'remote_backup_path' : SrtSetting.get_setting('SRT_REMOTE_BACKUP_PATH',''),
+ }
+ return render(request, 'maintenance.html', context)
+ elif request.method == "POST":
+ _log("EXPORT_POST:MAINTENANCE: %s" % request)
+
+ if request.POST["action"] == "submit-remote-backup-path":
+ SrtSetting.set_setting('SRT_REMOTE_BACKUP_PATH',request.POST["text-remote-backup-path"].strip()),
+ return redirect(maintenance)
+ else:
+ return render(request, "unavailable_artifact.html", context={})
+
def cve(request, cve_pk, active_tab="1"):
if request.method == "GET":
template = "cve.html"
@@ -501,10 +558,17 @@ def cve(request, cve_pk, active_tab="1"):
_log("CVE_ERROR(%s)(%s):" % (cve_pk,e))
return redirect(landing)
- # does this user have permission to see this record?
+ # Does this user have permission to see this record?
if (not cve_object.public) and (not UserSafe.is_admin(request.user)):
- _log("CVE_ERROR_PERMISSIONS:(%s)" % request.user)
- return redirect(landing)
+ try:
+ cveaccess = CveAccess.objects.get(cve=cve_object,user=request.user)
+ except:
+ cveaccess = None
+ if not cveaccess:
+ _log("CVE_ERROR_PERMISSIONS:(%s)" % request.user)
+ return redirect(landing)
+ else:
+ _log("CVE_PASS_PERMISSIONS:(%s)" % request.user)
# Set up the investigation link
investigation_records = Investigation.objects.filter(name=cve_object.name)
@@ -520,29 +584,45 @@ def cve(request, cve_pk, active_tab="1"):
cve_index = ord('1')
is_edit = ('Edit' == active_tab)
- # Prepend summary page?
+ # Fetch source tabs list
cve_sources = CveSource.objects.filter(cve=cve_object.id).order_by('datasource__key')
- if True or (1 < len(cve_sources)):
- tab_states[chr(cve_index)] = ''
- cveDetails,cve_html = summaryCveDetails(cve_object,cve_sources)
- cve_list_table.append([cveDetails,tab_states[chr(cve_index)],'Summary',cve_html])
- cve_index += 1
+ # Always pre-pend a summary page
+ tab_states[chr(cve_index)] = 'active'
+ cveDetails,cve_html = summaryCveDetails(cve_object,cve_sources)
+ cve_list_table.append([cveDetails,tab_states[chr(cve_index)],'Summary',cve_html,''])
+ cve_index += 1
# Add the source/edit tabs
- for cs in cve_sources:
+ for i in range(len(cve_sources)):
+ if (i < (len(cve_sources)-1)) and (cve_sources[i].datasource.source == cve_sources[i+1].datasource.source):
+ # Insure one source per vendor where the highest key wins (e.g. NIST Modified)
+ continue
+ pass
+
+ cs = cve_sources[i]
if active_tab == cs.datasource.name:
active_tab = chr(cve_index)
if ('Edit' == active_tab) and ('Local' == cs.datasource.name):
- tab_states[chr(cve_index)] = 'active'
- cve_list_table.append([readCveDetails(cve_object,cs.datasource),tab_states[chr(cve_index)],'Edit',{}])
+ if False:
+ tab_states[chr(cve_index)] = ''
+ else:
+ # Force the 'Edit' tab to start active
+ tab_states[chr(cve_index)] = 'active'
+ # Force the 'Summary' tab to start inactive
+ tab_states[chr(ord('1'))] = ''
+ cve_list_table[0][1] = ''
+ cve_list_table.append([readCveDetails(cve_object,cs.datasource),tab_states[chr(cve_index)],'Edit',{},''])
else:
- tab_states[chr(cve_index)] = 'active' if (active_tab == chr(cve_index)) else ''
- cve_list_table.append([readCveDetails(cve_object,cs.datasource),tab_states[chr(cve_index)],cs.datasource.name,{}])
+ tab_states[chr(cve_index)] = ''
+ #tab_states[chr(cve_index)] = 'active' if (active_tab == chr(cve_index)) else ''
+ tab_name = cs.datasource.name
+ cve_list_table.append([readCveDetails(cve_object,cs.datasource),tab_states[chr(cve_index)],tab_name,{},cs.datasource.id])
cve_index += 1
if 0 == len(cve_sources):
_log("CVE_0_Sources??:(%s,%s)" % (cve_pk, active_tab))
- tab_states['1'] = 'active'
- cve_list_table.append([readCveDetails(cve_object,None),tab_states['1'],'(No Source)',{}])
+ tab_states['1'] = ''
+ details = readCveDetails(cve_object,None)
+ cve_list_table.append([readCveDetails(cve_object,None),tab_states['1'],'No_Source',{},''])
# Check to make sure active_tab was applied
for tab in tab_states.keys():
@@ -552,11 +632,6 @@ def cve(request, cve_pk, active_tab="1"):
tab_states['1'] = 'active'
cve_list_table[0][1] = 'active'
-
- # cve_summary = copy.copy(cve_object)
- # cve_summary_detail = copy.copy(cve_object_detail)
- # cve_summary.source = 'Summary'
- #
context = {
'object' : cve_object,
'cve_list_table' : cve_list_table,
@@ -583,18 +658,26 @@ def cve(request, cve_pk, active_tab="1"):
# Is this not a save?
if not request.POST.get('cve-edit','').startswith('Save'):
- return redirect(cve, cve_object.id, "Local")
+ return redirect(cve, cve_object.id, "Summary")
+
# does this user have permission to see this record?
if (not cve_object.public) and (not UserSafe.is_admin(request.user)):
- _log("CVE_ERROR_PERMISSIONS:(%s)" % request.user)
- return redirect(landing)
+ try:
+ cveaccess = CveAccess.objects.get(cve=cve_object,user=request.user)
+ except:
+ cveaccess = None
+ if not cveaccess:
+ _log("CVE_ERROR_PERMISSIONS:(%s)" % request.user)
+ return redirect(landing)
+ else:
+ _log("CVE_PASS_PERMISSIONS:(%s)" % request.user)
# update the local CVE record
writeCveDetails(cve_object.name,request)
# show the results
- return redirect(cve, cve_object.id, "Local")
+ return redirect(cve, cve_object.id, "Summary")
def cve_edit(request, cve_pk):
_log("CVE_EDIT1(%s):" % cve_pk)
@@ -615,15 +698,23 @@ def cve_edit(request, cve_pk):
cve_source_object,created = CveSource.objects.get_or_create(cve=cve_object,datasource=source)
return cve(request, cve_object.name, active_tab="Edit")
-def cve_create(request):
+def _create_local_cve():
# Create the local CVE edit record
new_cve_name = CveLocal.new_cve_name()
cve_object = Cve.objects.create(name=new_cve_name,name_sort=get_name_sort(new_cve_name))
+ cve_object.save()
cve_local_object = CveLocal.objects.create(name=new_cve_name)
+ cve_local_object.save()
# Add the source mapping
source = DataSource.objects.get(name='Local')
cve_source_object = CveSource.objects.create(cve=cve_object,datasource=source)
- # Open the new CVE
+ cve_source_object.save()
+ return cve_object,cve_local_object
+
+def cve_create(request):
+ # Create the local CVE edit record
+ cve_object,cve_local_object = _create_local_cve()
+ # Open the new CVE page
return redirect(cve, cve_object.id, "Local")
@@ -641,11 +732,19 @@ def vulnerability(request, vulnerability_pk):
except:
return redirect(landing)
- products = Product.objects.all()
+ products = Product.objects.all().order_by('order')
# does this user have permission to see this record?
if (not vulnerability_object.public) and (not UserSafe.is_admin(request.user)):
- return redirect(landing)
+ try:
+ vul_access = VulnerabilityAccess.objects.get(vulnerability=vulnerability_object,user=request.user)
+ except:
+ vul_access = None
+ if not vul_access:
+ _log("VUL_ERROR_PERMISSIONS:(%s)" % request.user)
+ return redirect(landing)
+ else:
+ _log("VUL_PASS_PERMISSIONS:(%s)" % request.user)
context = {
'object' : vulnerability_object,
@@ -677,12 +776,14 @@ def vulnerability(request, vulnerability_pk):
_log("EXPORT_POST:'fileupload' does not exist: %s" % e)
try:
- with open(path + "/" + file.name, 'xb+') as destination:
+ local_file_path = path + "/" + file.name
+ with open(local_file_path, 'xb+') as destination:
for line in file:
destination.write(line)
username = UserSafe.user_name(request.user)
- VulnerabilityUploads.objects.get_or_create(vulnerability_id=vulnerability_object.id, description=description, path=path + "/" + file.name, size=file.size, date=datetime.today().strftime('%Y-%m-%d'), author=username)
+ VulnerabilityUploads.objects.get_or_create(vulnerability_id=vulnerability_object.id, description=description, path=local_file_path, size=file.size, date=datetime.today().strftime('%Y-%m-%d'), author=username)
+ VulnerabilityHistory.objects.create(vulnerability_id=vulnerability_object.id, comment=Update.ATTACH_DOC % file.name, date=datetime.now().strftime(SRTool.DATE_FORMAT), author=username)
except Exception as e:
_log("EXPORT_POST:FILE ALREADY EXISTS: %s" % e)
return redirect(vulnerability,vulnerability_pk)
@@ -722,16 +823,48 @@ def investigation(request, investigation_pk):
except:
return redirect(landing)
+ # does this user have permission to see this record?
+ if (not investigation_object.public) and (not UserSafe.is_admin(request.user)):
+ try:
+ inv_access = InvestigationAccess.objects.get(investigation=investigation_object,user=request.user)
+ except:
+ inv_access = None
+ if not inv_access:
+ _log("INV_ERROR_PERMISSIONS:(%s)" % request.user)
+ return redirect(landing)
+ else:
+ _log("INV_PASS_PERMISSIONS:(%s)" % request.user)
+
+
+ ### TO-DO: replace with dynamic lookahead instead of static huge list
defects = Defect.objects.all()
+
+ # Calculate the default 'affected_components' list, if any
+ affected_components = ''
+ affected_components_list = {}
+ for package in investigation_object.packages.split():
+ affected_components_list[package] = True
+ vulnerability = investigation_object.vulnerability
+ vc_list = vulnerability.vulnerability_to_cve.all()
+ for vc in vc_list:
+ if vc.cve.packages:
+ for package in vc.cve.packages.split():
+ affected_components_list[package] = True
+ if affected_components_list:
+ affected_components = ' '.join(affected_components_list)
+
+ # Pass Investigation's defect list
investigation_to_defect = investigation_object.investigation_to_defect.all()
context = {
'object' : investigation_object,
'defects' : defects,
'investigation_to_defect' : investigation_to_defect,
+ 'affected_components' : affected_components,
'defect_example' : SrtSetting.objects.get(name='SRTOOL_DEFECT_SAMPLENAME').value,
'notify_categories' : NotifyCategories.objects.all(),
'users' : UserSafe.get_safe_userlist(True),
'components' : Defect.Components,
+ 'found_version' : investigation_object.product.get_defect_tag('found_version'),
}
return render(request, template, context)
elif request.method == "POST":
@@ -757,11 +890,13 @@ def investigation(request, investigation_pk):
_log("EXPORT_POST:'fileupload' does not exist: %s" % e)
try:
- with open(path + "/" + file.name, 'xb+') as destination:
+ local_file_path = path + "/" + file.name
+ with open(local_file_path, 'xb+') as destination:
for line in file:
destination.write(line)
username = UserSafe.user_name(request.user)
- InvestigationUploads.objects.get_or_create(investigation_id=investigation_object.id, description=description, path=path + "/" + file.name, size=file.size, date=datetime.today().strftime('%Y-%m-%d'), author=username)
+ InvestigationUploads.objects.get_or_create(investigation_id=investigation_object.id, description=description, path=local_file_path, size=file.size, date=datetime.today().strftime('%Y-%m-%d'), author=username)
+ InvestigationHistory.objects.create(investigation_id=investigation_object.id, comment=Update.ATTACH_DOC % file.name, date=datetime.now().strftime(SRTool.DATE_FORMAT), author=username)
except Exception as e:
_log("EXPORT_POST:FILE ALREADY EXISTS: %s" % e)
return redirect(investigation,investigation_pk)
@@ -797,6 +932,7 @@ def defect(request, defect_pk):
context = {
'object' : defect_object,
'users' : users,
+ 'SRTOOL_DEFECT_URLBASE' : SrtSetting.objects.get(name='SRTOOL_DEFECT_URLBASE').value
}
return render(request, template, context)
@@ -824,7 +960,8 @@ def sources(request):
object = DataSource.objects.all()
context = {
- 'object' : object,
+ 'object' : object,
+ 'mru' : Job.get_recent(),
}
return render(request, template, context)
@@ -845,7 +982,7 @@ def login(request):
try:
### USER CONTROL
- user = SrtUser.objects.get(name=user_name)
+ user = SrtUser.objects.get(username=user_name)
request.session['srt_user_id'] = user.id
request.session.modified = True
_log("LOGIN_POST_SET:%s,%s" % (user.name,user.id))
@@ -872,10 +1009,13 @@ def users(request):
context = {
'object' : object,
+ 'groups' : Group.objects.all().order_by(Lower('name')),
+ 'builtin_groups' : ('Reader','Contributor','Creator','Admin'),
}
return render(request, template, context)
def report(request,page_name):
+ _log("REPORT!:%s" % (request))
if request.method == "GET":
context = ReportManager.get_context_data(page_name,request=request)
record_list = request.GET.get('record_list', '')
@@ -883,7 +1023,7 @@ def report(request,page_name):
context['record_list'] = record_list
return render(request, 'report.html', context)
elif request.method == "POST":
- _log("EXPORT_POST!:%s|%s" % (request,request.FILES))
+ _log("EXPORT_POST!:%s" % (request))
parent_page = request.POST.get('parent_page', '')
file_name,response_file_name = ReportManager.exec_report(parent_page,request=request)
@@ -926,14 +1066,311 @@ def create_vulnerability(request):
context = {}
return render(request, 'create_vulnerability.html', context)
+class Snap():
+ def __init__(self,snap_index=0,snap_mode='None',snap_dir='',snap_date='',snap_time='',snap_day=''):
+ self.index = '%02d' % snap_index
+ self.mode = snap_mode
+ self.dir = snap_dir
+ self.date = snap_date
+ self.time = snap_time
+ self.day = snap_day
+
+class ReportFile():
+ def __init__(self,name='',size=0,date=None):
+ self.name = name
+ self.size = size
+ self.date = date
+
def publish(request):
# does this user have permission to see this record?
if not UserSafe.is_creator(request.user):
return redirect(landing)
- context = {}
+ context = {
+ }
return render(request, 'publish.html', context)
+def publish_summary(request):
+ # does this user have permission to see this record?
+ if not UserSafe.is_creator(request.user):
+ return redirect(landing)
+
+ context = {
+ }
+ return render(request, 'management.html', context)
+
+def publish_diff_snapshot(request):
+ # does this user have permission to see this record?
+ if not UserSafe.is_creator(request.user):
+ return redirect(landing)
+
+ if request.method == "GET":
+
+ # Prepare available snapshots
+ snapshot_list = []
+ snap_start_index = 0
+ snap_stop_index = 0
+ snap_date_base = SrtSetting.get_setting('publish_snap_date_base','2019-06-08')
+ snap_date_top = SrtSetting.get_setting('publish_snap_date_top','2019-06-16')
+ snap_date_start = SrtSetting.get_setting('publish_snap_date_start','2019-06-08')
+ snap_date_stop = SrtSetting.get_setting('publish_snap_date_stop','2019-06-16')
+ snap_last_calc = SrtSetting.get_setting('publish_snap_last_calc','')
+ backup_returncode,backup_stdout,backup_result = execute_process('bin/common/srtool_backup.py','--list-backups-db')
+ for i,line in enumerate(backup_stdout.decode("utf-8").splitlines()):
+ # Week|backup_2019_19|2019-05-18|12:51:51|Saturday, May 18 2019
+ backup_mode,backup_dir,backup_date,backup_time,backup_day = line.split('|')
+ if 'Now' != backup_mode:
+ snap = Snap(i,backup_mode,backup_dir,backup_date,backup_time,backup_day)
+ snapshot_list.append(snap)
+ if snap_date_base == snap.date:
+ snap_start_index = i
+ if snap_date_start < snap.date:
+ snap_date_start = snap.date
+ if snap_date_stop < snap.date:
+ snap_date_stop = snap.date
+ if snap_date_top == snap.date:
+ snap_stop_index = i
+ if snap_date_stop > snap.date:
+ snap_date_stop = snap.date
+ if not snap_stop_index:
+ snap_stop_index = i
+ if snap_date_stop < snap.date:
+ snap_date_stop = snap.date
+ # Report automation
+ snap_frequency_select = SrtSetting.get_setting('publish_snap_frequency','Off')
+ snapshot_frequency_list = [
+ 'Off',
+ 'Monthly',
+ 'Bi-monthly',
+ 'Weekly',
+ 'Daily',
+ ]
+ # List of available reports
+ generated_report_list = []
+ if os.path.isdir('data/publish'):
+ for entry in os.scandir('data/publish'):
+ if entry.name.startswith('cve-svns-srtool'):
+ generated_report_list.append(ReportFile(entry.name,entry.stat().st_size,datetime.fromtimestamp(entry.stat().st_mtime)))
+# generated_report_list.sort()
+ generated_report_list = sorted(generated_report_list,key=lambda x: x.name)
+
+ # Prepare History data
+ last_calc = SrtSetting.get_setting('publish_last_calc','06/08/2019')
+ date_start = SrtSetting.get_setting('publish_date_start','06/08/2019')
+ date_stop = SrtSetting.get_setting('publish_date_stop','06/21/2019')
+
+ context = {
+ 'date_start' : date_start,
+ 'date_stop' : date_stop,
+ 'last_calc' : last_calc,
+
+ 'snap_date_start' : snap_date_start,
+ 'snap_date_stop' : snap_date_stop,
+ 'snap_date_base' : snap_date_base,
+ 'snap_date_top' : snap_date_top,
+ 'snapshot_list' : snapshot_list,
+ 'snap_start_index' : '%02d' % snap_start_index,
+ 'snap_stop_index' : '%02d' % snap_stop_index,
+ 'snap_last_calc' : snap_last_calc,
+ 'generated_report_list' : generated_report_list,
+
+ 'snapshot_frequency_list' : snapshot_frequency_list,
+ 'snap_frequency_select' : snap_frequency_select,
+ 'mru' : Job.get_recent(),
+ }
+ return render(request, 'publish_diff_snapshot.html', context)
+ elif request.method == "POST":
+ action = request.POST['action']
+
+ if request.POST["action"] == "download":
+ report_name = request.POST['report_name']
+ file_path = 'data/publish/%s' % (report_name)
+ if file_path:
+ fsock = open(file_path, "rb")
+ content_type = MimeTypeFinder.get_mimetype(file_path)
+ response = HttpResponse(fsock, content_type = content_type)
+ disposition = 'attachment; filename="{}"'.format(file_path)
+ response['Content-Disposition'] = 'attachment; filename="{}"'.format(file_path)
+ _log("EXPORT_POST_Q{%s} %s || %s " % (response, response['Content-Disposition'], disposition))
+ return response
+ else:
+ return render(request, "unavailable_artifact.html", context={})
+
+ # Dates (make as no timezone)
+ msg = ''
+ try:
+ msg = 'Start:%s' % request.POST.get('date_start', '')
+ date_start = datetime.strptime(request.POST.get('date_start', ''), '%m/%d/%Y')
+ msg = 'Stop:%s' % request.POST.get('date_stop', '')
+ date_stop = datetime.strptime(request.POST.get('date_stop', ''), '%m/%d/%Y')
+ if date_stop < date_start:
+# return 'Error:stop date is before start date'
+ _log('Error:stop date is before start date')
+ pass
+ except Exception as e:
+# return 'Error:bad format for dates (must be mm/dd/yyyy) (%s)(%s)' % (msg,e),''
+ _log('Error:bad format for dates (must be mm/dd/yyyy) (%s)(%s)' % (msg,e))
+ pass
+ SrtSetting.set_setting('publish_date_start',date_start.strftime('%m/%d/%Y'))
+ SrtSetting.set_setting('publish_date_stop',date_stop.strftime('%m/%d/%Y'))
+ if 'recalculate' == action:
+ # Calculate
+ publishCalculate(date_start,date_stop)
+ return redirect('publish')
+ if 'view' == action:
+ # Go to publish list page
+ return redirect('publish-list')
+ if 'add-cve' == action:
+ # Go to publish list page
+ return redirect('publish-cve')
+ if 'add-defect' == action:
+ # Go to publish list page
+ return redirect('publish-defect')
+ if 'reset' == action:
+ publishReset(date_start,date_stop)
+ publishCalculate(date_start,date_stop)
+ return redirect('publish')
+ if 'export' == action:
+ return redirect('/%s/report/publish' % SRT_MAIN_APP)
+ return redirect('publish')
+
+def publish_diff_history(request):
+ # does this user have permission to see this record?
+ if not UserSafe.is_creator(request.user):
+ return redirect(landing)
+
+ if request.method == "GET":
+
+ # Prepare available snapshots
+ snapshot_list = []
+ snap_start_index = 0
+ snap_stop_index = 0
+ snap_date_base = SrtSetting.get_setting('publish_snap_date_base','2019-06-08')
+ snap_date_top = SrtSetting.get_setting('publish_snap_date_top','2019-06-16')
+ snap_date_start = SrtSetting.get_setting('publish_snap_date_start','2019-06-08')
+ snap_date_stop = SrtSetting.get_setting('publish_snap_date_stop','2019-06-16')
+ snap_last_calc = SrtSetting.get_setting('publish_snap_last_calc','')
+ backup_returncode,backup_stdout,backup_result = execute_process('bin/common/srtool_backup.py','--list-backups-db')
+ for i,line in enumerate(backup_stdout.decode("utf-8").splitlines()):
+ # Week|backup_2019_19|2019-05-18|12:51:51|Saturday, May 18 2019
+ backup_mode,backup_dir,backup_date,backup_time,backup_day = line.split('|')
+ if 'Now' != backup_mode:
+ snap = Snap(i,backup_mode,backup_dir,backup_date,backup_time,backup_day)
+ snapshot_list.append(snap)
+ if snap_date_base == snap.date:
+ snap_start_index = i
+ if snap_date_start < snap.date:
+ snap_date_start = snap.date
+ if snap_date_stop < snap.date:
+ snap_date_stop = snap.date
+ if snap_date_top == snap.date:
+ snap_stop_index = i
+ if snap_date_stop > snap.date:
+ snap_date_stop = snap.date
+ if not snap_stop_index:
+ snap_stop_index = i
+ if snap_date_stop < snap.date:
+ snap_date_stop = snap.date
+ # Report automation
+ snap_frequency_select = SrtSetting.get_setting('publish_snap_frequency','Off')
+ snapshot_frequency_list = [
+ 'Off',
+ 'Monthly',
+ 'Bi-monthly',
+ 'Weekly',
+ 'Daily',
+ ]
+ # List of available reports
+ generated_report_list = []
+ if os.path.isdir('data/publish'):
+ for entry in os.scandir('data/publish'):
+ if entry.name.startswith('cve-svns-srtool'):
+ generated_report_list.append(ReportFile(entry.name,entry.stat().st_size,datetime.fromtimestamp(entry.stat().st_mtime)))
+# generated_report_list.sort()
+ generated_report_list = sorted(generated_report_list,key=lambda x: x.name)
+
+ # Prepare History data
+ last_calc = SrtSetting.get_setting('publish_last_calc','06/08/2019')
+ date_start = SrtSetting.get_setting('publish_date_start','06/08/2019')
+ date_stop = SrtSetting.get_setting('publish_date_stop','06/21/2019')
+
+ context = {
+ 'date_start' : date_start,
+ 'date_stop' : date_stop,
+ 'last_calc' : last_calc,
+
+ 'snap_date_start' : snap_date_start,
+ 'snap_date_stop' : snap_date_stop,
+ 'snap_date_base' : snap_date_base,
+ 'snap_date_top' : snap_date_top,
+ 'snapshot_list' : snapshot_list,
+ 'snap_start_index' : '%02d' % snap_start_index,
+ 'snap_stop_index' : '%02d' % snap_stop_index,
+ 'snap_last_calc' : snap_last_calc,
+ 'generated_report_list' : generated_report_list,
+
+ 'snapshot_frequency_list' : snapshot_frequency_list,
+ 'snap_frequency_select' : snap_frequency_select,
+ }
+ return render(request, 'publish.html', context)
+ elif request.method == "POST":
+ action = request.POST['action']
+
+ if request.POST["action"] == "download":
+ report_name = request.POST['report_name']
+ file_path = 'data/publish/%s' % (report_name)
+ if file_path:
+ fsock = open(file_path, "rb")
+ content_type = MimeTypeFinder.get_mimetype(file_path)
+ response = HttpResponse(fsock, content_type = content_type)
+ disposition = 'attachment; filename="{}"'.format(file_path)
+ response['Content-Disposition'] = 'attachment; filename="{}"'.format(file_path)
+ _log("EXPORT_POST_Q{%s} %s || %s " % (response, response['Content-Disposition'], disposition))
+ return response
+ else:
+ return render(request, "unavailable_artifact.html", context={})
+
+ # Dates (make as no timezone)
+ msg = ''
+ try:
+ msg = 'Start:%s' % request.POST.get('date_start', '')
+ date_start = datetime.strptime(request.POST.get('date_start', ''), '%m/%d/%Y')
+ msg = 'Stop:%s' % request.POST.get('date_stop', '')
+ date_stop = datetime.strptime(request.POST.get('date_stop', ''), '%m/%d/%Y')
+ if date_stop < date_start:
+# return 'Error:stop date is before start date'
+ _log('Error:stop date is before start date')
+ pass
+ except Exception as e:
+# return 'Error:bad format for dates (must be mm/dd/yyyy) (%s)(%s)' % (msg,e),''
+ _log('Error:bad format for dates (must be mm/dd/yyyy) (%s)(%s)' % (msg,e))
+ pass
+ SrtSetting.set_setting('publish_date_start',date_start.strftime('%m/%d/%Y'))
+ SrtSetting.set_setting('publish_date_stop',date_stop.strftime('%m/%d/%Y'))
+ if 'recalculate' == action:
+ # Calculate
+ publishCalculate(date_start,date_stop)
+ return redirect('publish')
+ if 'view' == action:
+ # Go to publish list page
+ return redirect('publish-list')
+ if 'add-cve' == action:
+ # Go to publish list page
+ return redirect('publish-cve')
+ if 'add-defect' == action:
+ # Go to publish list page
+ return redirect('publish-defect')
+ if 'reset' == action:
+ publishReset(date_start,date_stop)
+ publishCalculate(date_start,date_stop)
+ return redirect('publish')
+ if 'export' == action:
+ return redirect('/%s/report/publish' % SRT_MAIN_APP)
+ return redirect('publish')
+
+
+
+
def manage_report(request):
# does this user have permission to see this record?
if not UserSafe.is_creator(request.user):
@@ -952,31 +1389,70 @@ def guided_tour(request):
def quicklink(request):
return redirect("/srtgui/select-publish")
-def _create_defect(investigation,defect_reason,components):
- _log("SRT_DEFECT=%s|%s|%s|" % (investigation.name,defect_reason,components))
+# Return defect_name,isCreated
+def _create_defect(investigation,reason,defect_reason,domain_components,affected_components,username):
+ _log("SRT_DEFECT=%s|%s|%s|%s|" % (investigation.name,defect_reason,domain_components,affected_components))
+
+ # Check to see if defect creation is allowed for this product
+ if 'no' == investigation.product.get_defect_tag('auto_create','yes'):
+ _log("SRT_DEFECT_SKIPPED:NO_auto_create:%s" % (investigation.product.defect_tags))
+ return '(%s skipped)' % investigation.product.key,False
+
+ # Check to see if a defect already is created for this investigation
+ try:
+ for id in InvestigationToDefect.objects.filter(investigation=investigation):
+ # First defect wins
+ _log("SRT_DEFECT_EXISTING:%s" % (id.defect.name))
+ return id.defect.name, False
+ except:
+ pass
vulnerability = investigation.vulnerability
vc_list = vulnerability.vulnerability_to_cve.all()
- # gather name(s) and link(s) of parent CVE(s)
+ # Gather name(s) and link(s) of parent CVE(s)
cve_list = [vc.cve.name for vc in vc_list]
cves = ','.join(cve_list)
+
+ # Offer a default defect description
description = ['%s\n' % vc.cve.description for vc in vc_list]
+
### TODO: normal NIST link might not always work
- link_list = ['https://nvd.nist.gov/vuln/detail/%s' % vc.cve.name for vc in vc_list]
- links = ','.join(cve_list)
+ link_list = []
+ for vc in vc_list:
+ link_list.append('https://nvd.nist.gov/vuln/detail/%s' % vc.cve.name)
+
+ # Fix links to make if Jira friendly
+ # CREATE(Triage): {Link=https://nvd.nist.gov/vuln/detail/CVE-2019-8934 User=admin}
+# links = "%s {%sLink=%s User=%s}" % (Update.CREATE_STR % Update.SOURCE_TRIAGE,"Reason='%s' " % reason if reason else '',' '.join(link_list),username)
+ # CREATE(Triage):(User=admin) [CVE-2019-8934|https://nvd.nist.gov/vuln/detail/CVE-2019-8934]
+ links = "%s%s(User=%s)" % (Update.CREATE_STR % Update.SOURCE_TRIAGE,"(Reason='%s')" % reason if reason else '',username)
+ for link in link_list:
+ links += ' [%s|%s]' % (os.path.basename(link),link)
# Assign the defect the same priority as the Investigation
priority = investigation.get_priority_text
-
- # Component string (e.g. 'kernel', 'userspace', ...)
- if not components:
- components = 'unknown'
- # Offer a defect summary
+ # Protect Jira from undefine priorities
+ if priority == SRTool.priority_text(SRTool.UNDEFINED):
+ _log("WARNING:_create_defect:FIX_PRIORITY:'%s' to '%s" % (priority,SRTool.priority_text(SRTool.LOW)))
+ priority = SRTool.priority_text(SRTool.LOW)
+ _log("_create_defect:%s:%s:%s" % (investigation.name,priority,links))
+
+ # Offer a default defect summary
+ if not defect_reason:
+ defect_reason = affected_components
if defect_reason:
summary = "Security Advisory - %s - %s" % (defect_reason,cves)
else:
summary = "Security Advisory %s" % (cves)
+
+ # Add the affect components
+ if affected_components:
+ affected_components.replace(',',' ').replace(';',' ').replace(' ',' ')
+ components = "%s {COMPONENTS:%s}" % (domain_components,affected_components)
+ else:
+ components = domain_components
+
defect_tool = SrtSetting.objects.get(name='SRTOOL_DEFECT_TOOL').value
result_returncode,result_stdout,result_stderr = execute_process(
defect_tool, '--new',
@@ -1000,6 +1476,8 @@ def _create_defect(investigation,defect_reason,components):
d_name = params[0]
d_url = params[1]
_log("SRT_DEFECT3c|%s|%s|" % (d_name,d_url))
+ else:
+ error_log(ErrorLog.ERROR,"DEFECT_CREATION_FAIL(%d)'%s':'%s'" % (result_returncode,result_stdout,result_stderr))
### TO-DO: Trigger dialog in a production system if not defect created at this point
### For now provide a defect number simulation
if not d_name:
@@ -1016,17 +1494,39 @@ def _create_defect(investigation,defect_reason,components):
d_name = "DEFECT-%s-%d" % (investigation.product.get_defect_tag('key'),index)
d_url = "%s%s" % (SrtSetting.objects.get(name='SRTOOL_DEFECT_URLBASE').value,d_name)
# create new defect entry
- d = Defect.objects.create(name=d_name)
+ d = Defect.objects.create(name=d_name,product=investigation.product)
d.summary = summary
d.priority = investigation.priority
- d.product = investigation.product
+ d.status = Defect.DEFECT_STATUS_OPEN
+ d.resolution = Defect.DEFECT_UNRESOLVED
+ d.srt_priority = investigation.priority
+ d.srt_status = Defect.VULNERABLE
+ d.srt_outcome = Defect.OPEN
d.url = d_url
+ d.packages = investigation.packages
d.save()
_log("NEW_DEFECT:%s|%s|%s|%s" % (d.name,summary,components,priority))
# Create Investigation to Defect
id = InvestigationToDefect.objects.create(investigation=investigation,defect=d,product=investigation.product)
id.save()
- return d.name
+ return d.name,True
+
+def _auto_map_cve_priority(cve,force=True):
+ if not force and (SRTool.UNDEFINED != cve.priority):
+ return(cve.priority)
+ severity = cve.cvssV3_baseSeverity.strip()
+ if not severity:
+ severity = cve.cvssV2_severity.strip()
+ if not severity:
+ severity = 'MEDIUM'
+ if 'CRITICAL' == severity:
+ return(SRTool.CRITICAL)
+ elif 'HIGH' == severity:
+ return(SRTool.HIGH)
+ elif 'MEDIUM' == severity:
+ return(SRTool.MEDIUM)
+ else:
+ return(SRTool.LOW)
def xhr_triage_commit(request):
_log("xhr_triage_commit(%s)" % request.POST)
@@ -1035,7 +1535,8 @@ def xhr_triage_commit(request):
try:
username = UserSafe.user_name(request.user)
action = request.POST['action']
- today = datetime.today().strftime("%Y-%m-%d")
+ srtool_today_time = datetime.today()
+ srtool_today = datetime.today().strftime("%Y-%m-%d")
if 'submit-notvulnerable' == action:
reason = request.POST['reason']
cves = request.POST['cves']
@@ -1044,85 +1545,83 @@ def xhr_triage_commit(request):
created_list = ''
for cve_name in cves.split(','):
cve = Cve.objects.get(name=cve_name)
+ history_update = []
+ history_update.append(Update.STATUS % (SRTool.status_text(cve.status),SRTool.status_text(Cve.NOT_VULNERABLE)))
+ cve.priority = _auto_map_cve_priority(cve,False)
cve.status = Cve.NOT_VULNERABLE
if cve.comments:
cve.comments += ', ' + reason
else:
cve.comments = reason
+ cve.acknowledge_date = srtool_today_time
cve.save()
created_list += ' %s' % cve_name
# add audit comment
cc = CveHistory.objects.create(cve=cve)
- cc.date = today
- cc.comment = "ACTION: marked not vulnerable, reason='%s'" % (reason)
+ cc.date = srtool_today
+ cc.comment = "%s%s {%s}" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_update),"Set by triage, reason='%s'" % reason)
cc.author = username
cc.save()
if created_list:
created_list = "NotVulnerable:" + created_list
- if 'submit-investigate' == action:
- cves = request.POST['cves']
- created_list = ''
- for cve_name in cves.split(','):
- cve = Cve.objects.get(name=cve_name)
- cve.status = Cve.INVESTIGATE
- cve.save()
- created_list += ' %s' % cve_name
- # add audit comment
- cc = CveHistory.objects.create(cve=cve)
- cc.date = today
- cc.comment = "ACTION: marked investigate"
- cc.author = username
- cc.save()
- if created_list:
- created_list = "Investigate:" + created_list
-
if 'submit-other' == action:
cves = request.POST['cves']
status = int(request.POST['status'])
created_list = ''
for cve_name in cves.split(','):
cve = Cve.objects.get(name=cve_name)
+ history_update = []
+ history_update.append(Update.STATUS % (SRTool.status_text(cve.status),SRTool.status_text(status)))
+ cve.priority = _auto_map_cve_priority(cve,False)
cve.status = status
+ cve.acknowledge_date = srtool_today_time
cve.save()
created_list += ' %s' % cve_name
# add audit comment
cc = CveHistory.objects.create(cve=cve)
- cc.date = today
- cc.comment = "ACTION: set status to %s" % cve.get_status_text
+ cc.date = srtool_today
+ cc.comment = "%s%s {%s}" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_update),"Set by triage")
cc.author = username
cc.save()
if created_list:
created_list = "Status=%s:%s" % (cve.get_status_text,created_list)
- if 'submit-isvulnerable' == action:
+ if action in ('submit-isvulnerable','submit-investigate'):
+ if 'submit-isvulnerable' == action:
+ notify_message = 'Triage:Vulnerable:'
+ new_status = SRTool.VULNERABLE
+ elif 'submit-investigate' == action:
+ notify_message = 'Triage:Investigate:'
+ new_status = SRTool.INVESTIGATE
reason = request.POST['reason'].strip()
defect_reason = request.POST['defect_reason'].strip()
cves = request.POST['cves']
products = request.POST['products']
components = request.POST['components']
- priority = request.POST['priority']
+ affected_components = request.POST['affected_components'].strip()
+ priority = int(request.POST['priority'])
make_defects = ('yes' == request.POST['mk_d'])
mark_publish = ('yes' == request.POST['pub'])
group_vulnerability = int(request.POST['vul_group'])
group_vulnerability_name = request.POST['vul_name'].strip()
notifications = ('yes' == request.POST['notify'])
+ acknowledge_date = request.POST['acknowledge_date']
add_for = request.POST['for']
_log("xhr_triage_commit:IS:%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|" % (reason,defect_reason,cves,products,components,make_defects,mark_publish,add_for,priority,group_vulnerability,group_vulnerability_name))
# Set up
- investigation_names = {}
created_list = ''
- notify_message = 'Triage:Vulnerable:'
+
# Map vulnerability grouping
- v = None
+ vulnerability = None
if 2 == group_vulnerability:
# Existing V all C
first_vulnerability = False
group_vulnerabilities = True
try:
- v = Vulnerability.objects.get(name=group_vulnerability_name)
- created_list += ' %s(found)' % v.name
- notify_message += ' Found:%s' % v.name
+ vulnerability = Vulnerability.objects.get(name=group_vulnerability_name)
+ created_list += ' %s(found)' % vulnerability.name
+ notify_message += ' Found:%s' % vulnerability.name
except Exception as e:
_log("xhr_triage_commit:No such Vulnerability name found (%s,%s)" % (group_vulnerability_name,e))
return HttpResponse(json.dumps({"error":"No such Vulnerability name found (%s)" % (group_vulnerability_name)}), content_type = "application/json")
@@ -1134,91 +1633,159 @@ def xhr_triage_commit(request):
# One V per C
first_vulnerability = True
group_vulnerabilities = False
+
# Process the CVE list
for cve_name in cves.split(','):
# update CVE
cve = Cve.objects.get(name=cve_name)
- cve.status = Cve.VULNERABLE
- cve.priority = priority
+ # Auto priority?
+ cve_priority = _auto_map_cve_priority(cve) if 99 == priority else priority
if cve.comments:
- cve.comments += ', ' + reason
+ cve_comments = '%s, %s' % (cve.comments,reason)
else:
- cve.comments = reason
+ cve_comments = reason
+ # Acknowledge date selection
+ try:
+ if ('publish' == acknowledge_date) and cve.publishedDate:
+ cve_acknowledge_date = datetime.strptime(cve.publishedDate, '%Y-%m-%d')
+ elif ('update' == acknowledge_date) and cve.lastModifiedDate:
+ cve_acknowledge_date = datetime.strptime(cve.lastModifiedDate, '%Y-%m-%d')
+ elif ('no_change' == acknowledge_date):
+ cve_acknowledge_date = cve.acknowledge_date
+ else:
+ cve_acknowledge_date = srtool_today_time
+ except:
+ cve_acknowledge_date = srtool_today_time
+ # Update history changes
+ history_update = []
+ if cve.status != new_status:
+ history_update.append(Update.STATUS % (SRTool.status_text(cve.status),SRTool.status_text(new_status)))
+ if cve.priority != cve_priority:
+ history_update.append(Update.PRIORITY % (SRTool.priority_text(cve.priority),SRTool.priority_text(cve_priority)))
+ if cve.acknowledge_date != cve_acknowledge_date:
+ history_update.append(Update.ACKNOWLEDGE_DATE % (cve.acknowledge_date.strftime("%Y/%m/%d") if cve.acknowledge_date else '',cve_acknowledge_date.strftime("%Y/%m/%d")))
+ # Update record
+ cve.status = new_status
+ cve.priority = cve_priority
+ cve.comments = cve_comments
+ cve.acknowledge_date = cve_acknowledge_date
+ cve.packages = affected_components
cve.save()
notify_message += " %s" % cve_name
- # create vulnerability
+ # Add history comment
+ if history_update:
+ cc = CveHistory.objects.create(cve=cve)
+ cc.date = srtool_today
+ cc.comment = "%s%s {%s}" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_update), "Triage:reason='%s'" % reason)
+ cc.author = username
+ cc.save()
+
+ # Find or create vulnerability
if first_vulnerability or not group_vulnerabilities:
first_vulnerability = False
- v_name = Vulnerability.new_vulnerability_name()
- v = Vulnerability.objects.create(name=v_name)
- v.public = True
- v.status = Vulnerability.VULNERABLE
- v.priority = priority
- v.comments = reason
- v.save()
- notify_message += " %s" % v_name
- created_list += ' %s' % v.name
- _log("Create First Vulnerability:%s" % v.name)
- # add audit comment
- cc = CveHistory.objects.create(cve=cve)
- cc.date = today
- cc.comment = "ACTION: created vulnerability '%s', reason='%s'" % (v.name,reason)
- cc.author = username
- cc.save()
+
+ # Check to see if a vulnerability already is created for this cve
+ vulnerability = None
+ try:
+ for cv in CveToVulnerablility.objects.filter(cve=cve):
+ # First vulnerability wins
+ vulnerability = cv.vulnerability
+ created_list += ' (%s)' % vulnerability.name
+ break
+ except:
+ pass
+
+ if not vulnerability:
+ v_name = Vulnerability.new_vulnerability_name()
+ vulnerability = Vulnerability.objects.create(name=v_name)
+ vulnerability.public = True
+ vulnerability.priority = cve_priority
+ vulnerability.status = new_status
+ vulnerability.outcome = Vulnerability.OPEN
+ vulnerability.comments = reason
+ vulnerability.packages = cve.packages
+ vulnerability.save()
+ notify_message += " %s" % v_name
+ created_list += ' %s' % vulnerability.name
+ _log("Create First Vulnerability:%s" % vulnerability.name)
+
+ # add audit comment
+ vh = VulnerabilityHistory.objects.create(vulnerability=vulnerability)
+ vh.date = srtool_today
+ vh.comment = "%s {%s}" % (Update.CREATE_STR % Update.SOURCE_TRIAGE,'Created from triage')
+ vh.author = username
+ vh.save()
# map vulnerability to CVE
- cv = CveToVulnerablility.objects.create(vulnerability=v,cve=cve)
- cv.save()
- # add audit comment
- vc = VulnerabilityHistory.objects.create(vulnerability=v)
- vc.date = today
- vc.comment = "ACTION: created vulnerability for '%s', reason='%s'" % (cve.name,reason)
- vc.author = username
- vc.save()
+ cv,created = CveToVulnerablility.objects.get_or_create(vulnerability=vulnerability,cve=cve)
+ if created:
+ cv.save()
if products:
for product_id in products.split(','):
# fetch product
- p = Product.objects.get(pk=product_id)
+ product = Product.objects.get(pk=product_id)
# create (or group) investigation
- investigation_key = "%s-%s" % (v_name,product_id)
- i_name = ''
- if investigation_key in investigation_names:
- i_name = investigation_names[investigation_key]
- if not i_name or not group_vulnerabilities:
+
+ # Check to see if a investigation for this product already is created for this vulnerability
+ investigation = None
+ try:
+ for vi in VulnerabilityToInvestigation.objects.filter(vulnerability=vulnerability,investigation__product=product):
+ # First Investigation for this product wins
+ investigation = vi.investigation
+ created_list += ' (%s)' % investigation.name
+ break
+ except:
+ pass
+
+ if not investigation:
i_name = Investigation.new_investigation_name()
- i = Investigation.objects.create(name=i_name)
- i.vulnerability = v
- i.product = p
- i.priority = priority
- i.save()
- notify_message += " %s" % i_name
- created_list += ' %s' % i.name
- investigation_names[investigation_key] = i_name
+ investigation = Investigation.objects.create(name=i_name,product=product,vulnerability = vulnerability)
+ investigation.priority = cve_priority
+ investigation.outcome = Investigation.OPEN
+ investigation.packages = cve.packages
+ # Check to see if product is active
+ _log("BOO1:")
+ if 'no' == product.get_product_tag('active','yes'):
+ _log("BOO2:%s,%s" % (investigation.status,SRTool.status_to_inactive(new_status)))
+ investigation.status = SRTool.status_to_inactive(new_status)
+ else:
+ _log("BOO3:")
+ investigation.status = new_status
+ _log("BOO4:%s" % investigation.status )
+
+ investigation.save()
+
+ notify_message += " %s" % investigation.name
+ created_list += ' %s' % investigation.name
# map vulnerability to investigation/product
- vi = VulnerabilityToInvestigation.objects.create(vulnerability=v,investigation = i)
+ vi = VulnerabilityToInvestigation.objects.create(vulnerability=vulnerability,investigation=investigation)
vi.save()
- else:
- i = Investigation.objects.get(name=i_name)
- # add audit comment
- ic = InvestigationHistory.objects.create(investigation=i)
- ic.date = today
- ic.comment = "ACTION: created investigation for '%s', reason='%s'" % (cve.name,reason)
- ic.author = username
- ic.save()
+
+ # add audit comment
+ ih = InvestigationHistory.objects.create(investigation=investigation)
+ ih.date = srtool_today
+ ih.comment = "%s {%s}" % (Update.CREATE_STR % Update.SOURCE_TRIAGE,'Created from triage')
+ ih.author = username
+ ih.save()
+
# create defects
if make_defects:
- defect_name = _create_defect(i,defect_reason,components)
- notify_message += " %s" % defect_name
- created_list += ' %s' % defect_name
- _log("NEW_DEFECT:%s|%s|%s|" % (defect_name,components,priority))
+ defect_name,created = _create_defect(investigation,reason,defect_reason,components,affected_components,username)
+ if created:
+ notify_message += ' %s' % defect_name
+ created_list += ' %s' % defect_name
+ else:
+ notify_message += ' (%s)' % defect_name
+ created_list += ' (%s)' % defect_name
+ _log("NEW_DEFECT:%s|%s|%s|" % (defect_name,components,cve_priority))
# Finish up
if notifications:
# Create the notify record
_log("xhr_notifications3")
notify = Notify()
notify.category = 'TRIAGE'
- notify.priority = priority
+ notify.priority = cve_priority
notify.description = notify_message
notify.url = ''
notify.author = username
@@ -1273,6 +1840,7 @@ def _submit_notification(request):
NotifyAccess.objects.get_or_create(notify=notify, user=user)
_log("xhr_notifications5")
+
def xhr_cve_commit(request):
_log("xhr_cve_commit(%s)" % request.POST)
if not 'action' in request.POST:
@@ -1280,45 +1848,99 @@ def xhr_cve_commit(request):
try:
cve = Cve.objects.get(id=request.POST['cve_id'])
action = request.POST['action']
- history_comment = ''
+ history_update = []
new_name = ''
+ error_text = "ok"
+ username = UserSafe.user_name(request.user)
+
if 'submit-quickedit' == action:
- note = request.POST['note']
priority = int(request.POST['priority'])
status = int(request.POST['status'])
- private_note = request.POST['private_note']
- publish_state = request.POST['publish_state']
- publish_date = request.POST['publish_date']
- if (priority != cve.priority):
+ public = (1 == int(request.POST['public']))
+ note = request.POST['note'].strip()
+ private_note = request.POST['private_note'].strip()
+ tags = request.POST['tags'].strip()
+ publish_state = int(request.POST['publish_state'])
+ publish_date = request.POST['publish_date'].strip()
+ acknowledge_date = request.POST['acknowledge_date'].strip()
+ affected_components = request.POST['affected_components'].strip()
+ # Convert simple date back to datetime
+ try:
+ if not acknowledge_date:
+ acknowledge_date = None
+ else:
+ acknowledge_date = datetime.strptime(acknowledge_date, '%Y-%m-%d')
+ except Exception as e:
+ acknowledge_date = cve.acknowledge_date
+ if (cve.priority != priority):
+ history_update.append(Update.PRIORITY % (SRTool.priority_text(cve.priority),SRTool.priority_text(priority)))
cve.priority = priority
- history_comment += "Priority, "
- if (status != cve.status):
+ if (cve.status != status):
+ history_update.append(Update.STATUS % (SRTool.status_text(cve.status),SRTool.status_text(status)))
cve.status = status
- history_comment += "Status, "
- if (note != cve.comments):
+ if (cve.comments != note):
+ history_update.append(Update.NOTE)
cve.comments = note
- history_comment += "Note, "
- if (private_note != cve.comments_private):
+ if (cve.comments_private != private_note):
+ history_update.append(Update.PRIVATE_NOTE)
cve.comments_private = private_note
- history_comment += "Private Note, "
- if (publish_state != cve.publish_state):
+ if ( cve.tags !=tags):
+ history_update.append(Update.TAG)
+ cve.tags = tags
+ if (cve.publish_state != publish_state):
+ history_update.append(Update.PUBLISH_STATE % (SRTool.publish_text(cve.publish_state),SRTool.publish_text(publish_state)))
cve.publish_state = publish_state
- history_comment += "Publish State, "
- if (publish_date != cve.publish_date):
+ if (cve.publish_date != publish_date):
+ history_update.append(Update.PUBLISH_DATE % (SRTool.date_ymd_text(cve.publish_date),SRTool.date_ymd_text(publish_date)))
cve.publish_date = publish_date
- history_comment += "Publish Date, "
- cve.save()
- if 'submit-notification' == action:
+ if (cve.packages != affected_components):
+ history_update.append(Update.AFFECTED_COMPONENT % (cve.packages,affected_components))
+ cve.packages = affected_components
+ # Allow for either acknowledge_date to be empty/None
+ if (cve.acknowledge_date and not acknowledge_date):
+ history_update.append(Update.ACKNOWLEDGE_DATE % (SRTool.date_ymd_text(cve.acknowledge_date),''))
+ cve.acknowledge_date = None
+ elif (not cve.acknowledge_date and acknowledge_date):
+ cve.acknowledge_date = acknowledge_date
+ history_update.append(Update.ACKNOWLEDGE_DATE % (cve.acknowledge_date,SRTool.date_ymd_text(acknowledge_date)))
+ elif (cve.acknowledge_date != acknowledge_date):
+ history_update.append(Update.ACKNOWLEDGE_DATE % (SRTool.date_ymd_text(cve.acknowledge_date),SRTool.date_ymd_text(acknowledge_date)))
+ cve.acknowledge_date = acknowledge_date
+
+ # Process implications of 'public' change
+ if (cve.public != public):
+ history_update.append(Update.PUBLIC % (cve.public,public))
+ cve.public = public
+ # Insure newly private record has at least this user
+ if not public:
+ cve_access,created = CveAccess.objects.get_or_create(cve=cve, user=request.user)
+ if created:
+ cve_access.save()
+ # If we are about to propagate, save current state first and once
+ cve.save()
+ cve.propagate_private()
+ else:
+ # No propagation, normal save
+ cve.save()
+ elif 'submit-notification' == action:
+ # Note: no history update
_submit_notification(request)
- if 'submit-newname' == action:
+ elif 'submit-newname' == action:
old_name = request.POST['old_name']
- new_name = request.POST['new_name']
+ new_name_input = request.POST['new_name'].strip()
+ new_name = ''
+ for i in range(len(new_name_input)):
+ if not new_name_input[i] in ('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-0123456789_ '):
+ new_name += '_'
+ else:
+ new_name += new_name_input[i]
try:
# Is name already used?
Cve.objects.get(name=new_name)
return HttpResponse(json.dumps({"error":"name '%s' is already used\n" % new_name}), content_type = "application/json")
except:
_log("NewName3:%s -> %s" % (old_name,new_name))
+ history_update.append(Update.NEW_NAME % (old_name,new_name))
# Apply this unique name to CVE
cve.name = new_name
cve.name_sort = get_name_sort(new_name)
@@ -1327,7 +1949,7 @@ def xhr_cve_commit(request):
cveLocal = CveLocal.objects.get(name=old_name)
cveLocal.name = new_name
cveLocal.save()
- if 'submit-create-vulnerability' == action:
+ elif 'submit-create-vulnerability' == action:
_log("SUBMIT-CREATE-VULNERABILITY")
vname = Vulnerability.new_vulnerability_name()
vulnerability = Vulnerability.objects.create(
@@ -1335,12 +1957,20 @@ def xhr_cve_commit(request):
description = cve.description,
status = cve.status,
priority = cve.priority,
+ comments = cve.comments,
+ packages = cve.packages,
+ public = cve.public,
)
vulnerability.save()
+ # If private, add users
+ if not cve.public:
+ for cve_private_access in CveAccess.objects.filter(cve=cve,user=request.user):
+ VulnerabilityAccess.objects.create(vulnerability=vulnerability,user=cve_private_access.user)
+ history_update.append(Update.ATTACH_INV % (vname))
cve2vul = CveToVulnerablility.objects.create(cve = cve,vulnerability = vulnerability)
cve2vul.save()
_log("SUBMIT-CREATE-VULNERABILITY:%s,%s,%s" % (cve.id,vulnerability.id,cve2vul.id))
- if 'submit-attach-vulnerability' == action:
+ elif 'submit-attach-vulnerability' == action:
_log("SUBMIT-CREATE-VULNERABILITY")
vname = request.POST['vul_name'].strip()
try:
@@ -1348,20 +1978,103 @@ def xhr_cve_commit(request):
except Exception as e:
_log("xhr_triage_commit:No such Vulnerability name found (%s,%s)" % (vname,e))
return HttpResponse(json.dumps({"error":"No such Vulnerability name found (%s)" % (vname)}), content_type = "application/json")
+ history_update.append(Update.ATTACH_INV % (vname))
cve2vul = CveToVulnerablility.objects.create(cve = cve,vulnerability = vulnerability)
cve2vul.save()
+ update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,Update.ATTACH_CVE % cve.name)
+ vul_hist = VulnerabilityHistory.objects.create(vulnerability_id=vulnerability.id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
+ vul_hist.save()
_log("SUBMIT-CREATE-VULNERABILITY:%s,%s,%s" % (cve.id,vulnerability.id,cve2vul.id))
+ elif 'submit-detach-vulnerability' == action:
+ record_id = request.POST['record_id']
+ vulnerability = Vulnerability.objects.get(id=record_id)
+ c2v = CveToVulnerablility.objects.get(vulnerability=vulnerability,cve=cve)
+ c2v.delete()
+ update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,Update.DETACH_CVE % cve.name)
+ vul_hist = VulnerabilityHistory.objects.create(vulnerability_id=vulnerability.id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
+ vul_hist.save()
+ history_update.append(Update.DETACH_VUL % vulnerability.name)
+ elif 'submit-delete-cve' == action:
+ _log("SUBMIT-DELETE-CVE(%s)" % cve.name)
+ #history_update.append(Update.ATTACH_INV % (vname))
+
+ # Try remove the datasource map first
+ try:
+ cvesource = CveSource.objects.get(cve=cve)
+ if cvesource:
+ cvesource.delete()
+ except:
+ # NO CveSource record
+ pass
+
+ # First delete the Cve record (and its related records automatically)
+ cve_name = cve.name
+ cve.delete()
+ _log("SUBMIT-DELETED-CVE(%s)!" % cve.name)
+ # Now remove any related cvelocal records
+ # CveLocal records are keyed by name, since they are created dynamically form a local edit
+ try:
+ cvelocal = CveLocal.objects.get(name=cve_name)
+ if cvelocal:
+ cvelocal.delete()
+ except:
+ # NO CveLocal record
+ pass
+ new_name = 'url:/srtgui/cves'
+
+ elif 'submit-adduseraccess' == action:
+ users = request.POST['users']
+ usernames = []
+ for user_id in users.split(','):
+ usernames.append(SrtUser.objects.get(pk=user_id).username)
+ CveAccess.objects.get_or_create(cve=cve, user_id=user_id)
+ history_update.append(Update.ATTACH_ACCESS % ','.join(usernames))
+ cve.propagate_private()
+ elif 'submit-trashuseraccess' == action:
+ record_id = request.POST['record_id']
+ access_record = CveAccess.objects.get(id=record_id)
+ history_update.append(Update.DETACH_ACCESS % access_record.user.username)
+ access_record.delete()
+ cve.propagate_private()
+
+ elif 'submit-merge-cve' == action:
+ cve_merge_name = request.POST['cve_merge_name']
+ try:
+ cve_merge = Cve.objects.get(name=cve_merge_name)
+ # We found it, but does the user have access to it?
+ # TODO
+
+ # Merge/create the cvelocal data
+ pass
+
+ # Save the results
+ pass
+
+ # Delete the local CVE in favor if the merged CVE?
+ pass
+
+ # Jump to the new CVE
+ new_name = cve_merge_name
+ history_update.append(Update.MERGE_CVE % cve.name)
+ cve = cve_merge
+
+ except Exception as e:
+ error_text = "ERROR: unknown CVE name '%s'" % cve_merge_name
+ _log("ERROR:CVE_MERGE_NAME:%s" % e)
+
+ else:
+ error_text = "ERROR: unknown action '%s'" % action
+
+ _log("XHR_CVE_COMMIT:new_name=%s" % new_name)
return_data = {
- "error": "ok",
+ "error": error_text,
"new_name" : new_name,
}
- username = UserSafe.user_name(request.user)
- if (history_comment != ''):
- history_comment = history_comment[:-2]
- history_comment += " edited"
- CveHistory.objects.create(cve_id=cve.id, comment=history_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
+ if history_update:
+ update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_update))
+ CveHistory.objects.create(cve_id=cve.id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
_log("xhr_cve_commit:SUCCESS")
return HttpResponse(json.dumps( return_data ), content_type = "application/json")
@@ -1416,34 +2129,105 @@ def xhr_vulnerability_commit(request):
action = request.POST['action']
v_id = request.POST['vulnerability_id']
username = UserSafe.user_name(request.user)
- history_comment = ''
+ new_name = ''
try:
+ history_update = []
if 'submit-quickedit' == action:
- note = request.POST['note']
- private_note = request.POST['private_note']
+ note = request.POST['note'].strip()
+ private_note = request.POST['private_note'].strip()
+ tags = request.POST['tags'].strip()
+ priority = int(request.POST['priority'])
+ status = int(request.POST['status'])
+ public = (1 == int(request.POST['public']))
+ outcome = int(request.POST['outcome'])
+ affected_components = request.POST['affected_components'].strip()
+ description = request.POST['description'].strip()
v = Vulnerability.objects.get(id=v_id)
+ if (v.priority != priority):
+ history_update.append(Update.PRIORITY % (SRTool.priority_text(v.priority),SRTool.priority_text(priority)))
+ v.priority = priority
+ if (v.status != status):
+ history_update.append(Update.STATUS % (SRTool.status_text(v.status),SRTool.status_text(status)))
+ v.status = status
+ if (v.outcome != outcome):
+ history_update.append(Update.OUTCOME % (SRTool.status_text(v.outcome),SRTool.status_text(outcome)))
+ v.outcome = outcome
if (v.comments != note):
+ history_update.append(Update.NOTE)
v.comments = note
- history_comment += "Note, "
if (v.comments_private != private_note):
+ history_update.append(Update.PRIVATE_NOTE)
v.comments_private = private_note
- history_comment += "Private Note, "
- if (v.status != request.POST['status']):
- v.status = request.POST['status']
- history_comment += "Status, "
- if (v.outcome != request.POST['outcome']):
- v.outcome = request.POST['outcome']
- history_comment += "Outcome, "
- if (v.priority != request.POST['priority']):
- v.priority = request.POST['priority']
- history_comment += "Priority, "
- if (history_comment != ''):
- history_comment = history_comment[:-2]
- history_comment += " edited"
- v.save()
- if 'submit-addproduct' == action:
+ if (tags != v.tags):
+ history_update.append(Update.TAG)
+ v.tags = tags
+ if (affected_components != v.packages):
+ history_update.append(Update.AFFECTED_COMPONENT % (v.packages,affected_components))
+ v.packages = affected_components
+ if (description != v.description):
+ history_update.append(Update.DESCRIPTION)
+ v.description = description
+
+ # Process implications of 'public' change
+ _log("V2C:PRIVATE0:%s to %s" % (v.public,public))
+ if (public != v.public):
+ history_update.append(Update.PUBLIC % (v.public,public))
+ v.public = public
+ # Insure newly private record has at least this user
+ if not public:
+ vul_access,created = VulnerabilityAccess.objects.get_or_create(vulnerability=v, user=request.user)
+ if created:
+ vul_access.save()
+ # Since we are about to propagate, save current state first and once
+ v.save()
+ _log("V2C:PRIVATE1:%s" % v.public)
+ # Propagate the 'public' change via the parent CVEs (if any)
+ for c2v in CveToVulnerablility.objects.filter(vulnerability=v):
+ _log("V2C:PRIVATE2:%s" % c2v.cve.name)
+ # If now private, insure parent CVE has this user
+ if not public:
+ cve_access,created = CveAccess.objects.get_or_create(cve=c2v.cve, user=request.user)
+ if created:
+ cve_access.save()
+ c2v.cve.public = public
+ c2v.cve.save()
+ c2v.cve.propagate_private()
+ else:
+ # No propagation, normal save
+ v.save()
+ elif 'submit-newname' == action:
+ v = Vulnerability.objects.get(id=v_id)
+ old_name = request.POST['old_name']
+ new_name_input = request.POST['new_name'].strip()
+ new_name = ''
+ for i in range(len(new_name_input)):
+ if not new_name_input[i] in ('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-0123456789_'):
+ new_name += '_'
+ else:
+ new_name += new_name_input[i]
+ try:
+ # Is name already used?
+ is_existing_vul = Vulnerability.objects.get(name=new_name)
+ return HttpResponse(json.dumps({"error":"name '%s' is already used\n" % new_name}), content_type = "application/json")
+ except:
+ _log("NewName3:%s -> %s" % (old_name,new_name))
+ # Apply this unique name to CVE
+ v.name = new_name
+ v.save()
+ # Move any attached documents
+ path_old = os.path.join(SRT_BASE_DIR, "downloads/%s" % old_name)
+ path_new = os.path.join(SRT_BASE_DIR, "downloads/%s" % new_name)
+ doc_found = False
+ for doc in VulnerabilityUploads.objects.filter(vulnerability=v):
+ doc_found = True
+ doc.path = doc.path.replace(path_old,path_new)
+ doc.save()
+ if doc_found:
+ os.rename(path_old, path_new)
+ history_update.append(Update.NEW_NAME % (old_name,new_name))
+ elif 'submit-addproduct' == action:
products = request.POST['products']
- product_names = ''
+ investigation_names = []
vulnerability_obj = Vulnerability.objects.get(id=v_id)
for product_id in products.split(','):
product_obj = Product.objects.get(pk=product_id)
@@ -1457,77 +2241,150 @@ def xhr_vulnerability_commit(request):
outcome = vulnerability_obj.outcome,
priority = vulnerability_obj.priority,
product = product_obj,
+ comments = vulnerability_obj.comments,
+ packages = vulnerability_obj.packages,
+ public = vulnerability_obj.public,
)
vul2inv = VulnerabilityToInvestigation.objects.create(vulnerability=vulnerability_obj,investigation=investigation_obj)
vul2inv.save()
- product_names += "%s " % product_obj.long_name
- product_names = product_names[:-2]
- history_comment = product_names + " added to affected products"
- if 'submit-trashinvestigation' == action:
+ investigation_names.append(iname)
+ # Assert part CVE access rights
+ for c2v in CveToVulnerablility.objects.filter(vulnerability=vulnerability_obj):
+ c2v.cve.propagate_private()
+ history_update.append(Update.ATTACH_INV % ','.join(investigation_names))
+ elif 'submit-trashinvestigation' == action:
inv_id = request.POST['record_id']
investigation_obj = Investigation.objects.get(pk=inv_id)
vul2inv = VulnerabilityToInvestigation.objects.filter(investigation=investigation_obj)
vul2inv.delete()
- history_comment = investigation_obj.name + " investigation(s) removed"
+ history_update.append(Update.DETACH_INV % (investigation_obj.name))
investigation_obj.delete()
- if 'submit-newcomment' == action:
+ elif 'submit-newcomment' == action:
comment = request.POST['comment']
VulnerabilityComments.objects.create(vulnerability_id=v_id, comment=comment, date=datetime.today().strftime('%Y-%m-%d'), author=username)
- history_comment = "New comment submitted"
- if 'submit-trashcomment' == action:
+ #NOTE: No History for this
+ elif 'submit-trashcomment' == action:
record_id = request.POST['record_id']
comment = VulnerabilityComments.objects.get(id=record_id)
- history_comment = "Comment from " + comment.author + " deleted"
comment.delete()
- if 'submit-trashattachment' == action:
+ #NOTE: No History for this
+ elif 'submit-trashattachment' == action:
record_id = request.POST['record_id']
upload = VulnerabilityUploads.objects.get(id=record_id)
- history_comment = "Upload '" + upload.description + "' from " + upload.author + " deleted"
try:
os.remove(upload.path)
except OSError:
pass
+ history_update.append(Update.DETACH_DOC % (upload.path))
upload.delete()
- if 'submit-addusernotify' == action:
+ elif 'submit-addusernotify' == action:
users = request.POST['users']
- usernames = ''
+ usernames = []
for user_id in users.split(','):
- usernames += SrtUser.objects.get(pk=user_id).name + ', '
+ usernames.append(SrtUser.objects.get(pk=user_id).username)
VulnerabilityNotification.objects.get_or_create(vulnerability_id=v_id, user_id=user_id)
- usernames = usernames[:-2]
- history_comment = usernames + " added to notifications"
- if 'submit-trashusernotification' == action:
+ history_update.append(Update.ATTACH_USER_NOTIFY % ','.join(usernames))
+ elif 'submit-trashusernotification' == action:
record_id = request.POST['record_id']
notification_record = VulnerabilityNotification.objects.get(id=record_id)
- removed_user = SrtUser.objects.get(pk=notification_record.user_id).name
- history_comment = removed_user + " removed from notifications"
+ removed_user = SrtUser.objects.get(pk=notification_record.user_id).username
notification_record.delete()
- if 'submit-adduseraccess' == action:
+ history_update.append(Update.DETACH_USER_NOTIFY % removed_user)
+ elif 'submit-adduseraccess' == action:
users = request.POST['users']
- usernames = ''
+ usernames = []
for user_id in users.split(','):
- usernames += SrtUser.objects.get(pk=user_id).name + ', '
+ usernames.append(SrtUser.objects.get(pk=user_id).username)
VulnerabilityAccess.objects.get_or_create(vulnerability_id=v_id, user_id=user_id)
- usernames = usernames[:-2]
- history_comment = usernames + " granted access"
- if 'submit-trashuseraccess' == action:
+ history_update.append(Update.ATTACH_ACCESS % ','.join(usernames))
+ elif 'submit-trashuseraccess' == action:
record_id = request.POST['record_id']
access_record = VulnerabilityAccess.objects.get(id=record_id)
- removed_user = username
- history_comment = removed_user + "'s access removed"
+ history_update.append(Update.DETACH_ACCESS % access_record.user.username)
access_record.delete()
- if 'submit-notification' == action:
+ elif 'submit-notification' == action:
_submit_notification(request)
- if 'submit-trashvulnerability' == action:
+ #NOTE: No History for this
+ elif 'submit-trashvulnerability' == action:
record_id = request.POST['record_id']
vulnerability_obj = Vulnerability.objects.get(pk=record_id)
- history_comment = "Vulnerability '%s' is deleted" % vulnerability_obj.name
+# history_update.append(Update.DETACH_VUL % vulnerability_obj.name)
vulnerability_obj.delete()
+ elif 'submit-attach-cve' == action:
+ vulnerability_obj = Vulnerability.objects.get(pk=v_id)
+ cve_name_input = request.POST['cve_name']
+ # Sanitize the CVE name
+ cve_name = ''
+ for i in range(len(cve_name_input)):
+ if not cve_name_input[i] in ('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-0123456789_ '):
+ cve_name += '_'
+ else:
+ cve_name += cve_name_input[i]
+ try:
+ cve_obj = Cve.objects.get(name=cve_name)
+ # Does the user have permission to see this CVE?
+ if (not cve_obj.public) and (not UserSafe.is_admin(request.user)):
+ try:
+ cveaccess = CveAccess.objects.get(cve=cve_obj,user=request.user)
+ except:
+ cveaccess = None
+ if not cveaccess:
+ _log("CVE_ATTACHE_ERROR_PERMISSIONS:(%s)" % request.user)
+ return HttpResponse(json.dumps( {"error":"Error: this CVE name is reserved"} ), content_type = "application/json")
+ except:
+ # Create local CVE with this name
+ cve_obj,cve_local_object = _create_local_cve()
+ old_name = cve_obj.name
+ cve_obj.description = vulnerability_obj.description
+ cve_obj.name = cve_name
+ cve_obj.save()
+ cve_local_object.description = vulnerability_obj.description
+ cve_local_object.save()
+ # Apply the new name to CveLocal
+ cveLocal = CveLocal.objects.get(name=old_name)
+ cveLocal.name = cve_name
+ cveLocal.save()
+
+ history_cve_update = []
+ if not vulnerability_obj.public:
+ history_cve_update.append(Update.PUBLIC % (cve_obj.public,vulnerability_obj.public))
+ cve_obj.public = vulnerability_obj.public
+ # Insure newly private record has at least this user
+ cve_access,created = CveAccess.objects.get_or_create(cve=cve_obj, user=request.user)
+ cve_access.save()
+ cve_obj.propagate_private()
+ cve_obj.save()
+
+ # Attach the CVE to the Vulnerability
+ c2v,create = CveToVulnerablility.objects.get_or_create(vulnerability=vulnerability_obj,cve=cve_obj)
+ c2v.save()
+ # Add history to CVE
+ username = UserSafe.user_name(request.user)
+ history_cve_update.append(Update.ATTACH_INV % (vulnerability_obj.name))
+ update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_cve_update))
+ cve_hist = CveHistory.objects.create(cve_id=cve_obj.id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
+ cve_hist.save()
+ history_update.append(Update.ATTACH_CVE % cve_obj.name)
+ elif 'submit-detach-cve' == action:
+ vulnerability_obj = Vulnerability.objects.get(pk=v_id)
+ record_id = request.POST['record_id']
+ cve_obj = Cve.objects.get(id=record_id)
+ c2v = CveToVulnerablility.objects.get(vulnerability=vulnerability_obj,cve=cve_obj)
+ c2v.delete()
+ update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,Update.DETACH_VUL % vulnerability_obj.name)
+ cve_hist = CveHistory.objects.create(cve_id=cve_obj.id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
+ cve_hist.save()
+ history_update.append(Update.DETACH_CVE % cve_obj.name)
+ else:
+ # Action not found
+ return HttpResponse(json.dumps( {"error": "ERROR:unknown action '%s'" % action} ), content_type = "application/json")
- if (history_comment != ''):
- VulnerabilityHistory.objects.create(vulnerability_id=v_id, comment=history_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
+ if history_update:
+ update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_update))
+ VulnerabilityHistory.objects.create(vulnerability_id=v_id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
return_data = {
"error": "ok",
+ "new_name" : new_name,
}
return HttpResponse(json.dumps( return_data ), content_type = "application/json")
except Exception as e:
@@ -1609,6 +2466,30 @@ def xhr_notifications(request):
_log("xhr_notifications_commit:no(%s)" % e)
return HttpResponse(json.dumps({"error":str(e) + "\n"}), content_type = "application/json")
+def xhr_errorlogs(request):
+ _log("xhr_errorlogs(%s)" % request.POST)
+ if not 'action' in request.POST:
+ return HttpResponse(json.dumps({"error":"missing action\n"}), content_type = "application/json")
+
+ action = request.POST['action']
+
+ _log("xhr_errorlogs1")
+
+ try:
+ results_msg = ''
+ if 'delete-errorlogs' == action:
+ log_list = request.POST['log_list']
+ for log_id in log_list.split(','):
+ ErrorLog.objects.get(pk=log_id).delete()
+ return_data = {
+ "error": "ok",
+ "results_msg": results_msg,
+ }
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+ except Exception as e:
+ _log("xhr_errorlogs_commit:ERROR(%s)" % e)
+ return HttpResponse(json.dumps({"error":str(e) + "\n"}), content_type = "application/json")
+
def xhr_packages(request):
_log("xhr_packages(%s)" % request.POST)
if not 'action' in request.POST:
@@ -1644,43 +2525,53 @@ def xhr_investigation_commit(request):
action = request.POST['action']
invst_id = request.POST['investigation_id']
username = UserSafe.user_name(request.user)
- history_comment = "Nothing happened."
try:
+ history_update = []
+ xhr_note = ''
if 'submit-quickedit' == action:
- note = request.POST['note']
- private_note = request.POST['private_note']
+ priority = int(request.POST['priority'])
+ status = int(request.POST['status'])
+ outcome = int(request.POST['outcome'])
+ note = request.POST['note'].strip()
+ private_note = request.POST['private_note'].strip()
+ tags = request.POST['tags'].strip()
+ affected_components = request.POST['affected_components'].strip()
invst = Investigation.objects.get(id=invst_id)
+ if (invst.priority != priority):
+ history_update.append(Update.PRIORITY % (SRTool.priority_text(invst.priority),SRTool.priority_text(priority)))
+ invst.priority = priority
+ if (invst.status != request.POST['status']):
+ history_update.append(Update.STATUS % (SRTool.status_text(invst.status),SRTool.status_text(status)))
+ invst.status = request.POST['status']
+ if (invst.outcome != outcome):
+ history_update.append(Update.OUTCOME % (SRTool.status_text(invst.outcome),SRTool.status_text(outcome)))
+ invst.outcome = outcome
if (invst.comments != note):
invst.comments = note
- history_comment += "Note, "
+ history_update.append(Update.NOTE)
if (invst.comments_private != private_note):
invst.comments_private = private_note
- history_comment += "Private Note, "
- if (invst.status != request.POST['status']):
- invst.status = request.POST['status']
- history_comment += "Status, "
- if (invst.outcome != request.POST['outcome']):
- invst.outcome = request.POST['outcome']
- history_comment += "Outcome, "
- if (invst.priority != request.POST['priority']):
- invst.priority = request.POST['priority']
- history_comment += "Priority, "
- if (history_comment != ''):
- history_comment = history_comment[:-2]
- history_comment += " edited"
+ history_update.append(Update.PRIVATE_NOTE)
+ if (invst.tags != tags):
+ invst.tags = tags
+ history_update.append(Update.TAG)
+ if (invst.packages != affected_components):
+ history_update.append(Update.AFFECTED_COMPONENT % (invst.packages,affected_components))
+ invst.packages = affected_components
invst.save()
- if 'submit-attachdefectlist' == action:
+ elif 'submit-attachdefectlist' == action:
defects = request.POST['defects']
product_id = Investigation.objects.get(id=invst_id).product_id
- defect_names = ""
+ defect_names = []
for defect_id in defects.split(','):
- defect_names += Defect.objects.get(pk=defect_id).name + ", "
+ defect_names.append(Defect.objects.get(pk=defect_id).name)
InvestigationToDefect.objects.get_or_create(investigation_id=invst_id, defect_id=defect_id)
- defect_names = defect_names[:-2]
- history_comment = defect_names + " added to defects"
- if 'submit-attachdefect' == action:
+ history_update.append(Update.ATTACH_DEV % ','.join(defect_names))
+ elif 'submit-attachdefect' == action:
query = request.POST['query'].upper()
product_id = Investigation.objects.get(id=invst_id).product_id
+ # Courtesy removal of URL (or other) prefix
+ query = re.sub(r".*/", "", query)
#check if defect already in SRTool data
try:
defect = Defect.objects.get(name=query)
@@ -1697,81 +2588,105 @@ def xhr_investigation_commit(request):
defect = Defect.objects.get(name=query)
except subprocess.CalledProcessError as e:
_log("ERROR:submit-attachdefect:%d:STDOUT='%s':" % (e.returncode, e.output))
- return HttpResponse(json.dumps({"error":str(e) + "\n"}), content_type = "application/json")
+ error_message = "Could not find defect with the name '%s'\n\n(detail:%s)\n" % (query,str(e))
+ return HttpResponse(json.dumps({"error":error_message}), content_type = "application/json")
if defect:
InvestigationToDefect.objects.get_or_create(investigation_id=invst_id, defect_id=defect.id, product_id=product_id)
- history_comment = "Attached " + defect.name
- if 'submit-createdefect' == action:
+ # Enforce minimum status on open defects
+ if Defect.DEFECT_UNRESOLVED == defect.resolution:
+ invst = Investigation.objects.get(id=invst_id)
+ if defect.srt_status < invst.status:
+ defect.srt_status = invst.status
+ defect.save()
+ history_update.append(Update.ATTACH_DEV % defect.name)
+ elif 'submit-createdefect' == action:
investigation = Investigation.objects.get(id=invst_id)
defect_reason = request.POST['defect_reason']
components = request.POST['components']
priority = request.POST['priority']
- defect_name = _create_defect(investigation,defect_reason,components)
- history_comment = "New defect '%s' created" % defect_name
- if 'submit-detachdefect' == action:
+ try:
+ # if explicit selected priority, reset Investigation to that
+ priority = int(priority)
+ if priority != investigation.priority:
+ investigation.priority = priority
+ investigation.save()
+ except Exception as e:
+ _log("WARINING:defect_create:priority issue:'%s'" % priority)
+
+ affected_components = request.POST['affected_components'].strip()
+ defect_name,created = _create_defect(investigation,'',defect_reason,components,affected_components,username)
+ history_update.append(Update.ATTACH_DEV % defect_name)
+ xhr_note = defect_name
+ elif 'submit-detachdefect' == action:
defect_name = request.POST['defect']
product_id = Investigation.objects.get(id=invst_id).product_id
defect_id = Defect.objects.get(name=defect_name).id
InvestigationToDefect.objects.get(investigation_id=invst_id, defect_id=defect_id).delete()
- history_comment = defect_name + " detached from investigation"
- if 'submit-newcomment' == action:
+ history_update.append(Update.DETACH_DEV % defect_name)
+ elif 'submit-newcomment' == action:
comment = request.POST['comment']
InvestigationComments.objects.create(investigation_id=invst_id, comment=comment, date=datetime.today().strftime('%Y-%m-%d'), author=username)
- history_comment = "New comment submitted"
- if 'submit-trashcomment' == action:
+ #NOTE: No History for this
+ elif 'submit-trashcomment' == action:
record_id = request.POST['record_id']
comment = InvestigationComments.objects.get(id=record_id)
- history_comment = "Comment from " + comment.author + " deleted"
comment.delete()
- if 'submit-trashattachment' == action:
+ #NOTE: No History for this
+ elif 'submit-trashattachment' == action:
record_id = request.POST['record_id']
upload = InvestigationUploads.objects.get(id=record_id)
- history_comment = "Upload '" + upload.description + "' from " + upload.author + " deleted"
try:
os.remove(upload.path)
except OSError:
pass
+ history_update.append(Update.DETACH_DOC % (upload.path))
upload.delete()
- if 'submit-addusernotify' == action:
+ elif 'submit-addusernotify' == action:
users = request.POST['users']
- usernames = ""
+ usernames = []
for user_id in users.split(','):
- usernames += SrtUser.objects.get(pk=user_id).name + ", "
+ usernames.append(SrtUser.objects.get(pk=user_id).username)
InvestigationNotification.objects.get_or_create(investigation_id=invst_id, user_id=user_id)
- usernames = usernames[:-2]
- history_comment = usernames + " added to notifications"
- if 'submit-trashusernotification' == action:
+ history_update.append(Update.ATTACH_USER_NOTIFY % ','.join(usernames))
+ elif 'submit-trashusernotification' == action:
record_id = request.POST['record_id']
notification_record = InvestigationNotification.objects.get(id=record_id)
- removed_user = SrtUser.objects.get(pk=notification_record.user_id).name
- history_comment = removed_user + " removed from notifications"
+ removed_user = SrtUser.objects.get(pk=notification_record.user_id).username
+ history_update.append(Update.DETACH_USER_NOTIFY % removed_user)
notification_record.delete()
- if 'submit-adduseraccess' == action:
+ elif 'submit-adduseraccess' == action:
users = request.POST['users']
- usernames = ""
+ usernames = []
for user_id in users.split(','):
- usernames += SrtUser.objects.get(pk=user_id).name + ", "
+ usernames.append(SrtUser.objects.get(pk=user_id).username)
InvestigationAccess.objects.get_or_create(investigation_id=invst_id, user_id=user_id)
- history_comment = usernames + " granted access"
- if 'submit-trashuseraccess' == action:
+ history_update.append(Update.ATTACH_ACCESS % ','.join(usernames))
+ elif 'submit-trashuseraccess' == action:
record_id = request.POST['record_id']
access_record = InvestigationAccess.objects.get(id=record_id)
- removed_user = username
- history_comment = removed_user + "'s access removed"
+ history_update.append(Update.DETACH_ACCESS % access_record.user.username)
access_record.delete()
- if 'submit-notification' == action:
+ elif 'submit-notification' == action:
_submit_notification(request)
- history_comment = ''
- if 'submit-trashinvestigation' == action:
+ #NOTE: No History for this
+ elif 'submit-trashinvestigation' == action:
record_id = request.POST['record_id']
investigation_obj = Investigation.objects.get(pk=record_id)
- history_comment = "Investigation '%s' is deleted" % investigation_obj.name
+# history_update.append(Update.DETACH_INV % investigation_obj.name)
investigation_obj.delete()
-
- if history_comment:
- InvestigationHistory.objects.create(investigation_id=invst_id, comment=history_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
+ else:
+ return_data = {
+ "error": "ERROR:unknown action '%s'" % action,
+ "new_name" : new_name,
+ }
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+
+ if history_update:
+ update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_update))
+ InvestigationHistory.objects.create(investigation_id=invst_id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
return_data = {
"error": "ok",
+ "note": xhr_note,
}
return HttpResponse(json.dumps( return_data ), content_type = "application/json")
@@ -1779,6 +2694,212 @@ def xhr_investigation_commit(request):
_log("xhr_investigation_commit:no(%s)" % e)
return HttpResponse(json.dumps({"error":str(e) + "\n"}), content_type = "application/json")
+def xhr_publish(request):
+ _log("xhr_publish(%s)" % request.POST)
+
+ def remove_mark(mark,line):
+ pos1 = line.find(mark)
+ if -1 == pos1:
+ return line
+ pos2 = line.find(')',pos1)
+ if -1 == pos2:
+ return line.replace(mark,'')
+ line = line[0:pos1] + line[pos2+1:]
+ return line
+
+ if not 'action' in request.POST:
+ return HttpResponse(json.dumps({"error":"missing action\n"}), content_type = "application/json")
+ try:
+ username = UserSafe.user_name(request.user)
+ action = request.POST['action']
+
+ if 'export-snapshot' == action:
+ snap_date_base = request.POST['snap_date_base']
+ snap_date_top = request.POST['snap_date_top']
+ snap_date_start = request.POST['snap_date_start']
+ snap_date_stop = request.POST['snap_date_stop']
+ _log("xhr_publish:export-snapshot:%s,%s,%s,%s" % (snap_date_base,snap_date_top,snap_date_start,snap_date_stop))
+
+ SrtSetting.set_setting('publish_snap_date_base',snap_date_base)
+ SrtSetting.set_setting('publish_snap_date_top',snap_date_top)
+ SrtSetting.set_setting('publish_snap_date_start',snap_date_start)
+ SrtSetting.set_setting('publish_snap_date_stop',snap_date_stop)
+
+ backup_returncode,backup_stdout,backup_result = execute_process('bin/common/srtool_backup.py','--list-backups-db')
+ base_dir = ''
+ top_dir = ''
+ for i,line in enumerate(backup_stdout.decode("utf-8").splitlines()):
+ # Week|backup_2019_19|2019-05-18|12:51:51|Saturday, May 18 2019
+ backup_mode,backup_dir,backup_date,backup_time,backup_day = line.split('|')
+ if (not base_dir) and (snap_date_base == backup_date):
+ base_dir = 'backups/%s' % backup_dir
+ if (not top_dir) and (snap_date_top == backup_date) and ('Now' != backup_mode):
+ top_dir = 'backups/%s' % backup_dir
+
+ _log('Publish:./bin/%s/srtool_publish.py --srt2update %s' % (SRT_MAIN_APP,base_dir))
+ report_returncode,report_stdout,report_error = execute_process('./bin/%s/srtool_publish.py' % SRT_MAIN_APP,'--srt2update',base_dir)
+ if 0 != report_returncode:
+ return_data = {"error": "Error: base dir prep:%s:%s" % (report_error,report_stdout),}
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+ _log('Publish:./bin/%s/srtool_publish.py --srt2update %s' % (SRT_MAIN_APP,top_dir))
+ report_returncode,report_stdout,report_error = execute_process('./bin/%s/srtool_publish.py' % SRT_MAIN_APP,'--srt2update',top_dir)
+ if 0 != report_returncode:
+ return_data = {"error": "Error: top dir prep:%s:%s" % (report_error,report_stdout),}
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+ _log('Publish:./bin/'+SRT_MAIN_APP+'/srtool_publish.py --validate-update-svns --previous '+base_dir+' --current '+top_dir+' --start '+snap_date_start+' --stop '+snap_date_stop)
+ report_returncode,report_stdout,report_error = execute_process('./bin/%s/srtool_publish.py' % SRT_MAIN_APP,
+ '--validate-update-svns','--previous',base_dir,'--current',top_dir,
+ '--start',snap_date_start,'--stop',snap_date_stop)
+ if 0 != report_returncode:
+ return_data = {"error": "Error: publish report:%s:%s" % (report_error,report_stdout),}
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+
+ publish_snap_last_calc = 'Base:%s, Top:%s, Start:%s, Stop:%s, On:%s' % (
+ snap_date_base,snap_date_top,snap_date_start,snap_date_stop,
+ datetime.today().strftime("%Y-%m-%d %H:%M:%S")
+ )
+ SrtSetting.set_setting('publish_snap_last_calc',publish_snap_last_calc)
+
+ _log('Publish:Done!')
+
+ if 'export-snapshot-progress' == action:
+ snap_date_base = request.POST['snap_date_base']
+ snap_date_top = request.POST['snap_date_top']
+ snap_date_start = request.POST['snap_date_start']
+ snap_date_stop = request.POST['snap_date_stop']
+ _log("xhr_publish:export-snapshot:%s,%s,%s,%s" % (snap_date_base,snap_date_top,snap_date_start,snap_date_stop))
+
+ SrtSetting.set_setting('publish_snap_date_base',snap_date_base)
+ SrtSetting.set_setting('publish_snap_date_top',snap_date_top)
+ SrtSetting.set_setting('publish_snap_date_start',snap_date_start)
+ SrtSetting.set_setting('publish_snap_date_stop',snap_date_stop)
+
+ backup_returncode,backup_stdout,backup_result = execute_process('bin/common/srtool_backup.py','--list-backups-db')
+ base_dir = ''
+ top_dir = ''
+ for i,line in enumerate(backup_stdout.decode("utf-8").splitlines()):
+ # Week|backup_2019_19|2019-05-18|12:51:51|Saturday, May 18 2019
+ backup_mode,backup_dir,backup_date,backup_time,backup_day = line.split('|')
+ if (not base_dir) and (snap_date_base == backup_date):
+ base_dir = 'backups/%s' % backup_dir
+ if (not top_dir) and (snap_date_top == backup_date) and ('Now' != backup_mode):
+ top_dir = 'backups/%s' % backup_dir
+
+ _log('PublishProgress:./bin/%s/srtool_publish.py --srt2update %s' % (SRT_MAIN_APP,base_dir))
+
+ command = [
+ './bin/%s/srtool_publish.py' % SRT_MAIN_APP,
+ '--validate-update-svns-progress','--previous',base_dir,'--current',top_dir,
+ '--start',snap_date_start,'--stop',snap_date_stop,
+ ' --progress'
+ ]
+ Job.start('Update svns progress','Create SVNS diff file',' '.join(command),'','update_logs/run_svns_job.log',job_id=2)
+
+ publish_snap_last_calc = 'Base:%s, Top:%s, Start:%s, Stop:%s, On:%s' % (
+ snap_date_base,snap_date_top,snap_date_start,snap_date_stop,
+ datetime.today().strftime("%Y-%m-%d %H:%M:%S")
+ )
+ SrtSetting.set_setting('publish_snap_last_calc',publish_snap_last_calc)
+
+ _log('PublishProgress:Done!')
+
+ elif 'submit-trashreport' == action:
+ report_name = request.POST['report_name']
+ os.remove('data/%s/%s' % (SRT_MAIN_APP,report_name))
+ else:
+ srtool_today_time = datetime.today()
+ srtool_today = datetime.today().strftime("%Y-%m-%d")
+ reason_map = {}
+ if 'defects' in request.POST:
+ cve_table = []
+ for defect_name in request.POST['defects'].split(','):
+ try:
+ defect = Defect.objects.get(name = defect_name)
+ cve_names = defect.get_cve_names
+ for cve_name in cve_names.split(','):
+ cve_table.append(cve_name)
+ reason_map[cve_name] = defect_name
+ except Exception as e:
+ _log("ERROR:xhr_publish:defectlist:%s" % e)
+ cve_list = ','.join(cve_table)
+ else:
+ cve_list = request.POST['cves']
+ for cve_name in cve_list.split(','):
+ reason_map[cve_name] = ''
+ _log("xhr_publish_defect2cves3:%s:%d" % (cve_list,len(cve_list)))
+
+ date_start = datetime.strptime(SrtSetting.get_setting('publish_date_start','02/15/2019'), '%m/%d/%Y')
+ date_stop = datetime.strptime(SrtSetting.get_setting('publish_date_stop','03/15/2019'), '%m/%d/%Y')
+ # set date_stop to 11:59pm for end of 'incusive' day
+ date_stop = date_stop.replace(hour=11, minute=59)
+ if 'mark-new' == action:
+ for cve_name in cve_list.split(','):
+ _log("xhr_publish_defect2cvesNEW:%s" % (cve_name))
+ cve = Cve.objects.get(name=cve_name)
+ publish_object,created = PublishSet.objects.get_or_create(cve=cve)
+ publish_object.state = PublishSet.PUBLISH_SET_NEW_USER
+ publish_object.reason = remove_mark('Mark_New',publish_object.reason)
+ publish_object.reason = remove_mark('Mark_Updated',publish_object.reason)
+ publish_object.reason += ' Mark_New(%s)' % reason_map[cve_name]
+ publish_object.reason = publish_object.reason.replace(' ',' ').strip()
+ publish_object.save()
+ publishMarkNew(cve_list,reason_map,date_start,date_stop)
+ if 'mark-modified' == action:
+ for cve_name in cve_list.split(','):
+ _log("xhr_publish_defect2cvesMOD:%s" % (cve_name))
+ cve = Cve.objects.get(name=cve_name)
+ publish_object,created = PublishSet.objects.get_or_create(cve=cve)
+ publish_object.state = PublishSet.PUBLISH_SET_MODIFIED_USER
+ publish_object.reason = remove_mark('Mark_New',publish_object.reason)
+ publish_object.reason = remove_mark('Mark_Updated',publish_object.reason)
+ publish_object.reason += ' Mark_Updated(%s)' % reason_map[cve_name]
+ publish_object.reason = publish_object.reason.replace(' ',' ').strip()
+ publish_object.save()
+ publishMarkModified(cve_list,reason_map,date_start,date_stop)
+ if 'unmark' == action:
+ for cve_name in cve_list.split(','):
+ cve = Cve.objects.get(name=cve_name)
+ publish_object,created = PublishSet.objects.get_or_create(cve=cve)
+ publish_object.state = PublishSet.PUBLISH_SET_NONE
+ publish_object.reason = remove_mark('Mark_New',publish_object.reason)
+ publish_object.reason = remove_mark('Mark_Updated',publish_object.reason)
+ publish_object.reason = publish_object.reason.replace(' ',' ').strip()
+ publish_object.save()
+ publishMarkNone(cve_list,date_start,date_stop)
+
+ return_data = {
+ "error": "ok",
+ }
+
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+ except Exception as e:
+ _log("xhr_publish:no(%s)(%s)" % (e,traceback.print_stack()))
+ return HttpResponse(json.dumps({"error":str(e) + "\n"}), content_type = "application/json")
+
+
+def attach_cve_alternates(cve,force_nist_update=True):
+ # Attach all matching CVE sources
+ #_log("Alternate1:%s" % (cve.name))
+ for ds in DataSource.objects.filter(data="cve"):
+ #_log("Alternate2:%s:%s:%s:%s:" % (ds.key,ds.cve_filter,cve.name,ds.cve_filter))
+ if ds.cve_filter and cve.name.startswith(ds.cve_filter):
+ try:
+ cve_source_object,created = CveSource.objects.get_or_create(cve=cve,datasource=ds)
+ except:
+ ### WORKAROUND TODO TOFIX
+ cve_source_object = CveSource.objects.filter(cve=cve,datasource=ds).first()
+ created = False
+ #_log("Alternate CVE source %s for %s (created=%s)" % (ds.key,cve.name,created))
+
+ # Force update the CVE summary data from sources
+ if force_nist_update:
+ result_returncode,result_stdout,result_stderr = execute_process(
+ './bin/nist/srtool_nist.py',
+ '--update-cve-list',
+ cve.name,
+ '--force'
+ )
+ #_log("CVE_ALT_REFRESH=%s|%s|%s" % (result_returncode,result_stdout,result_stderr))
def cve_alternates(request, cve_pk):
try:
@@ -1786,17 +2907,261 @@ def cve_alternates(request, cve_pk):
except Exception as e:
_log("CVE_ERROR(%s):" % e)
return redirect(landing)
-
# Attach all matching CVE sources
- _log("Alternate1:%s" % (cve_object.name))
- for ds in DataSource.objects.filter(data="cve"):
- _log("Alternate2:%s" % (ds.key))
- if ds.cve_filter and cve_object.name.startswith(ds.cve_filter):
- cve_source_object,created = CveSource.objects.get_or_create(cve=cve_object,datasource=ds)
- _log("Alternate CVE source %s for %s (created=%s)" % (ds.key,cve_object.name,created))
-
+ attach_cve_alternates(cve_object)
return redirect(cve, cve_pk)
+def xhr_maintenance_commit(request):
+ _log("xhr_maintenance_commit(%s)" % request.POST)
+ if not 'action' in request.POST:
+ return HttpResponse(json.dumps({"error":"missing action\n"}), content_type = "application/json")
+ try:
+ if request.POST["action"] == "<some_action>":
+ pass
+
+ return_data = {
+ "error": "ok",
+ }
+ _log("xhr_maintenance_commit:SUCCESS")
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+
+ except Exception as e:
+ _log("xhr_triage_commit:no(%s)" % e)
+ return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+
+def xhr_sources_commit(request):
+ _log("xhr_sources_commit(%s)" % request.POST)
+ if not 'action' in request.POST:
+ return HttpResponse(json.dumps({"error":"missing action\n"}), content_type = "application/json")
+ try:
+ error_message = "ok";
+ data_message = "";
+ if request.POST["action"] == "submit-run-update-job":
+ ds_id = int(request.POST['id'])
+ datasource = DataSource.objects.get(id=ds_id)
+ #Job.start(name,description,command,options='',log_file=None,job_id=1):
+ name = datasource.name
+ description = datasource.description
+ options = ''
+ # Force update to execute now
+ command = datasource.update + ' --force'
+ _log("SUBMIT-RUN-UPDATE-JOB:Job.start(%s,%s,%s,%s)" % (name,description,command,options))
+ with open(f"{SRT_BASE_DIR}/update_logs/master_log.txt", "a") as update_log:
+ update_log.write("SRTOOL_UPDATE_MANUAL:%s:%s:%s:\n" % (datetime.now(),datasource.description,command))
+ Job.start(name,description,command,options)
+
+ elif request.POST["action"] == "submit-toggle-enable":
+ ds_id = int(request.POST['id'])
+ datasource = DataSource.objects.get(id=ds_id)
+ if 'DISABLE ' in datasource.attributes:
+ datasource.attributes = datasource.attributes.replace('DISABLE ','')
+ datasource.attributes = 'ENABLE ' + datasource.attributes
+ else:
+ datasource.attributes = 'DISABLE ' + datasource.attributes
+ datasource.attributes = datasource.attributes.replace('ENABLE ','')
+ datasource.save()
+ error_message = 'no_refresh'
+ data_message = '%d=%s' % (datasource.id,datasource.attributes)
+
+ else:
+ error_message = "ERROR:unknown action '%s'" % request.POST["action"]
+
+ return_data = {
+ "error": error_message,
+ "data_message": data_message,
+ }
+ _log("xhr_sources_commit:SUCCESS")
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+
+ except Exception as e:
+ _log("xhr_triage_commit:no(%s)" % e)
+ return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+
+def xhr_job_post(request):
+ _log("xhr_job_post(%s)2" % request.POST)
+ if not 'action' in request.POST:
+ return HttpResponse(json.dumps({"error":"missing action\n"}), content_type = "application/json")
+ try:
+ if request.POST["action"] == "submit-job":
+ command = request.POST.get('command', 'NoCmnd')
+ name = request.POST.get('name', 'NoName')
+ options = request.POST.get('options', '')
+ Job.start(name,'Submit Job',command,options,'update_logs/Job.start_user.log')
+ elif request.POST["action"] == "submit-testjob":
+ command = request.POST.get('command', 'NoCmnd')
+ name = request.POST.get('name', 'NoName')
+ options = request.POST.get('options', '')
+ Job.start(name,'This is a test',command,options,'update_logs/Job.start_user.log')
+ elif request.POST["action"] == "submit-testjob-j2":
+ command = request.POST.get('command', 'NoCmnd')
+ name = request.POST.get('name', 'NoName')
+ options = request.POST.get('options', '')
+ Job.start(name,'This is a test',command,options,'update_logs/Job.start_user.log',job_id=2)
+ elif request.POST["action"] == "submit-testjob-parent":
+ command = request.POST.get('command', 'NoCmnd')
+ name = request.POST.get('name', 'NoName')
+ options = request.POST.get('options', '')
+ # Preclear previously completed jobs from view
+ for job in Job.objects.all():
+ if not job.status in (Job.NOTSTARTED,Job.INPROGRESS):
+ job.status = Job.NOTSTARTED
+ job.save()
+ Job.start(name,'Parent/Children test',"./bin/common/srtool_job.py --test-parent-job",options,'update_logs/Job.start_user.log',job_id=9)
+ elif request.POST["action"] == "submit-trash-job":
+ record_id = int(request.POST.get('record_id', '0'))
+ if UserSafe.is_admin(request.user):
+ Job.objects.get(id=record_id).delete()
+ elif request.POST["action"] == "submit-clearjobs":
+ if UserSafe.is_admin(request.user):
+ Job.objects.all().delete()
+ else:
+ return_data = {
+ "error": "ERROR:unknown action '%s'" % request.POST["action"],
+ }
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+
+ return_data = {
+ "error": "ok",
+ }
+ _log("xhr_maintenance_commit:SUCCESS")
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+
+ except Exception as e:
+ _log("xhr_job_post:no(%s)" % e)
+ return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+
+def joblog(request,job_pk):
+ if request.method == "GET":
+ _log("GET_JOBLOG:%s" % job_pk)
+ job, created = Job.objects.get_or_create(id=job_pk)
+ template = "joblog.html"
+ log_text = ''
+ log_file = job.log_file if (job.log_file and ('/' == job.log_file[0])) else os.path.join(SRT_BASE_DIR,job.log_file)
+ if job.log_file:
+ with open(os.path.join(SRT_BASE_DIR,log_file),'r') as file:
+ log_text = file.read() #Note: keep EOL chars
+ context = {
+ 'object' : job,
+ 'log_text' : log_text,
+ 'log_date' : time.asctime(time.localtime(os.path.getmtime(log_file))),
+ }
+ return render(request, template, context)
+ # No action if no log
+ return HttpResponse(json.dumps( {"error": "ok",} ), content_type = "application/json")
+ elif request.method == "POST":
+ _log("POST_JOBLOG: %s" % request)
+
+ if request.POST["action"] == "download-job-log":
+ try:
+ job = Job.objects.get(id=job_pk)
+ file_path = job.log_file
+ except:
+ # In case job was cleaned up but old link for log was still visible
+ file_path = ''
+ if file_path:
+ fsock = open(file_path, "rb")
+ file_name = os.path.basename(file_path)
+ content_type = MimeTypeFinder.get_mimetype(file_path)
+ response = HttpResponse(fsock, content_type = content_type)
+ disposition = 'attachment; filename="{}"'.format(file_name)
+ response['Content-Disposition'] = 'attachment; filename="{}"'.format(file_name)
+ _log("EXPORT_POST_Q{%s} %s || %s " % (response, response['Content-Disposition'], disposition))
+ return response
+ else:
+ return render(request, "unavailable_artifact.html", context={})
+
+ return redirect("/srtgui/joblog")
+
+ raise Exception("Invalid HTTP method for this page")
+
+def email_admin(request):
+ if request.method == "GET":
+ context = {
+ 'error_message' : '',
+ }
+ return render(request, 'email_admin.html', context)
+ elif request.method == "POST":
+ _log("EMAIL_ADMIN: %s" % request)
+
+ if request.POST["action"] == "submit":
+ request_type = request.POST.get('request-type', '')
+ user_name = request.POST.get('user-name', '').strip()
+ user_email = request.POST.get('user-email', '').strip()
+ message = request.POST.get('message', '').strip()
+ if (not user_name) or (not user_email):
+ return render(request, 'email_admin.html', {'error_message' : "Error:missing user name or email",})
+
+ email_list = []
+ for user in SrtUser.get_group_users('SRTool_Admins'):
+ if user.email:
+ email_list.append(user.email)
+ if not email_list:
+ return render(request, 'email_admin.html', {'error_message' : "Error:missing admin emails. Contact SRTool team",})
+# email_list.append(user_email)
+
+ email_temp_file = '.email.txt'
+ with open(email_temp_file, 'w') as file:
+ print("SRTool alert: %s for %s" % (request_type,user_name),file=file)
+ print("From: %s" % user_email,file=file)
+ for email in email_list:
+ print("To: %s" % email,file=file)
+ print("Subject: %s requests %s" % (user_name,request_type),file=file)
+ print("",file=file)
+ print("SRTool alert: %s" % request_type,file=file)
+ print("From: %s" % user_name,file=file)
+ print("Email: %s" % user_email,file=file)
+ print("",file=file)
+ print(message,file=file)
+
+ smtp_server = os.environ.get('SRT_EMAIL_SMTP', 'MISSING_SRT_EMAIL_SMTP')
+ email_command = ['git','send-email','--from='+user_email,'--thread','--quiet','--confirm=never',\
+ '--smtp-server',smtp_server,'--to=%s' % ','.join(email_list), email_temp_file]
+ email_returncode,email_stdout,email_stderr = execute_process(email_command)
+ if email_returncode:
+ return render(request, 'email_admin.html', {'error_message' : email_stderr,})
+ return redirect(email_success)
+
+ elif request.POST["action"] == "cancel":
+ return redirect('/')
+
+ else:
+ return render(request, 'email_admin.html', {'error_message' : "Error:no such action '%s'" % request.POST["action"]})
+
+ raise Exception("Invalid HTTP method for this page")
+
+def email_success(request):
+ if request.method == "GET":
+ context = {
+ }
+ return render(request, 'email_success.html', context)
+ elif request.method == "POST":
+ _log("EMAIL_SUCCESS: %s" % request)
+ if request.POST["action"] == "close":
+ return redirect('/')
+ return redirect('/')
+
+def date_time_test(request):
+ utc_dt = datetime.now(timezone.utc)
+ current_ala = utc_dt.astimezone(pytz.timezone('US/Pacific')).strftime(SRTool.DATETIME_FORMAT)
+ user_timezone_str = request.user.map_usertz_to_usertz_str()
+
+ # Replace with getting user_timezone_str from the user record
+ user_timezone = pytz.timezone(request.user.map_usertz_str_to_usertz(user_timezone_str))
+
+ epoch = time.time()
+ offset = utc_dt.astimezone(user_timezone).replace(tzinfo=None) - datetime.fromtimestamp(epoch)
+ local_time = utc_dt + offset
+ current_local = local_time.strftime(SRTool.DATETIME_FORMAT)
+
+ context = {
+ 'current_utc' : datetime.utcnow().strftime(SRTool.DATETIME_FORMAT),
+ 'current_ala' : current_ala,
+ 'current_local' : current_local,
+ 'timezone_list' : SrtUser.get_timezone_list(),
+ 'user_timezone' : user_timezone,
+ }
+ return render(request, 'date-time-test.html', context)
+
def tbd(request):
context = {}
diff --git a/lib/srtgui/widgets.py b/lib/srtgui/widgets.py
index 751f7f14..ec2fbd42 100644
--- a/lib/srtgui/widgets.py
+++ b/lib/srtgui/widgets.py
@@ -4,7 +4,7 @@
#
# BitBake Toaster Implementation
#
-# Copyright (C) 2015 Intel Corporation
+# Copyright (C) 2023 Intel Corporation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -21,6 +21,7 @@
from django.views.generic import View, TemplateView
from django.views.decorators.cache import cache_control
+from django.utils.decorators import method_decorator
from django.shortcuts import HttpResponse
from django.core.cache import cache
from django.core.paginator import Paginator, EmptyPage
@@ -29,8 +30,15 @@ from django.template import Context, Template
from django.template import VariableDoesNotExist
from django.template import TemplateSyntaxError
from django.core.serializers.json import DjangoJSONEncoder
+from django.urls import reverse, resolve
+from django.utils import timezone
+from django.http import JsonResponse
+
+from srtgui.templatetags.jobtags import json as template_json
+from srtgui.templatetags.jobtags import sectohms
from orm.models import SrtSetting, Cve
+from orm.models import Job
import types
import json
@@ -51,6 +59,38 @@ class NoFieldOrDataName(Exception):
# quick development/debugging support
from srtgui.api import _log
+################################################
+### Helper Routines
+
+def isbalanced(s):
+ c= 0
+ ans=False
+ list1 = []
+ list2 = []
+ strcheck = ""
+ lstcheck = []
+ for i in range(len(s)):
+ if s[i] == "(":
+ strcheck = strcheck + "_" + s[i]
+ lstcheck.append(i)
+ elif s[i] == ")" and "(" in strcheck:
+ list1 = strcheck.split("_")
+ list1.pop()
+ lstcheck.pop()
+ strcheck = "_".join(list1)
+ elif s[i] == ")" and "(" not in strcheck:
+ strcheck = strcheck + "_" + s[i]
+ lstcheck.append(i)
+ list1 = strcheck.split("_")
+ list2[:0] = s
+ if len(lstcheck) > 0 :
+ for i2 in lstcheck:
+ list2.pop(lstcheck[0])
+ return "".join(list2)
+
+################################################
+### ToasterTable
+
class ToasterTable(TemplateView):
def __init__(self, *args, **kwargs):
super(ToasterTable, self).__init__()
@@ -60,7 +100,7 @@ class ToasterTable(TemplateView):
self.queryset = None
self.columns = []
- _log("ToasterTable:%s,%s" % (args,kwargs))
+# _log("ToasterTable:%s,%s" % (args,kwargs))
# map from field names to Filter instances
self.filter_map = TableFilterMap()
@@ -71,8 +111,8 @@ class ToasterTable(TemplateView):
self.default_orderby = ""
# prevent HTTP caching of table data
- @cache_control(must_revalidate=True,
- max_age=0, no_store=True, no_cache=True)
+ @method_decorator(cache_control(must_revalidate=True,
+ max_age=0, no_store=True, no_cache=True))
def dispatch(self, request, *args, **kwargs):
return super(ToasterTable, self).dispatch(request, *args, **kwargs)
@@ -96,6 +136,17 @@ class ToasterTable(TemplateView):
def get(self, request, *args, **kwargs):
if request.GET.get('format', None) == 'json':
+ # Add all URL parameters to kwargs, specifically for the
+ # case of the Toaster table JSON 'get' for the 'filter' AJAX
+ # call which does not include the request header parameters
+ tableParams = self.request.GET.get('tableParams','')
+ for param in tableParams.split(','):
+ pos = param.find('=')
+ if 0 < pos:
+ name = param[:param.find('=')]
+ value = param[param.find('=')+1:]
+ kwargs[name] = value
+
self.setup_queryset(*args, **kwargs)
# Put the project id into the context for the static_data_template
if 'pid' in kwargs:
@@ -228,25 +279,89 @@ class ToasterTable(TemplateView):
TableFilterAction* before its filter is applied and may modify the
queryset returned by the filter
"""
+
self.setup_filters(**kwargs)
try:
- filter_name, action_name = filters.split(':')
+# filter_name, action_name = filters.split(':')
+ if len(filters.split(",")) < 2 :
+ filter_name, action_name = filters.split(':')
action_params = unquote_plus(filter_value)
except ValueError:
return
- if "all" in action_name:
- return
-
- table_filter = self.filter_map.get_filter(filter_name)
- action = table_filter.get_action(action_name)
- action.set_filter_params(action_params)
- self.queryset = action.filter(self.queryset)
+# if "all" in action_name:
+# return
+
+ FilterString = ""
+ CriteriaString = ""
+ lstactionlist = []
+ if self.request.session.get('filterkey'):
+ if False:
+ if self.request.session['filterkey'].split("~")[0] != "":
+ CriteriaString = self.request.session['filterkey']
+ CriteriaString = CriteriaString.split("~")[0]
+ FilterString = str(filters) + str('|') + str(CriteriaString)
+ FilterList = FilterString.split('|')
+
+
+ # if self.request.session['filterkey'].split("~")[0] != "":
+ if len(filters.split(",")) > 1:
+ # CriteriaString = self.request.session['filterkey']
+ # CriteriaString = CriteriaString.split("~")[0]
+ # for CriteriaItem in filters.split(","):
+ # if not CriteriaItem in FilterString:
+ # FilterString = str(CriteriaItem) + str('|') + str(CriteriaString)
+ FilterList = filters.split(",")
+
+ q1 = None
+ q2 = self.queryset
+ for filterItem in FilterList:
+ #if counter == 0:
+ table_filter1 = self.filter_map.get_filter(filterItem.split(":")[0])
+ action1 = table_filter1.get_action(filterItem.split(":")[1])
+ action1.set_filter_params(action_params)
+ q1 = action1.filter(q2)
+ q2 = q1
+ lstactionlist.append(filterItem.split(":")[1])
+ self.queryset = q1
+ else:
+ table_filter = self.filter_map.get_filter(filter_name)
+ action = table_filter.get_action(action_name)
+ action.set_filter_params(action_params)
+ self.queryset = action.filter(self.queryset)
+ FilterString = str(filters)
+ lstactionlist.append(action_name)
+ else:
+ table_filter = self.filter_map.get_filter(filter_name)
+ action = table_filter.get_action(action_name)
+ action.set_filter_params(action_params)
+ self.queryset = action.filter(self.queryset)
+ FilterString = str(filters)
+ lstactionlist.append(action_name)
+
+ _log("FOO:APPLY_FILTER:FILTER:%s" % action_params)
+
+ strquerystring = self.queryset.query.__str__()
+ qstring1 = strquerystring.replace ('AND', 'AND\n')
+ qstring2 = qstring1.replace ('OR', 'OR\n')
+ tar = re.findall(r"(?<==).+(?= AND)|(?<==)(?<==).+(?= OR )|(?<==).+(?=[)])|(?<==).+(?= OR)", qstring2)
+ for item in tar:
+ if len(re.findall(r"[A-Za-z]", item.strip())) != 0 :
+ item = isbalanced(item)
+ strquerystring = strquerystring.replace(item, '"'+item.strip()+'"' )
+ self.request.session['filterkey'] = str(FilterString) + str('~') + strquerystring
def apply_orderby(self, orderby):
# Note that django will execute this when we try to retrieve the data
- self.queryset = self.queryset.order_by(orderby)
+ if False:
+ # Use parent order field if present (for column computed from existing column)
+ order_by = re.sub(r'.*__parent_', '', orderby)
+# order_by = orderby
+ self.queryset = self.queryset.order_by(order_by)
+ else:
+ self.queryset = self.queryset.order_by(orderby)
+ # self.request.session['filterkey'] = str('~') + str(self.queryset.query)
def apply_search(self, search_term):
"""Creates a query based on the model's search_allowed_fields"""
@@ -285,9 +400,19 @@ class ToasterTable(TemplateView):
else:
search_queries = queries
+ _log("FOO:APPLY_SEARCH:FILTER:%s" % search_queries)
self.queryset = self.queryset.filter(search_queries)
-
- def apply_row_customization(self, row):
+ strquerystring = self.queryset.query.__str__()
+ qstring1 = strquerystring.replace ('AND', 'AND\n')
+ qstring2 = qstring1.replace ('OR', 'OR\n')
+ tar = re.findall(r"(?<==).+(?= AND)|(?<==)(?<==).+(?= OR )|(?<==).+(?=[)])|(?<==).+(?= OR)", qstring2)
+ for item in tar:
+ if len(re.findall(r"[A-Za-z]", item.strip())) != 0 :
+ item = isbalanced(item)
+ strquerystring = strquerystring.replace(item, '"'+item.strip()+'"' )
+ self.request.session['filterkey'] = str('~') + str(strquerystring)
+
+ def apply_row_customization(self, row, **kwargs):
""" function to implement in the subclass which supports
row data customization in the respective table handler """
return row
@@ -311,6 +436,11 @@ class ToasterTable(TemplateView):
orderby = request.GET.get("orderby", None)
nocache = request.GET.get("nocache", None)
+ # Test if clear filters from session
+ if filters == "":
+ if request.session.get('filterkey'):
+ del request.session['filterkey']
+
# Make a unique cache name
cache_name = self.__class__.__name__
@@ -327,7 +457,8 @@ class ToasterTable(TemplateView):
cache_name = re.sub(r'["\']', "-", cache_name)
cache_name = re.sub(r'[^A-Za-z0-9-]', "", cache_name)
- if nocache:
+ # Forcibly disable caching for all pages, since SRTool data is too dynamic
+ if True or nocache:
cache.delete(cache_name)
data = cache.get(cache_name)
@@ -338,6 +469,8 @@ class ToasterTable(TemplateView):
self.setup_columns(**kwargs)
+ self.request.session['nofilterkey'] = str('~') + str(self.queryset.query)
+
if search:
self.apply_search(search)
if filters:
@@ -427,7 +560,7 @@ class ToasterTable(TemplateView):
data['rows'].append(required_data)
# apply any row data customization override before converted to JSON
- data = self.apply_row_customization(data)
+ data = self.apply_row_customization(data, **kwargs)
data = json.dumps(data, indent=2, cls=DjangoJSONEncoder)
cache.set(cache_name, data, 60*30)
@@ -490,3 +623,136 @@ class ToasterTypeAhead(View):
pass
+class MostRecentJobsView(View):
+ def _was_yesterday_or_earlier(self, completed_on):
+ now = timezone.now()
+ delta = now - completed_on
+
+ if delta.days >= 1:
+ return True
+
+ return False
+
+ def get(self, request, *args, **kwargs):
+ """
+ Returns a list of jobs in JSON format.
+ """
+
+ recent_job_objs = Job.get_recent()
+ recent_jobs = []
+
+ for job_obj in recent_job_objs:
+## cancel_url = \
+## reverse('xhr_jobrequest', args=(job_obj.sprint.pk,))
+# cancel_url = \
+# reverse('xhr_jobrequest', )
+ cancel_url = \
+ ''
+
+ job = {}
+ job['id'] = job_obj.pk
+
+ tasks_complete_percentage = 0
+ if job_obj.status in (Job.SUCCESS, Job.ERRORS):
+ tasks_complete_percentage = 100
+ elif job_obj.status == Job.INPROGRESS:
+ tasks_complete_percentage = job_obj.completeper()
+
+ job['tasks_complete_percentage'] = tasks_complete_percentage
+
+ job['state'] = job_obj.get_status_text
+
+ job['errors'] = job_obj.errors
+
+ job['warnings'] = job_obj.warnings
+
+ if job_obj.completed_on and job_obj.started_on:
+ timespent = job_obj.completed_on - job_obj.started_on
+ job['jobtime'] = sectohms(timespent.total_seconds())
+ else:
+ job['jobtime'] = 0
+
+ job['cancel_url'] = cancel_url
+
+ job['job_targets_json'] = \
+ template_json(job_obj.name)
+
+ # convert completed_on time to user's timezone
+ if job_obj.completed_on:
+ completed_on = job_obj.completed_on
+
+ completed_on_template = '%H:%M'
+ if self._was_yesterday_or_earlier(completed_on):
+ completed_on_template = '%d/%m/%Y ' + completed_on_template
+ else:
+ completed_on_template = 'Today ' + completed_on_template
+ job['completed_on'] = completed_on.strftime(
+ completed_on_template)
+ else:
+ job['completed_on'] = 'In progress...'
+
+ job['targets'] = job_obj.message #current remote command
+
+ if job_obj.refresh:
+ # Right now a binary flag, later maybe a timeout counter
+ job['refresh'] = '1' #remote page refresh request
+ job_obj.refresh = 0
+ job_obj.save()
+ else:
+ job['refresh'] = '0'
+
+ recent_jobs.append(job)
+
+ return JsonResponse(recent_jobs, safe=False)
+
+class XhrJobRequest(View):
+
+ def error_response(error):
+ return JsonResponse({"error": error})
+
+ def get(self, request, *args, **kwargs):
+ return HttpResponse()
+
+ def post(self, request, *args, **kwargs):
+ """
+ Job control
+
+ Entry point: /xhr_jobrequest/<project_id>
+ Method: POST
+
+ Args:
+ id: id of job to change
+ jobCancel = job_request_id ...
+ jobDelete = id ...
+
+ Returns:
+ {"error": "ok"}
+ or
+ {"error": <error message>}
+ """
+
+ if 'jobCancel' in request.POST:
+ for i in request.POST['jobCancel'].strip().split(" "):
+ try:
+ job = Job.objects.get(pk=i)
+ job.cancel()
+ except Job.DoesNotExist:
+ return error_response('No such job request id %s' % i)
+
+ return JsonResponse({"error": 'ok'})
+
+ if 'jobDelete' in request.POST:
+ for i in request.POST['jobDelete'].strip().split(" "):
+ try:
+ Job.objects.select_for_update().get(
+ pk=i,
+ state__lte=Job.INPROGRESS).delete()
+
+ except Job.DoesNotExist:
+ pass
+ return error_response("ok")
+
+ response = HttpResponse()
+ response.status_code = 500
+ return response
+
diff --git a/lib/srtmain/management/commands/checksocket.py b/lib/srtmain/management/commands/checksocket.py
index 19e75cb5..803009bc 100644
--- a/lib/srtmain/management/commands/checksocket.py
+++ b/lib/srtmain/management/commands/checksocket.py
@@ -25,7 +25,7 @@ import errno
import socket
from django.core.management.base import BaseCommand, CommandError
-from django.utils.encoding import force_text
+from django.utils.encoding import force_str
DEFAULT_ADDRPORT = "0.0.0.0:8000"
@@ -63,7 +63,7 @@ class Command(BaseCommand):
if hasattr(err, 'errno') and err.errno in errors:
errtext = errors[err.errno]
else:
- errtext = force_text(err)
+ errtext = force_str(err)
raise CommandError(errtext)
self.stdout.write("OK")
diff --git a/lib/srtmain/management/commands/update.py b/lib/srtmain/management/commands/update.py
index 8304e199..7da17acd 100755
--- a/lib/srtmain/management/commands/update.py
+++ b/lib/srtmain/management/commands/update.py
@@ -7,36 +7,45 @@ class Command(BaseCommand):
help = "Trigger a data source update"
def add_arguments(self, parser):
+ print("UPDATE:add_arguments")
parser.add_argument('--cron-start', action='store_const', const='cron_start', dest='command', help='Start the SRTool backgroud updater')
parser.add_argument('--cron-stop', action='store_const', const='cron_stop', dest='command', help='Stop the SRTool backgroud updater')
parser.add_argument('--list', '-l', action='store_const', const='list', dest='command', help='List data sources')
parser.add_argument('--run-updates', '-u', action='store_const', const='run-updates', dest='command', help='update scheduled data sources')
parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
- parser.add_argument('--name-filter', '-n', nargs='+', type=str, dest='name_filter', help='Filter for datasource name')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates for cummulative status')
parser.add_argument('--verbose', action='store_true', dest='verbose', help='Debugging: verbose output')
parser.add_argument('--trial', '-t', action='store_true', dest='is_trial', help='Debugging: trial run')
+ # NOTE: we have to do shenanigans with name_filter to support spaces
+ parser.add_argument('--name-filter', '-n', nargs='+', dest='name_filter', help='Filter for datasource name')
+
def handle(self, *args, **options):
- #print("UPDATE:%s|%s" % (str(args),str(options)))
+ print("UPDATE:%s|%s" % (str(args),str(options)))
command = ''
if 'cron_start' == options['command']: command = '--cron-start'
if 'cron_stop' == options['command']: command = '--cron-stop'
if 'list' == options['command']: command = '--list'
if 'run-updates' == options['command']: command = '--run-updates'
-
- # NOTE: we have to do shenanigans with name_filter to support spaces
- name_filter = '--name-filter "%s"' % ' '.join(options['name_filter']) if options['name_filter'] else ''
-
- force = '--force' if options['force'] else ''
- is_trial = '--trial' if options['is_trial'] else ''
- verbose = '--verbose' if options['verbose'] or (options['verbosity'] > 1) else ''
- context = '> /dev/null 2>&1 &' if 'cron_start' == options['command'] else ''
-
- update_command = "./bin/common/srtool_update.py %s %s %s %s %s %s" % (command,name_filter,force,is_trial,verbose,context)
- if verbose:
- print("RUN UPDATE SCRIPT: %s" % (update_command))
- os.chdir(os.environ['SRT_BASE_DIR'])
- os.system("%s" % (update_command))
+ if not command:
+ print("manage update: missing command '%s %s'" % (str(args),str(options)))
+ else:
+ if options['verbose'] or (options['verbosity'] > 1):
+ command += ' --verbose'
+ verbose = True
+ else:
+ verbose = False
+ if options['force']: command += ' --force'
+ if options['update_skip_history']: command += ' --update-skip-history'
+ if options['is_trial']: command += ' --trial'
+ # NOTE: we have to do shenanigans with name_filter to support spaces
+ if options['name_filter']: command += ' --name-filter "%s"' % ' '.join(options['name_filter'])
+ if 'cron_start' == options['command']: command += ' > /dev/null 2>&1 &'
+ update_command = "./bin/common/srtool_update.py %s" % (command)
+ if verbose:
+ print("RUN UPDATE SCRIPT: %s" % (update_command))
+ os.chdir(os.environ['SRT_BASE_DIR'])
+ os.system("%s" % (update_command))
diff --git a/lib/srtmain/settings.py b/lib/srtmain/settings.py
index 0607fe9a..abd115cb 100644
--- a/lib/srtmain/settings.py
+++ b/lib/srtmain/settings.py
@@ -22,8 +22,8 @@
# Django settings for SRT
import os
-
from django import VERSION as DJANGO_VERSION
+import yaml
DEBUG = True
@@ -40,27 +40,47 @@ ADMINS = (
MANAGERS = ADMINS
-SRT_SQLITE_DEFAULT_DIR = os.environ.get('SRT_BASE_DIR')
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR', ".")
+with open(f"{SRT_BASE_DIR}/srt_dbconfig.yml", "r") as ymlfile:
+ SRT_DBCONFIG = yaml.safe_load(ymlfile)
+ SRT_DBSELECT = SRT_DBCONFIG['dbselect']
+ srt_dbconfig = SRT_DBCONFIG[SRT_DBSELECT]
+ srt_dbtype = srt_dbconfig['dbtype']
DATABASES = {
'default': {
- # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'ENGINE': 'django.db.backends.sqlite3',
# DB name or full path to database file if using sqlite3.
- 'NAME': "%s/srt.sqlite" % SRT_SQLITE_DEFAULT_DIR,
+ 'NAME': f"{SRT_BASE_DIR}/{srt_dbconfig['path']}",
'USER': '',
'PASSWORD': '',
- #'HOST': '127.0.0.1', # e.g. mysql server
- #'PORT': '3306', # e.g. mysql port
},
# Sqlite database lock problem
'OPTIONS': {
'timeout': 20,
}
+} if srt_dbtype == "sqlite" else {
+ 'default': {
+ 'ENGINE': 'django.db.backends.mysql',
+ 'NAME': srt_dbconfig["name"],
+ 'USER': srt_dbconfig["user"],
+ 'PASSWORD': srt_dbconfig["passwd"],
+ 'HOST': srt_dbconfig["host"],
+ 'PORT': srt_dbconfig["port"],
+ },
+} if srt_dbtype == "mysql" else {
+ 'default': {
+ 'ENGINE': 'django.db.backends.postgresql_psycopg2',
+ 'NAME': srt_dbconfig["name"],
+ 'USER': srt_dbconfig["user"],
+ 'PASSWORD': srt_dbconfig["passwd"],
+ 'HOST': srt_dbconfig["host"],
+ 'PORT': srt_dbconfig["port"],
+ },
}
# Needed when Using sqlite especially to add a longer timeout for waiting
-# for the database lock to be released
+# for the database lock to be released
# https://docs.djangoproject.com/en/1.6/ref/databases/#database-is-locked-errors
if 'sqlite' in DATABASES['default']['ENGINE']:
DATABASES['default']['OPTIONS'] = { 'timeout': 20 }
@@ -180,7 +200,7 @@ TEMPLATES = [
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
- os.path.join(SRT_SQLITE_DEFAULT_DIR, 'lib/srtmain/templates'),
+ os.path.join(SRT_BASE_DIR, 'lib/srtmain/templates'),
],
'OPTIONS': {
'context_processors': [
@@ -278,7 +298,7 @@ INSTALLED_APPS = (
'srtgui',
'users',
)
-#print("DEBUG:INSTALLED_APPS:%s,%s" % (SRT_MAIN_APP,INSTALLED_APPS))
+##print("DEBUG:INSTALLED_APPS:%s,%s" % (SRT_MAIN_APP,INSTALLED_APPS))
INTERNAL_IPS = ['127.0.0.1', '192.168.2.28']
@@ -319,7 +339,7 @@ if os.environ.get('SRT_DEVEL', None) is not None:
SOUTH_TESTS_MIGRATE = False
-# We automatically detect and install applications here if
+# We automatically detect and install other applications here if
# they have a 'models.py' or 'views.py' file
import os
currentdir = os.path.dirname(__file__)
@@ -331,6 +351,7 @@ for t in os.walk(os.path.dirname(currentdir)):
if ("views.py" in t[2] or "models.py" in t[2]) and not modulename in INSTALLED_APPS:
INSTALLED_APPS = INSTALLED_APPS + (modulename,)
+##print("INSTALLED_APPS:%s" % ','.join(INSTALLED_APPS))
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
@@ -401,3 +422,10 @@ EMAIL_FILE_PATH = os.path.join(SITE_ROOT, "sent_emails")
# Custom SRTool Users
AUTH_USER_MODEL = 'users.SrtUser'
+DEFAULT_AUTO_FIELD='django.db.models.AutoField'
+
+# SSL support (Django 4)
+SRT_CSRF_TRUSTED_ORIGINS = os.environ.get("SRT_CSRF_TRUSTED_ORIGINS", None)
+if SRT_CSRF_TRUSTED_ORIGINS is not None:
+ CSRF_TRUSTED_ORIGINS = SRT_CSRF_TRUSTED_ORIGINS.split(',')
+
diff --git a/lib/srtmain/urls.py b/lib/srtmain/urls.py
index 2f330154..6bdc1581 100644
--- a/lib/srtmain/urls.py
+++ b/lib/srtmain/urls.py
@@ -22,7 +22,7 @@
import os
from django import VERSION as DJANGO_VERSION
-from django.conf.urls import url
+from django.urls import re_path as url
from django.views.generic import RedirectView, TemplateView
from django.views.decorators.cache import never_cache
@@ -39,6 +39,9 @@ logger = logging.getLogger("srt")
from django.contrib import admin
admin.autodiscover()
+# Fetch the main app URL
+SRT_MAIN_APP = os.environ.get('SRT_MAIN_APP', 'srtgui')
+
urlpatterns = [
# Examples:
@@ -50,7 +53,7 @@ urlpatterns = [
url(r'^health$', TemplateView.as_view(template_name="health.html"), name='Toaster Health'),
# if no application is selected, we have the magic srtgui app here
- url(r'^$', never_cache(RedirectView.as_view(url='/srtgui/', permanent=True))),
+ url(r'^$', never_cache(RedirectView.as_view(url='/'+SRT_MAIN_APP+'/', permanent=True)), name='Default URL=/'+SRT_MAIN_APP+'/'),
]
import srtmain.settings
@@ -76,12 +79,12 @@ if DJANGO_VERSION >= (2,0):
# Uncomment the next lines to enable the admin:
path('admin/', admin.site.urls),
- # Main application
- path(SRT_MAIN_APP + '/', include(SRT_MAIN_APP + '.urls')),
# Default applications
path('srtgui/', include('srtgui.urls')),
path('users/', include('users.urls')),
path('users/', include('django.contrib.auth.urls')),
+ # Main application
+ path(SRT_MAIN_APP + '/', include(SRT_MAIN_APP + '.urls')),
] + urlpatterns
else:
urlpatterns = [
@@ -96,7 +99,26 @@ else:
url('^' + SRT_MAIN_APP + '/', include(SRT_MAIN_APP + '.urls')),
] + urlpatterns
-#print("DEBUG:INSTALLED_URL_PATTERNS:%s,%s" % (SRT_MAIN_APP,urlpatterns))
+
+# We automatically detect and install other applications here
+# (at a lower precedence) if they have a 'urls.py'
+currentdir = os.path.dirname(__file__)
+urlpatterns_str = str(urlpatterns)
+for t in os.walk(os.path.dirname(currentdir)):
+ modulename = os.path.basename(t[0])
+ if 'srtmain' == modulename:
+ # Avoid infinite recursion
+ continue
+ if "urls.py" in t[2]:
+ found = False
+ for url in urlpatterns:
+ if modulename+"/urls.py" in str(url):
+ found = True
+ if not found:
+# urlpatterns.append(path(modulename + '/', include(modulename + '.urls')))
+ urlpatterns.insert(0,path(modulename + '/', include(modulename + '.urls')))
+
+##print("DEBUG:INSTALLED_URL_PATTERNS:%s,%s" % (SRT_MAIN_APP,urlpatterns))
currentdir = os.path.dirname(__file__)
diff --git a/lib/srtmain/wsgi.py b/lib/srtmain/wsgi.py
index 6b468e4f..42259a79 100644
--- a/lib/srtmain/wsgi.py
+++ b/lib/srtmain/wsgi.py
@@ -17,12 +17,29 @@ framework.
"""
import os
+from dotenv import load_dotenv
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "Toaster.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "srtmain.settings")
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR','.')
+PIDFILE = os.environ.get('PIDFILE','.')
+
+# quick development/debugging support
+def _log(msg):
+ f1=open(f"{SRT_BASE_DIR}/gunicorn_env.txt", 'w')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+# Spawn the updater, if not already running
+_log(str(os.environ))
+is_update_pid = os.path.isfile(f"{SRT_BASE_DIR}/.srtupdate.pid")
+if False and SRT_BASE_DIR and PIDFILE and (not is_update_pid):
+ cmnd = [f"{SRT_BASE_DIR}/bin/srt","start_update",f"update_follow_pid={PIDFILE}"]
+ _log(f"COMMAND:{cmnd}")
+ os.spawnv(os.P_NOWAIT, cmnd[0], cmnd)
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
diff --git a/lib/users/migrations/0002_last_name.py b/lib/users/migrations/0002_last_name.py
new file mode 100644
index 00000000..11560e6a
--- /dev/null
+++ b/lib/users/migrations/0002_last_name.py
@@ -0,0 +1,18 @@
+# Generated by Django 2.2.11 on 2020-11-25 05:02
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('users', '0001_initial'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='srtuser',
+ name='last_name',
+ field=models.CharField(blank=True, max_length=150, verbose_name='last name'),
+ ),
+ ]
diff --git a/lib/users/migrations/0003_srtuser_timezone.py b/lib/users/migrations/0003_srtuser_timezone.py
new file mode 100644
index 00000000..d69f62d6
--- /dev/null
+++ b/lib/users/migrations/0003_srtuser_timezone.py
@@ -0,0 +1,18 @@
+# Generated by Django 2.2.20 on 2021-05-07 18:18
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('users', '0002_last_name'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='srtuser',
+ name='timezone',
+ field=models.CharField(blank=True, max_length=32),
+ ),
+ ]
diff --git a/lib/users/migrations/0004_timezone_default.py b/lib/users/migrations/0004_timezone_default.py
new file mode 100755
index 00000000..1d53468c
--- /dev/null
+++ b/lib/users/migrations/0004_timezone_default.py
@@ -0,0 +1,18 @@
+# Generated by Django 2.2.11 on 2021-10-31 02:24
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('users', '0003_srtuser_timezone'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='srtuser',
+ name='timezone',
+ field=models.CharField(blank=True, default='US/Pacific', max_length=32, null=True),
+ ),
+ ]
diff --git a/lib/users/migrations/0005_alter_srtuser_first_name.py b/lib/users/migrations/0005_alter_srtuser_first_name.py
new file mode 100644
index 00000000..9aa275ba
--- /dev/null
+++ b/lib/users/migrations/0005_alter_srtuser_first_name.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.0 on 2023-01-30 18:58
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('users', '0004_timezone_default'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='srtuser',
+ name='first_name',
+ field=models.CharField(blank=True, max_length=150, verbose_name='first name'),
+ ),
+ ]
diff --git a/lib/users/models.py b/lib/users/models.py
index e91f317b..60391303 100755
--- a/lib/users/models.py
+++ b/lib/users/models.py
@@ -8,6 +8,7 @@ from srtgui.api import _log
class SrtUser(AbstractUser):
# add additional fields in here
role = models.CharField(max_length=128, verbose_name='security role')
+ timezone = models.CharField(max_length=32, default='US/Pacific', null=True, blank=True)
def __str__(self):
return "%s,%s" % (self.email,self.role)
@@ -49,14 +50,104 @@ class SrtUser(AbstractUser):
def get_groups(self):
groups = [ group.name for group in self.groups.all() ]
if not groups:
+ # Dynamically assign a group if none attached, in particular
+ # for (super)users created on command line
if self.is_superuser:
- return 'Superuser'
+ command_line_fixup = False
+ if not self.role:
+ self.role = "Superuser"
+ command_line_fixup = True
+ if not self.last_name:
+ self.last_name = self.username
+ command_line_fixup = True
+ if command_line_fixup:
+ self.save()
+ group = Group.objects.get(name = 'Admin')
+ group.user_set.add(self)
+ return group.name
else:
- return ''
+ group = Group.objects.get(name = 'Reader')
+ group.user_set.add(self)
+ return group.name
return ",".join(groups)
+ @staticmethod
+ def get_people_users():
+ names_to_exclude = ['admin','Guest','SRTool','All']
+ return(SrtUser.objects.exclude(username__in=names_to_exclude))
+ @staticmethod
+ def get_group_users(group_name):
+ try:
+ group = Group.objects.get(name=group_name)
+ except:
+ return([])
+ return([user for user in group.user_set.all()])
@property
def get_group_perm(self):
return self.get_group_permissions()
+ DEFAULT_TIMEZONE_INDEX = 21
+ @staticmethod
+ def get_timezone_list():
+ #timezone_list.split(':') = [GMT offset][pytz entry(key:can use ezlookup against pytz list)][Example cities/regions]
+ timezone_list = [
+ '+14:Pacific/Kiritimati:',
+ '+13:Pacific/Apia:',
+ '+12:Pacific/Auckland:NZ/Standard',
+ '+11:Pacific/Guadalcanal:SB/Standard RU/Magadan',
+ '+10:Pacific/Port_Moresby:PG/Standard RU/Vladivostok',
+ '+09:Asia/Tokyo:JP/Standard RU/Yakutsk ID/Jakarta',
+ '+08:Asia/Singapore:SG/Standard HK/Standard MN/Ulanbaatar',
+ '+07:Asia/Bangkok:TH/Bangkok RU/Krasnoyarsk',
+ '+06:Asia/Dhaka:BD/Dhaka RU/Omsk',
+ '+05:Asia/Karachi:PK/Karachi RU/Yekaterinburg',
+ '+04:Asia/Dubai:AE/Dubai MU/Standard RU/Samara',
+ '+03:Europe/Moscow:RU/Moscow KE/Nairobi',
+ '+02:Europe/Berlin:RU/Kaliningrad EG/Cairo RW/Kigali',
+ '+01:Europe/Dublin:NG/Lagos IE/Dublin',
+ '+00:Atlantic/Reykjavik:LR/Monrovia PT/Azores MA/Casablancas',
+ '-01:Atlantic/Cape_Verde:CV/Praia',
+ '-02:Etc/GMT-2:',
+ '-03:America/Argentina/Buenos_Aires:AR/Buenos Aires SR/Commewijne',
+ '-04:US/Eastern:US/New York City PR/San Juan',
+ '-05:US/Central:US/Houston MX/Mexico City',
+ '-06:US/Mountain:Denver CA/Edmonton',
+ '-07:US/Pacific:Los Angeles CA/Vancouver MX/Tijuana',
+ '-08:Pacific/Pitcairn:PN/Adamstown',
+ '-09:America/Anchorage:US/Anchorage',
+ '-10:Pacific/Tahiti:PF/Tahiti',
+ '-11:Pacific/Pago_Pago:AS/Pago Pago US/Midway',
+ '-12:Pacific/Kwajalein:',
+ ]
+ return timezone_list
+
+ # Set user timezone string from long string
+ def map_usertz_str_to_usertz(self, long_str):
+ short_timezone = ""
+ for tz in SrtUser.get_timezone_list():
+ if tz == long_str:
+ short_timezone = tz.split(':')[1]
+ break
+ if not short_timezone:
+ short_timezone = SrtUser.get_timezone_list[SrtUser.DEFAULT_TIMEZONE_INDEX].split(':')[1]
+ self.timezone = short_timezone
+ return self.timezone
+
+ # Return long string format
+ def map_usertz_to_usertz_str(self):
+ for tz in SrtUser.get_timezone_list():
+ if self.timezone == tz.split(':')[1]:
+ return tz
+ break
+ return self.get_timezone_list()[SrtUser.DEFAULT_TIMEZONE_INDEX]
+
+ # Return offset from UTC -> jobtags.py
+ @property
+ def get_timezone_offset(self):
+ for tz in SrtUser.get_timezone_list():
+ if self.timezone == tz.split(':')[1]:
+ return int(tz.split(':')[0])
+ break
+ return int(self.get_timezone_list()[SrtUser.DEFAULT_TIMEZONE_INDEX].split(':')[0])
+
# Minimal and safe User object to pass to web pages (no passwords)
class UserSafe():
diff --git a/lib/users/templates/user_edit.html b/lib/users/templates/user_edit.html
index 26b18ea8..4d57ac97 100755
--- a/lib/users/templates/user_edit.html
+++ b/lib/users/templates/user_edit.html
@@ -1,8 +1,11 @@
<!-- templates/signup.html -->
{% extends 'base.html' %}
-{% block title %}{% if 'new_admin' == mode %}New User{% else %}Edit User Settings{% endif %}{% endblock %}
+{% load static %}
+{% load jobtags %}
+{% load humanize %}
+{% block title %}{% if 'new_admin' == mode %}New User{% else %}Edit User Settings{% endif %}{% endblock %}
{% block pagecontent %}
<div>
<h2>{% if 'new_admin' == mode %}New User{% else %}Edit User Settings{% endif %}</h2>
@@ -32,6 +35,16 @@
<dt>Role:</dt>
<dd><input type="text" placeholder="Edit role" name="user_role" size="80" value="{{user_role}}"></dd>
+ <!--Insert tz dropdown here-->
+ <dt>Timezone:</dt>
+ <dd>
+ <select name="timezone" id="select-timezone">
+ {% for tz in timezone_list %}
+ <option value="{{tz}}" {% if user_timezone == tz %}selected{% endif %}>{{tz}}</option>
+ {% endfor %}
+ </select>
+ </dd>
+
<dt>Group:</dt>
<dd>
{% if 'edit_user' == mode %}
@@ -42,6 +55,7 @@
<option value="Contributor" {% if 'Contributor' == group_name %}selected{% endif %}>Contributor</option>
<option value="Creator" {% if 'Creator' == group_name %}selected{% endif %}>Creator</option>
<option value="Admin" {% if 'Admin' == group_name %}selected{% endif %}>Admin</option>
+ <option value="SuperUser" {% if user_super %}selected{% endif %}>SuperUser</option>
</select>
{% endif %}
</dd>
diff --git a/lib/users/urls.py b/lib/users/urls.py
index 4c33cb18..f936d1d0 100755
--- a/lib/users/urls.py
+++ b/lib/users/urls.py
@@ -1,4 +1,4 @@
-from django.conf.urls import include, url
+from django.urls import re_path as url, include
from . import views
urlpatterns = [
@@ -9,6 +9,6 @@ urlpatterns = [
url(r'^edit_user/(?P<user_pk>\d+)$', views.edit_user, name="edit_user"),
url(r'^xhr_user_commit/$', views.xhr_user_commit, name='xhr_user_commit'),
-
+ url(r'^xhr_date_time_test/$', views.xhr_date_time_test, name='xhr_date_time_test'),
]
diff --git a/lib/users/views.py b/lib/users/views.py
index 62163822..c5cce77d 100755
--- a/lib/users/views.py
+++ b/lib/users/views.py
@@ -103,6 +103,9 @@ def edit_user(request,user_pk):
'user_last' : '' if not pk else srtuser.last_name,
'user_email' : '' if not pk else srtuser.email,
'user_role' : '' if not pk else srtuser.role,
+ 'user_super' : False if not pk else srtuser.is_superuser,
+ 'user_timezone' : SrtUser.get_timezone_list()[SrtUser.DEFAULT_TIMEZONE_INDEX] if not pk else srtuser.map_usertz_to_usertz_str(),
+ 'timezone_list': SrtUser.get_timezone_list(),
'group_name' : 'Reader' if not pk else srtuser.get_groups.split(',')[0],
'validation_errors' : '',
}
@@ -115,12 +118,14 @@ def edit_user(request,user_pk):
else:
return redirect('/')
+ # added user_tz to POST method
mode = request.POST.get('mode', '')
user_name = request.POST.get('user_name', '')
user_first = request.POST.get('user_first', '')
user_last = request.POST.get('user_last', '')
user_email = request.POST.get('user_email', '')
user_role = request.POST.get('user_role', '')
+ user_tz = request.POST.get('timezone', '')
user_group = request.POST.get('user_group', '')
user_pass1 = request.POST.get('user_pass1', '')
user_pass2 = request.POST.get('user_pass2', '')
@@ -151,12 +156,18 @@ def edit_user(request,user_pk):
'user_last' : user_last,
'user_email' : user_email,
'user_role' : user_role,
+ 'user_tz' : user_tz,
'group_name' : user_group,
'validation_errors' : validation_errors[2:],
}
return render(request, 'user_edit.html', context)
# Process the post
+ if 'SuperUser' == user_group:
+ user_group = 'Admin'
+ is_superuser = True
+ else:
+ is_superuser = False
if 'new_admin' == mode:
srtuser = SrtUser(username=user_name)
else:
@@ -168,6 +179,8 @@ def edit_user(request,user_pk):
srtuser.last_name = user_last
srtuser.email = user_email
srtuser.role = user_role
+ srtuser.is_superuser = is_superuser
+ srtuser.timezone = srtuser.map_usertz_str_to_usertz(user_tz)
srtuser.save()
# Update Group
if user_group and (user_group != srtuser.get_groups.split(',')[0]):
@@ -198,14 +211,70 @@ def xhr_user_commit(request):
action = request.POST['action']
history_comment = ''
try:
+ error_message = "ok";
if 'submit-trashuser' == action:
record_id = request.POST['record_id']
user = SrtUser.objects.get(pk=record_id).delete()
+
+ elif 'submit-trashgroup' == action:
+ record_id = request.POST['record_id']
+ group = Group.objects.get(pk=record_id).delete()
+
+ elif 'submit-trashusergroup' == action:
+ group_id = int(request.POST.get('group_id','0'))
+ record_id = request.POST['record_id']
+ group = Group.objects.get(pk=group_id)
+ srtuser = SrtUser.objects.get(pk=record_id)
+ group.user_set.remove(srtuser)
+ ret=group.save()
+
+ elif 'submit-group-users' == action:
+ group_id = int(request.POST.get('group_id','0'))
+ user_id_list = request.POST['user_id_list']
+ group = Group.objects.get(pk=group_id)
+ # Add new users
+ for user_id in user_id_list.split(','):
+ if user_id:
+ srtuser = SrtUser.objects.get(id=int(user_id))
+ group.user_set.add(srtuser)
+ group.save()
+ # Remove old users
+ for srtuser in group.user_set.all():
+ if not str(srtuser.id) in user_id_list:
+ group.user_set.remove(srtuser)
+ group.save()
+
+ elif 'submit-group-create' == action:
+ group_name = request.POST['group_name'].strip()
+ group,created = Group.objects.get_or_create(name=group_name)
+ group.save()
+
+ else:
+ error_message = "ERROR:unknown action '%s'" % request.POST["action"]
+
return_data = {
- "error": "ok",
+ "error": error_message,
}
return HttpResponse(json.dumps( return_data ), content_type = "application/json")
except Exception as e:
_log("xhr_user_commit:no(%s)" % e)
return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+
+def xhr_date_time_test(request):
+ _log("xhr_date_time_test(%s)" % request.POST)
+ if not 'action' in request.POST:
+ return HttpResponse(json.dumps({"error":"missing action\n"}), content_type = "application/json")
+
+ action = request.POST['action']
+ history_comment = ''
+ try:
+ if 'submit-timezone' == action:
+ timezone = request.POST['timezone']
+ return_data = {
+ "error": "ok",
+ }
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+ except Exception as e:
+ _log("xhr_date_time_test:no(%s)" % e)
+ return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
diff --git a/lib/yp/reports.py b/lib/yp/reports.py
new file mode 100755
index 00000000..565c2dc0
--- /dev/null
+++ b/lib/yp/reports.py
@@ -0,0 +1,381 @@
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2017-2020 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+# Please run flake8 on this file before sending patches
+
+import os
+import re
+import logging
+import json
+from collections import Counter
+from datetime import datetime, date
+import csv
+
+from orm.models import Cve, Vulnerability, Investigation, Defect, Product
+from orm.models import SRTool, PublishSet
+from srtgui.api import execute_process, readCveDetails, writeCveDetails, summaryCveDetails
+
+from srtgui.reports import Report, ReportManager, ProductsReport, ManagementReport, DefectsReport, PublishListReport
+
+
+from django.db.models import Q, F
+from django.db import Error
+from srtgui.templatetags.jobtags import filtered_filesizeformat
+
+logger = logging.getLogger("srt")
+
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR', '.')
+SRT_REPORT_DIR = '%s/reports' % SRT_BASE_DIR
+
+# quick development/debugging support
+from srtgui.api import _log
+
+def _log_args(msg, *args, **kwargs):
+ s = '%s:(' % msg
+ if args:
+ for a in args:
+ s += '%s,' % a
+ s += '),('
+ if kwargs:
+ for key, value in kwargs.items():
+ s += '(%s=%s),' % (key,value)
+ s += ')'
+ _log(s)
+
+###############################################################################
+#
+# YPPublishListReport: Yocto Project Management reports
+#
+
+class YPPublishListReport(PublishListReport):
+ """Report for the Publish Cve Page"""
+
+ def __init__(self, parent_page, *args, **kwargs):
+ _log_args("REPORT_YPPUBLISHLIST_INIT(%s)" % parent_page, *args, **kwargs)
+ super(YPPublishListReport, self).__init__(parent_page, *args, **kwargs)
+
+ def get_context_data(self, *args, **kwargs):
+ _log_args("REPORT_YPPUBLISHLIST_CONTEXT", *args, **kwargs)
+ context = super(YPPublishListReport, self).get_context_data(*args, **kwargs)
+
+ # Add a custom extension report type
+ context['report_type_list'] = '\
+ <option value="yp_summary">YP Summary Report</option> \
+ '
+
+ context['report_custom_list'] = ''
+ # Add scope
+ context['report_custom_list'] += '\
+ <input type="checkbox" id="new" name="new" checked>&nbsp;New CVEs</input> <br>\
+ <input type="checkbox" id="investigate" name="investigate" checked>&nbsp;Investigate CVEs</input> <br>\
+ <input type="checkbox" id="vulnerable" name="vulnerable" checked>&nbsp;Vulnerable CVEs</input> <br>\
+ <input type="checkbox" id="not-vulnerable" name="not-vulnerable" checked>&nbsp;Not Vulnerable CVEs</input> <br>\
+ <input type="checkbox" id="new-reserved" name="new-reserved" >&nbsp;New-Reserved CVEs</input> <br>\
+ <input type="checkbox" id="historical" name="historical" >&nbsp;Historical CVEs</input> <br>\
+ '
+ # Add extra
+ context['report_custom_list'] += '<br>'
+ context['report_custom_list'] += '\
+ <input type="checkbox" id="truncate" name="truncate" checked>&nbsp;Truncate fields (for simple text reports)</input> <BR>\
+ '
+
+ return context
+
+ def get_product_status_matrix(self,product_list,cve):
+ # Preset the default product status labels
+ status_table = {}
+ product_top_order = 99
+ product_top_defect = []
+ # Default all product status to the CVE's status
+ for product in product_list:
+ status_table[product.key] = SRTool.status_text(SRTool.NOT_VULNERABLE)
+ # Set the specific status for the child investigations
+ for cv in cve.cve_to_vulnerability.all():
+ #status_text = cv.vulnerability.get_status_text
+ for investigation in cv.vulnerability.vulnerability_investigation.all():
+# product_key = investigation.product.key
+ release_version_list = []
+ # Gather release versions, find the highest product's respective defect
+ for id in investigation.investigation_to_defect.all():
+ # Find defect(s) for higest ordered product
+ if product_top_order > investigation.product.order:
+ product_top_order = investigation.product.order
+ product_top_defect = []
+ if product_top_order == investigation.product.order:
+ product_top_defect.append(id.defect.name)
+ # Gather the status or release version
+ if id.defect.release_version:
+ release_version_list.append(id.defect.release_version)
+ release_version = '/'.join(release_version_list)
+ # Set investigation status, unless there are release versions
+ status_table[investigation.product.key] = investigation.get_status_text
+ if release_version:
+ status_table[investigation.product.key] = release_version
+ return status_table,product_top_defect
+
+ def exec_report(self, *args, **kwargs):
+ _log_args("REPORT_YPPUBLISHLIST_EXEC", *args, **kwargs)
+ super(YPPublishListReport, self).exec_report(*args, **kwargs)
+
+ request_POST = self.request.POST
+ format = request_POST.get('format', '')
+ report_type = request_POST.get('report_type', '')
+ csv_separator = request_POST.get('csv_separator', 'semi')
+ truncate = ('on' == request_POST.get('truncate', 'off'))
+ status_list = []
+ if ('on' == request_POST.get('new', 'off')): status_list.append(Cve.NEW)
+ if ('on' == request_POST.get('investigate', 'off')): status_list.append(Cve.INVESTIGATE)
+ if ('on' == request_POST.get('vulnerable', 'off')): status_list.append(Cve.VULNERABLE)
+ if ('on' == request_POST.get('not-vulnerable', 'off')): status_list.append(Cve.NOT_VULNERABLE)
+ if ('on' == request_POST.get('new-reserved', 'off')): status_list.append(Cve.NEW_RESERVED)
+ if ('on' == request_POST.get('historical', 'off')): status_list.append(Cve.HISTORICAL)
+
+ # Default to the regular report output if not our custom extension
+ if not report_type in ('yp_summary'):
+ return(super(YPPublishListReport, self).exec_report(*args, **kwargs))
+
+ if 'csv' == format:
+ separator = ';'
+ if csv_separator == 'comma': separator = ','
+ if csv_separator == 'tab': separator = '\t'
+ report_name = '%s/cve-svns-srtool-%s.csv' % (SRT_REPORT_DIR,datetime.today().strftime('%Y_%m_%d'))
+ else:
+ separator = ","
+ report_name = '%s/cve-svns-srtool-%s.txt' % (SRT_REPORT_DIR,datetime.today().strftime('%Y_%m_%d'))
+
+ # Get the desired product list
+ product_list = Product.objects.order_by('-order')
+
+ if 'yp_summary' == report_type:
+ with open(report_name, 'w', newline='') as csvfile:
+ writer = None
+
+ # Assemble the header
+ text_format = '%-18s,%16s,%-8s,%-8s,%-15s,%-15s,%-30s,%-25s,%15s,%15s,%20s,'
+ header = [
+ 'CVE Number',
+ 'Status',
+ 'CVSSv2_Severity',
+ 'CVSSv2_Score',
+ 'CVSSv3_Severity',
+ 'CVSSv3_Score',
+ 'CVE Description',
+ 'YP Comments',
+ 'Created Date',
+ 'Modified Date',
+ 'YP Acknowledged Date',
+ ]
+ # Assemble the product column namess
+ for product in product_list:
+ product_title = product.key
+ header.append(product_title)
+ min_len = max(16,len(product_title)+1)
+ str_format = "%s%ds," % ('%',min_len)
+ text_format += str_format
+# # Add Top Defect
+# header.append('Top Defect')
+# text_format += '%s'
+
+ # Print the header
+ if 'csv' == format:
+ writer = csv.writer(csvfile, delimiter=separator, quotechar='"', quoting=csv.QUOTE_MINIMAL)
+ writer.writerow(header)
+ else:
+ writer = csvfile
+ print(text_format % tuple(header), file=csvfile)
+
+ for i,cve in enumerate(Cve.objects.filter(status__in=status_list).order_by('name_sort')):
+ # Compute the product columns
+ status_table,product_top_defect = self.get_product_status_matrix(product_list,cve)
+ # Assemble the row data
+ if cve.description:
+ if truncate:
+ description = cve.description[:26] + '...'
+ else:
+ description = cve.description
+ else:
+ description = ''
+
+ # Use publish date if acknowledge date not available
+ try:
+ acknowledge_date = cve.acknowledge_date
+ if not acknowledge_date:
+ acknowledge_date = datetime.strptime(cve.publishedDate, '%Y-%m-%d')
+ acknowledge_date = acknowledge_date.strftime('%m/%d/%Y')
+ except:
+ acknowledge_date = ''
+ _log("NO ACK:%s,%s" % (cve.acknowledge_date,cve.publishedDate))
+
+ row = [
+ cve.name,
+ cve.get_status_text,
+ cve.cvssV2_severity,
+ cve.cvssV2_baseScore,
+ cve.cvssV3_baseSeverity,
+ cve.cvssV3_baseScore,
+ description,
+ cve.get_public_comments[:20] if truncate else cve.get_public_comments,
+ cve.srt_created.strftime('%Y/%m/%d') if cve.srt_created else '',
+ cve.srt_updated.strftime('%Y/%m/%d') if cve.srt_updated else '',
+ acknowledge_date,
+ ]
+ # Append the product columns
+ for product in product_list:
+ # Show inactive status as normal status
+ row.append(status_table[product.key].replace('(','').replace(')',''))
+# row.append('/'.join(product_top_defect))
+ # Print the row
+ if 'csv' == format:
+ writer.writerow(row)
+ else:
+ print(text_format % tuple(row), file=writer)
+
+
+ return report_name,os.path.basename(report_name)
+
+###############################################################################
+#
+# EXAMPLE: simple custom extention to the Products report
+#
+class YPProductsReport(ProductsReport):
+ """Report for the Products Page"""
+
+ def __init__(self, parent_page, *args, **kwargs):
+ _log_args("YP_REPORT_PRODUCTS_INIT(%s)" % parent_page, *args, **kwargs)
+ super(YPProductsReport, self).__init__(parent_page, *args, **kwargs)
+
+ def get_context_data(self, *args, **kwargs):
+ _log_args("YP_REPORT_PRODUCTS_CONTEXT", *args, **kwargs)
+
+ # Fetch the default report context definition
+ context = super(YPProductsReport, self).get_context_data(*args, **kwargs)
+
+ # Add a custom extension report type
+ context['report_type_list'] += '\
+ <option value="wr_summary">YP Products Table</option> \
+ '
+
+ # Done!
+ return context
+
+ def exec_report(self, *args, **kwargs):
+ _log_args("YP_REPORT_PRODUCTS_EXEC", *args, **kwargs)
+
+ request_POST = self.request.POST
+
+ records = request_POST.get('records', '')
+ format = request_POST.get('format', '')
+ title = request_POST.get('title', '')
+ report_type = request_POST.get('report_type', '')
+ record_list = request_POST.get('record_list', '')
+
+ # Default to the regular report output if not our custom extension
+ if 'wr_summary' != report_type:
+ return(super(YPProductsReport, self).exec_report(*args, **kwargs))
+
+ # CUSTOM: prepend "wr" to the generated file name
+ report_name = '%s/wr_products_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
+ with open(report_name, 'w') as file:
+
+ if 'csv' == format:
+ separator = "\t"
+ else:
+ separator = ","
+
+ if ('wr_summary' == report_type):
+ if 'csv' == format:
+ # CUSTOM: prepend "YP" to the generated header
+ file.write("YP Name\tVersion\tProfile\tCPE\tSRT SPE\tInvestigations\tDefects\n")
+ if 'txt' == format:
+ # CUSTOM: prepend "YP" to the generated title
+ file.write("Report : YP Products Table\n")
+ file.write("\n")
+ # CUSTOM: prepend "YP" to the generated header
+ file.write("YP Name,Version,Profile,CPE,SRT SPE,Investigations,Defects\n")
+
+ for product in Product.objects.all():
+ # CUSTOM: prepend "YP" to the product name
+ file.write("YP %s%s" % (product.name,separator))
+ file.write("%s%s" % (product.version,separator))
+ file.write("%s%s" % (product.profile,separator))
+ file.write("%s%s" % (product.cpe,separator))
+ file.write("%s%s" % (product.defect_tags,separator))
+ file.write("%s%s" % (product.product_tags,separator))
+
+ for i,pi in enumerate(product.product_investigation.all()):
+ if i > 0:
+ file.write(" ")
+ file.write("%s" % (pi.name))
+ file.write("%s" % separator)
+ for i,pd in enumerate(product.product_defect.all()):
+ if i > 0:
+ file.write(" ")
+ file.write("%s" % (pd.name))
+ #file.write("%s" % separator)
+ file.write("\n")
+
+ return report_name,os.path.basename(report_name)
+
+###############################################################################
+#
+# Yocto Projects reports
+#
+# Available 'parent_page' values:
+# cve
+# vulnerability
+# investigation
+# defect
+# cves
+# select-cves
+# vulnerabilities
+# investigations
+# defects
+# products
+# select-publish
+# update-published
+# package-filters
+# cpes_srtool
+#
+
+class YPReportManager():
+ @staticmethod
+ def get_report_class(parent_page, *args, **kwargs):
+ _log("FOO:YPReportManager:'%s'" % parent_page)
+
+ if 'products' == parent_page:
+ # Extend the Products report
+ return YPProductsReport(parent_page, *args, **kwargs)
+
+ elif 'publish-summary' == parent_page:
+ return YPPublishListReport(parent_page, *args, **kwargs)
+
+ else:
+ # Return the default for all other reports
+ return ReportManager.get_report_class(parent_page, *args, **kwargs)
+
+ @staticmethod
+ def get_context_data(parent_page, *args, **kwargs):
+ _log_args("YP_REPORTMANAGER_CONTEXT", *args, **kwargs)
+ reporter = YPReportManager.get_report_class(parent_page, *args, **kwargs)
+ return reporter.get_context_data(*args, **kwargs)
+
+ @staticmethod
+ def exec_report(parent_page, *args, **kwargs):
+ _log_args("YP_REPORTMANAGER_EXEC", *args, **kwargs)
+ reporter = YPReportManager.get_report_class(parent_page, *args, **kwargs)
+ return reporter.exec_report(*args, **kwargs)
diff --git a/lib/yp/templates/landing.html b/lib/yp/templates/landing.html
new file mode 100755
index 00000000..0a488398
--- /dev/null
+++ b/lib/yp/templates/landing.html
@@ -0,0 +1,93 @@
+{% extends "base.html" %}
+
+{% load static %}
+{% load jobtags %}
+{% load humanize %}
+
+{% block title %} Welcome to SRTool{% endblock %}
+{% block pagecontent %}
+ <div class="row">
+ <div class="col-md-7" style="padding-left: 50px;">
+ <h1>Security Response Tool (SRTool)</h1>
+ <p>A web interface to SRTool CVE investigations ({{this_landing}})</p>
+ </div>
+ </div>
+ <div class="row">
+ <div class="jumbotron well-transparent">
+
+ <div class="col-md-6">
+ <div>
+ <table class="table table-striped table-condensed" data-testid="landing-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>Table</th>
+ <th>Description</th>
+ </tr>
+ </thead>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'cvechecker_audits' %}">CVE Check Audits</a></td>
+ <td>CVE Check Audits</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'cves' %}">CVE's</a></td>
+ <td>Common Vulnerability Enumeration</td>
+ </tr>
+
+<!-- <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'investigations' %}">Investigations</a></td>
+ <td>SRTool Investigations (product level)</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'defects' %}">Defects</a></td>
+ <td>SRTool Defects</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'cpes_srtool' %}">Package CPE's</a></td>
+ <td>Affected packages (Common Platform Enumeration)</td>
+ </tr>
+
+-->
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'cwes' %}">CWE's</a></td>
+ <td>Common Weakness Enumeration</td>
+ </tr>
+
+ <tr>
+ <td> <a class="btn btn-info btn-lg" href="{% url 'products' %}">Products</a></td>
+ <td>SRTool Products<td>
+ </tr>
+
+ {% for ext_url,ext_title,ext_description in landing_extensions_table %}
+ <tr>
+ <td> <a class="btn btn-info btn-lg" href="{% url ext_url %}">{{ext_title}}</a></td>
+ <td>{{ext_description}}<td>
+ </tr>
+ {% endfor %}
+
+ {% if request.user.is_creator %}
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'manage' %}">Management</a></td>
+ <td>Triage CVE's, Create Vulnerabilities, Manage Users</td>
+ </tr>
+ {% endif %}
+
+ </table>
+ </div>
+
+ </div>
+
+ <div class="col-md-6">
+ <div align="center"><a class="btn btn-primary btn-lg" href="{% url 'guided_tour' %}">Click here to take a Guided Tour!</a></div>
+ <p />
+ <p />
+ <img alt="CVE preview" class="img-thumbnail" src="{% static 'img/cve_splash.png' %}"/>
+ </div>
+
+ </div>
+ </div>
+
+{% endblock %}
diff --git a/lib/yp/templates/management.html b/lib/yp/templates/management.html
new file mode 100755
index 00000000..013a9d87
--- /dev/null
+++ b/lib/yp/templates/management.html
@@ -0,0 +1,199 @@
+{% extends "base.html" %}
+
+{% load static %}
+{% load jobtags %}
+{% load humanize %}
+
+{% block title %} Manage Resources {% endblock %}
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li>Management</a>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-7" style="padding-left: 50px;">
+ <h1>Management</h1>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<div class="row">
+ <div class="jumbotron well-transparent">
+
+ <div class="col-md-6">
+ <div>
+ <table class="table table-striped table-condensed" data-testid="landing-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>Action</th>
+ <th>Description</th>
+ </tr>
+ </thead>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'cvechecker_import_manager' %}">Import Manager</a></td>
+ <td>Manage the CVE Check report import modes</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'triage_cves' %}">Triage CVE's</a></td>
+ <td>Triage the CVE's ({{cve_new}})</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'manage_notifications' %}">Pending notifications</a></td>
+ <td>Triage the pending notifications ({{notification_total}})</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'manage_report' %}">Summary Report</a></td>
+ <td>Report on the over all response system status</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'publish' %}">Publish Reports</a></td>
+ <td>Process items to be published from the SRTool</td>
+ </tr>
+
+ {% if request.user.is_admin %}
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'users' %}">Manage Users</a></td>
+ <td>Add, edit, and remove users</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'sources' %}?nocache=1">Manage Sources</a></td>
+ <td>Manage source list, perform manual pulls</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'maintenance' %}?nocache=1">Maintenance</a></td>
+ <td>Maintenance utilities ({{errorlog_total}})</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'cvechecker_clear_jobs' %}">Clear Jobs</a></td>
+ <td>Clear the Jobs table of all entries</td>
+ </tr>
+
+ {% endif %}
+
+ </table>
+ </div>
+
+ </div>
+
+ <div class="col-md-5">
+ <b>Quick Info</b>
+ <div class="well">
+ <dl class="dl-horizontal">
+ <dt>CVE's: Total Count =</dt>
+ <dd>
+ <a href="{% url 'cves' %}"> {{cve_total}} </a>
+ </dd>
+ <dt>Pending triaged =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:new&default_orderby=name&filter_value=on&"> {{cve_new}} </a>
+ </dd>
+ <dt>Investigate =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:investigate&default_orderby=name&filter_value=on&"> {{cve_investigate}} </a>
+ </dd>
+ <dt>Vulnerable =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:vulnerable&default_orderby=name&filter_value=on&"> {{cve_vulnerable}} </a>
+ </dd>
+ <dt>Not Vulnerable =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:not_vulnerable&default_orderby=name&filter_value=on&"> {{cve_not_vulnerable}} </a>
+ </dd>
+ <dt>Vulnerabilities: Total Count =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}"> {{vulnerability_total}} </a>
+ </dd>
+ <dt>Open =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_outcome:open&default_orderby=name&filter_value=on&"> {{vulnerability_open}} </a>
+ </dd>
+ <dt>Critical active =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:critical&default_orderby=name&filter_value=on&" %}> {{vulnerability_critical}} </a>
+ </dd>
+ <dt>High active =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:high&default_orderby=name&filter_value=on&" %}> {{vulnerability_high}} </a>
+ </dd>
+ <dt>Medium active =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:medium&default_orderby=name&filter_value=on&" %}> {{vulnerability_medium}} </a>
+ </dd>
+
+ <dt>Investigations: Total Count =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}" %}> {{investigation_total}} </a>
+ </dd>
+ <dt>Open =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_outcome:open&default_orderby=name&filter_value=on&" %}> {{investigation_open}} </a>
+ </dd>
+ <dt>Critical active =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:critical&default_orderby=name&filter_value=on&" %}> {{investigation_critical}} </a>
+ </dd>
+ <dt>High active =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:high&default_orderby=name&filter_value=on&" %}> {{investigation_high}} </a>
+ </dd>
+ <dt>Medium active =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:medium&default_orderby=name&filter_value=on&" %}> {{investigation_medium}} </a>
+ </dd>
+
+ <dt>Defects: Total Count =</dt>
+ <dd>
+ <a href="{% url 'defects' %}" %}> {{defect_total}} </a>
+ </dd>
+ <dt>Open =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_srt_outcome:open&default_orderby=name&filter_value=on&" %}> {{defect_open}} </a>
+ </dd>
+ <dt>InProgress =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_status:in_progress&default_orderby=name&filter_value=on&" %}> {{defect_inprogress}} </a>
+ </dd>
+ <dt>P1 active =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_priority:critical&default_orderby=name&filter_value=on&" %}> {{defect_p1}} </a>
+ </dd>
+ <dt>P2 active =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_priority:high&default_orderby=name&filter_value=on&" %}> {{defect_p2}} </a>
+ </dd>
+
+ <dt>Packages: Affected=</dt>
+ <dd>
+ <a href="{% url 'cpes_srtool' %}" %}> {{package_total}} </a>
+ </dd>
+
+ </dl>
+ </div>
+ </div>
+
+ </div>
+</div>
+
+{% endblock %}
diff --git a/lib/yp/templates/yp_hello.html b/lib/yp/templates/yp_hello.html
index 15ab9a7b..95ee8e43 100755
--- a/lib/yp/templates/yp_hello.html
+++ b/lib/yp/templates/yp_hello.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Yocto Project {% endblock %}
diff --git a/lib/yp/urls.py b/lib/yp/urls.py
index 586b87b6..d60360b7 100755
--- a/lib/yp/urls.py
+++ b/lib/yp/urls.py
@@ -1,8 +1,14 @@
-from django.conf.urls import include, url
+from django.urls import re_path as url,include
+from django.views.generic import RedirectView
from . import views
urlpatterns = [
- url(r'^hello/$', views.yp_hello, name='yp_hello'),
+ # landing page
+ url(r'^landing/$', views.landing, name='landing'),
- url(r'^$', views.yp_hello, name='yp_default'),
+ url(r'^report/(?P<page_name>\D+)$', views.report, name='report'),
+ url(r'^manage_report/$', views.manage_report, name='manage_report'),
+
+ # default redirection
+ url(r'^$', RedirectView.as_view(url='landing', permanent=True)),
]
diff --git a/lib/yp/views.py b/lib/yp/views.py
index 2d6d0043..6f722479 100755
--- a/lib/yp/views.py
+++ b/lib/yp/views.py
@@ -4,7 +4,7 @@
#
# Security Response Tool Implementation
#
-# Copyright (C) 2017-2018 Wind River Systems
+# Copyright (C) 2017-2020 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -19,17 +19,87 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+import os
+
#from django.urls import reverse_lazy
#from django.views import generic
-#from django.http import HttpResponse, HttpResponseNotFound, JsonResponse, HttpResponseRedirect
+from django.http import HttpResponse, HttpResponseNotFound, JsonResponse, HttpResponseRedirect
from django.shortcuts import render, redirect
+from users.models import SrtUser, UserSafe
+from orm.models import SrtSetting
+from srtgui.views import MimeTypeFinder
+from yp.reports import YPReportManager
+
#from orm.models import SrtSetting
# quick development/debugging support
from srtgui.api import _log
-def yp_hello(request):
- context = {}
- _log("Note:yp_hello")
- return render(request, 'yp_hello.html', context)
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR', '.')
+SRT_MAIN_APP = os.environ.get('SRT_MAIN_APP', '.')
+
+# determine in which mode we are running in, and redirect appropriately
+def landing(request):
+
+ # Django sometimes has a race condition with this view executing
+ # for the master app's landing page HTML which can lead to context
+ # errors, so hard enforce the default re-direction
+ if SRT_MAIN_APP and (SRT_MAIN_APP != "yp"):
+ return redirect(f"/{SRT_MAIN_APP}/landing/")
+
+ # Append the list of landing page extensions
+ landing_extensions_table = []
+ for landing_extension in SrtSetting.objects.filter(name__startswith='LANDING_LINK').order_by('name'):
+ landing_extensions_table.append(landing_extension.value.split('|'))
+
+ context = {
+ 'landing_extensions_table' : landing_extensions_table,
+ 'this_landing' : 'yp',
+ }
+
+ return render(request, 'landing.html', context)
+
+def report(request,page_name):
+ if request.method == "GET":
+ context = YPReportManager.get_context_data(page_name,request=request)
+ record_list = request.GET.get('record_list', '')
+ _log("EXPORT_GET!:%s|%s|" % (request,record_list))
+ context['record_list'] = record_list
+ return render(request, 'report.html', context)
+ elif request.method == "POST":
+ _log("EXPORT_POST!:%s|%s" % (request,request.FILES))
+ parent_page = request.POST.get('parent_page', '')
+ file_name,response_file_name = YPReportManager.exec_report(parent_page,request=request)
+
+ if file_name.startswith("Error"):
+ # Refresh the page with the error message
+ context = YPReportManager.get_context_data(page_name,request=request)
+ context['error_message'] = file_name
+ record_list = request.GET.get('record_list', '')
+ _log("EXPORT_GET_WITH_ERROR!:%s|%s|" % (request,record_list))
+ context['record_list'] = record_list
+ return render(request, 'report.html', context)
+ elif file_name and response_file_name:
+ fsock = open(file_name, "rb")
+ content_type = MimeTypeFinder.get_mimetype(file_name)
+
+ response = HttpResponse(fsock, content_type = content_type)
+
+ disposition = "attachment; filename=" + response_file_name
+ response["Content-Disposition"] = disposition
+
+ _log("EXPORT_POST_Q{%s|" % (response))
+ return response
+ else:
+ return render(request, "unavailable_artifact.html", {})
+
+ return redirect('/')
+ raise Exception("Invalid HTTP method for this page")
+
+def manage_report(request):
+ # does this user have permission to see this record?
+ if not UserSafe.is_creator(request.user):
+ return redirect('/')
+
+ return redirect(report,'management')