aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xbin/acme/srtool_acme.py6
-rwxr-xr-xbin/acme/srtool_defect.py6
-rwxr-xr-xbin/acme/srtool_env.sh1
-rwxr-xr-xbin/acme/srtool_jira_acme.py20
-rwxr-xr-xbin/common/datasource.json56
-rwxr-xr-xbin/common/srtool_backup.py122
-rwxr-xr-xbin/common/srtool_common.py395
-rwxr-xr-xbin/common/srtool_email.py2
-rw-r--r--bin/common/srtool_jira_template.py20
-rwxr-xr-xbin/common/srtool_job.py791
-rwxr-xr-xbin/common/srtool_progress.py75
-rwxr-xr-xbin/common/srtool_sanity_test.py31
-rwxr-xr-xbin/common/srtool_sql.py492
-rwxr-xr-xbin/common/srtool_test.py204
-rwxr-xr-xbin/common/srtool_update.py363
-rwxr-xr-xbin/common/srtool_utils.py550
-rwxr-xr-xbin/cve_checker/datasource.json25
-rwxr-xr-xbin/cve_checker/patcher.json41
-rwxr-xr-xbin/cve_checker/srtool_cvechecker.py950
-rwxr-xr-xbin/cve_checker/srtool_cvechecker_util.py465
-rwxr-xr-xbin/cve_checker/srtool_env.sh4
-rwxr-xr-xbin/dev_tools/db_migration_config_sample.yml42
-rwxr-xr-xbin/dev_tools/db_migrations.py339
-rwxr-xr-xbin/dev_tools/dump_jason.py29
-rwxr-xr-xbin/dev_tools/dump_jason.sh29
-rwxr-xr-xbin/dev_tools/history.py6
-rwxr-xr-xbin/dev_tools/lssrt.sh3
-rwxr-xr-xbin/dev_tools/master_app.sh13
-rwxr-xr-xbin/dev_tools/migrate.sh5
-rwxr-xr-xbin/dev_tools/nohup_start.sh12
-rwxr-xr-xbin/dev_tools/prepare_environment.sh64
-rwxr-xr-xbin/dev_tools/quick_find.sh23
-rwxr-xr-xbin/dev_tools/restart.sh4
-rwxr-xr-xbin/dev_tools/srt_env.sh79
-rwxr-xr-xbin/dev_tools/start.sh4
-rwxr-xr-xbin/dev_tools/stop.sh3
-rwxr-xr-xbin/dev_tools/tail.sh12
-rwxr-xr-xbin/dev_tools/update_status.sh7
-rwxr-xr-xbin/mitre/datasource_2010.json5
-rwxr-xr-xbin/mitre/datasource_2011.json5
-rwxr-xr-xbin/mitre/datasource_2012.json5
-rwxr-xr-xbin/mitre/datasource_2013.json5
-rwxr-xr-xbin/mitre/datasource_2014.json5
-rwxr-xr-xbin/mitre/datasource_2015.json5
-rwxr-xr-xbin/mitre/datasource_2016.json5
-rwxr-xr-xbin/mitre/datasource_2017.json5
-rwxr-xr-xbin/mitre/datasource_2018.json5
-rwxr-xr-xbin/mitre/datasource_2019.json5
-rwxr-xr-xbin/mitre/datasource_2020.json5
-rwxr-xr-xbin/mitre/datasource_2021.json19
-rwxr-xr-xbin/mitre/datasource_2022.json19
-rwxr-xr-xbin/mitre/datasource_2023.json19
-rwxr-xr-xbin/mitre/srtool_mitre.py100
-rw-r--r--bin/nist/datasource.json6
-rwxr-xr-xbin/nist/datasource_2002.json4
-rwxr-xr-xbin/nist/datasource_2003.json4
-rwxr-xr-xbin/nist/datasource_2004.json4
-rwxr-xr-xbin/nist/datasource_2005.json4
-rwxr-xr-xbin/nist/datasource_2006.json4
-rwxr-xr-xbin/nist/datasource_2007.json4
-rwxr-xr-xbin/nist/datasource_2008.json4
-rwxr-xr-xbin/nist/datasource_2009.json4
-rwxr-xr-xbin/nist/datasource_2010.json4
-rwxr-xr-xbin/nist/datasource_2011.json4
-rwxr-xr-xbin/nist/datasource_2012.json4
-rwxr-xr-xbin/nist/datasource_2013.json4
-rwxr-xr-xbin/nist/datasource_2014.json4
-rwxr-xr-xbin/nist/datasource_2015.json4
-rwxr-xr-xbin/nist/datasource_2016.json4
-rwxr-xr-xbin/nist/datasource_2017.json4
-rwxr-xr-xbin/nist/datasource_2018.json4
-rwxr-xr-xbin/nist/datasource_2019.json4
-rwxr-xr-xbin/nist/datasource_2020.json4
-rwxr-xr-xbin/nist/datasource_2021.json18
-rwxr-xr-xbin/nist/datasource_2022.json18
-rwxr-xr-xbin/nist/datasource_2023.json18
-rwxr-xr-xbin/nist/srtool_nist.py242
-rwxr-xr-xbin/redhat/srtool_redhat.py30
-rwxr-xr-xbin/srt95
-rw-r--r--bin/srt_dbconfig.yml41
-rwxr-xr-xbin/srtool-requirements.txt10
-rwxr-xr-xbin/ubuntu_trivy/datasource.json19
-rwxr-xr-xbin/ubuntu_trivy/license.txt4
-rwxr-xr-xbin/ubuntu_trivy/srtool_ubuntu_trivy.py295
-rwxr-xr-xbin/wr_trivy/datasource.json19
-rwxr-xr-xbin/wr_trivy/license.txt4
-rwxr-xr-xbin/wr_trivy/srtool_wr_trivy.py264
-rwxr-xr-xbin/yp/srtool_cve_checker.py277
-rwxr-xr-xbin/yp/srtool_defect.py6
-rwxr-xr-xbin/yp/srtool_publish.py1052
-rwxr-xr-xbin/yp/srtool_yp.py6
-rwxr-xr-xbin/yp/yocto-project-products.json48
-rwxr-xr-xdata/recipe_names_from_layer_index.txt3844
-rwxr-xr-xlib/acme/reports.py2
-rwxr-xr-xlib/acme/tables.py2
-rwxr-xr-xlib/acme/templates/acme_hello.html2
-rwxr-xr-xlib/acme/templates/acme_product.html2
-rwxr-xr-xlib/acme/templates/base.html3
-rwxr-xr-xlib/acme/urls.py2
-rwxr-xr-xlib/cve_checker/__init__.py0
-rwxr-xr-xlib/cve_checker/admin.py3
-rwxr-xr-xlib/cve_checker/apps.py5
-rw-r--r--lib/cve_checker/migrations/0001_initial.py71
-rw-r--r--lib/cve_checker/migrations/0002_ckpackage2cve_ck_audit.py19
-rw-r--r--lib/cve_checker/migrations/0003_alter_ckpackage2cve_ck_package.py19
-rw-r--r--lib/cve_checker/migrations/0004_ck_package_ignored_cnt_ck_package_patched_cnt_and_more.py28
-rw-r--r--lib/cve_checker/migrations/0005_ckuploadmanager.py27
-rw-r--r--lib/cve_checker/migrations/0006_rename_mode_ckuploadmanager_import_mode.py18
-rw-r--r--lib/cve_checker/migrations/0007_ckuploadmanager_select_list_and_more.py23
-rwxr-xr-xlib/cve_checker/migrations/__init__.py0
-rwxr-xr-xlib/cve_checker/models.py165
-rwxr-xr-xlib/cve_checker/reports.py511
-rwxr-xr-xlib/cve_checker/tables.py695
-rwxr-xr-xlib/cve_checker/templates/ck-audit-toastertable.html223
-rwxr-xr-xlib/cve_checker/templates/ck-auditcve-toastertable.html431
-rwxr-xr-xlib/cve_checker/templates/ck-audits-toastertable.html425
-rwxr-xr-xlib/cve_checker/templates/ck-import_manager-toastertable.html266
-rwxr-xr-xlib/cve_checker/templates/ck-issue-toastertable.html347
-rwxr-xr-xlib/cve_checker/templates/ck-product-toastertable.html309
-rwxr-xr-xlib/cve_checker/tests.py3
-rwxr-xr-xlib/cve_checker/urls.py47
-rwxr-xr-xlib/cve_checker/views.py325
-rw-r--r--lib/orm/management/commands/checksettings.py14
-rw-r--r--lib/orm/management/commands/lsupdates.py23
-rwxr-xr-xlib/orm/migrations/0007_components_errorlog.py39
-rw-r--r--lib/orm/migrations/0008_cveaccess.py24
-rw-r--r--lib/orm/migrations/0009_recipetable.py20
-rw-r--r--lib/orm/migrations/0010_job.py35
-rw-r--r--lib/orm/migrations/0011_extend_field_sizes.py33
-rwxr-xr-xlib/orm/migrations/0012_job_user.py21
-rwxr-xr-xlib/orm/migrations/0013_update_preinit.py18
-rw-r--r--lib/orm/migrations/0014_alter_packagetocve_applicable.py18
-rw-r--r--lib/orm/models.py295
-rw-r--r--lib/srtgui/api.py153
-rw-r--r--lib/srtgui/reports.py407
-rw-r--r--lib/srtgui/static/js/libtoaster.js133
-rwxr-xr-xlib/srtgui/static/js/mrjsection.js131
-rw-r--r--lib/srtgui/static/js/table.js107
-rwxr-xr-xlib/srtgui/static/js/typeahead_affected_components.js9
-rw-r--r--lib/srtgui/tables.py410
-rw-r--r--lib/srtgui/templates/base.html21
-rw-r--r--lib/srtgui/templates/basetable_top.html2
-rw-r--r--lib/srtgui/templates/create_vulnerability.html2
-rwxr-xr-xlib/srtgui/templates/cve-edit-local.html2
-rwxr-xr-xlib/srtgui/templates/cve-nist-local.html5
-rwxr-xr-xlib/srtgui/templates/cve-nist.html8
-rw-r--r--lib/srtgui/templates/cve.html188
-rwxr-xr-xlib/srtgui/templates/cve.html_orig2
-rw-r--r--lib/srtgui/templates/cves-select-toastertable.html35
-rw-r--r--lib/srtgui/templates/cves-toastertable.html2
-rwxr-xr-xlib/srtgui/templates/date-time-test.html88
-rw-r--r--lib/srtgui/templates/defect.html2
-rw-r--r--lib/srtgui/templates/detail_sorted_header.html2
-rwxr-xr-xlib/srtgui/templates/email_admin.html70
-rwxr-xr-xlib/srtgui/templates/email_success.html49
-rwxr-xr-xlib/srtgui/templates/errorlog-toastertable.html142
-rw-r--r--lib/srtgui/templates/export.html2
-rw-r--r--lib/srtgui/templates/filtersnippet.html2
-rw-r--r--lib/srtgui/templates/generic-toastertable-page.html2
-rw-r--r--lib/srtgui/templates/guided_tour.html2
-rw-r--r--lib/srtgui/templates/investigation.html53
-rwxr-xr-xlib/srtgui/templates/joblog.html39
-rw-r--r--lib/srtgui/templates/js-unit-tests.html2
-rw-r--r--lib/srtgui/templates/landing.html11
-rw-r--r--lib/srtgui/templates/landing_not_managed.html2
-rw-r--r--lib/srtgui/templates/login.html2
-rwxr-xr-xlib/srtgui/templates/maintenance.html280
-rwxr-xr-xlib/srtgui/templates/manage-jobs-toastertable.html126
-rw-r--r--lib/srtgui/templates/management.html334
-rwxr-xr-xlib/srtgui/templates/mrj_section.html194
-rw-r--r--lib/srtgui/templates/product.html2
-rw-r--r--lib/srtgui/templates/publish.html2
-rw-r--r--lib/srtgui/templates/publish_diff_snapshot.html42
-rw-r--r--lib/srtgui/templates/report.html4
-rw-r--r--lib/srtgui/templates/snippets/gitrev_popover.html2
-rw-r--r--lib/srtgui/templates/snippets/investigations_popover.html2
-rw-r--r--lib/srtgui/templates/snippets/pkg_dependencies_popover.html2
-rw-r--r--lib/srtgui/templates/snippets/pkg_revdependencies_popover.html2
-rw-r--r--lib/srtgui/templates/sources-toastertable.html81
-rw-r--r--lib/srtgui/templates/sources.html2
-rwxr-xr-xlib/srtgui/templates/srtool_metadata_include.html27
-rw-r--r--lib/srtgui/templates/tablesort.html2
-rw-r--r--lib/srtgui/templates/tbd.html2
-rw-r--r--lib/srtgui/templates/toastertable-simple.html2
-rw-r--r--lib/srtgui/templates/toastertable.html29
-rw-r--r--lib/srtgui/templates/triage_cves.html2
-rw-r--r--lib/srtgui/templates/unavailable_artifact.html2
-rw-r--r--lib/srtgui/templates/users.html203
-rw-r--r--lib/srtgui/templates/vulnerability.html134
-rwxr-xr-x[-rw-r--r--]lib/srtgui/templatetags/jobtags.py (renamed from lib/srtgui/templatetags/projecttags.py)11
-rwxr-xr-xlib/srtgui/templatetags/multi_tags.py22
-rw-r--r--lib/srtgui/templatetags/project_url_tag.py34
-rw-r--r--lib/srtgui/typeaheads.py26
-rw-r--r--lib/srtgui/urls.py43
-rw-r--r--lib/srtgui/views.py864
-rw-r--r--lib/srtgui/widgets.py297
-rw-r--r--lib/srtmain/management/commands/checksocket.py4
-rw-r--r--lib/srtmain/settings.py48
-rw-r--r--lib/srtmain/urls.py32
-rw-r--r--lib/srtmain/wsgi.py17
-rw-r--r--lib/users/migrations/0002_last_name.py18
-rw-r--r--lib/users/migrations/0003_srtuser_timezone.py18
-rwxr-xr-xlib/users/migrations/0004_timezone_default.py18
-rw-r--r--lib/users/migrations/0005_alter_srtuser_first_name.py18
-rwxr-xr-xlib/users/models.py76
-rwxr-xr-xlib/users/templates/user_edit.html16
-rwxr-xr-xlib/users/urls.py4
-rwxr-xr-xlib/users/views.py71
-rwxr-xr-xlib/yp/reports.py4
-rwxr-xr-xlib/yp/templates/landing.html93
-rwxr-xr-xlib/yp/templates/management.html199
-rwxr-xr-xlib/yp/templates/yp_hello.html2
-rwxr-xr-xlib/yp/urls.py10
-rwxr-xr-xlib/yp/views.py30
214 files changed, 20549 insertions, 1401 deletions
diff --git a/bin/acme/srtool_acme.py b/bin/acme/srtool_acme.py
index f815ae17..7f4aeda0 100755
--- a/bin/acme/srtool_acme.py
+++ b/bin/acme/srtool_acme.py
@@ -33,7 +33,7 @@
import os
import sys
import argparse
-import sqlite3
+from common.srtool_sql import *
import json
# load the srt.sqlite schema indexes
@@ -85,7 +85,7 @@ def init_products(source_file):
with open(source_doc) as json_data:
dct = json.load(json_data)
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
Product_Items = dct['Product_Items']
@@ -103,7 +103,7 @@ def init_products(source_file):
product = cur.execute(sql).fetchone()
if product is None:
# NOTE: 'order' is a reserved SQL keyword, so we have to quote it
- sql = ''' INSERT into orm_product ("order", key, name, version, profile, cpe, defect_tags, product_tags) VALUES (?, ?, ?, ?, ?, ?, ?, ?)'''
+ sql = ''' INSERT INTO orm_product (`order`, `key`, name, version, profile, cpe, defect_tags, product_tags) VALUES (?, ?, ?, ?, ?, ?, ?, ?)'''
cur.execute(sql, (order, key, name, version, profile, cpe, defect_tags, product_tags))
else:
sql = ''' UPDATE orm_product
diff --git a/bin/acme/srtool_defect.py b/bin/acme/srtool_defect.py
index 0e189a3a..e5ac0a7a 100755
--- a/bin/acme/srtool_defect.py
+++ b/bin/acme/srtool_defect.py
@@ -26,7 +26,7 @@
import os
import sys
import argparse
-import sqlite3
+from common.srtool_sql import *
import json
# load the srt.sqlite schema indexes
@@ -133,7 +133,7 @@ class Defect:
#
def new_defect_name(product_prefix):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
sql = "SELECT * FROM orm_srtsetting WHERE name='current_defect_simulation_index'"
@@ -147,7 +147,7 @@ def new_defect_name(product_prefix):
sql = '''UPDATE orm_srtsetting SET value=? WHERE id = ?'''
cur.execute(sql, (index, cvi[ORM.SRTSETTING_ID]))
conn.commit() #commit to db
- conn.close()
+ SQL_CLOSE_CONN(conn)
defect_name = "DEFECT-%s-%05d" % (product_prefix,index)
return defect_name
diff --git a/bin/acme/srtool_env.sh b/bin/acme/srtool_env.sh
index 4eac83b0..e72ccb5a 100755
--- a/bin/acme/srtool_env.sh
+++ b/bin/acme/srtool_env.sh
@@ -1,4 +1,5 @@
# Main application shell settings
export SRT_MAIN_APP="acme"
+export SRT_MAIN_URL="acme"
diff --git a/bin/acme/srtool_jira_acme.py b/bin/acme/srtool_jira_acme.py
index f11af1df..313a5f1e 100755
--- a/bin/acme/srtool_jira_acme.py
+++ b/bin/acme/srtool_jira_acme.py
@@ -53,7 +53,7 @@ import os
import sys
import re
import argparse
-import sqlite3
+from common.srtool_sql import *
import json
from datetime import datetime, date
@@ -196,7 +196,7 @@ def do_update_jira():
print("CONNECTION TO JIRA FAILED")
return 1
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
today = datetime.today()
@@ -252,7 +252,7 @@ def do_update_jira():
conn.commit()
c.close()
- conn.close()
+ SQL_CLOSE_CONN(conn)
#############################################################################3
###
@@ -468,7 +468,7 @@ def update_project_issues(project, issues, conn, log):
except:
cve_name_sort = cve.name
- sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, packages, srt_updated)
+ sql = ''' INSERT INTO orm_cve (name, name_sort, priority, status, comments, comments_private, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, packages, srt_updated)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
c.execute(sql, (cve_name, cve_name_sort, d.priority, cve_status, '', '', '', '', '', 1, 0, '', 'Created from defect %s' % d.name, '', '', 0, '', '', '', '', '', '', datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT)))
@@ -562,7 +562,7 @@ def jira_update_list(jira_list):
print("CONNECTION TO JIRA FAILED")
return 1
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
products = c.execute('''SELECT * FROM orm_product''').fetchall()
@@ -629,7 +629,7 @@ def jira_add_to_defect_db(jira_name):
#try connecting to jira
try:
jira = JIRA(JIRA_PRODUCTION_LINK, auth=(srt_user, srt_passwd))
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
except Exception as e:
print("xhr_investigation_commit:CONNECTION TO JIRA FAILED:(%s)\n" % e, file=sys.stderr)
@@ -679,7 +679,7 @@ def jira_add_to_defect_db(jira_name):
c.execute(sql, (d.name, d.summary, d.url, d.priority, d.status, d.resolution, str(d.publish), d.release_version, d.product_id, d.date_created, d.date_updated))
conn.commit()
c.close()
- conn.close()
+ SQL_CLOSE_CONN(conn)
except Exception as e:
print("ERROR:could not find/import defect(%s)" % e, file=sys.stderr)
return 1
@@ -701,7 +701,7 @@ JIRA_IS_TEST = True
JIRA_IS_SIMULATE = True
def simulate_new_defect_name(product_prefix):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
sql = "SELECT * FROM orm_srtsetting WHERE name='current_defect_simulation_index'"
@@ -715,7 +715,7 @@ def simulate_new_defect_name(product_prefix):
sql = '''UPDATE orm_srtsetting SET value=? WHERE id = ?'''
cur.execute(sql, (index, cvi[ORM.SRTSETTING_ID]))
conn.commit() #commit to db
- conn.close()
+ SQL_CLOSE_CONN(conn)
defect_name = "%s-%05d" % (product_prefix,index)
return defect_name
@@ -746,7 +746,7 @@ def jira_new_defect(product_defect_tags,summary,cve_list,description,reason,prio
return 1
#srt_error_log("Jira connection made")
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
# append the jira link to description
diff --git a/bin/common/datasource.json b/bin/common/datasource.json
index 81d5c289..34ca5404 100755
--- a/bin/common/datasource.json
+++ b/bin/common/datasource.json
@@ -55,6 +55,19 @@
"_comment_" : "Update on Saturdays at 2:00 am",
"update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
},
+ {
+ "key" : "0001-common-recipes",
+ "data" : "common_recipes",
+ "source" : "common",
+ "name" : "Common Recipes",
+ "description" : "Common recipe names for typeahead",
+ "cve_filter" : "",
+ "init" : "bin/common/srtool_common.py --import-recipe-names",
+ "update" : "bin/common/srtool_common.py --import-recipe-names",
+ "lookup" : "",
+ "update_frequency" : "6",
+ "update_time" : "{}"
+ },
{
"key" : "0900-common-local",
@@ -97,6 +110,20 @@
"_comment_" : "Update daily at 7:00 am",
"update_time" : "{\"hour\":\"7\"}"
},
+ {
+ "key" : "0912-common-log-daily",
+ "data" : "backup_log_daily",
+ "source" : "common",
+ "name" : "Daily Logs Backup",
+ "description" : "Daily logs backup",
+ "cve_filter" : "",
+ "init" : "",
+ "update" : "bin/common/srtool_backup.py --backup-logs",
+ "lookup" : "",
+ "update_frequency" : "2",
+ "_comment_" : "Update daily at 11:00 pm",
+ "update_time" : "{\"hour\":\"23\"}"
+ },
{
"_comment_" : "Only score 100 at a time to prevent run-away database overloading",
@@ -105,14 +132,37 @@
"source" : "common",
"name" : "Score",
"description" : "Score CVEs",
+ "attributes" : "DISABLE ",
"cve_filter" : "",
- "init" : "bin/common/srtool_common.py --score-new-cves NEW --count=100",
- "update" : "bin/common/srtool_common.py --score-new-cves NEW --count=100",
+ "init" : "bin/common/srtool_common.py --score-new-cves NEW --count=100 --progress",
+ "update" : "bin/common/srtool_common.py --score-new-cves NEW --count=100 --progress",
"lookup" : "",
- "update_frequency" : "0",
+ "update_frequency" : "5",
"_comment_" : "Update every 10 minutes",
"update_time" : "{\"minutes\":\"10\"}"
+ },
+ {
+ "_comment1_" : "Test srtool_update.py execution",
+ "key" : "0930-updater-test",
+ "data" : "unit_test",
+ "source" : "common",
+ "name" : "updater_test",
+ "description" : "<Updater test>",
+ "_comment2_" : "By default, disable this datasource",
+ "attributes" : "DISABLE ",
+ "cve_filter" : "",
+ "init" : "",
+ "_comment3_" : "Use '!' prefix for built-ins to stop attempted CWD path insertion",
+ "update" : "!date > foo.txt",
+ "lookup" : "",
+ "_comment4_" : "Update every two minutes MINUTELY(=0)",
+ "update_frequency" : "5",
+ "update_time" : "{\"minutes\":\"2\"}"
}
+
+
+
+
],
"permissions" : [
diff --git a/bin/common/srtool_backup.py b/bin/common/srtool_backup.py
index 1b93637c..f065e2f2 100755
--- a/bin/common/srtool_backup.py
+++ b/bin/common/srtool_backup.py
@@ -5,7 +5,7 @@
#
# Security Response Tool Commandline Tool
#
-# Copyright (C) 2018-2019 Wind River Systems
+# Copyright (C) 2018-2021 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -27,7 +27,6 @@
import os
import sys
import argparse
-import sqlite3
import json
from datetime import datetime, date
@@ -35,13 +34,17 @@ from datetime import datetime, date
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
from common.srt_schema import ORM
+from common.srtool_sql import *
# Setup:
verbose = False
+force_update = False
srtDbName = 'srt.sqlite'
BACKUP_DIR = 'backups'
BACKUP_PREFIX = 'backup_'
+BACKUP_LOG_DIR = 'update_logs'
+BACKUP_LOG_PREFIX = 'backup_log_'
#################################
# Common routines
@@ -118,6 +121,105 @@ def backup_db(is_daily):
# Set stamp file
backup_stamp(backup_dir)
+ # Additional remote backup directory copy
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ sql = '''SELECT * FROM orm_srtsetting WHERE name=?'''
+ setting = SQL_EXECUTE(cur, sql, ('SRT_REMOTE_BACKUP_PATH',)).fetchone()
+ print("* Check for remote backup (%s) " % str(setting))
+ cur.close()
+ conn.close()
+ if setting and setting[ORM.SRTSETTING_VALUE].strip():
+ remote_backup_dir = setting[ORM.SRTSETTING_VALUE]
+ if is_daily:
+ remote_backup_dir = os.path.join(script_pathname, "%s/%s%s" % (remote_backup_dir,BACKUP_PREFIX,weekday))
+ else:
+ remote_backup_dir = os.path.join(script_pathname, "%s/%s%s_%s" % (remote_backup_dir,BACKUP_PREFIX,year,weeknum))
+
+ print("* Safety copy to remote location '%s'" % remote_backup_dir)
+ try:
+ os.makedirs(remote_backup_dir)
+ except:
+ # If exists, clean it out
+ os.system("rm -rf %s/*" % (remote_backup_dir))
+
+ # Is path a failure?
+ if not os.path.isdir(remote_backup_dir):
+ print("ERROR: no such remote backup path '%s'" % remote_backup_dir)
+ return(-1)
+
+ # Copy the backup directory to the remote location
+ cmd = 'cp -r -p %s %s' % (backup_dir,remote_backup_dir)
+ print(cmd)
+ os.system(cmd)
+
+ os.system('bash -c "echo \"BACKUP:`date`:%s\" >> backup_db.log"' % cmd)
+
+
+
+#######################################################################
+# backup_logs:
+#
+# Back up the logs to a daily wheel, and reset the logs
+# to empty for the next day.
+# If end of week, concatenate the respective daily logs into a
+# week-of-year log
+#
+
+def backup_logs():
+ today = datetime.today()
+ date_str = today.strftime("%Y/%m/%d")
+ weeknum = today.strftime("%W") # 00, 01, ... , 53
+ weekday = today.strftime("%A") # Sunday, Monday, ... , Saturday
+ weekday_num = today.strftime("%w") # 0, 1, ..., 6
+ year = today.strftime("%Y")
+ log_name_list = ('srt_web.log','srt_update.log','srt_dbg.log','update_logs/master_log.txt')
+
+ # Perform the daily back up
+ backup_day_dir = os.path.join(script_pathname, "%s/%s%s" % (BACKUP_LOG_DIR,BACKUP_LOG_PREFIX,weekday))
+ # Make sure directory exists
+ try:
+ os.makedirs(backup_day_dir)
+ except:
+ # If exists, clean it out
+ os.system("rm -rf %s/*" % (backup_day_dir))
+ pass
+ os.makedirs(os.path.join(backup_day_dir,'data'))
+
+ print("*** Backup dir[%s]='%s' ***" % (weekday_num,backup_day_dir))
+ print("* Copy logs")
+ for logname in log_name_list:
+ logname_base = os.path.basename(logname)
+ # Backup log file
+ cmd = 'echo "=== Backup: %s ===" > %s' % (date_str,os.path.join(backup_day_dir,logname_base))
+ print(cmd)
+ os.system(cmd)
+ cmd = 'cat %s | grep -v "/srtgui/mostrecentjobs?format=json" >> %s' % (os.path.join(script_pathname,logname),os.path.join(backup_day_dir,logname_base))
+ print(cmd)
+ os.system(cmd)
+ # Reset log file to empty
+ cmd = 'echo "" > %s' % (os.path.join(script_pathname,logname))
+ print(cmd)
+ os.system(cmd)
+
+ # If week's end, perform the weekly backup
+ if (6 == weekday) or force_update:
+ backup_week_dir = os.path.join(script_pathname, "%s/%s%s_%s" % (BACKUP_LOG_DIR,BACKUP_LOG_PREFIX,year,weeknum))
+ if not os.path.isdir(backup_week_dir):
+ os.makedirs(backup_week_dir)
+ day_names = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']
+ concat = '>'
+ for weekday_num,weekday in enumerate(day_names):
+ backup_day_dir = os.path.join(script_pathname, "%s/%s%s" % (BACKUP_LOG_DIR,BACKUP_LOG_PREFIX,weekday))
+ if os.path.isdir(backup_day_dir):
+ # Concatinate the respective logs
+ for logname in log_name_list:
+ logname_base = os.path.basename(logname)
+ cmd = 'cat %s %s %s' % (os.path.join(backup_day_dir,logname_base),concat,os.path.join(backup_week_dir,logname_base))
+ print(cmd)
+ os.system(cmd)
+ concat = '>>'
+
#######################################################################
# list
#
@@ -176,12 +278,15 @@ def main(argv):
global verbose
global cmd_skip
global cmd_count
+ global force_update
# setup
parser = argparse.ArgumentParser(description='srtool_backup.py: backup the SRTool database')
parser.add_argument('--backup-db', '-b', action='store_const', const='backup', dest='command', help='Backup the database, save to year_weeknum dir')
parser.add_argument('--backup-db-daily', '-d', action='store_const', const='backup-daily', dest='command', help='Backup the database, save to weekday dir')
+ parser.add_argument('--backup-logs', '-B', action='store_const', const='backup_logs', dest='command', help='Backup the logs, save to year_weeknum and weekday dir')
+
parser.add_argument('--init-stamps', '-I', action='store_const', const='init-stamps', dest='command', help='Initialize the backup directory timestamps')
parser.add_argument('--init-dir-stamp', '-D', dest='init_dir_stamp', help='Initialize a specific backup directory timestamp')
@@ -199,6 +304,7 @@ def main(argv):
master_log = open(os.path.join(script_pathname, "update_logs/master_log.txt"), "a")
verbose = args.verbose
+ force_update = args.force
cmd_skip = 0
if None != args.skip:
cmd_skip = int(args.skip)
@@ -214,14 +320,22 @@ def main(argv):
except Exception as e:
print ("DATABASE BACKUP FAILED ... %s" % e)
master_log.write("SRTOOL:%s:DATABASE BACKUP:\t\t\t\t...\t\t\tFAILED ... %s\n" % (date.today(), e))
+
+ elif 'backup_logs' == args.command:
+ try:
+ backup_logs()
+ except Exception as e:
+ print ("Log BACKUP FAILED ... %s" % e)
+ master_log.write("SRTOOL:%s:LOGS BACKUP:\t\t\t\t...\t\t\tFAILED ... %s\n" % (date.today(), e))
elif 'list' == args.command:
list()
elif 'list-db' == args.command:
list(True)
elif 'init-stamps' == args.command:
init_stamps()
- elif args.command.init_dir_stamp:
- backup_stamp(args.command.init_dir_stamp)
+ elif args.init_dir_stamp:
+ backup_stamp(args.init_dir_stamp)
+
else:
print("Command not found")
master_log.close()
diff --git a/bin/common/srtool_common.py b/bin/common/srtool_common.py
index bd2f7f28..b9765cfa 100755
--- a/bin/common/srtool_common.py
+++ b/bin/common/srtool_common.py
@@ -5,7 +5,7 @@
#
# Security Response Tool Implementation
#
-# Copyright (C) 2017-2018 Wind River Systems
+# Copyright (C) 2017-2023 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -32,7 +32,6 @@ import re
import csv
import json
import argparse
-import sqlite3
import subprocess
from time import sleep
from datetime import datetime
@@ -42,8 +41,12 @@ from datetime import datetime
# it may not exist on the first pass
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
+from common.srtool_progress import *
+from common.srtool_sql import *
+is_orm = False
try:
from common.srt_schema import ORM
+ is_orm = True
except:
# Do a pass so that '--generate-schema-header' can fix it
print("Warning: srt_schema not yet created or bad format")
@@ -54,6 +57,8 @@ verbose = False
cmd_skip = 0
cmd_count = 0
cmd_test = False
+debug_sql = False
+is_progress = False
srtDbName = 'srt.sqlite'
packageKeywordsFile = 'data/package_keywords.csv'
@@ -95,6 +100,27 @@ def get_tag_key(tag,key,default=None):
return default
#################################
+# Global error log registration
+#
+
+def log_error(description,severity=99):
+ # Bootstrap protection if ORM not yet generated
+ if not is_orm:
+ return
+
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+
+ if (severity < int(ORM.ERRORLOG_INFO)) or (severity > int(ORM.ERRORLOG_ERROR)):
+ severity = int(ORM.ERRORLOG_ERROR)
+ sql = ''' INSERT INTO orm_errorlog (severity,description,srt_created) VALUES (?, ?, ?)'''
+ SQL_EXECUTE(cur, sql, (severity,description,datetime.now()))
+
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
# Load the package keyword source into the database
#
@@ -110,8 +136,8 @@ def init_package_keywords(filename):
print("ERROR: DB NOT FOUND '%s'" % filename)
return
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
is_first_row = True
lookupTable = []
@@ -143,26 +169,28 @@ def init_package_keywords(filename):
# ARG!: we have to use an escaped "LIKE", because even simple 'WHERE' applies
# wild card on random '-' in the text
sql = '''SELECT 1 FROM orm_package WHERE name LIKE ? ESCAPE '-' '''
- package = cur.execute(sql, ( name, )).fetchone()
+ package = SQL_EXECUTE(cur, sql, ( name, )).fetchone()
PACKAGE_ID = 0
if package is None:
- sql = ''' INSERT into orm_package (mode, name, realname, invalidname, weight, cve_count, vulnerability_count, investigation_count,defect_count ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)'''
- cur.execute(sql, (mode,name,realname,invalidname,weight,0,0,0,0))
+ sql = ''' INSERT INTO orm_package (mode, name, realname, invalidname, weight, cve_count, vulnerability_count, investigation_count,defect_count ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)'''
+ SQL_EXECUTE(cur, sql, (mode,name,realname,invalidname,weight,0,0,0,0))
else:
sql = ''' UPDATE orm_package
SET mode = ?, realname = ?, invalidname = ?, weight = ?
WHERE id = ?'''
- cur.execute(sql, (mode,realname,invalidname,weight,package[PACKAGE_ID]))
+ SQL_EXECUTE(cur, sql, (mode,realname,invalidname,weight,package[PACKAGE_ID]))
- if 0 == (i % 10):
- print("%04d:%30s\r" % (i,name), end='')
+ if not is_progress:
+ # Do not block on '\r' if displaying progress
+ if 0 == (i % 10):
+ print("%04d:%30s" % (i,name), end='\r')
i += 1
print("%04d:%30s" % (i,name))
- conn.commit()
- cur.close()
- conn.close()
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
#################################
# Score new CVEs for the triage review
@@ -250,9 +278,9 @@ def attach_packages(cur, cve, recommend_list):
# Find or create a package record (WARNING: some package names have <'>)
pkg_name = pkg_name.replace('"',"'")
- sql = '''SELECT * FROM orm_package where name = "%s" AND mode = "%s";''' % (pkg_name,mode)
+ sql = f"""SELECT * FROM orm_package where name = '{pkg_name}' AND mode = '{mode}';"""
if verbose: print("PKG_TEST:%s" % sql)
- cur.execute(sql)
+ SQL_EXECUTE(cur, sql)
package = cur.fetchone()
if package:
if verbose: print("FOUND PACKAGE ID for %s" % (pkg_name))
@@ -261,17 +289,17 @@ def attach_packages(cur, cve, recommend_list):
# Create Package
if verbose: print("INSERTING PACKAGE for %s,%s" % (cve[ORM.CVE_NAME],pkg_name))
sql = '''INSERT INTO orm_package (mode, name, realname, invalidname, weight, cve_count, vulnerability_count, investigation_count,defect_count ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)'''
- cur.execute(sql, (mode, pkg_name, pkg_name, '', 1 if FOR==mode else -1,0,0,0,0,))
- pkg_id = cur.lastrowid
+ SQL_EXECUTE(cur, sql, (mode, pkg_name, pkg_name, '', 1 if FOR==mode else -1,0,0,0,0,))
+ pkg_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
# Also create Package2CVE
sql = "SELECT * FROM orm_packagetocve where package_id = '%s' AND cve_id = '%s';" % (pkg_id,cve_id)
- cur.execute(sql)
+ SQL_EXECUTE(cur, sql)
package2cve = cur.fetchone()
if not package2cve:
- AFFECTED = 0
- RELATED = 1
+ AFFECTED = False
+ RELATED = True
sql = '''INSERT INTO orm_packagetocve (package_id, cve_id, applicable) VALUES (?,?,?)'''
- cur.execute(sql, (pkg_id,cve_id,AFFECTED))
+ SQL_EXECUTE(cur, sql, (pkg_id,cve_id,AFFECTED))
# Add FOR packages to field in CVE
if FOR == mode:
@@ -286,14 +314,17 @@ def score_new_cves(cve_filter):
global recommends
global cmd_skip
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
- cur_write = conn.cursor()
- cur_ds = conn.cursor()
+ if debug_sql:
+ SQL_DEBUG(True,'SCR')
+
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+ cur_write = SQL_CURSOR(conn)
+ cur_ds = SQL_CURSOR(conn)
# Load the package filter table
sql = "SELECT * FROM orm_package"
- cur.execute(sql)
+ SQL_EXECUTE(cur, sql)
for package in cur:
# Fixup notation not intended to be regex
name = package[ORM.PACKAGE_NAME].replace('++',r'\+\+')
@@ -304,16 +335,16 @@ def score_new_cves(cve_filter):
if 'NEW' == cve_filter:
# sql = "SELECT * FROM orm_cve WHERE (status='%s' OR status='%s') AND score_date IS NULL;" % (ORM.STATUS_NEW,ORM.STATUS_NEW_RESERVED)
sql = "SELECT * FROM orm_cve WHERE status='%s' AND score_date IS NULL;" % (ORM.STATUS_NEW)
- cur.execute(sql)
+ SQL_EXECUTE(cur, sql)
elif cve_filter.startswith('CVE-'):
- cur.execute('SELECT * FROM orm_cve WHERE name LIKE "'+cve_filter+'%"')
+ SQL_EXECUTE(cur, 'SELECT * FROM orm_cve WHERE name LIKE "'+cve_filter+'%"')
else:
print("ERROR: Unrecognized filter '%s'" % filter)
exit(1)
# Pre-gather the potential data sources
sql = "SELECT * FROM orm_datasource WHERE data = ?"
- cur_ds.execute(sql, ('cve',))
+ SQL_EXECUTE(cur_ds, sql, ('cve',))
ds_list = []
for ds in cur_ds:
if not "ALT-SOURCE" in ds[ORM.DATASOURCE_ATTRIBUTES]:
@@ -330,34 +361,46 @@ def score_new_cves(cve_filter):
ds_count = 0
is_change = False
time_now = datetime.now()
+
+ # Progress expected max
+ if cmd_count:
+ progress_set_max(cmd_count)
+ else:
+ progress_set_max(len(cur))
+
for i,cve in enumerate(cur):
cve_name = cve[ORM.CVE_NAME]
+ print("C=%s,I=%s" % (cve_name,i), file=sys.stderr)
+ sys.stdout.flush()
+
+ if 0 == (i % 10):
+ progress_set_current(i)
+ progress_show(cve_name)
+
# if cve[ORM.CVE_SCORE_DATE]:
# #cve_score_date = datetime.strptime(source[ORM.CVE_SCORE_DATE], '%Y-%m-%d %H:%M:%S')
# # If there is any score_date, then nothing to do here
# continue
#
# Progress indicator support
- if 0 == i % 10:
- print('%04d: %20s\r' % (i,cve_name), end='')
+ if not is_progress:
+ # Do not block on '\r' if displaying progress
+ if 0 == i % 10:
+ print('%04d: %20s' % (i,cve_name), end='\r')
if (0 == i % 200) and (not cmd_skip) and is_change:
- conn.commit()
print("%4d: COMMIT" % i)
- sleep(2)
+ sys.stdout.flush()
+ SQL_COMMIT(conn)
+ #sleep(0.5)
is_change = False
# Development/debug support
- if cmd_skip:
- if i < cmd_skip:
- continue
- else:
- cmd_skip = 0
- if cmd_count:
- if record_count < cmd_count:
- record_count += 1
- else:
- print("Count return: %s,%s,%s" % (i,record_count,cmd_count))
- break
+ if cmd_skip > i:
+ continue
+ if cmd_count < (i - cmd_skip):
+ print("Count return: %s,%s" % (i,cmd_count))
+ sys.stdout.flush()
+ break
if verbose: print("TEST CVE = %20s" % (cve[ORM.CVE_NAME]))
recommend,recommend_list = compute_recommends(cve)
@@ -376,7 +419,7 @@ def score_new_cves(cve_filter):
packages = ?,
score_date = ?
WHERE id = ?'''
- cur_write.execute(sql, (recommend, recommend_list, cve_packages, time_now.strftime(ORM.DATASOURCE_DATE_FORMAT), cve[ORM.CVE_ID]))
+ SQL_EXECUTE(cur_write, sql, (recommend, recommend_list, cve_packages, time_now.strftime(ORM.DATASOURCE_DATE_FORMAT), cve[ORM.CVE_ID]))
write_count += 1
is_change = True
@@ -387,17 +430,23 @@ def score_new_cves(cve_filter):
if cve[ORM.CVE_NAME].startswith(ds_obj['filter']):
#print(" Alternate CVE source %s for %s " % (ds_obj['id'],cve[ORM.CVE_ID]))
sql = ''' SELECT * FROM orm_cvesource WHERE cve_id = ? AND datasource_id = ?'''
- if not cur_write.execute(sql, (cve[ORM.CVE_ID],ds_obj['id'],)).fetchone():
+ if not SQL_EXECUTE(cur_write, sql, (cve[ORM.CVE_ID],ds_obj['id'],)).fetchone():
### TO-DO: only add sources that have CVE matches
- sql = ''' INSERT into orm_cvesource (cve_id, datasource_id ) VALUES (?, ?)'''
- cur_write.execute(sql, (cve[ORM.CVE_ID],ds_obj['id']))
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id ) VALUES (?, ?)'''
+ SQL_EXECUTE(cur_write, sql, (cve[ORM.CVE_ID],ds_obj['id']))
ds_count += 1
- print("%30sADDED [%4d]: %20s <- %20s\r" % ('',ds_count,ds_obj['key'],cve[ORM.CVE_NAME]),end='')
+ if verbose: print("%30sADDED [%4d]: %20s <- %20s" % ('',ds_count,ds_obj['key'],cve[ORM.CVE_NAME]),end='\r')
if is_change:
- conn.commit()
+ SQL_COMMIT(conn)
print("COMMIT")
print("\nUpdated CVEs=%d, Added alternate sources=%d" % (write_count,ds_count))
+ sys.stdout.flush()
+ # End progress
+ progress_done('Done')
+ # Dump the SQL transaction data
+ if debug_sql:
+ SQL_DUMP()
#################################
# init_notify_categories
@@ -407,25 +456,25 @@ def init_notify_categories(filename):
with open(filename) as json_data:
dct = json.load(json_data)
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
Category_Items = dct['Notify_Categories']
for i,category in enumerate(Category_Items):
if verbose: print("%s" % category['name'])
category_name = str(category['name'])
- sql = '''SELECT * FROM orm_notifycategories where category = "%s";''' % (category_name)
- nc = cur.execute(sql).fetchone()
+ sql = f"""SELECT * FROM orm_notifycategories where category = '{category_name}';"""
+ nc = SQL_EXECUTE(cur, sql).fetchone()
if not nc:
sql = '''INSERT INTO orm_notifycategories (category) VALUES (?)'''
# REMINDER: we need the ',' else the 'category_name' will be seen as an array of chars
- cur.execute(sql, (category_name,))
+ SQL_EXECUTE(cur, sql, (category_name,))
else:
if verbose: print("FOUND_CATEGORY:%s" % category['name'])
pass
- conn.commit()
- cur.close()
- conn.close()
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
#################################
# Update cumulative Cve/Vulnerability/Investigation status
@@ -454,10 +503,10 @@ def _update_cve_status(cur,cve,srtool_today,update_skip_history):
cve_priority = cve[ORM.CVE_PRIORITY]
cve_status = None
vote_count = 0
- cve2vuls = cur.execute("SELECT * FROM orm_cvetovulnerablility where cve_id = '%s'" % cve[ORM.CVE_ID]).fetchall()
+ cve2vuls = SQL_EXECUTE(cur, "SELECT * FROM orm_cvetovulnerablility where cve_id = '%s'" % cve[ORM.CVE_ID]).fetchall()
for cve2vul in cve2vuls:
vulnerability_id = cve2vul[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID]
- vulnerability = cur.execute("SELECT * FROM orm_vulnerability where id = '%s'" % vulnerability_id).fetchone()
+ vulnerability = SQL_EXECUTE(cur, "SELECT * FROM orm_vulnerability where id = '%s'" % vulnerability_id).fetchone()
# Compute Status
status = vulnerability[ORM.VULNERABILITY_STATUS]
if verbose: print(" %s,%s" % (vulnerability[ORM.VULNERABILITY_NAME],ORM.get_orm_string(status,ORM.STATUS_STR)))
@@ -504,12 +553,12 @@ def _update_cve_status(cur,cve,srtool_today,update_skip_history):
if verbose: print(" Change CVE:%s" % ';'.join(history_update))
if not cmd_test:
sql = "UPDATE orm_cve SET status=?, priority=?, srt_updated=? WHERE id=?"
- cur.execute(sql, (cve_status,cve_priority,srtool_today,cve[ORM.CVE_ID],) )
+ SQL_EXECUTE(cur, sql, (cve_status,cve_priority,srtool_today,cve[ORM.CVE_ID],) )
if not update_skip_history:
# Add status update in history
update_comment = "%s%s {%s}" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_DEFECT,';'.join(history_update),'Cumulative update from vulnerabilities')
sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
- cur.execute(sql, (cve[ORM.CVE_ID],update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+ SQL_EXECUTE(cur, sql, (cve[ORM.CVE_ID],update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
# Create notification
### TO-DO
@@ -518,16 +567,16 @@ def _update_cve_status(cur,cve,srtool_today,update_skip_history):
if verbose: print(" No status change needed!")
def update_cve_status(cve_list,update_skip_history):
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
srtool_today = datetime.today()
if 'all' == cve_list:
- cves = cur.execute("SELECT * FROM orm_cve").fetchall()
+ cves = SQL_EXECUTE(cur, "SELECT * FROM orm_cve").fetchall()
else:
cve_paren_list = str(cve_list.split(',')).replace('[','(').replace(']',')')
if verbose: print("SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list)
- cves = cur.execute("SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list).fetchall()
+ cves = SQL_EXECUTE(cur, "SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list).fetchall()
if verbose: print("ACTION:update_cve_status:count=%d" % (len(cves)))
@@ -540,18 +589,20 @@ def update_cve_status(cve_list,update_skip_history):
_update_cve_status(cur,cve,srtool_today,update_skip_history)
i += 1
- if (0 == i % 100):
- print("%5d: %-10s\r" % (i,cve[ORM.CVE_NAME]),end='')
+ if not is_progress:
+ # Do not block on '\r' if displaying progress
+ if (0 == i % 100):
+ print("%5d: %-10s" % (i,cve[ORM.CVE_NAME]),end='\r')
if (0 == i % 200):
- conn.commit()
+ SQL_COMMIT(conn)
# Development/debug support
if cmd_skip and (i < cmd_skip): continue
if cmd_count and ((i - cmd_skip) > cmd_count): break
print("%5d:" % (i))
- cur.close()
- conn.commit()
- conn.close()
+ SQL_CLOSE_CUR(cur)
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CONN(conn)
# Indexes into the product table cache
PRODUCT_DICT_KEY = 0
@@ -567,10 +618,10 @@ def _update_vulnerability_status(cur,vulnerability,srtool_today,product_dict,upd
vulnerability_priority = vulnerability[ORM.VULNERABILITY_PRIORITY]
vulnerability_status = None
vote_count = 0
- vul2invs = cur.execute("SELECT * FROM orm_vulnerabilitytoinvestigation where vulnerability_id = '%s'" % vulnerability[ORM.VULNERABILITY_ID]).fetchall()
+ vul2invs = SQL_EXECUTE(cur, "SELECT * FROM orm_vulnerabilitytoinvestigation where vulnerability_id = '%s'" % vulnerability[ORM.VULNERABILITY_ID]).fetchall()
for vul2inv in vul2invs:
investigation_id = vul2inv[ORM.VULNERABILITYTOINVESTIGATION_INVESTIGATION_ID]
- investigation = cur.execute("SELECT * FROM orm_investigation where id = '%s'" % investigation_id).fetchone()
+ investigation = SQL_EXECUTE(cur, "SELECT * FROM orm_investigation where id = '%s'" % investigation_id).fetchone()
# For now, only calculate the "Public Status", so skip non-supported products
product_mode = get_tag_key(product_dict[investigation[ORM.INVESTIGATION_PRODUCT_ID]][PRODUCT_DICT_TAG],'mode')
@@ -624,12 +675,12 @@ def _update_vulnerability_status(cur,vulnerability,srtool_today,product_dict,upd
if verbose: print(" Change Vulnerability:%s" % ';'.join(history_update))
if not cmd_test:
sql = "UPDATE orm_vulnerability SET status=?, priority=?, srt_updated=? WHERE id=?"
- cur.execute(sql, (vulnerability_status,vulnerability_priority,srtool_today,vulnerability[ORM.VULNERABILITY_ID],) )
+ SQL_EXECUTE(cur, sql, (vulnerability_status,vulnerability_priority,srtool_today,vulnerability[ORM.VULNERABILITY_ID],) )
if not update_skip_history:
# Add status update in history
update_comment = "%s%s {%s}" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_DEFECT,';'.join(history_update),'Cumulative update from investigations')
sql = '''INSERT INTO orm_vulnerabilityhistory (vulnerability_id, comment, date, author) VALUES (?,?,?,?)'''
- cur.execute(sql, (vulnerability[ORM.VULNERABILITY_ID],update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+ SQL_EXECUTE(cur, sql, (vulnerability[ORM.VULNERABILITY_ID],update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
# Create notification
### TO-DO
@@ -638,39 +689,41 @@ def _update_vulnerability_status(cur,vulnerability,srtool_today,product_dict,upd
if verbose: print(" No status change needed!")
def update_vulnerability_status(vulnerability_list,update_skip_history):
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
srtool_today = datetime.today()
# Pre-gather and cache the product information
product_dict = {}
- products = cur.execute("SELECT * FROM orm_product").fetchall()
+ products = SQL_EXECUTE(cur, "SELECT * FROM orm_product").fetchall()
for product in products:
product_dict[ product[ORM.PRODUCT_ID] ] = [product[ORM.PRODUCT_KEY],product[ORM.PRODUCT_PRODUCT_TAGS]]
if 'all' == vulnerability_list:
- vulnerabilities = cur.execute("SELECT * FROM orm_vulnerability").fetchall()
+ vulnerabilities = SQL_EXECUTE(cur, "SELECT * FROM orm_vulnerability").fetchall()
else:
vulnerability_paren_list = str(vulnerability_list.split(',')).replace('[','(').replace(']',')')
if verbose: print("SELECT * FROM orm_vulnerability WHERE name IN %s" % vulnerability_paren_list)
- vulnerabilities = cur.execute("SELECT * FROM orm_vulnerability WHERE name IN %s" % vulnerability_paren_list).fetchall()
+ vulnerabilities = SQL_EXECUTE(cur, "SELECT * FROM orm_vulnerability WHERE name IN %s" % vulnerability_paren_list).fetchall()
i = 0
for vulnerability in vulnerabilities:
_update_vulnerability_status(cur,vulnerability,srtool_today,product_dict,update_skip_history)
i += 1
- if (0 == i % 100):
- print("%5d: %-10s\r" % (i,vulnerability[ORM.VULNERABILITY_NAME]),end='')
+ if not is_progress:
+ # Do not block on '\r' if displaying progress
+ if (0 == i % 100):
+ print("%5d: %-10s" % (i,vulnerability[ORM.VULNERABILITY_NAME]),end='\r')
if (0 == i % 200):
- conn.commit()
+ SQL_COMMIT(conn)
# Development/debug support
if cmd_skip and (i < cmd_skip): continue
if cmd_count and ((i - cmd_skip) > cmd_count): break
print("%5d:" % (i))
- cur.close()
- conn.commit()
- conn.close()
+ SQL_CLOSE_CUR(cur)
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CONN(conn)
def _update_investigation_status(cur,investigation,srtool_today,update_skip_history):
@@ -683,10 +736,10 @@ def _update_investigation_status(cur,investigation,srtool_today,update_skip_hist
investigation_priority = investigation[ORM.INVESTIGATION_PRIORITY]
investigation_status = None
vote_count = 0
- inv2defs = cur.execute("SELECT * FROM orm_investigationtodefect where investigation_id = '%s'" % investigation[ORM.INVESTIGATION_ID]).fetchall()
+ inv2defs = SQL_EXECUTE(cur, "SELECT * FROM orm_investigationtodefect where investigation_id = '%s'" % investigation[ORM.INVESTIGATION_ID]).fetchall()
for inv2def in inv2defs:
defect_id = inv2def[ORM.INVESTIGATIONTODEFECT_DEFECT_ID]
- defect = cur.execute("SELECT * FROM orm_defect where id = '%s'" % defect_id).fetchone()
+ defect = SQL_EXECUTE(cur, "SELECT * FROM orm_defect where id = '%s'" % defect_id).fetchone()
# Compute Status
status = defect[ORM.DEFECT_SRT_STATUS]
if verbose: print(" %s,%s" % (defect[ORM.DEFECT_NAME],ORM.get_orm_string(status,ORM.STATUS_STR)))
@@ -763,12 +816,12 @@ def _update_investigation_status(cur,investigation,srtool_today,update_skip_hist
if verbose: print(" Change Investigation:%s" % ';'.join(history_update))
if not cmd_test:
sql = "UPDATE orm_investigation SET status=?, outcome=?, priority=?, srt_updated=? WHERE id=?"
- cur.execute(sql, (investigation_status,investigation_outcome,investigation_priority,srtool_today,investigation[ORM.INVESTIGATION_ID],) )
+ SQL_EXECUTE(cur, sql, (investigation_status,investigation_outcome,investigation_priority,srtool_today,investigation[ORM.INVESTIGATION_ID],) )
if not update_skip_history:
# Add status update in history
update_comment = "%s%s {%s}" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_DEFECT,';'.join(history_update),'Cumulative update from defects')
sql = '''INSERT INTO orm_investigationhistory (investigation_id, comment, date, author) VALUES (?,?,?,?)'''
- cur.execute(sql, (investigation[ORM.INVESTIGATION_ID],update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+ SQL_EXECUTE(cur, sql, (investigation[ORM.INVESTIGATION_ID],update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
# Create notification
### TO-DO
@@ -777,37 +830,39 @@ def _update_investigation_status(cur,investigation,srtool_today,update_skip_hist
if verbose: print(" No status change needed!")
def update_investigation_status(investigation_list,update_skip_history):
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
srtool_today = datetime.today()
if 'all' == investigation_list:
- investigations = cur.execute("SELECT * FROM orm_investigation").fetchall()
+ investigations = SQL_EXECUTE(cur, "SELECT * FROM orm_investigation").fetchall()
else:
investigation_paren_list = str(investigation_list.split(',')).replace('[','(').replace(']',')')
if verbose: print("SELECT * FROM orm_investigation WHERE name IN %s" % investigation_paren_list)
- investigations = cur.execute("SELECT * FROM orm_investigation WHERE name IN %s" % investigation_paren_list).fetchall()
+ investigations = SQL_EXECUTE(cur, "SELECT * FROM orm_investigation WHERE name IN %s" % investigation_paren_list).fetchall()
i = 0
for investigation in investigations:
_update_investigation_status(cur,investigation,srtool_today,update_skip_history)
i += 1
- if (0 == i % 100):
- print("%5d: %-10s\r" % (i,investigation[ORM.INVESTIGATION_NAME]),end='')
+ if not is_progress:
+ # Do not block on '\r' if displaying progress
+ if (0 == i % 100):
+ print("%5d: %-10s" % (i,investigation[ORM.INVESTIGATION_NAME]),end='\r')
if (0 == i % 200):
- conn.commit()
+ SQL_COMMIT(conn)
# Development/debug support
if cmd_skip and (i < cmd_skip): continue
if cmd_count and ((i - cmd_skip) > cmd_count): break
- cur.close()
- conn.commit()
- conn.close()
+ SQL_CLOSE_CUR(cur)
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CONN(conn)
# This routine is intended for incremental cumulative status updates
def update_cve_status_tree(cve_list,update_skip_history):
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
if 'all' == cve_list:
# global cumulative update
@@ -819,7 +874,7 @@ def update_cve_status_tree(cve_list,update_skip_history):
# Perform a deep update on the CVEs, their vunerabilities, and their investigations
cve_paren_list = str(cve_list.split(',')).replace('[','(').replace(']',')')
if verbose: print("SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list)
- cves = cur.execute("SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list).fetchall()
+ cves = SQL_EXECUTE(cur, "SELECT * FROM orm_cve WHERE name IN %s" % cve_paren_list).fetchall()
if verbose: print("ACTION:update_cve_status_tree:count=%d" % (len(cves)))
@@ -830,16 +885,16 @@ def update_cve_status_tree(cve_list,update_skip_history):
vulnerability_list = []
investigation_list = []
- cve2vuls = cur.execute("SELECT * FROM orm_cvetovulnerablility where cve_id = '%s'" % cve[ORM.CVE_ID]).fetchall()
+ cve2vuls = SQL_EXECUTE(cur, "SELECT * FROM orm_cvetovulnerablility where cve_id = '%s'" % cve[ORM.CVE_ID]).fetchall()
for cve2vul in cve2vuls:
vulnerability_id = cve2vul[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID]
- vulnerability = cur.execute("SELECT * FROM orm_vulnerability where id = '%s'" % vulnerability_id).fetchone()
+ vulnerability = SQL_EXECUTE(cur, "SELECT * FROM orm_vulnerability where id = '%s'" % vulnerability_id).fetchone()
vulnerability_list.append(vulnerability[ORM.VULNERABILITY_NAME])
- vul2invs = cur.execute("SELECT * FROM orm_vulnerabilitytoinvestigation where vulnerability_id = '%s'" % vulnerability_id).fetchall()
+ vul2invs = SQL_EXECUTE(cur, "SELECT * FROM orm_vulnerabilitytoinvestigation where vulnerability_id = '%s'" % vulnerability_id).fetchall()
for vul2inv in vul2invs:
investigation_id = vul2inv[ORM.VULNERABILITYTOINVESTIGATION_INVESTIGATION_ID]
- investigation = cur.execute("SELECT * FROM orm_investigation where id = '%s'" % investigation_id).fetchone()
+ investigation = SQL_EXECUTE(cur, "SELECT * FROM orm_investigation where id = '%s'" % investigation_id).fetchone()
investigation_list.append(investigation[ORM.INVESTIGATION_NAME])
# Update the CVE's children status
@@ -848,8 +903,36 @@ def update_cve_status_tree(cve_list,update_skip_history):
# Childred are updated, now update the CVEs
update_cve_status(','.join(cve_list), update_skip_history)
- cur.close()
- conn.close()
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# import_recipe_names
+#
+# Import the common recipe names from text file
+# Use for CVE triage affected component typeahead
+#
+
+def import_recipe_names():
+
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+
+ # Preclear the recipe table
+ sql = 'DELETE FROM orm_recipetable'
+ SQL_EXECUTE(cur, sql)
+
+ # Load the recipe list (derived from Layer Index)
+ with open('data/recipe_names_from_layer_index.txt') as f:
+ for line in f:
+ recipe = line[:-1]
+ sql = "SELECT * FROM orm_recipetable WHERE recipe_name = '%s'" % (recipe)
+ cvi = SQL_EXECUTE(cur, sql).fetchone()
+ if not cvi:
+ sql = '''INSERT INTO orm_recipetable (recipe_name) VALUES (?)'''
+ SQL_EXECUTE(cur, sql, (recipe,))
+
+ SQL_COMMIT(conn)
#################################
# Generate database schema offsets
@@ -865,6 +948,14 @@ def gen_schema_header(database_dir,schema_dir):
database_file = os.path.join(database_dir, 'srt.sqlite')
schema_file = os.path.join(schema_dir, 'srt_schema.py')
+ # Fetch USER_SRTOOL_ID
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+ USER_SRTOOL_NAME = 'SRTool'
+ user = SQL_EXECUTE(cur, "SELECT * FROM users_srtuser where username = '%s'" % USER_SRTOOL_NAME).fetchone()
+ USER_SRTOOL_ID = user[0] # Hardcoded 'ORM.USERS_SRTUSER_ID'
+ SQL_CLOSE_CONN(conn)
+
create_re = re.compile(r"CREATE TABLE[A-Z ]* \"(\w+)\" \((.+)\);")
try:
cmd = ('sqlite3', database_file, '.schema')
@@ -873,13 +964,7 @@ def gen_schema_header(database_dir,schema_dir):
print("ERROR(%d): %s" % (e.returncode, e.output))
return
- # Fetch USER_SRTOOL_ID
- conn = sqlite3.connect(database_file)
- cur = conn.cursor()
- USER_SRTOOL_NAME = 'SRTool'
- user = cur.execute("SELECT * FROM users_srtuser where username = '%s'" % USER_SRTOOL_NAME).fetchone()
- USER_SRTOOL_ID = user[0] # Hardcoded 'ORM.USERS_SRTUSER_ID'
- conn.close()
+
with open(schema_file, 'w') as fd:
fd.write("# SRTool database table schema indexes\n")
@@ -902,10 +987,19 @@ def gen_schema_header(database_dir,schema_dir):
for i, col in enumerate(columns.split(',')):
col = col.strip()
name = col[1:]
- name = name[:name.index('"')]
+ #
+ try:
+ name = name[:name.index('"')]
+ except Exception as e:
+ print("ERROR:%s:%s:" % (e,col))
+ name = col[:col.index(' ')]
+
#print("%s_%s = %d" % (table.upper(),name.upper(),i))
fd.write(" %s_%s = %d\n" % (table.upper(),name.upper(),i))
+ schema_indices = gen_schema_indices()
+ fd.write(schema_indices)
+
#
# Common SRTool Status Mappings
#
@@ -1008,13 +1102,27 @@ def gen_schema_header(database_dir,schema_dir):
fd.write(" %s_%s = %d\n" % ('DATASOURCE','MONTHLY' ,4))
fd.write(" %s_%s = %d\n" % ('DATASOURCE','ONDEMAND' ,5))
fd.write(" %s_%s = %d\n" % ('DATASOURCE','ONSTARTUP' ,6))
+ fd.write(" %s_%s = %d\n" % ('DATASOURCE','PREINIT' ,7))
fd.write(" %s_%s = '%s'\n" % ('DATASOURCE','FREQUENCY_STR', \
- 'Minute,Hourly,Daily,Weekly,Monthly,OnDemand,OnStartup' \
+ 'Minute,Hourly,Daily,Weekly,Monthly,OnDemand,OnStartup,PreInit' \
))
fd.write(" %s_%s = '%s'\n" % ('DATASOURCE','DATE_FORMAT','%Y-%m-%d'))
fd.write(" %s_%s = '%s'\n" % ('DATASOURCE','DATETIME_FORMAT','%Y-%m-%d %H:%M:%S'))
#
+ # Job Status Mappings
+ #
+
+ fd.write(" %s_%s = %d\n" % ('JOB_STATUS','NOTSTARTED' ,0))
+ fd.write(" %s_%s = %d\n" % ('JOB_STATUS','INPROGRESS' ,1))
+ fd.write(" %s_%s = %d\n" % ('JOB_STATUS','SUCCESS' ,2))
+ fd.write(" %s_%s = %d\n" % ('JOB_STATUS','ERRORS' ,3))
+ fd.write(" %s_%s = %d\n" % ('JOB_STATUS','CANCELLED' ,4))
+ fd.write(" %s_%s = '%s'\n" % ('JOB_STATUS','STR', \
+ 'NotStarted,InProgress,Success,Errors,Cancelled' \
+ ))
+
+ #
# Update class Mappings
#
@@ -1060,6 +1168,15 @@ def gen_schema_header(database_dir,schema_dir):
fd.write(" %s_%s = '%s'\n" % ('UPDATE','MARK_UPDATED','Mark_Updated()'))
#
+ # ErrorLog class Mappings
+ #
+
+ fd.write("\n\n")
+ fd.write(" %s_%s = '%s'\n" % ('ERRORLOG','INFO',0))
+ fd.write(" %s_%s = '%s'\n" % ('ERRORLOG','WARNING',1))
+ fd.write(" %s_%s = '%s'\n" % ('ERRORLOG','ERROR',2))
+
+ #
# Helper routine to map values to string names
#
@@ -1089,6 +1206,8 @@ def main(argv):
global cmd_skip
global cmd_count
global cmd_test
+ global debug_sql
+ global is_progress
# setup
parser = argparse.ArgumentParser(description='srtool_common.py: manage SRTool common source data')
@@ -1097,7 +1216,7 @@ def main(argv):
parser.add_argument('--score-new-cves', '-s', dest='score_new_cves', help='Score CVEs for triage [NEW|CVE-1234]')
parser.add_argument('--generate-schema-header', '-g', action='store_const', const='gen_schema_header', dest='command', help='Generate database schema header')
parser.add_argument('--generate-schema-header-dir', dest='gen_schema_header_dir', help='Generate database schema header for a give database directory')
-
+ parser.add_argument('--import-recipe-names', action='store_const', const='import_recipe_names', dest='command', help='Import recipe names table into database')
parser.add_argument('--update-cve-status-tree', '-S', dest='update_cve_status_tree', help="Update CVEs and their children's cumulative status")
parser.add_argument('--update-investigation-status', '-I', dest='update_investigation_status', help='Update Investigation cumulative status')
@@ -1105,11 +1224,13 @@ def main(argv):
parser.add_argument('--update-cve-status', '-C', dest='update_cve_status', help='Update CVE cumulative status')
parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
+ parser.add_argument('--progress', '-P', action='store_true', dest='do_progress', help='Progress output')
parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
parser.add_argument('--test', '-t', action='store_true', dest='test', help='Test run')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ parser.add_argument('--debug-sql', action='store_true', dest='debug_sql', help='Debug SQL writes')
args = parser.parse_args()
verbose = args.verbose
@@ -1123,6 +1244,9 @@ def main(argv):
cmd_count = int(args.count)
if get_override('SRTDBG_MINIMAL_DB'):
cmd_count = 40
+ debug_sql = args.debug_sql
+ is_progress = args.do_progress
+ progress_set_on(is_progress)
if verbose:
print('srtool_common %s' % args)
@@ -1137,6 +1261,8 @@ def main(argv):
gen_schema_header(srtool_basepath,os.path.join(srtool_basepath,'bin/common'))
elif args.gen_schema_header_dir:
gen_schema_header(args.gen_schema_header_dir,args.gen_schema_header_dir)
+ elif 'import_recipe_names' == args.command:
+ import_recipe_names()
elif args.update_cve_status_tree:
update_cve_status_tree(args.update_cve_status_tree, update_skip_history)
@@ -1150,6 +1276,35 @@ def main(argv):
else:
print("Command not found")
+### generate schema indices
+def gen_schema_indices():
+ conn=SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+
+ if srt_dbtype == "mysql":
+ sql = f"""SELECT * FROM information_schema.columns where table_name like '%%' and table_schema = '{srt_dbconfig['name']}' order by table_name,ordinal_position"""
+ elif srt_dbtype == "postgres":
+ sql = """SELECT * FROM information_schema.columns where table_schema = 'public' order by table_name,ordinal_position;"""
+ else:
+ sql = """SELECT m.name as table_name, p.name as column_name FROM sqlite_master AS m JOIN pragma_table_info(m.name) AS p where table_name != 'sqlite_sequence' ORDER BY m.name, p.cid"""
+ cur.execute(sql)
+
+ columns = cur.description
+ results = [{columns[index][0]:column for index, column in enumerate(value)} for value in cur.fetchall()]
+ if srt_dbtype =="mysql":
+ results = [{'column_name': col['COLUMN_NAME'], 'table_name': col['TABLE_NAME']}for col in results]
+ columns = [SimpleNamespace(**col) for col in results]
+ current_table = None
+ count = 0
+ schema = ""
+ for col in columns:
+ if current_table != col.table_name:
+ count = 0
+ current_table = col.table_name
+ schema += f" {col.table_name.replace('orm_', '').upper()}_{col.column_name.upper()} = {count}\n"
+ count += 1
+ return schema
+
if __name__ == '__main__':
srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
main(sys.argv[1:])
diff --git a/bin/common/srtool_email.py b/bin/common/srtool_email.py
index c57fa9ab..54c65eab 100755
--- a/bin/common/srtool_email.py
+++ b/bin/common/srtool_email.py
@@ -132,7 +132,7 @@ def main(argv):
parser.add_argument('--subject', '-s', dest='subject', help='Subject for email address')
parser.add_argument('--server', dest='smtpserver', help='SMTP server address')
parser.add_argument('--user', dest='user', help='User name for Jira access')
- parser.add_argument('--passwd', dest='passwd', help='User password for Jira access')
+ parser.add_argument('--passwd', dest='passwd', help='User password for access')
parser.add_argument('--tls', '-t', action='store_true', dest='tls', help='Use TLS encryption')
parser.add_argument('--message', '-m', dest='message', help='Message to send')
parser.add_argument('--file', '-f', dest='file', help='File to send')
diff --git a/bin/common/srtool_jira_template.py b/bin/common/srtool_jira_template.py
index 82f2dc94..c059de17 100644
--- a/bin/common/srtool_jira_template.py
+++ b/bin/common/srtool_jira_template.py
@@ -41,7 +41,7 @@ import os
import sys
import re
import argparse
-import sqlite3
+from common.srtool_sql import *
import json
from datetime import datetime, date
@@ -172,7 +172,7 @@ def do_update_jira():
print("CONNECTION TO JIRA FAILED")
return 1
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
today = datetime.today()
@@ -228,7 +228,7 @@ def do_update_jira():
conn.commit()
c.close()
- conn.close()
+ SQL_CLOSE_CONN(conn)
#############################################################################3
###
@@ -444,7 +444,7 @@ def update_project_issues(project, issues, conn, log):
except:
cve_name_sort = cve.name
- sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, packages, srt_updated)
+ sql = ''' INSERT INTO orm_cve (name, name_sort, priority, status, comments, comments_private, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, packages, srt_updated)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
c.execute(sql, (cve_name, cve_name_sort, d.priority, cve_status, '', '', '', '', '', 1, 0, '', 'Created from defect %s' % d.name, '', '', 0, '', '', '', '', '', '', datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT)))
@@ -538,7 +538,7 @@ def jira_update_list(jira_list):
print("CONNECTION TO JIRA FAILED")
return 1
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
products = c.execute('''SELECT * FROM orm_product''').fetchall()
@@ -605,7 +605,7 @@ def jira_add_to_defect_db(jira_name):
#try connecting to jira
try:
jira = JIRA(JIRA_PRODUCTION_LINK, auth=(srt_user, srt_passwd))
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
except Exception as e:
print("xhr_investigation_commit:CONNECTION TO JIRA FAILED:(%s)\n" % e, file=sys.stderr)
@@ -655,7 +655,7 @@ def jira_add_to_defect_db(jira_name):
c.execute(sql, (d.name, d.summary, d.url, d.priority, d.status, d.resolution, str(d.publish), d.release_version, d.product_id, d.date_created, d.date_updated))
conn.commit()
c.close()
- conn.close()
+ SQL_CLOSE_CONN(conn)
except Exception as e:
print("ERROR:could not find/import defect(%s)" % e, file=sys.stderr)
return 1
@@ -677,7 +677,7 @@ JIRA_IS_TEST = True
JIRA_IS_SIMULATE = True
def simulate_new_defect_name(product_prefix):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
sql = "SELECT * FROM orm_srtsetting WHERE name='current_defect_simulation_index'"
@@ -691,7 +691,7 @@ def simulate_new_defect_name(product_prefix):
sql = '''UPDATE orm_srtsetting SET value=? WHERE id = ?'''
cur.execute(sql, (index, cvi[ORM.SRTSETTING_ID]))
conn.commit() #commit to db
- conn.close()
+ SQL_CLOSE_CONN(conn)
defect_name = "%s-%05d" % (product_prefix,index)
return defect_name
@@ -722,7 +722,7 @@ def jira_new_defect(product_defect_tags,summary,cve_list,description,reason,prio
return 1
#srt_error_log("Jira connection made")
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
# append the jira link to description
diff --git a/bin/common/srtool_job.py b/bin/common/srtool_job.py
new file mode 100755
index 00000000..4c9214b0
--- /dev/null
+++ b/bin/common/srtool_job.py
@@ -0,0 +1,791 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# SRTool Implementation
+#
+# Copyright (C) 2020-2021 Wind River Systems
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+#
+# Usage:
+#
+# https://kevinmccarthy.org/2016/07/25/streaming-subprocess-stdin-and-stdout-with-asyncio-in-python/
+# "python How do I get real time output from my commands"
+
+import os
+import sys
+import re
+import argparse
+from datetime import datetime, date
+import subprocess
+import asyncio
+import time
+import traceback
+
+# load the srttool.sqlite schema indexes
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+from common.srtool_progress import *
+from common.srtool_sql import *
+from common.srtool_common import log_error
+
+# Setup:
+job_errors = 0
+job_warnings = 0
+verbose = False
+debug_sql = False
+DBName = 'srt.sqlite'
+
+#################################
+# Helper methods
+#
+
+def debugMsg(msg):
+ if verbose:
+ print(msg)
+
+srtErrorLog = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srtool_dbg.log'
+def _log(msg):
+ f1=open(srtErrorLog, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+# Sub Process calls
+def execute_process(*args):
+ cmd_list = []
+ for arg in args:
+ if isinstance(arg, (list, tuple)):
+ # Flatten all the way down
+ for a in arg:
+ cmd_list.append(a)
+ else:
+ cmd_list.append(arg)
+
+ result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return(result.returncode,result.stdout.decode('utf-8'),result.stderr.decode('utf-8'))
+
+#################################
+# SQL as dict support
+#
+# https://stackoverflow.com/questions/3300464/how-can-i-get-dict-from-sqlite-query
+#
+
+def connectDatabase():
+ return SQL_CONNECT()
+
+def dict_factory(cursor, row):
+ d = {}
+ for idx, col in enumerate(cursor.description):
+ d[col[0]] = row[idx]
+ return d
+
+#################################
+#
+#
+
+async def do_read_stream(stream, logfd, cur, conn, job_id, mode):
+ global job_errors
+ global job_warnings
+ while True:
+ line = await stream.readline()
+ if line:
+ line = line.decode("utf-8").strip()
+
+ # Is this an error line?
+ line_strip = line.strip()
+ line_strip_lower = line_strip.strip().lower()
+ if line_strip_lower.startswith('traceback') or \
+ line_strip_lower.startswith('syntaxerror') or \
+ (line_strip_lower.startswith('error') and (not '"ok"' in line)):
+ job_errors += 1
+ if verbose:
+ print("[CUR]:[ERROR DETECTED:%d]<%s>" % (job_errors,line))
+ _log("FOO:JOB:DO_READ_STREAM|%s|" % line_strip)
+ sys.stdout.flush()
+
+ # Is this an warning line?
+ line_strip = line.strip()
+ if line_strip_lower.startswith('warning'):
+ job_warnings += 1
+ if verbose:
+ print("[CUR]:[WARNING DEFECTED:%d]<%s>" % (job_warnings,line))
+ sys.stdout.flush()
+
+ # Is this a progress line?
+ if line.startswith('[PROGRESS'):
+ # Parse the progress line
+ #[PROGRESS:0,3,foobar1]
+ cnt = -1
+ max = -1
+ msg = line
+ now = datetime.now().strftime('%H:%M:%S')
+
+ m = re.search(r'\[PROGRESS:(\d+),(\d+),(.*)\]', line)
+ try:
+ if m:
+ cnt = int(m.group(1))
+ max = int(m.group(2))
+ msg = m.group(3)
+ except:
+ if verbose:
+ print("[CUR]:[PARSE_ERROR]<%s>" % line)
+ sys.stdout.flush()
+ continue
+
+ # Update database
+ if cur:
+ sql = ''' UPDATE orm_job
+ SET status = ?, count = ?, max=?, message=?
+ WHERE id=?'''
+ if cnt > max:
+ cnt = max
+ ret = SQL_EXECUTE(cur, sql, (ORM.JOB_STATUS_INPROGRESS, cnt, max, msg[:49], job_id, ))
+ SQL_COMMIT(conn)
+ if verbose:
+ print("[CUR][%s]:Cnt=%s,Max=%s,Msg='%s',Now=%s" % (job_id,cnt,max,msg,now))
+ sys.stdout.flush()
+
+ # Is this an refresh line?
+ elif line_strip.startswith('[REFRESH'):
+ if cur:
+ sql = ''' UPDATE orm_job
+ SET refresh=?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, ('1', job_id, ))
+ SQL_COMMIT(conn)
+ if verbose:
+ print("[CUR]:[REFRESH_REQUESTED]")
+ sys.stdout.flush()
+ else:
+ # Update log file
+ if verbose and ('stderr' == mode):
+ line = "[STDERR]" + line
+ if logfd:
+ logfd.write(line + '\n')
+ logfd.flush()
+ else:
+ print(line)
+ sys.stdout.flush()
+ else:
+ break
+
+async def do_stream_subprocess(cmd, logfd, cur, conn, job_id):
+ process = await asyncio.create_subprocess_exec(*cmd,
+ stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)
+
+ await asyncio.wait([
+ do_read_stream(process.stdout, logfd, cur, conn, job_id, 'stdout'),
+ do_read_stream(process.stderr, logfd, cur, conn, job_id, 'stderr')
+ ])
+ return await process.wait()
+
+
+def do_execute(cmd,job_name,job_desc,log_file,job_id,options='',parent_name=''):
+ conn = None
+ cur = None
+ logfd = None
+
+ # Log file
+ if log_file:
+ logfd=open(log_file, 'w')
+
+ if verbose: print("DO_EXECUTE(%s,%s,%s)" % (cmd, log_file,job_id),file=logfd)
+
+ # Database cursor
+ if job_id:
+ if verbose: print("...DO_EXECUTE(job_id=%s)" % (job_id),file=logfd)
+ conn = connectDatabase()
+ cur = conn.cursor()
+ # Prepare/validate job id, create new job record on request
+ job_id = prepare_job_record(job_id,job_name,job_desc,' '.join(cmd),log_file,options,parent_name)
+ # Initialize the generated values
+ status = ORM.JOB_STATUS_INPROGRESS
+ started_on = datetime.now()
+ completed_on = None
+ pid = os.getpid()
+ sql = ''' UPDATE orm_job
+ SET status = ?, count = ?, max = ?, errors=?, started_on = ?, completed_on=?, message = ?, pid=?
+ WHERE id=?'''
+ ret = SQL_EXECUTE(cur, sql, (status, 0, 0, 0, started_on, completed_on, '', pid, job_id, ))
+ if verbose: print("...DO_EXECUTE(PRESET=%s)" % (ret),file=logfd)
+ SQL_COMMIT(conn)
+
+ loop = asyncio.get_event_loop()
+ rc = loop.run_until_complete(
+ do_stream_subprocess(
+ cmd,
+ logfd,
+ cur,
+ conn,
+ job_id,
+ ))
+
+ #
+ # Finish up
+ #
+
+ # Close handles
+ if cur:
+ # (Re)set the status in case the job died before final progress update
+ ### TODO set error if job returns an error code
+ if job_errors or job_warnings:
+ status = ORM.JOB_STATUS_ERRORS
+ else:
+ status = ORM.JOB_STATUS_SUCCESS
+ completed_on = datetime.now() #datetime.today()
+#
+ # Give the user a moment to savor the 100% display,
+ # and time enough for the javascript loop to catch it
+ time.sleep(2)
+#
+ sql = ''' UPDATE orm_job
+ SET status = ?, completed_on=?, errors = ?, warnings = ?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (status, completed_on, job_errors, job_warnings, job_id, ))
+ SQL_COMMIT(conn)
+
+ # Close handles
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+ loop.close()
+ if logfd:
+ logfd.close()
+ return rc
+
+def execute_command(command,job_name,job_desc,log_file,job_id,options,parent_name):
+ rc = do_execute(
+ ["bash", "-c", command],job_name,job_desc,log_file,job_id,options,parent_name,
+ )
+
+#################################
+# Unit tests
+#
+
+def test_async(job_id,log_file,delay):
+ do_execute(
+ ["bash", "-c", "echo stdout && sleep %s && echo stderr 1>&2 && sleep %s && echo done" % (delay,delay)],
+ 'Test_Async','Test_Async',log_file,job_id
+ )
+
+def do_test_unit(job_id,log_file,delay=1):
+ if not log_file:
+ log_file = 'unit_test.log'
+ testfd=open('.test1.txt', 'w')
+ testfd.write("foo1\n")
+ testfd.write("[PROGRESS:0,3,foobar1]\n")
+ testfd.write("foo2\n")
+ testfd.close()
+ testfd=open('.test2.txt', 'w')
+ testfd.write("foo3\n")
+ testfd.write("[PROGRESS:1,3,foobar2]\n")
+ testfd.write("foo4\n")
+ testfd.close()
+ testfd=open('.test3.txt', 'w')
+ testfd.write("foo5\n")
+ testfd.write("[PROGRESS:2,3,foobar3]\n")
+ testfd.write("foo6\n")
+ testfd.close()
+ testfd=open('.test4.txt', 'w')
+ testfd.write("foo7\n")
+ testfd.write("[PROGRESS:3,3,foobar4]\n")
+ testfd.close()
+ do_execute(
+ ["bash", "-c", "cat .test1.txt && sleep %s && cat .test2.txt && sleep %s && cat .test3.txt && sleep %s && cat .test4.txt " % (delay,delay,delay)],
+ 'Test_Unit','Do_Test_Unit',log_file,job_id
+ )
+ os.remove('.test1.txt')
+ os.remove('.test2.txt')
+ os.remove('.test3.txt')
+ os.remove('.test4.txt')
+
+
+def do_test_error1_unit(job_id,log_file,delay=1):
+ if not log_file:
+ log_file = 'unit_test.log'
+ testfd=open('.test1.txt', 'w')
+ testfd.write("foo1\n")
+ testfd.write("[PROGRESS:0,3,foobar1]\n")
+ testfd.write("foo2\n")
+ testfd.close()
+ testfd=open('.test2.txt', 'w')
+ testfd.write("foo3\n")
+ testfd.write("Error: bad data in structure\n")
+ testfd.close()
+ do_execute(
+ ["bash", "-c", "cat .test1.txt && sleep %s && cat .test2.txt" % (delay)],
+ 'Do_Test_Error1_Unit','Do_Test_Error1_Unit',log_file,job_id
+ )
+ os.remove('.test1.txt')
+ os.remove('.test2.txt')
+
+def do_test_error2_unit(job_id,log_file,delay=1):
+ if not log_file:
+ log_file = 'unit_test.log'
+ testfd=open('.test1.txt', 'w')
+ testfd.write("foo1\n")
+ testfd.write("[PROGRESS:0,3,foobar1]\n")
+ testfd.write("foo2\n")
+ testfd.close()
+ testfd=open('.test2.txt', 'w')
+ testfd.write('Traceback (most recent call last):\n')
+ testfd.write('File "/home/test/1.txt", line 994, in <module>\n')
+ testfd.write('main(sys.argv[1:])\n')
+ testfd.write('File "/home/test/2.txt", line 970, in main\n')
+ testfd.write('read_db(work_dir)\n')
+ testfd.write('File "/home/test/3.txt", line 373, in read_db\n')
+ testfd.write('write_db()\n')
+ testfd.write("TypeError: write_db() missing 1 required positional argument: 'work_dir'\n")
+ testfd.close()
+ do_execute(
+ ["bash", "-c", "cat .test1.txt && sleep %s && cat .test2.txt" % (delay)],
+ 'Do_Test_Error2_Unit','Do_Test_Error2_Unit',log_file,job_id
+ )
+ os.remove('.test1.txt')
+ os.remove('.test2.txt')
+
+def do_test_hang(job_id,log_file,delay=1):
+ if not log_file:
+ log_file = 'unit_test.log'
+ testfd=open('.test1.txt', 'w')
+ testfd.write("foo1\n")
+ testfd.write("[PROGRESS:0,3,foobar1]\n")
+ testfd.write("foo2\n")
+ testfd.close()
+ testfd=open('.test2.txt', 'w')
+ testfd.write("foo3\n")
+ testfd.write("[PROGRESS:1,3,foobar2]\n")
+ testfd.write("foo4\n")
+ testfd.close()
+ testfd=open('.test3.txt', 'w')
+ testfd.write("foo5\n")
+ testfd.write("[PROGRESS:2,3,NOTE:HANG_40_SEC]\n")
+ testfd.write("foo6\n")
+ testfd.close()
+ do_execute(
+ ["bash", "-c", "cat .test1.txt && sleep %s && cat .test2.txt && sleep %s && cat .test3.txt && sleep %s && cat .test4.txt " % (delay,delay,40)],
+ 'Do_Test_Hang','Do_Test_Hang',log_file,job_id
+ )
+ os.remove('.test1.txt')
+ os.remove('.test2.txt')
+ os.remove('.test3.txt')
+
+def do_test_refresh(job_id,log_file,delay=1):
+ if not log_file:
+ log_file = 'unit_test.log'
+ testfd=open('.test1.txt', 'w')
+ testfd.write("foo1\n")
+ testfd.write("[PROGRESS:0,4,foobar1]\n")
+ testfd.write("foo2\n")
+ testfd.close()
+ testfd=open('.test2.txt', 'w')
+ testfd.write("foo3\n")
+ testfd.write("[PROGRESS:1,4,foobar2]\n")
+ testfd.write("[REFRESH:foobar3]\n")
+ testfd.write("foo4\n")
+ testfd.close()
+ testfd=open('.test3.txt', 'w')
+ testfd.write("foo5\n")
+ testfd.write("[PROGRESS:2,4,foobar3]\n")
+ testfd.write("foo6\n")
+ testfd.close()
+ testfd=open('.test4.txt', 'w')
+ testfd.write("foo7\n")
+ testfd.write("[PROGRESS:3,4,foobar4]\n")
+ testfd.close()
+ testfd=open('.test5.txt', 'w')
+ testfd.write("foo7\n")
+ testfd.write("[PROGRESS:4,4,foobar4]\n")
+ testfd.close()
+ do_execute(
+ ["bash", "-c", "cat .test1.txt && sleep %s && cat .test2.txt && sleep %s && cat .test3.txt && sleep %s && cat .test4.txt && sleep %s && cat .test5.txt " % (delay,delay,delay,delay)],
+ 'Do_Test_Refresh','Do_Test_Refresh',log_file,job_id
+ )
+ os.remove('.test1.txt')
+ os.remove('.test2.txt')
+ os.remove('.test3.txt')
+ os.remove('.test4.txt')
+ os.remove('.test5.txt')
+
+# Run a parent job that calls sub-jobs
+# Example: bin/common/srtool_job.py -c "./bin/common/srtool_job.py --test-parent-job" --job-id 9 --log logs/run_job_parent.log --verbose --name "ParentTest" --description "Run a parent job test"
+def do_test_parent_job(parent_job_id,parent_log_file):
+ child_job_id = 8
+ child_log_file = 'logs/run_job_child.log'
+ progress_set_on(True)
+ progress_set_max(3)
+ print("Test_Parent_Job:Start")
+ child_command = 'bin/common/srtool_job.py -c SELFTEST --job-id %s --log %s ' % (child_job_id,child_log_file)
+ print("CHILD:%s" % child_command)
+ progress_show("Child pass #1")
+ print("Test_Parent_Job:Child pass #1")
+ ret = os.system(child_command)
+ progress_show("Child pass #2")
+ print("Test_Parent_Job:Child pass #2")
+ ret = os.system(child_command)
+ progress_show("Child pass #3")
+ print("Test_Parent_Job:Child pass #3")
+ ret = os.system(child_command)
+ print("Test_Parent_Job:Done")
+ progress_done('Done')
+
+#################################
+# Job record creation control
+#
+# Prepare the Job record, create new one if needed/requested
+# Job_id of '0' means create a new record
+
+def prepare_job_record(job_id=0,name='AutoJob',description = 'AutoJob',command = '',log_file='logs/run_job_auto.log',options='',parent_name=''):
+ conn = connectDatabase()
+ cur = conn.cursor()
+
+ if verbose: print("DO_CREATE_JOB(%s,%s,%s,%s,%s)" % (job_id,name,description,command,log_file))
+
+ # Generated values
+ message = ''
+ status = ORM.JOB_STATUS_NOTSTARTED
+ started_on = None
+ completed_on = None
+ pid = 0
+ count = 0
+ max = 0
+ errors = 0
+ warnings = 0
+ refresh = 0
+
+ # Validate requested job number as a positive integer
+ try:
+ job_id_number = int(job_id)
+ if 0 > job_id_number:
+ job_id_number = 0
+ except:
+ job_id_number = 0
+
+ # Create / Update Job
+ if 0 != job_id_number:
+ SQL_EXECUTE(cur, "SELECT * FROM orm_job where id = %d;" % job_id_number)
+ job = cur.fetchone()
+ # If that job_id is in progress, force a new job record
+ if job and (ORM.JOB_STATUS_INPROGRESS == job[ORM.JOB_STATUS]):
+ if verbose: print("FOUND JOB BUSY %s, force new job" % job_id_number)
+ job = None
+ job_id_number = 0
+ else:
+ # Autocreate new job record
+ job = None
+
+ if not job:
+ if verbose: print("CREATE JOB %s" % job_id_number)
+ # Create the new job
+ # Offset ... 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
+ sql = ''' INSERT INTO orm_job (name, description, command, parent_name, log_file, status, pid, count, max, errors, warnings, message, started_on, completed_on, options, refresh)
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
+ SQL_EXECUTE(cur, sql, (name, description, command, parent_name, log_file, status, pid, count, max, errors, warnings, message, started_on, completed_on, options, refresh))
+ new_job_id_number = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ SQL_COMMIT(conn)
+
+ # If a specific job_id was requested (e.g. external tracking purposes),
+ # force that id (we know here that it is unique)
+ if 0 != job_id_number:
+ if verbose: print("SET JOB ID %d" % job_id_number)
+ sql = ''' UPDATE orm_job
+ SET id = ?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (job_id_number, new_job_id_number, ))
+ SQL_COMMIT(conn)
+ else:
+ job_id_number = new_job_id_number
+ else:
+ if verbose: print("FOUND JOB %d" % job_id_number)
+ sql = ''' UPDATE orm_job
+ SET name = ?, description = ?, command = ?, parent_name = ?, log_file = ?, status = ?, pid = ?, count = ?, max = ?, errors = ?, warnings = ?, message = ?, started_on = ?, completed_on = ?, options = ?, refresh = ?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (name, description, command, parent_name, log_file, status, pid, count, max, errors, warnings, message, started_on, completed_on, options, refresh, job_id_number, ))
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CONN(conn)
+
+ return job_id_number
+
+def progress_step(job_id,count):
+ conn = connectDatabase()
+ cur = conn.cursor()
+ # Use job #90 as default job
+ if 0 == job_id:
+ job_id = 90
+ SQL_EXECUTE(cur, "SELECT * FROM orm_job where id = %s;" % job_id)
+ job=cur.fetchone()
+ # Create any missing job record on-the-fly
+ if not job:
+ job_id = prepare_job_record(job_id)
+ SQL_EXECUTE(cur, "SELECT * FROM orm_job where id = %s;" % job_id)
+ job=cur.fetchone()
+
+ status = job[ORM.JOB_STATUS]
+# started_on = job[ORM.JOB_STARTED_ON]
+# completed_on = job[ORM.JOB_COMPLETED_ON]
+ if count == 0:
+ started_on = datetime.now()
+ completed_on = None
+ status = ORM.JOB_STATUS_INPROGRESS
+ elif count == 4:
+ completed_on = datetime.now()
+ status = ORM.JOB_STATUS_SUCCESS
+
+ max = 4
+ message = 'STEP%d' % count
+ command = 'COMMAND%d' % count
+
+ sql = ''' UPDATE orm_job
+ SET status = ?, count = ?, max=?, message=?, command = ?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (status, count, max, message[:49], command, job_id, ))
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# List the status of the PIDs in the Job table
+#
+
+def job_pid_status():
+ conn = connectDatabase()
+ conn.row_factory = dict_factory # sqlite3.Row
+ cur = conn.cursor()
+
+ # Job Status
+ SUCCESS = 2
+ # PID Status
+ RUNNING = 0
+ MISSING = 1
+
+ pid_table = {}
+ for job in SQL_EXECUTE(cur, "SELECT * FROM orm_job"):
+ if job['pid']:
+ pid_table[int(job['pid'])] = MISSING
+
+ # Fetch pid data
+ result_returncode,result_stdout,result_stderr = execute_process(['ps','-a','-x'])
+ if 0 != result_returncode:
+ result_stdout = str(result_stdout)
+ print("ERROR(%s):%s" % (result_returncode,result_stderr))
+ exit(1)
+ for line in result_stdout.splitlines():
+ try:
+ pid = int(line[:line.index(' ')])
+ except:
+ continue
+ #value = line[line.index('=')+1:]
+ if pid in pid_table:
+ pid_table[pid] = RUNNING
+
+ for pid in pid_table:
+ print('%s:%s' % (pid,pid_table[pid]))
+
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# External job monitor support
+#
+def dump():
+ conn = connectDatabase()
+ cur = conn.cursor()
+ for job in SQL_EXECUTE(cur, "SELECT * FROM orm_job ORDER BY id;"):
+ print("Job=%03d,Cnt=%04s,Max=%04s,Status=%-10s,Err=%s,Pid=%5s,Start=%s,Stop=%s,Re:%s,Name=%10s,Msg='%s'" % (
+ job[ORM.JOB_ID],job[ORM.JOB_COUNT],job[ORM.JOB_MAX],
+ ORM.get_orm_string(job[ORM.JOB_STATUS],ORM.JOB_STATUS_STR),job[ORM.JOB_ERRORS],job[ORM.JOB_PID],
+ job[ORM.JOB_STARTED_ON][5:19] if job[ORM.JOB_STARTED_ON] else ' ',
+ job[ORM.JOB_COMPLETED_ON][11:19] if job[ORM.JOB_COMPLETED_ON] else ' ',
+ job[ORM.JOB_REFRESH],job[ORM.JOB_NAME],job[ORM.JOB_MESSAGE],
+ ))
+ sys.stdout.flush()
+
+def monitor_job():
+ print("Monitor Job:: Now=%s" % (datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
+ conn = connectDatabase()
+ cur = conn.cursor()
+ job_prev = {}
+ i=0
+ # Common date_time string expansion
+ def datetime_text(dt):
+ # 2021-04-01 12:10:09
+ return dt.strftime('%Y-%m-%d %H:%M:%S')
+
+ while True:
+ i += 1
+ msg = []
+ progress = []
+ for job in SQL_EXECUTE(cur, "SELECT * FROM orm_job ORDER BY id;"):
+ job_id = job[ORM.JOB_ID]
+ is_change = False or (0 == (i % 40))
+ status = ' '
+ if job_id in job_prev:
+ # Marker if new job started
+ if (job_prev[job_id][ORM.JOB_STATUS] in (ORM.JOB_STATUS_SUCCESS,ORM.JOB_STATUS_ERRORS,ORM.JOB_STATUS_CANCELLED)) and \
+ (job_prev[job_id][ORM.JOB_STATUS] != job[ORM.JOB_STATUS]):
+ status = '*'
+ # Line if job status change
+ for i in range(len(job)):
+ if job_prev[job_id][i] != job[i]:
+ is_change = True
+ else:
+ is_change = True
+ job_prev[job_id] = job
+ if is_change:
+ msg.append("%s Job=%2s,Name=%10s,Cnt=%04s,Max=%04s,Status=%-10s,Err=%s,Pid=%5s,Start=%s,Stop=%s,Re:%s,Msg=%s| " % (
+ status,job_id,job[ORM.JOB_NAME],job[ORM.JOB_COUNT],job[ORM.JOB_MAX],
+ ORM.get_orm_string(job[ORM.JOB_STATUS],ORM.JOB_STATUS_STR),job[ORM.JOB_ERRORS],job[ORM.JOB_PID],
+ datetime_text(job[ORM.JOB_STARTED_ON])[11:19] if job[ORM.JOB_STARTED_ON] else ' ',
+ datetime_text(job[ORM.JOB_COMPLETED_ON])[11:19] if job[ORM.JOB_COMPLETED_ON] else ' ',
+ job[ORM.JOB_REFRESH],job[ORM.JOB_MESSAGE],
+ ))
+ if (ORM.JOB_STATUS_INPROGRESS == job[ORM.JOB_STATUS]):
+ progress.append("%d=%d%%" % (job_id,((job[ORM.JOB_COUNT] * 100)/job[ORM.JOB_MAX]) if job[ORM.JOB_MAX] else 0))
+
+ if msg:
+ print("=== %s (%s) ===" % (datetime.now().strftime('%Y-%m-%d %H:%M:%S'),','.join(progress)))
+ for str in msg:
+ print(str)
+ sys.stdout.flush()
+ time.sleep(0.1)
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+ global DBName
+ global debug_sql
+
+ parser = argparse.ArgumentParser(description='xxx_job.py: Run command line jobs, update progress in GUI')
+
+ # Main options
+ parser.add_argument('--command', '-c', dest='command', help='Command to execute')
+ parser.add_argument('--name', '-n', dest='job_name', help='Job Name')
+ parser.add_argument('--description', '-d', dest='job_desc', help='Job Desciption')
+ parser.add_argument('--log', '-l', dest='log_file', help='Log file')
+ parser.add_argument('--options', '-o', dest='options', help='Job options')
+ parser.add_argument('--job-id', '-j', dest='job_id', help='Select a specific job record ID')
+ parser.add_argument('--parent-name', '-p', dest='parent_name', help='Parent record name, if any')
+ # UI helpers
+ parser.add_argument('--job-pid-status', action='store_const', const='job_pid_status', dest='command', help='Dump the PID status of the open jobs')
+
+ # Unit tests
+ parser.add_argument('--test-async', '-T', action='store_true', dest='do_test_async', help='Test Aync')
+ parser.add_argument('--test-unit', '-U', action='store_true', dest='do_test_unit', help='Unit Test')
+ parser.add_argument('--test-hang', '-H', action='store_true', dest='do_test_hang', help='Simulate app hang')
+ parser.add_argument('--test-refresh', '-R', action='store_true', dest='do_test_refresh', help='Insert a refresh request')
+ parser.add_argument('--test-parent-job', '-P', action='store_true', dest='do_test_parent_job', help='Insert a refresh request')
+
+ # Step tests
+ parser.add_argument('-0', action='store_true', dest='progress_step_0', help='Manual progress step 0 (init)')
+ parser.add_argument('-1', action='store_true', dest='progress_step_1', help='Manual progress step 1 (1/4)')
+ parser.add_argument('-2', action='store_true', dest='progress_step_2', help='Manual progress step 2 (2/4)')
+ parser.add_argument('-3', action='store_true', dest='progress_step_3', help='Manual progress step 3 (3/4)')
+ parser.add_argument('-4', action='store_true', dest='progress_step_4', help='Manual progress step 4 (done)')
+
+ # Debugging support
+ parser.add_argument('--dump', '-D', action='store_true', dest='dump', help='Dump jobs in the database')
+ parser.add_argument('--monitor-job', '-M', action='store_true', dest='monitor_job', help='Monitor a job in the database')
+ parser.add_argument('--delay', '-L', dest='delay', help='Test delays')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose debugging')
+ parser.add_argument('--database', '-B', dest='database', help='Set the database path')
+ parser.add_argument('--debug-sql', action='store_true', dest='debug_sql', help='Debug SQL writes')
+ parser.add_argument('--debug-sql-compare', dest='debug_sql_compare', help='Compare Debug SQL logs (e.g. "JOB,SCR")')
+
+ # Be flexible with arguments to support sub-parse trees
+ args, argv = parser.parse_known_args()
+
+ _log("Job:args:%s" % args)
+
+ try:
+ # Basic parameters
+ verbose = args.verbose
+ debug_sql = args.debug_sql
+ if args.log_file:
+ log_file = args.log_file
+ else:
+ log_file = ''
+ # Default is to create a new job record
+ if args.job_id:
+ job_id = args.job_id
+ else:
+ job_id = 0
+ if args.database:
+ DBName = args.database
+ if args.delay:
+ delay = args.delay
+ else:
+ delay = 1
+ job_name = args.job_name if args.job_name else ''
+ job_desc = args.job_desc if args.job_desc else ''
+ job_options = args.options if args.options else ''
+ job_parent_name = args.parent_name if args.parent_name else ''
+
+ # Enable SQL tracing
+ if debug_sql:
+ SQL_DEBUG(True,'JOB')
+
+ # Unit tests
+ ret = 0
+ if args.command == 'SELFTEST':
+ do_test_unit(job_id,log_file,'3')
+ elif args.command == 'SELFERROR1':
+ do_test_error1_unit(job_id,log_file,'3')
+ elif args.command == 'SELFERROR2':
+ do_test_error2_unit(job_id,log_file,'3')
+
+ elif args.do_test_async:
+ test_async(job_id,log_file,delay)
+ elif args.do_test_unit:
+ do_test_unit(job_id,log_file,delay)
+ elif args.do_test_hang:
+ do_test_hang(job_id,log_file,delay)
+ elif args.do_test_refresh:
+ do_test_refresh(job_id,log_file,delay)
+ elif args.do_test_parent_job:
+ do_test_parent_job(job_id,log_file)
+
+ elif args.monitor_job:
+ monitor_job()
+ elif args.dump:
+ dump()
+ elif args.debug_sql_compare:
+ SQL_DUMP_COMPARE(args.debug_sql_compare)
+
+ elif args.progress_step_0: progress_step(job_id,0);
+ elif args.progress_step_1: progress_step(job_id,1);
+ elif args.progress_step_2: progress_step(job_id,2);
+ elif args.progress_step_3: progress_step(job_id,3);
+ elif args.progress_step_4: progress_step(job_id,4);
+
+ # UI Helpers
+ elif 'job_pid_status' == args.command:
+ job_pid_status();
+
+ # Test the parameters
+ elif not args.command:
+ print("ERROR: Job: Missing command")
+ return(1)
+
+ # Execute the main command
+ else:
+ ret = execute_command(args.command,job_name,job_desc,log_file,job_id,job_options,job_parent_name)
+
+ # Dump the SQL transaction data
+ if debug_sql:
+ SQL_DUMP()
+
+ if 0 != ret:
+ exit(ret)
+
+ except Exception as e:
+ print("ERROR:%s" % e)
+ print("%s" % traceback.print_exc())
+ log_error("ERROR:SRTOOL_JOB:'%s'" % (e))
+ exit(1)
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/bin/common/srtool_progress.py b/bin/common/srtool_progress.py
new file mode 100755
index 00000000..2d87811f
--- /dev/null
+++ b/bin/common/srtool_progress.py
@@ -0,0 +1,75 @@
+#################################
+# Progress helper methods
+#
+
+import sys
+
+progress_count = -1
+progress_max = 0
+progress_percent_prev = -1
+progress_enabled = False
+
+PROGRESS_STATUS_ENABLE = 0
+PROGRESS_STATUS_COUNT = 1
+PROGRESS_STATUS_MAX = 2
+
+# Debugging support
+progress_debug = False
+
+def progress_set_on(value = True):
+ global progress_enabled
+ progress_enabled = value
+ if progress_debug: print("PROGRESS_SET_ON=%s" % value)
+
+def progress_status():
+ return progress_enabled, progress_count, progress_max
+
+def progress_set_max(max):
+ global progress_max
+ progress_max = max
+ progress_show('Start',0)
+ if progress_debug: print("PROGRESS_SET_MAX=%s" % max)
+
+def progress_set_current(current):
+ global progress_count
+ progress_count = current
+ if progress_debug: print("PROGRESS_SET_CURRENT=%s" % current)
+
+def progress_get_current(current):
+ global progress_count
+ progress_count = current
+
+def progress_show(msg,add_cnt=1,force_newline=False):
+ global progress_count
+ global progress_percent_prev
+ if not progress_enabled:
+ return
+ progress_count += add_cnt
+ if progress_max:
+ progress_percent_new = (progress_count * 100) // progress_max
+ else:
+ progress_percent_new = 0
+ if progress_debug: print("PROGRESS_SHOW=%s (%s:%s)(%s:%s)" % (msg,progress_percent_prev,progress_percent_new,progress_count,progress_max))
+ if progress_percent_prev < progress_percent_new:
+ progress_percent_prev = progress_percent_new
+ else:
+ return
+ # Force a new line to unblock STDIO if application uses prints with <end="">
+ if force_newline:
+ print("\n")
+ print("[PROGRESS:%d,%d,%s]" % (progress_count,progress_max,msg))
+ sys.stdout.flush()
+
+def progress_done(msg):
+ if not progress_enabled:
+ return
+ print("[PROGRESS:%d,%d,%s]" % (progress_max,progress_max,msg))
+
+# Formally post an error message for the Job Control to catch
+def progress_error(msg):
+ if not progress_enabled:
+ return
+ print("ERROR:%s" % (msg))
+
+
+
diff --git a/bin/common/srtool_sanity_test.py b/bin/common/srtool_sanity_test.py
index 4bd116a7..4a0a91d0 100755
--- a/bin/common/srtool_sanity_test.py
+++ b/bin/common/srtool_sanity_test.py
@@ -33,18 +33,15 @@
import os
import sys
import argparse
-import sqlite3
import subprocess
from django import VERSION as DJANGO_VERSION
-# Load the srt.sqlite schema index file
-# Since it is generated from this script
-# it may not exist on the first pass
-try:
- from srt_schema import ORM
-except ImportError:
- pass
+# load the srt.sqlite schema indexes
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+from common.srtool_sql import *
# Setup:
verbose = False
@@ -81,7 +78,7 @@ def get_override(key):
#
def get_host_statistics():
-
+ print("* Host statistics ...")
try:
cmd = ('uname', '-vm')
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
@@ -127,8 +124,9 @@ def get_host_statistics():
def get_database_statistics():
global table_counts
+ print("* Database statistics ... (use '-v' for details)")
# Get List of Tables:
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
tableListQuery = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY Name"
cur.execute(tableListQuery)
@@ -139,9 +137,9 @@ def get_database_statistics():
numberOfRows = cur.fetchone()[0]
table_counts[table] = numberOfRows
if verbose:
- print("%d\t%s" % (numberOfRows,table, ))
- cur.close()
- conn.close()
+ print("%11d\t%s" % (numberOfRows,table, ))
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
#################################
# init test
@@ -172,13 +170,14 @@ def init_test():
get_host_statistics()
get_database_statistics()
+ print("* Table checks ...")
ret = 0
for table in init_table_list:
if (not table in table_counts) or (0 == table_counts[table]):
- print("ERROR: Table '%s' is empty" % table)
- ret = 1
+ print("Note: Table '%s' is empty" % table)
if not ret:
+ print("* Summary ...")
print("CVEs = %s" % table_counts['orm_cve'])
print("Users = %s" % table_counts['users_srtuser'])
print("Data sources= %s" % table_counts['orm_datasource'])
@@ -206,7 +205,7 @@ def main(argv):
ret = init_test()
exit(ret)
else:
- print("Command not found")
+ print("Run: './srtool_sanity_test.py -i'")
exit(1)
if __name__ == '__main__':
diff --git a/bin/common/srtool_sql.py b/bin/common/srtool_sql.py
new file mode 100755
index 00000000..673793d6
--- /dev/null
+++ b/bin/common/srtool_sql.py
@@ -0,0 +1,492 @@
+#################################
+# Python SQL helper methods
+#
+# Provide SQL extended support via wrappers
+# * Enable retry for errors, specifically database locks
+# * Capture start/stop second+millisecond timestamps
+# * Provide post-dump of time tracking and retry counts
+#
+# Solution source:
+# https://stackoverflow.com/questions/15143871/simplest-way-to-retry-sqlite-query-if-db-is-locked
+# Quote: "Python will retry regularly if the table is locked. It will not retry if the Database is locked."
+#
+
+import sys
+import time
+import subprocess
+from datetime import datetime, date
+from collections import OrderedDict
+import sqlite3
+import re
+import os
+import yaml
+from types import SimpleNamespace
+
+# Globals
+SQL_TRACE = False
+SQL_VERBOSE = False
+SQL_CONTEXT = "NN"
+SQL_TIMEOUT_MAX = 10
+SQL_TIMEOUT_TIME = 0.0001
+SQL_LOG_DIR = 'logs'
+
+# Load the database configuration
+SRT_BASE_DIR = os.getenv('SRT_BASE_DIR', '.')
+srt_dbconfig = None
+srt_dbtype = None
+with open(f"{SRT_BASE_DIR}/srt_dbconfig.yml", "r") as ymlfile:
+ SRT_DBCONFIG = yaml.safe_load(ymlfile)
+ SRT_DBSELECT = SRT_DBCONFIG['dbselect']
+ srt_dbconfig = SRT_DBCONFIG[SRT_DBSELECT]
+ srt_dbtype = srt_dbconfig['dbtype']
+if not srt_dbtype:
+ print(f"ERROR: Missing {SRT_BASE_DIR}/srt_dbconfig.yml'")
+ exit(1)
+if ("mysql" == srt_dbtype) or ('1' == os.getenv('SRT_MYSQL', '0')):
+ import MySQLdb
+if ("postgres" == srt_dbtype) or ('1' == os.getenv('SRT_POSTGRES', '0')):
+ import psycopg2
+ from psycopg2.extras import RealDictCursor
+
+# quick development/debugging support
+def _log(msg):
+ DBG_LVL = os.environ['SRTDBG_LVL'] if ('SRTDBG_LVL' in os.environ) else 2
+ DBG_LOG = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srt_dbg.log'
+ if 1 == DBG_LVL:
+ print(msg)
+ elif 2 == DBG_LVL:
+ f1=open(DBG_LOG, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+#with open(f"{SRT_BASE_DIR}/db_migration_config.yml", "r") as migfile:
+# DB_MIG_CONFIG = yaml.safe_load(migfile)
+
+#################################
+# Debug Support
+#
+
+SQL_TRACE_log = []
+SQL_VERBOSE_log = []
+
+# Enable debug tracking, optional context
+def SQL_DEBUG(is_trace,context=None,is_verbose=False):
+ global SQL_TRACE
+ global SQL_VERBOSE
+ global SQL_CONTEXT
+ SQL_TRACE = is_trace
+ if context:
+ SQL_CONTEXT = context
+ if is_verbose:
+ SQL_VERBOSE = context
+ if SQL_TRACE:
+ print("SRTSQL_DEBUG:Trace=%s,Context=%s,Verbose=%s)" % (SQL_TRACE,context,is_verbose))
+ sys.stdout.flush()
+
+def _SQL_GET_MS():
+ if not SQL_TRACE: return 0
+ dt = datetime.now()
+ return (dt.minute * 100000000) + (dt.second * 1000000) + dt.microsecond
+
+def _SQL_TRACE_LOG_ADD(start,stop,loop):
+ global SQL_TRACE_log
+ if not SQL_TRACE: return
+ SQL_TRACE_log.append([SQL_CONTEXT,start,stop,loop])
+
+def SQL_DUMP():
+ if not SQL_TRACE: return
+ if not os.path.isdir(SQL_LOG_DIR):
+ os.makedirs(SQL_LOG_DIR)
+ log_file = '%s/SQL_TRACE_%s.log' % (SQL_LOG_DIR,SQL_CONTEXT)
+ with open(log_file, 'w') as fd:
+ print(" (Context) Start Stop (Retries)",file=fd)
+ print("===============================================",file=fd)
+ for context,start,stop,loop in SQL_TRACE_log:
+ print("sql_dump:(%3s) %d to %d (%d)" % (context[:3],start,stop,loop),file=fd)
+ print("SQL debug trace log:%s" % log_file)
+
+def SQL_DUMP_COMPARE(param,is_csv=False):
+ tag1,tag2 = param.split(',')
+ if not os.path.isdir(SQL_LOG_DIR):
+ os.makedirs(SQL_LOG_DIR)
+ log1 = '%s/SQL_TRACE_%s.log' % (SQL_LOG_DIR,tag1)
+ log2 = '%s/SQL_TRACE_%s.log' % (SQL_LOG_DIR,tag2)
+
+ log = []
+
+ def load_log(logfile):
+ p = re.compile(r'sql_dump:\((\w+)\) (\d+) to (\d+) \((\d+)\)')
+ with open(logfile, 'r') as fs:
+ for line in fs.readlines():
+ # sql_dump:(JOB) 39290879 to 39293849 (0)
+ m = p.match(line)
+ if not m:
+ continue
+ tag,start,stop,retry = m.groups()
+ log.append([tag,int(start),int(stop),retry])
+ # Load the logs
+ load_log(log1)
+ load_log(log2)
+ # Sort the log
+ def sortOnStart(e):
+ return e[1]
+ log.sort(key=sortOnStart)
+
+ # Display log table with diffs
+ if not is_csv:
+ print(" # |Tag|Start uSec|Stop uSec |Re|(diff prev )|(diff next )|(diff write)")
+ print("======|===|==========|==========|==|============|============|============")
+ else:
+ print("Index,Tag,Start,Stop,Retries,Diff_prev,Diff_next,Diff write")
+ logmax = len(log)
+ i = -1
+ for tag,start,stop,retry in log:
+ i += 1
+ if i == 0:
+ pre_diff = 0
+ else:
+ pre_diff = log[i][1] - log[i-1][2]
+ if i == (logmax - 1):
+ post_diff = 0
+ else:
+ post_diff = log[i+1][1] - log[i][2]
+ write_diff = log[i][2] - log[i][1]
+ if not is_csv:
+ print("[%4d]:%s,%010d,%010d,%s (^ %8d) (v %8d) (~ %8d)" % (i,tag,start,stop,retry,pre_diff,post_diff,write_diff))
+ else:
+ print("%d,%s,%010d,%010d,%s,%8d,%8d,%8d" % (i,tag,start,stop,retry,pre_diff,post_diff,write_diff))
+ if SQL_VERBOSE:
+ print('')
+ print('Executed SQL commands:')
+ for line in SQL_VERBOSE_log:
+ print(line)
+ print('')
+
+def SQL_FETCH_INDEXES(conn, dbconfig=None):
+ # Define the database type connection
+ if not dbconfig:
+ dbconfig = srt_dbconfig
+ dbtype = dbconfig['dbtype']
+
+ #Goal: Create a data structure that has:
+ # ordered list of tables
+ # table column names
+ # index(ordinal) of table
+ # columns: (table_name, column_name, ordinal_postion) -> Should be list[list]
+ # Formatting should not be done in the subroutine...do it in srtool_common.py
+ # (name, value) -> name is table_name,_column name and value is the index
+ # Should be returned as tuples rather than preformatted strings
+ # Returns should be consistent (also for error)
+
+ cur = conn.cursor()
+ if 'postgres' == dbtype:
+ sql = "SELECT * FROM information_schema.columns where table_schema = 'public' order by table_name,ordinal_position;"
+ print("cursor stat: {}".format(cur))
+ cur.execute(sql)
+ columns = cur.description
+ results = [{columns[index][0]:column for index, column in enumerate(value)} for value in cur.fetchall()]
+ tables = {}
+ # TODO last line of for is hardcoded in postgres format
+ for i in results:
+ if i['table_name'] not in tables:
+ # {'table_name' : 'column_name', 'ordinal_position'}
+ tables[i['table_name']] = {'ordinal_position' : 'column_name'}
+ tables[i['table_name']][i['ordinal_position']-1] = i['column_name']
+ for val_d in tables:
+ tables[val_d].pop('ordinal_position')
+ ret_list = []
+ for table in tables:
+ table_items = tables[table].items()
+ sorted_tabl = sorted(table_items)
+ for offset,i in enumerate(sorted_tabl):
+ table = table.replace('orm_','')
+ ret_list.append(("{}_{}".format(table.upper(), i[1].upper()), offset))
+ return(ret_list)
+ elif 'sqlite' == dbtype:
+ database_file = dbconfig['path']
+ create_re = re.compile(r"CREATE TABLE[A-Z ]* \"(\w+)\" \((.+)\);")
+ try:
+ cmd = ('sqlite3', database_file, '.schema') # must be abstracted
+ output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ return([("ERROR","(%d) %s" % (e.returncode, e.output))])
+ ret_list = []
+ # print('RET LIST: {}'.format(ret_list))
+ # print('cmd OUTPUT: {}'.format(output))
+ # problem -> for loop is not executing (nothing returned from command 'sqlite3 srt-backup.sqlite .schema')
+ for line in output.decode("utf-8").splitlines():
+ print(line)
+ print('In for loop - retlist: {}'.format(ret_list))
+ match = create_re.match(line)
+ if not match:
+ print('ERROR: no match')
+ continue
+
+ table = match.group(1).upper()
+ table = table.replace('ORM_','')
+
+ columns = match.group(2)
+ for i, col in enumerate(columns.split(',')):
+ col = col.strip()
+ name = col[1:]
+ #
+ try:
+ name = name[:name.index('"')]
+ print('NOTE: passed try #2: {}'.format(name))
+ except Exception as e:
+ return([("ERROR","%s:%s:" % (e,col))])
+ name = col[:col.index(' ')]
+ ret_list.append(("%s_%s" % (table.upper(),name.upper()), i))
+ return(ret_list)
+ else:
+ return([("ERROR","No support for MySQL or MariahDB. Update coming..."),])
+
+#################################
+# SQL wrapper methods
+#
+
+def _SQL_ACTION(action,cur_conn,sql=None,params=None,dbconfig=None):
+ # Define the database type connection
+ if not dbconfig:
+ dbconfig = srt_dbconfig
+ dbtype = dbconfig['dbtype']
+
+ ret = None
+ timeout_count = 0
+ if SQL_VERBOSE:
+ SQL_VERBOSE_log.append("SQL_ACTION:%s:%s:%s:%s:" % (action,cur_conn,sql,params))
+ sleep_time = SQL_TIMEOUT_TIME
+ start = _SQL_GET_MS()
+ exception_occured = False
+ for x in range(0, SQL_TIMEOUT_MAX):
+ exception_occured = False
+ try:
+ if 'exec' == action:
+ # to account for difference between mysql/postgres and sqlite
+ if not dbtype == "sqlite":
+ sql = sql.replace("?", "%s")
+ if dbtype == "postgres": # for postgres case insenstive issue
+ sql = sql.replace('`', '"') # replace backticks with double quotes
+ if "INSERT INTO" in sql:
+ sql += " RETURNING *"
+ camel_case_columns = ["lastModifiedDate", "publishedDate", "cvssV3_baseScore", "cvssV3_baseSeverity", "cvssV2_baseScore", "cvssV2_severity"]
+ for col in camel_case_columns:
+ if col in sql and f'"{col}"' not in sql:
+ sql = sql.replace(f'{col}', f'"{col}"')
+ if params:
+ ret = cur_conn.execute(sql, params)
+ else:
+ ret = cur_conn.execute(sql)
+ elif 'commit' == action:
+ ret = cur_conn.commit()
+ except Exception as e:
+ exception_occured = True
+ print(f"Error occured while running\nsql: {sql}\nparams:{params}\naction:{action}")
+ print(e)
+ time.sleep(sleep_time)
+ timeout_count += 1
+ pass
+ finally:
+ _SQL_TRACE_LOG_ADD(start,_SQL_GET_MS(),timeout_count)
+ break
+ else:
+ # Give up, dump what we had, and trigger a proper error
+ SQL_TRACE_log_add(start,_SQL_GET_MS(),timeout_count)
+ sql_dump()
+ if 'exec' == action:
+ ret = cur_conn.execute(sql,params)
+ elif 'commit' == action:
+ ret = cur_conn.commit()
+ if not dbtype == "sqlite":
+ ret = cur_conn
+ return ret
+
+def SQL_CONNECT(column_names=False,dbconfig=None):
+ # Define the database type connection
+ if not dbconfig:
+ dbconfig = srt_dbconfig
+ dbtype = dbconfig['dbtype']
+
+ if dbtype == "mysql":
+ conn = MySQLdb.connect(
+ passwd=dbconfig["passwd"],
+ db=dbconfig["name"],
+ host=dbconfig["host"],
+ user=dbconfig["user"],
+ port=dbconfig["port"]
+ )
+ return conn
+ elif dbtype == "postgres":
+ if column_names:
+ conn = psycopg2.connect(
+ password=dbconfig["passwd"],
+ database=dbconfig["name"],
+ host=dbconfig["host"],
+ user=dbconfig["user"],
+ port=dbconfig["port"],
+ cursor_factory=RealDictCursor,
+ )
+ else:
+ conn = psycopg2.connect(
+ password=dbconfig["passwd"],
+ database=dbconfig["name"],
+ host=dbconfig["host"],
+ user=dbconfig["user"],
+ port=dbconfig["port"],
+ )
+ return conn
+ else: # Sqlite
+ conn = sqlite3.connect(dbconfig["path"])
+ if column_names:
+ conn.row_factory = sqlite3.Row
+ return conn
+
+def SQL_CURSOR(conn,dbconfig=None):
+ return(conn.cursor())
+
+def SQL_EXECUTE(cur,sql,params=None,dbconfig=None):
+ return(_SQL_ACTION('exec',cur,sql,params,dbconfig))
+
+def SQL_COMMIT(conn,dbconfig=None):
+ return(_SQL_ACTION('commit',conn,dbconfig))
+
+def SQL_CLOSE_CUR(cur,dbconfig=None):
+ return(cur.close())
+
+def SQL_CLOSE_CONN(conn,dbconfig=None):
+ return(conn.close())
+
+def SQL_GET_LAST_ROW_INSERTED_ID(cur,dbconfig=None):
+ # Define the database type connection
+ if not dbconfig:
+ dbconfig = srt_dbconfig
+ dbtype = dbconfig['dbtype']
+
+ if dbtype == "postgres":
+ return(SQL_FETCH_ONE(cur).id)
+ else:
+ return cur.lastrowid
+
+def SQL_FETCH_ONE(cur,dbconfig=None):
+ columns = cur.description
+ result = {columns[index][0]:column for index, column in enumerate(cur.fetchone()) }
+ return SimpleNamespace(**result)
+
+def SQL_FETCH_ALL(cur,dbconfig=None):
+ columns = cur.description
+ results = [{columns[index][0]:column for index, column in enumerate(value)} for value in cur.fetchall()]
+ return [SimpleNamespace(**result) for result in results]
+
+def GET_DB_TYPE(dbconfig=None):
+ # Define the database type connection
+ if not dbconfig:
+ dbconfig = srt_dbconfig
+ dbtype = dbconfig['dbtype']
+ return dbtype
+
+def SQL_BATCH_WRITE(cur_conn, table, records, dbconfig=None, fields=None, override_values=None):
+ '''
+ Batch write wrapper function
+ - Records must contain tuples of the same length
+
+ :param cur_conn: SQL connection
+ :param table: target table name
+ :param records: list of tuples containing records to be inserted
+ :param dbconfig: dbconfig['dbtype'] contains DB type
+ :param fields: list of specified fields to insert into
+ :param override_values: list of specified values
+
+ :return: SQL DB connection's cursor
+ '''
+
+ # Define the database type connection
+ if not dbconfig:
+ dbconfig = srt_dbconfig
+ dbtype = dbconfig['dbtype']
+
+ # invalid parameters check
+ if cur_conn == None or table == None or records == None:
+ raise Exception("SQL Batch Write Failed: invalid parameters provided")
+ if not isinstance(records, list) and not isinstance(records, tuple):
+ raise Exception("SQL Batch Write Failed: records must be of type 'list' or 'tuple'")
+
+ # invalid number of fields supplied check
+ std_record_ct = len(records[0])
+ for record in records:
+ if len(record) != std_record_ct:
+ raise BaseException("SQL Batch Write Failed: incorrect number of fields supplied")
+
+ # bulk insert
+ if fields != None:
+ _fields = "(" + ','.join([str(field) for field in fields]) + ")"
+ else:
+ _fields = ''
+
+ if dbtype == 'sqlite':
+ if override_values is None:
+ _ov = f"({','.join(['?'] * len(records[0]))})"
+ elif isinstance(override_values, list):
+ _ov = "(" + ','.join([str(ov) for ov in override_values]) + ")"
+ elif isinstance(override_values, str):
+ _ov = override_values
+ cur_conn.executemany(f"INSERT INTO {table}{_fields} VALUES{_ov};", records)
+ elif dbtype == 'postgres':
+ if override_values is None:
+ _ov = f"({','.join(['%s'] * len(records[0]))})"
+ elif isinstance(override_values, list):
+ _ov = "(" + ','.join([str(ov) for ov in override_values]) + ")"
+ elif isinstance(override_values, str):
+ _ov = override_values
+ psycopg2.extras.execute_batch(cur_conn, f"INSERT INTO {table}{_fields} VALUES{_ov};", records)
+
+ # conn.commit()
+ return cur_conn
+
+
+def SQL_BATCH_UPDATE(cur_conn, table, values_list, set_field, where_field, dbconfig=None):
+ '''
+ Batch update wrapper function (not tested)
+ - Records must contain tuples of the same length
+
+ :param cur_conn: SQL connection
+ :param table: target table name
+ :param values_list: parameter values provided to the SQL query
+ :param set_field: list containing the 'SET' parameterized fields in the SQL query
+ :param where_field: list containing the 'WHERE' parameterized fields in the SQL query
+ :param dbconfig: dbconfig['dbtype'] contains DB type
+
+ :return: SQL DB connection's cursor
+ '''
+
+ # Define the database type connection
+ if not dbconfig:
+ dbconfig = srt_dbconfig
+ dbtype = dbconfig['dbtype']
+
+ # invalid parameters check
+ if (cur_conn == None) or (table == None) or (set_field == None) or (where_field == None) or (values_list == None):
+ raise Exception("SQL Batch Update Failed: invalid parameters provided")
+
+ # invalid number of fields supplied check
+ if (len(set_field) + len(where_field)) != len(values_list[0]):
+ raise Exception(f"SQL Batch Update Failed: number of fields and values supplied mismatches ({len(set_field)},{len(where_field)},{len(values_list)})")
+
+ if dbtype == 'sqlite':
+ # generate the SQL command for sqlite
+ update_comm = f"UPDATE {table}"
+ set_comm = " SET " + ", ".join([f"{s_field} = ?" for s_field in set_field])
+ where_comm = " WHERE " + ", ".join([f"{w_field} = ?" for w_field in where_field])
+ sql = update_comm + set_comm + where_comm + ";"
+ cur_conn.executemany(sql, values_list)
+
+ elif dbtype == 'postgres':
+ # generate the SQL command for postgresql
+ update_comm = f"UPDATE {table}"
+ set_comm = " SET " + ", ".join([f"{s_field} = %s" for s_field in set_field])
+ where_comm = " WHERE " + ", ".join([f"{w_field} = %s" for w_field in where_field])
+ sql = update_comm + set_comm + where_comm + ";"
+ psycopg2.extras.execute_batch(cur_conn, sql, values_list)
+
+ # conn.commit()
+ return cur_conn
+
diff --git a/bin/common/srtool_test.py b/bin/common/srtool_test.py
new file mode 100755
index 00000000..b3af8033
--- /dev/null
+++ b/bin/common/srtool_test.py
@@ -0,0 +1,204 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2017-2018 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+#
+# Theory of operation
+#
+# * This script manages the common SRTool data source files
+#
+
+import os
+import sys
+import re
+import csv
+import json
+import argparse
+from common.srtool_sql import *
+import subprocess
+from time import sleep
+from datetime import datetime
+
+# Load the srt.sqlite schema index file
+# Since it is generated from this script
+# it may not exist on the first pass
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+try:
+ from common.srtool_progress import *
+ from common.srt_schema import ORM
+except:
+ # Do a pass so that '--generate-schema-header' can fix it
+ print("Warning: srt_schema not yet created or bad format")
+ pass
+
+# Setup:
+verbose = False
+cmd_skip = 0
+cmd_count = 0
+cmd_test = False
+
+srtDbName = 'srt.sqlite'
+packageKeywordsFile = 'data/package_keywords.csv'
+notifyCategoriesFile = 'data/notify-categories.json'
+
+#################################
+# Helper methods
+#
+
+overrides = {}
+
+def set_override(key,value=None):
+ if not value is None:
+ overrides[key] = value
+ elif key in os.environ.keys():
+ overrides[key] = 'yes' if os.environ[key].startswith('1') else 'no'
+ else:
+ overrides[key] = ''
+ if overrides[key]:
+ print("OVERRIDE: %s = %s" % (key,overrides[key]))
+
+def get_override(key):
+ if key in overrides.keys():
+ return overrides[key]
+ return ''
+
+def get_name_sort(cve_name):
+ try:
+ a = cve_name.split('-')
+ cve_name_sort = '%s-%s-%07d' % (a[0],a[1],int(a[2]))
+ except:
+ cve_name_sort = cve_name
+ return cve_name_sort
+
+def get_tag_key(tag,key,default=None):
+ d = json.loads(tag)
+ if key in d:
+ return d[key]
+ return default
+
+###############################################################
+#
+#
+
+def reset_new():
+ global recommends
+ global cmd_skip
+ global cmd_count
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+ cur_ds = conn.cursor()
+ is_change = False
+ write_count = 0
+
+ # Cap this
+ if cmd_count == 0:
+ cmd_count = 201
+ progress_set_max(cmd_count)
+
+ # Scan the open CVEs
+ sql = "SELECT * FROM orm_cve WHERE status='%s' AND score_date IS NOT NULL;" % (ORM.STATUS_NEW)
+ cur.execute(sql)
+ for i,cve in enumerate(cur):
+ cve_name = cve[ORM.CVE_NAME]
+ progress_show(cve_name)
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%04d: %20s\r' % (i,cve_name), end='')
+ if (0 == i % 200) and (not cmd_skip) and is_change:
+ conn.commit()
+ print("%4d: COMMIT" % i)
+ sleep(2)
+ is_change = False
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ else:
+ cmd_skip = 0
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ sql = ''' UPDATE orm_cve
+ SET score_date = ?
+ WHERE id = ?'''
+ cur_write.execute(sql, (None, cve[ORM.CVE_ID]))
+ write_count += 1
+ is_change = True
+
+ if is_change:
+ conn.commit()
+ print("COMMIT")
+ print("\nUpdated CVEs=%d" % (write_count))
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+ global update_skip_history
+ global cmd_skip
+ global cmd_count
+ global cmd_test
+
+ # setup
+ parser = argparse.ArgumentParser(description='srtool_common.py: manage SRTool common source data')
+ parser.add_argument('--reset-new', action='store_const', const='reset_new', dest='command', help='Rese new CVEs for score test')
+
+ parser.add_argument('--progress', '-P', action='store_true', dest='do_progress', help='Progress output')
+ parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
+ parser.add_argument('--test', '-t', action='store_true', dest='test', help='Test run')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
+ parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ args = parser.parse_args()
+
+ verbose = args.verbose
+ cmd_test = args.test
+ cmd_skip = 0
+ if None != args.skip:
+ cmd_skip = int(args.skip)
+ cmd_count = 0
+ if None != args.count:
+ cmd_count = int(args.count)
+ progress_set_on(args.do_progress)
+
+ if verbose:
+ print('srtool_common %s' % args)
+
+ if 'reset_new' == args.command:
+ reset_new()
+
+ else:
+ print("Command not found")
+
+if __name__ == '__main__':
+ srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ main(sys.argv[1:])
+
+
+
diff --git a/bin/common/srtool_update.py b/bin/common/srtool_update.py
index 92f4479d..3227534b 100755
--- a/bin/common/srtool_update.py
+++ b/bin/common/srtool_update.py
@@ -5,7 +5,7 @@
#
# Security Response Tool Commandline Tool
#
-# Copyright (C) 2018 Wind River Systems
+# Copyright (C) 2018-2023 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -23,17 +23,17 @@
import os
import sys
import argparse
-import sqlite3
import json
import time
from datetime import datetime, timedelta
+import pytz
import traceback
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
from common.srt_schema import ORM
-
+from common.srtool_sql import *
# Setup:
verbose = False
master_log = ''
@@ -86,24 +86,107 @@ def get_tag_key(tag,key,default=''):
# ONDEMAND = 5 "{}" # only on demand
# ONSTARTUP = 6 "{}" # on every SRTool start up
-def run_updates(force_all,name_filter,update_skip_history,is_trial):
+def next_refresh_date(update_frequency,update_time_keys,last_updated_date,display=False):
+ # Get the update keys
+ delta_minutes = int(get_tag_key(update_time_keys,'minutes','10'))
+ delta_months = int(get_tag_key(update_time_keys,'months','1'))
+ minute_of_day = int(get_tag_key(update_time_keys,'minute','10'))
+ hour_of_day = int(get_tag_key(update_time_keys,'hour','12'))
+ weekday_of_week = int(get_tag_key(update_time_keys,'weekday','4'))
+ day_of_week = int(get_tag_key(update_time_keys,'day','4'))
+
+ # Calulate the next update datetime
+ if ORM.DATASOURCE_MINUTELY == update_frequency:
+ # Time relative to last
+ test_date = last_updated_date + timedelta(minutes=delta_minutes)
+ else:
+ # Time relative to a time_of_day
+ test_date = last_updated_date
+ if ORM.DATASOURCE_HOURLY == update_frequency:
+ test_date = test_date.replace(minute = minute_of_day)
+ if test_date < last_updated_date:
+ test_date += timedelta(hours=1)
+ elif ORM.DATASOURCE_DAILY == update_frequency:
+ test_date = test_date.replace(hour = hour_of_day)
+ test_date = test_date.replace(minute = minute_of_day)
+ if test_date < last_updated_date:
+ test_date += timedelta(days=1)
+ elif ORM.DATASOURCE_WEEKLY == update_frequency:
+ test_date = test_date.replace(hour = hour_of_day)
+ test_date = test_date.replace(minute = minute_of_day)
+ weekday = test_date.weekday()
+ if weekday >= weekday_of_week:
+ test_date += timedelta(days=(7 + weekday_of_week - weekday))
+ elif weekday < weekday_of_week:
+ test_date += timedelta(days=(weekday_of_week - weekday))
+ elif ORM.DATASOURCE_MONTHLY == update_frequency:
+ test_date = test_date.replace(day = day_of_week)
+ test_date = test_date.replace(hour = hour_of_day)
+ test_date = test_date.replace(minute = minute_of_day)
+ if test_date < last_updated_date:
+ test_date += timedelta(days=31)
+ else:
+ print("ERROR:unknown update '%s'" % update_frequency)
+ exit(1)
+
+ if display:
+ # ORM.DATASOURCE_DATETIME_FORMAT
+ print("%s <= %s,%s,%s" % (test_date.strftime("%c"),last_updated_date.strftime("%c"),update_frequency,update_time_keys))
+ return(test_date)
+
+def update_unit_test():
+ # datetime(year, month, day, hour=0, minute=0, tzinfo=None)
+ date_now = datetime.now(pytz.utc)
+ print("Unit test the update differentials and modes")
+
+ next_refresh_date(ORM.DATASOURCE_MINUTELY,"{\"minutes\":\"10\"}",date_now,True)
+ next_refresh_date(ORM.DATASOURCE_MINUTELY,"{\"minutes\":\"10\"}",date_now.replace(minute=59),True)
+
+ next_refresh_date(ORM.DATASOURCE_HOURLY,"{\"minute\":\"10\"}",date_now.replace(minute=11),True)
+ next_refresh_date(ORM.DATASOURCE_HOURLY,"{\"minutes\":\"10\"}",date_now.replace(minute=9),True)
+
+ next_refresh_date(ORM.DATASOURCE_DAILY,"{\"hour\":\"2\"}",date_now.replace(hour=1),True)
+ next_refresh_date(ORM.DATASOURCE_DAILY,"{\"hour\":\"2\"}",date_now.replace(hour=3),True)
+
+ # May need to adjust this relative to today's test day of week
+ next_refresh_date(ORM.DATASOURCE_WEEKLY,"{\"weekday\":\"5\",\"hour\":\"2\"}",date_now.replace(day=3,hour=1),True)
+ next_refresh_date(ORM.DATASOURCE_WEEKLY,"{\"weekday\":\"5\",\"hour\":\"2\"}",date_now.replace(day=4,hour=1),True)
+ next_refresh_date(ORM.DATASOURCE_WEEKLY,"{\"weekday\":\"5\",\"hour\":\"2\"}",date_now.replace(day=6,hour=3),True)
+
+ next_refresh_date(ORM.DATASOURCE_MONTHLY,"{\"day\":\"5\",\"hour\":\"2\"}",date_now.replace(day=4,hour=1),True)
+ next_refresh_date(ORM.DATASOURCE_MONTHLY,"{\"day\":\"25\",\"hour\":\"2\"}",date_now.replace(day=24,hour=1),True)
+ next_refresh_date(ORM.DATASOURCE_MONTHLY,"{\"day\":\"25\",\"hour\":\"2\"}",date_now.replace(day=26,hour=1),True)
+
+def time_delta_to_dhms(time_to_go):
+ days = time_to_go.days
+ clicks_to_go = time_to_go.seconds
+ seconds = clicks_to_go % 60
+ clicks_to_go //= 60
+ minutes = clicks_to_go % 60
+ hours = clicks_to_go // 60
+ return [days,hours,minutes,seconds]
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
- cur_write = conn.cursor()
+def run_updates(force_all,name_filter,update_skip_history,is_trial):
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+ cur_write = SQL_CURSOR(conn)
+ # get local timezone
+# local_tz = datetime.now().astimezone().tzinfo
+# time_now = datetime.now(local_tz) #datetime.now(pytz.utc)
time_now = datetime.now() #datetime.now(pytz.utc)
- if verbose:
- print("SRTool Update: time_now = %s" % time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT))
status_str = "============================================================\n"
status_str += "Update: Date=%s,Filter='%s',Force=%s,Skip_History=%s\n" % (time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT),name_filter,force_all,update_skip_history)
#get sources that have update command
- sources = cur.execute("SELECT * FROM orm_datasource").fetchall()
+ sources = SQL_EXECUTE(cur, '''SELECT * FROM orm_datasource''').fetchall()
for source in sources:
# Only process datasoures with update command
if not source[ORM.DATASOURCE_UPDATE]:
continue
+ elif 'DISABLE ' in source[ORM.DATASOURCE_ATTRIBUTES]:
+ # Data source disabled
+ continue
# Test filter
if 'all' != name_filter:
@@ -121,96 +204,76 @@ def run_updates(force_all,name_filter,update_skip_history,is_trial):
# testdate = datetime(year, month, day, hour=0, minute=0, second=0, microsecond=0, tzinfo=None, *,
# testdiff = timedelta(days=0, seconds=0, microseconds=0, milliseconds=0, minutes=0, hours=0, weeks=0)
- #print("Update datasource:'%s'" % source[ORM.DATASOURCE_DESCRIPTION])
-
# Get the datasource values
update_frequency = source[ORM.DATASOURCE_UPDATE_FREQUENCY]
+ if update_frequency in (ORM.DATASOURCE_ONDEMAND,ORM.DATASOURCE_ONSTARTUP,ORM.DATASOURCE_PREINIT):
+ continue
if not source[ORM.DATASOURCE_LASTUPDATEDDATE]:
- # Force update if no registed updated date for datasource (i.e. at Init phase)
- last_updated_date = time_now - timedelta(days=365)
+ if ORM.DATASOURCE_MINUTELY == update_frequency:
+ # Force MINUTELY to the current time)
+ last_updated_date = time_now
+ else:
+ # Force update if no registed updated date for datasource (i.e. at Init phase)
+ last_updated_date = time_now - timedelta(days=365)
+ sql = "UPDATE orm_datasource SET lastUpdatedDate=? WHERE id=?"
+ ret = SQL_EXECUTE(cur, sql, (time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT),source[ORM.DATASOURCE_ID],) )
+ SQL_COMMIT(conn)
else:
last_updated_date = datetime.strptime(source[ORM.DATASOURCE_LASTUPDATEDDATE], ORM.DATASOURCE_DATETIME_FORMAT)
- # Get the update presets
- update_time = source[ORM.DATASOURCE_UPDATE_TIME]
- delta_minutes = get_tag_key(update_time,'minutes',None)
- delta_minute = get_tag_key(update_time,'minute',None)
- delta_hour = get_tag_key(update_time,'hour',None)
- delta_weekday = get_tag_key(update_time,'weekday',None)
- delta_day = get_tag_key(update_time,'day',None)
- # Calulate the next update datetime
- if ORM.DATASOURCE_MINUTELY == update_frequency:
- if not delta_minutes:
- print("ERROR:Missing minutes in '%s' for '%s'" % (source[ORM.DATASOURCE_DESCRIPTION],update_time))
- delta_minutes = 10
- testdiff = timedelta(minutes=int(delta_minutes))
- elif ORM.DATASOURCE_HOURLY == update_frequency:
- testdiff = timedelta(hours=1)
- elif ORM.DATASOURCE_DAILY == update_frequency:
- testdiff = timedelta(days=1)
- elif ORM.DATASOURCE_WEEKLY == update_frequency:
- testdiff = timedelta(weeks=1)
- elif ORM.DATASOURCE_MONTHLY == update_frequency:
- testdiff = timedelta(months=1)
- elif ORM.DATASOURCE_ONDEMAND == update_frequency:
- continue
- elif ORM.DATASOURCE_ONSTARTUP == update_frequency:
- continue
- testdate = last_updated_date + testdiff
-
- # Adjust for update presets
- if None != delta_minute:
- # Force to selected day of month
- testdate = datetime(testdate.year, testdate.month, testdate.day, testdate.hour, int(delta_minute), testdate.second)
- if None != delta_day:
- # Force to selected day of month
- testdate = datetime(testdate.year, testdate.month, testdate.day, int(delta_hour), testdate.minute, testdate.second)
- if None != delta_day:
- # Force to selected day of month
- testdate = datetime(testdate.year, testdate.month, int(delta_day), testdate.hour, testdate.minute, testdate.second)
- if None != delta_weekday:
- # Force to selected day of week
- testdiff = timedelta( days=(int(delta_weekday) - testdate.weekday()) )
- testdate += testdiff
+
+ # Get the calculated next update datetime
+ update_time = source[ORM.DATASOURCE_UPDATE_TIME]
+ testdate = next_refresh_date(update_frequency,update_time,last_updated_date)
# Not yet?
+ frequency_str = ORM.get_orm_string(source[ORM.DATASOURCE_UPDATE_FREQUENCY],ORM.DATASOURCE_FREQUENCY_STR)
if testdate > time_now:
time_to_go = testdate - time_now
- frequency_str = ORM.get_orm_string(source[ORM.DATASOURCE_UPDATE_FREQUENCY],ORM.DATASOURCE_FREQUENCY_STR)
- status_str += " Skip (next=%s in days=%2s minutes=%4s,%7s):%s\n" % (testdate.strftime(ORM.DATASOURCE_DATETIME_FORMAT),time_to_go.days,time_to_go.seconds//60,frequency_str,source[ORM.DATASOURCE_DESCRIPTION])
+ dhms = time_delta_to_dhms(time_to_go)
+ if ORM.DATASOURCE_MINUTELY == update_frequency:
+ status_str += " Pend (next<%s in days=%2d hours=%2d mins=%02d:%02d,%7s):%s\n" % (testdate.strftime(ORM.DATASOURCE_DATETIME_FORMAT),dhms[0],dhms[1],dhms[2],dhms[3],frequency_str,source[ORM.DATASOURCE_DESCRIPTION])
+ else:
+ status_str += " Pend (next<%s in days=%2d hours=%2d minutes=%2d,%7s):%s\n" % (testdate.strftime(ORM.DATASOURCE_DATETIME_FORMAT),dhms[0],dhms[1],dhms[2],frequency_str,source[ORM.DATASOURCE_DESCRIPTION])
continue
else:
- status_str += " UPDATE '%s': Time reached (%s)\n" % (source[ORM.DATASOURCE_DESCRIPTION],testdate.strftime(ORM.DATASOURCE_DATETIME_FORMAT))
+ status_str += " GO (GO >%s' (%s) ,%7s):%s\n" % (testdate.strftime(ORM.DATASOURCE_DATETIME_FORMAT), last_updated_date , frequency_str,source[ORM.DATASOURCE_DESCRIPTION])
# Execute the update
if is_trial:
print("TRIAL: Update required\t...\texecuting '%s'" % (source[ORM.DATASOURCE_UPDATE]))
status_str += " > TRIAL: execute '%s'\n" % (source[ORM.DATASOURCE_UPDATE])
else:
- # First update the datasource's last_updated_date so avoid dual triggers
+ # First update the datasource's last_updated_date to avoid dual triggers
# (e.g. a manual test run on top of an automatic run)
sql = "UPDATE orm_datasource SET lastUpdatedDate=? WHERE id=?"
- cur_write.execute(sql, (time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT),source[ORM.DATASOURCE_ID],) )
- conn.commit()
+ ret = SQL_EXECUTE(cur, sql, (time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT),source[ORM.DATASOURCE_ID],) )
+ SQL_COMMIT(conn)
- print("Update required\t...\texecuting '%s'" % (source[ORM.DATASOURCE_UPDATE]))
+ print("Update required\t...\texecuting '%s' (%s)" % (source[ORM.DATASOURCE_UPDATE],time_now.strftime(ORM.DATASOURCE_DATETIME_FORMAT)))
status_str += " > EXECUTE: execute '%s'\n" % (source[ORM.DATASOURCE_UPDATE])
- master_write("SRTOOL_UPDATE:%s:%s:%s\n" %(datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),source[ORM.DATASOURCE_DESCRIPTION],source[ORM.DATASOURCE_UPDATE]))
+ master_write("SRTOOL_UPDATE_STRT:%s:%s:%s\n" %(datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),source[ORM.DATASOURCE_DESCRIPTION],source[ORM.DATASOURCE_UPDATE]))
update_command = source[ORM.DATASOURCE_UPDATE]
if force_all:
update_command += " --force"
if update_skip_history:
update_command += " --update-skip-history"
- if update_command.startswith('./'):
+ if update_command.startswith('!'):
+ update_command = update_command[1:]
+ elif not update_command.startswith('/'):
update_command = os.path.join(script_pathname, update_command)
os.system("echo 'Update:%s,%s' > %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),update_command,os.path.join(script_pathname,SRT_UPDATE_TASK_FILE)))
- os.system(update_command)
+
+ #
+ # bin/common/srtool_job.py -c "<cmnd>" -j 1 -l update_logs/run_job.log
+ os.system("bin/common/srtool_job.py --name %s --command \"%s\" --job-id 1 --log update_logs/run_job.log" % (source[ORM.DATASOURCE_NAME],update_command))
+ #
os.system("echo 'Done:%s,%s' >> %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),update_command,os.path.join(script_pathname,SRT_UPDATE_TASK_FILE)))
master_write("SRTOOL_UPDATE_DONE:%s:%s:%s\n" %(datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),source[ORM.DATASOURCE_DESCRIPTION],source[ORM.DATASOURCE_UPDATE]))
# Take a breath, let any commits settle
time.sleep(10)
- conn.close()
+ SQL_CLOSE_CONN(conn)
# Status summary
with open(os.path.join(script_pathname,UPDATE_STATUS_LOG), 'w') as status_file:
@@ -218,31 +281,73 @@ def run_updates(force_all,name_filter,update_skip_history,is_trial):
if verbose:
print(status_str)
+def fetch_updates_dhm():
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+ cur_write = SQL_CURSOR(conn)
+ time_now = datetime.now() #datetime.now(pytz.utc)
+ # Get sources
+ sources = SQL_EXECUTE(cur, '''SELECT * FROM orm_datasource ORDER BY id ASC''').fetchall()
+ for source in sources:
+ update_time = source[ORM.DATASOURCE_UPDATE_TIME]
+ update_frequency = source[ORM.DATASOURCE_UPDATE_FREQUENCY]
+ frequency_str = ORM.get_orm_string(source[ORM.DATASOURCE_UPDATE_FREQUENCY],ORM.DATASOURCE_FREQUENCY_STR)
+ # Non-update states
+ if update_frequency in (ORM.DATASOURCE_ONDEMAND,ORM.DATASOURCE_ONSTARTUP,ORM.DATASOURCE_PREINIT):
+ print("%s,(%s)" % (source[ORM.DATASOURCE_ID],frequency_str))
+ continue
+ elif not source[ORM.DATASOURCE_UPDATE]:
+ print("%s,(NoUpdate)" % source[ORM.DATASOURCE_ID])
+ continue
+# elif 'DISABLE ' in source[ORM.DATASOURCE_ATTRIBUTES]:
+# print("%s,(Disabled)" % source[ORM.DATASOURCE_ID])
+# continue
+ # Get the datasource values
+ if not source[ORM.DATASOURCE_LASTUPDATEDDATE]:
+ last_updated_date = time_now
+ else:
+ last_updated_date = datetime.strptime(source[ORM.DATASOURCE_LASTUPDATEDDATE], ORM.DATASOURCE_DATETIME_FORMAT)
+ # Get the calculated next update datetime
+ testdate = next_refresh_date(update_frequency,update_time,last_updated_date)
+ if testdate > time_now:
+ time_to_go = testdate - time_now
+ dhms = time_delta_to_dhms(time_to_go)
+ if ORM.DATASOURCE_MINUTELY == update_frequency:
+ print("%s,%02d|%02d:%02d:%02d" % (source[ORM.DATASOURCE_ID],dhms[0],dhms[1],dhms[2],dhms[3]))
+ else:
+ print("%s,%02d|%02d:%02d:00" % (source[ORM.DATASOURCE_ID],dhms[0],dhms[1],dhms[2]))
+ else:
+ print("%s,Next!" % source[ORM.DATASOURCE_ID])
+ SQL_CLOSE_CONN(conn)
+
+####################################################################
+###
+
#time must be in '%H:%M:%S' format
def configure_ds_update(datasource_description, frequency, time):
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
sql = "UPDATE orm_datasource SET update_frequency=?, update_time=? WHERE description=?"
cur.execute(sql, (frequency, time, datasource_description))
conn.commit()
- conn.close()
+ SQL_CLOSE_CONN(conn)
#################################
# List update data sources
#
def list():
- conn = sqlite3.connect(srtDbName)
- cur = conn.cursor()
- cur_write = conn.cursor()
+ conn = SQL_CONNECT()
+ cur = SQL_CURSOR(conn)
+ cur_write = SQL_CURSOR(conn)
- format_str = "%16s %7s %14s %10s %28s '%s'"
+ format_str = "%16s %9s %14s %10s %28s '%s'"
print("SRTool Update List:")
- print(format_str % ('Data','Source','Name','Frequency','Offset','Description'))
- print("================ ======= ============== ========== ============================ ===========================================")
+ print(format_str % ('Data',' Source','Name','Frequency','Offset','Description'))
+ print("================ ========= ============== ========== ============================ ===========================================")
#get sources that have update command
sources = cur.execute("SELECT * FROM orm_datasource").fetchall()
for source in sources:
@@ -250,20 +355,57 @@ def list():
if not source[ORM.DATASOURCE_UPDATE]:
continue
frequency_str = ORM.get_orm_string(source[ORM.DATASOURCE_UPDATE_FREQUENCY],ORM.DATASOURCE_FREQUENCY_STR)
+ if 'DISABLE ' in source[ORM.DATASOURCE_ATTRIBUTES]:
+ frequency_str = 'DISABLED'
print(format_str % (source[ORM.DATASOURCE_DATA],source[ORM.DATASOURCE_SOURCE],source[ORM.DATASOURCE_NAME],frequency_str,source[ORM.DATASOURCE_UPDATE_TIME],source[ORM.DATASOURCE_DESCRIPTION]))
if verbose:
print('')
run_updates(False,'all',True,True)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CUR(cur_write)
+ SQL_CLOSE_CONN(conn)
+
#################################
# Start 'cron' job for updates
#
-def cron_start():
+def check_updates_enabled(follow_pid_file):
+ if verbose: print(f"CHECK_UPDATES_ENABLED...")
+ # First check any follow PID file
+ if follow_pid_file:
+ if not os.path.isfile(follow_pid_file):
+ if verbose: print(f"CHECK_UPDATES_ENABLED:FOLLOW_PID_FILE:NOT_EXIST:{follow_pid_file}")
+ return(False)
+ with open(follow_pid_file) as f:
+ lines = f.readlines()
+ pid = lines[0].strip()
+ ret = os.system(f"ps -p {pid} > /dev/null 2>&1")
+ if ret:
+ if verbose: print(f"CHECK_UPDATES_ENABLED:FOLLOW_PID_FILE:NOT_RUNNING:{pid}")
+ return(False)
+ else:
+ if verbose: print(f"CHECK_UPDATES_ENABLED:FOLLOW_PID_FILE:RUNNING:{pid}")
+
+ # Check if master disable
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+ sql = 'SELECT * FROM orm_srtsetting WHERE "name" = ?'
+ enable_update_setting = SQL_EXECUTE(cur, sql, ('SRT_DISABLE_UPDATES',)).fetchone()
+ ret = (not enable_update_setting) or ('yes' != enable_update_setting['value'])
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+ if verbose: print(f"CHECK_UPDATES_ENABLED:SRT_DISABLE_UPDATES:{'GO' if ret else 'SKIP'}:")
+ return(ret)
+
+def cron_start(follow_pid_file):
pid = os.getpid()
master_write("SRTOOL_UPDATE:%s:Starting -v update cron job, pid=%s\n" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),pid))
- os.system("echo 'Start:%s,<cron_start>' > %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),os.path.join(script_pathname,SRT_UPDATE_TASK_FILE)))
+ os.system("echo 'Start:%s,<cron_start>!' > %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),os.path.join(script_pathname,SRT_UPDATE_TASK_FILE)))
+
+#
+ print("echo 'Start:%s,<cron_start>!' > %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),os.path.join(script_pathname,SRT_UPDATE_TASK_FILE)))
# Preserve this app's pid
srt_update_pid_file = os.path.join(script_pathname,SRT_UPDATE_PID_FILE)
@@ -276,16 +418,25 @@ def cron_start():
# Loop until app is killed
extra_line = False
while True:
- # Run the updates
- run_updates(False,'all',False,False)
- # Toggle an extra line in the log to make updates obvious
- if extra_line:
- extra_line = False
- os.system("echo '' >> %s" % os.path.join(script_pathname,UPDATE_STATUS_LOG))
- else:
- extra_line = True
- # Default to 5 minute loop
- time.sleep(5 * 60)
+ try:
+ if check_updates_enabled(follow_pid_file):
+ # Run the updates
+ run_updates(False,'all',False,False)
+ # Toggle an extra line in the log to make updates obvious
+ if extra_line:
+ extra_line = False
+ os.system("echo '' >> %s" % os.path.join(script_pathname,UPDATE_STATUS_LOG))
+ else:
+ extra_line = True
+ # Default to 5 minute loop
+
+ os.system("echo 'Sleep:%s,update in 5 minutes (%s)' > %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),check_updates_enabled(follow_pid_file),os.path.join(script_pathname,SRT_UPDATE_TASK_FILE)))
+
+ run_updates(False,'all',True,True)
+
+ time.sleep(5 * 60)
+ except Exception as e:
+ master_write("SRTOOL_UPDATE:ERROR:%s:%s\n" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT),e))
def cron_stop():
# Fetch the stored update app's pid
@@ -311,19 +462,29 @@ def main(argv):
global master_log
# setup
- parser = argparse.ArgumentParser(description='srtool.py: manage the SRTool database')
+ parser = argparse.ArgumentParser(description='srtool_update.py: manage the SRTool backgtoup tasks')
+ # Commands
parser.add_argument('--cron-start', action='store_const', const='cron_start', dest='command', help='Start the SRTool background updater')
parser.add_argument('--cron-stop', action='store_const', const='cron_stop', dest='command', help='Stop the SRTool background updater')
+ parser.add_argument('--follow-pid-file', dest='follow_pid_file', help='Only update when PID in this file is running')
+ # Status
parser.add_argument('--list', '-l', action='store_const', const='list', dest='command', help='List data sources')
parser.add_argument('--run-updates', '-u', action='store_const', const='run-updates', dest='command', help='Update scheduled data sources')
parser.add_argument('--name-filter', '-n', dest='name_filter', help='Filter for datasource name')
+ parser.add_argument('--status', '-s', action='store_const', const='status', dest='command', help='Current status of the run queue')
+ parser.add_argument('--fetch-updates-dhm', action='store_const', const='fetch_updates_dhm', dest='command', help='Fetch next updates for all sources')
+ parser.add_argument('--check-updates-enabled', action='store_const', const='check_updates_enabled', dest='command', help='Unit test the update offsets')
- parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
- parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
- parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
- parser.add_argument('--trial', '-t', action='store_true', dest='is_trial', help='Debugging: trial run')
+ # Test
+ parser.add_argument('--update-unit-test', '-U', action='store_const', const='update_unit_test', dest='command', help='Unit test the update offsets')
+
+ # Debugging support
+ parser.add_argument('--force', '-f', action='store_true', dest='force', help='Flag: Force the update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Flag: Skip history updates')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Flag: debug verbose output')
+ parser.add_argument('--trial', '-t', action='store_true', dest='is_trial', help='Flag: Debugging: trial run')
parser.add_argument('--configure_ds_update', '-T', nargs=3, help='Set update frequency and time for specified datasource. Check bin/README.txt for more info')
@@ -348,6 +509,11 @@ def main(argv):
print("FAILED UPDATING ALL DATASOURCES (%s)" % e)
master_log.write("SRTOOL:%s:UPDATING DATASOURCES\t\t\t...\t\t\tFAILED ... %s\n" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT), e))
traceback.print_exc(file=sys.stdout)
+ elif 'fetch_updates_dhm' == args.command:
+ fetch_updates_dhm()
+ elif 'check_updates_enabled' == args.command:
+ verbose = True
+ check_updates_enabled(args.follow_pid_file)
elif args.configure_ds_update:
try:
print("CHANGING UPDATE CONFIGURATION FOR %s" % args.configure_ds_update[0])
@@ -356,12 +522,21 @@ def main(argv):
except Exception as e:
print("FAILED TO CONFIGURE UPDATE SETTINGS FOR %s" % args.configure_ds_update[0])
master_log.write("SRTOOL:%s:%s\t\t\t...\t\t\tFAILED ... %s" % (datetime.now().strftime(ORM.DATASOURCE_DATETIME_FORMAT), args.configure_ds_update[0], e))
+ elif 'status' == args.command:
+ verbose = True
+ run_updates(False,'all',True,True)
+
elif 'cron_start' == args.command:
- cron_start()
+ cron_start(args.follow_pid_file)
elif 'cron_stop' == args.command:
cron_stop()
+
+ elif 'update_unit_test' == args.command:
+ verbose = True
+ update_unit_test()
+
else:
- print("Command not found")
+ print("srtool_update: Command not found")
master_log.close()
if __name__ == '__main__':
diff --git a/bin/common/srtool_utils.py b/bin/common/srtool_utils.py
index 4d9c27cf..cd0305a1 100755
--- a/bin/common/srtool_utils.py
+++ b/bin/common/srtool_utils.py
@@ -24,7 +24,6 @@
import os
import sys
import argparse
-import sqlite3
from datetime import datetime, date
import time
import re
@@ -35,13 +34,16 @@ import xml.etree.ElementTree as ET
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
+from common.srtool_progress import *
#from common.srt_schema import ORM
+from common.srtool_sql import *
# Setup:
verbose = False
cmd_skip = 0
cmd_count = 0
force = False
+debug_sql = False
srtDbName = 'srt.sqlite'
srtSchemaName = 'srt_schema.py'
@@ -151,10 +153,10 @@ def commit_to_source(conn, source_data):
WHERE id = ?'''
cur = conn.cursor()
print("UPDATE_SCORE:%s" % str(source_data))
- cur.execute(sql, source_data)
+ SQL_EXECUTE(cur, sql, source_data)
def sources(cmnd):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
print('Sources(%s)' % cmnd)
@@ -188,7 +190,7 @@ def sources(cmnd):
def settings():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
c = conn.cursor()
# Scan the SRTool Settings
@@ -203,7 +205,7 @@ def settings():
#
def remove_app_sources(master_app):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_write = conn.cursor()
@@ -220,7 +222,7 @@ def remove_app_sources(master_app):
if is_change:
conn.commit()
- conn.close()
+ SQL_CLOSE_CONN(conn)
#################################
# fix_new_reserved
@@ -228,7 +230,7 @@ def remove_app_sources(master_app):
# Is this reserved by Mitre? Is '** RESERVED **' within the first 20 char positions?
def fix_new_reserved():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_write = conn.cursor()
@@ -270,7 +272,7 @@ def fix_new_reserved():
# Fix the None "cve.tags" fields
def fix_new_tags():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_write = conn.cursor()
@@ -310,7 +312,7 @@ def fix_new_tags():
# Recompute all of the CVE name_sort fields
def fix_name_sort():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_write = conn.cursor()
@@ -337,7 +339,7 @@ def fix_name_sort():
# Reset empty CVE recommend fields to the proper integer zero
def fix_cve_recommend():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_write = conn.cursor()
@@ -381,7 +383,7 @@ def fix_cve_recommend():
print("CVE RECOMMEND FIX COUNT=%d of %d" % (fix_count,i))
if fix_count:
conn.commit()
- conn.close()
+ SQL_CLOSE_CONN(conn)
#################################
# fixup fix_srt_dates
@@ -409,7 +411,7 @@ def _fix_date(value,default):
def fix_srt_datetime(scope):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_write = conn.cursor()
@@ -721,7 +723,7 @@ def fix_srt_datetime(scope):
# Reset CVE srt_create to NIST release dates
def fix_reset_nist_to_create(cve_prefix):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_write = conn.cursor()
@@ -767,7 +769,7 @@ def fix_reset_nist_to_create(cve_prefix):
# Reset CVE None creation dates to 2019-01-01, out of the way of reports
def fix_missing_create_dates():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_write = conn.cursor()
@@ -824,7 +826,7 @@ def fix_missing_create_dates():
# Reset CVE 'New-Reserved' if now public from NIST
def fix_public_reserved():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_write = conn.cursor()
@@ -866,7 +868,7 @@ def fix_public_reserved():
# Remove a specific/accidental set of bulk CVE history updates intended to be background
def fix_remove_bulk_cve_history():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_cve = conn.cursor()
cur_del = conn.cursor()
@@ -917,7 +919,7 @@ def fix_remove_bulk_cve_history():
#
def report_defects_to_products():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_cve = conn.cursor()
cur_del = conn.cursor()
@@ -967,7 +969,7 @@ def report_defects_to_products():
# "New-Reserved" due to column ordering issue in the MITRE "Init" routine.
#
def fix_bad_mitre_init():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_ds = conn.cursor()
cur_cve = conn.cursor()
@@ -1037,7 +1039,7 @@ def fix_bad_mitre_init():
# records due to column ordering issue in the MITRE "Init" routine.
#
def fix_bad_mitre_descr(datasource_list):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur_ds = conn.cursor()
cur_cs = conn.cursor()
cur_cve = conn.cursor()
@@ -1171,13 +1173,13 @@ def fix_bad_mitre_descr(datasource_list):
# We are good to go
found_mapping = True
- # Disable this feature for now, since we do want to add unneeded pasive MITRE records
+ # Disable this feature for now, since we do want to add unneeded passive MITRE records
if False:
# Add if missing or deleted as obsolete
if not found_mapping:
print("Insert new mapping %s,%s" % (cve_name,mitre_ds_list[ds[ORM.DATASOURCE_ID]]))
if force:
- sql = ''' INSERT into orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
cur_cs.execute(sql, (cve[ORM.CVE_ID],ds[ORM.DATASOURCE_ID],))
# If this CVE has any NIST data sources, then skip description checking (e.g. NIST Modified preempt)
@@ -1221,7 +1223,7 @@ def fix_bad_mitre_descr(datasource_list):
if not found_mapping:
print("Insert new mapping %s,%s" % (cve_name,mitre_ds_list[ds[ORM.DATASOURCE_ID]]))
if force:
- sql = ''' INSERT into orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
cur_cs.execute(sql, (cve[ORM.CVE_ID],ds[ORM.DATASOURCE_ID],))
@@ -1249,7 +1251,7 @@ def fix_bad_mitre_descr(datasource_list):
#
def fix_severity(datasource_list):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur_ds = conn.cursor()
cur_cs = conn.cursor()
cur_cve = conn.cursor()
@@ -1410,7 +1412,7 @@ def fix_severity(datasource_list):
if not found_mapping:
print("Insert new mapping %s,%s" % (cve_name,nist_ds_list[ds[ORM.DATASOURCE_ID]]))
if force:
- sql = ''' INSERT into orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
cur_cs.execute(sql, (cve[ORM.CVE_ID],ds[ORM.DATASOURCE_ID],))
@@ -1434,7 +1436,7 @@ def fix_severity(datasource_list):
#
def fix_trim_cve_scores():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_wr = conn.cursor()
@@ -1519,7 +1521,7 @@ def fix_trim_cve_scores():
def report_multiple_defects():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur_i2d = conn.cursor()
cur_inv = conn.cursor()
cur_def = conn.cursor()
@@ -1540,7 +1542,7 @@ def report_multiple_defects():
if 0 == k:
print("[%02d] Multiple defects for investigation '%s':" % (count,investigation[ORM.INVESTIGATION_NAME]))
print(" [%02d] %s: %s (%s)" % (k+1,defect[ORM.DEFECT_NAME],defect[ORM.DEFECT_SUMMARY],ORM.get_orm_string(defect[ORM.DEFECT_RESOLUTION],ORM.DEFECT_RESOLUTION_STR)))
- conn.close()
+ SQL_CLOSE_CONN(conn)
#################################
# report_duplicate_names
@@ -1551,7 +1553,7 @@ def report_multiple_defects():
def report_duplicate_names():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
@@ -1615,7 +1617,7 @@ def report_duplicate_names():
dev_dict = {}
print('')
- conn.close()
+ SQL_CLOSE_CONN(conn)
#################################
# fix_bad_links
@@ -1623,9 +1625,10 @@ def report_duplicate_names():
def fix_bad_links():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_del = conn.cursor()
+ error_count = 0
#
print('\n=== CVE Source Check ===\n')
@@ -1650,7 +1653,8 @@ def fix_bad_links():
if (1 > srcid): error = True
if error:
- print("ERROR: [%4d] CVE=%6d,SRC=%6d (%s)" % (cs[ORM.CVESOURCE_ID],cveid,srcid,datasource_map[srcid]))
+ print("ERROR: [%4d] CVE=%6d,SRC=%6d (%s)\n" % (cs[ORM.CVESOURCE_ID],cveid,srcid,datasource_map[srcid]))
+ error_count += 1
if force:
sql = 'DELETE FROM orm_cvesource WHERE id=?'
cur_del.execute(sql, (cs[ORM.CVESOURCE_ID],))
@@ -1694,8 +1698,10 @@ def fix_bad_links():
product_id = defect[ORM.DEFECT_PRODUCT_ID]
if not product_id in products:
print("ERROR:[%5d] %-20s => %s" % (defect[ORM.DEFECT_ID],defect[ORM.DEFECT_NAME],product_id))
+ error_count += 1
- conn.close()
+ print("\nError count = %d " % error_count)
+ SQL_CLOSE_CONN(conn)
#################################
# fix_bad_score_date
@@ -1708,7 +1714,7 @@ def fix_bad_links():
def fix_bad_score_date():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_fix = conn.cursor()
@@ -1716,19 +1722,35 @@ def fix_bad_score_date():
print('\n=== CVE fix_bad_score_date Check ===\n')
#
- cur.execute('SELECT * FROM orm_cve WHERE status = %d' % ORM.STATUS_NEW)
+ cur.execute('SELECT * FROM orm_cve')
+ error_count = 0
+ last_error = ''
for i,cve in enumerate(cur):
- for j,item in enumerate(cve):
- print("%s\t" % (item), end='')
- if force:
- sql = ''' UPDATE orm_cve
- SET score_date = ?
- WHERE id = ?'''
- cur_fix.execute(sql, (None, cve[ORM.CVE_ID],))
+ if 999 == (i % 1000) :
+ print("%7d: %-20s %20s, %d\r" % (i+1,cve[ORM.CVE_NAME],last_error,error_count),end='')
+ if force: conn.commit()
- print("")
+ score_date = cve[ORM.CVE_SCORE_DATE]
+ if not score_date:
+ continue
- conn.commit()
+ try:
+ dt = datetime.strptime(score_date,ORM.DATASOURCE_DATE_FORMAT)
+ except:
+ error_count += 1
+ last_error = score_date
+ print("DATE_ERROR:%s,%s" % (cve[ORM.CVE_NAME],cve[ORM.CVE_SCORE_DATE]))
+# if 10 < error_count:
+# break
+ if force:
+ sql = ''' UPDATE orm_cve
+ SET score_date = ?
+ WHERE id = ?'''
+ cur_fix.execute(sql, (None, cve[ORM.CVE_ID],))
+
+ print("Error count = %d (e.g. %s,%s)" % (error_count,cve[ORM.CVE_NAME],last_error))
+ if force:
+ conn.commit()
#################################
# fix_inherit_affected_components()
@@ -1738,7 +1760,7 @@ def fix_bad_score_date():
def fix_inherit_affected_components():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur_cve = conn.cursor()
cur_cve2vul = conn.cursor()
cur_vul = conn.cursor()
@@ -1824,6 +1846,159 @@ def fix_inherit_affected_components():
if updates and force: conn.commit()
print("Affected Component Updates = %d" % updates)
+################################3
+# fix_notify_access
+#
+# Remove notify-access that do not point to existing notify records
+
+def fix_notify_access():
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_notify = conn.cursor()
+
+ bad_notifyaccess = []
+ cur.execute('SELECT * FROM orm_notifyaccess')
+ i = 0
+ for notifyaccess in cur:
+ i += 1
+ notify_id = notifyaccess[ORM.NOTIFYACCESS_NOTIFY_ID]
+ notify = cur_notify.execute('SELECT * FROM orm_notify WHERE id = %d' % notify_id).fetchone()
+ if not notify:
+ bad_notifyaccess.append(notifyaccess[ORM.NOTIFYACCESS_ID])
+
+ if verbose:
+ print("%3d] %d,%d,%d (%d)" % (i,notifyaccess[ORM.NOTIFYACCESS_ID],notifyaccess[ORM.NOTIFYACCESS_NOTIFY_ID],notifyaccess[ORM.NOTIFYACCESS_USER_ID],notify_id))
+ if 999 == (i % 1000) :
+ print("%7d: \r" % (i+1), end='')
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ if force:
+ for notifyaccess_id in bad_notifyaccess:
+ cur.execute('DELETE FROM orm_notifyaccess WHERE id = %d' % notifyaccess_id)
+ conn.commit()
+
+ print("Affected Notify Access Updates = %d/%d" % (len(bad_notifyaccess),i))
+
+
+################################3
+# fix_cvelocal
+#
+# Find and fix CveLocal duplicate/dettached records
+
+def fix_cvelocal():
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_cve = conn.cursor()
+
+ found_list = []
+ duplicates_list = []
+ unattached_list = []
+ cur.execute('SELECT * FROM orm_cvelocal')
+ i = 0
+ for cvelocal in cur:
+ i += 1
+
+ # Test duplicates
+ cvelocal_name = cvelocal[ORM.CVELOCAL_NAME]
+ cvelocal_id = cvelocal[ORM.CVELOCAL_ID]
+ if cvelocal_name in found_list:
+ duplicates_list.append(cvelocal_id)
+ print("ERROR:DUPLICATE:%s" % cvelocal_name)
+ continue
+ else:
+ found_list.append(cvelocal_name)
+
+ # Test dettached
+ found = False
+ try:
+ cve = cur_cve.execute('SELECT * FROM orm_cve WHERE name = "%s"' % cvelocal_name).fetchone()
+ if cve:
+ found = True
+ if cvelocal_name.startswith('SRTCVE'):
+ print("NOTE:ATTACHED:%s" % cvelocal_name)
+ except Exception as e:
+ print("ERROR:LOOKUP:%s" % e)
+ if not found:
+ unattached_list.append(cvelocal_id)
+ print("ERROR:DETTACHED:'%s'" % cvelocal_name)
+
+ # Development/debug support
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ if force:
+ for duplicate in duplicates_list:
+ cvelocal = cur.execute('SELECT * FROM orm_cvelocal WHERE id = "%s"' % duplicate).fetchone()
+ print('DELETE_DUPLICATE(%s):%s' % (duplicate,cvelocal[ORM.CVELOCAL_NAME]))
+ cur.execute('DELETE FROM orm_cvelocal WHERE id = %d' % duplicate)
+ for unattached in unattached_list:
+ cvelocal = cur.execute('SELECT * FROM orm_cvelocal WHERE id = "%s"' % unattached).fetchone()
+ print('DELETE_UNATTACHED(%s):%s' % (unattached,cvelocal[ORM.CVELOCAL_NAME]))
+ cur.execute('DELETE FROM orm_cvelocal WHERE id = %d' % unattached)
+ conn.commit()
+
+ print("Errors found: %d" % (len(duplicates_list) + len(unattached_list)))
+
+################################3
+# fix_cvesource
+#
+# Find and fix CveSource duplicate/dettached records
+
+def fix_cvesource():
+
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_cve = conn.cursor()
+
+ found_list = []
+ duplicates_list = []
+ cur.execute('SELECT * FROM orm_cvesource')
+ i = 0
+ for cvesource in cur:
+ i += 1
+
+ # Test duplicates
+ cve_id = cvesource[ORM.CVESOURCE_CVE_ID]
+ datasource_id = cvesource[ORM.CVESOURCE_DATASOURCE_ID]
+ key = '%d_%d' % (cve_id,datasource_id)
+
+ if key in found_list:
+ duplicates_list.append(cvesource[ORM.CVESOURCE_ID])
+ print("ERROR:DUPLICATE:%s" % key)
+ else:
+ found_list.append(key)
+
+ # Development/debug support
+ if 999 == (i % 1000) :
+ print("%7d: \r" % (i+1), end='')
+ if cmd_skip:
+ if i < cmd_skip:
+ continue
+ if cmd_count:
+ if (i - cmd_skip) > cmd_count:
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ if force:
+ for duplicate in duplicates_list:
+ cur.execute('DELETE FROM orm_cvesource WHERE id = %d' % duplicate)
+ conn.commit()
+
+ print("Errors found: %d" % len(duplicates_list))
#################################
# report_cve_status_summary()
@@ -1887,7 +2062,7 @@ def report_cve_status_summary():
table['total1'] = blank_row()
table['total2'] = blank_row()
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur.execute('SELECT * FROM orm_cve')
i = 0
@@ -2114,7 +2289,7 @@ def report_db_status_summary():
else: # priority == ORM.PRIORITY_UNDEFINED:
table_priority[year][I_CVE_UNDEFINED+offset] += 1
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur_cve = conn.cursor()
cur_cve2vul = conn.cursor()
cur_vul = conn.cursor()
@@ -2310,7 +2485,7 @@ def report_db_status_summary():
def report_unattached_records():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_cve = conn.cursor()
cur_cve2vul = conn.cursor()
@@ -2375,9 +2550,12 @@ def report_unattached_records():
count = 0
for inv2def in cur:
count += 1
- del unattached_records[inv2def[ORM.INVESTIGATIONTODEFECT_DEFECT_ID]]
+ if not inv2def[ORM.INVESTIGATIONTODEFECT_DEFECT_ID] in unattached_records:
+ print("INV2DEF: INV(%d) no such DEF(%d)" % (inv2def[ORM.INVESTIGATIONTODEFECT_INVESTIGATION_ID],inv2def[ORM.INVESTIGATIONTODEFECT_DEFECT_ID]))
+ else:
+ del unattached_records[inv2def[ORM.INVESTIGATIONTODEFECT_DEFECT_ID]]
print("Count = %d" % count)
- print("Unattached DEV to INV = %d" % len(unattached_records))
+ print("Unattached DEF to INV = %d" % len(unattached_records))
defect_list = []
for i,key in enumerate(unattached_records):
defect = cur.execute('SELECT * FROM orm_defect WHERE id = %d' % key).fetchone()
@@ -2470,7 +2648,7 @@ def report_unattached_records():
def fix_duplicate_notifications():
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_del = conn.cursor()
@@ -2507,7 +2685,251 @@ def fix_duplicate_notifications():
print("")
print('Delete count = %d of %d, Unique = %d' % (delete_count,i,len(notify_descriptions)))
#print(notify_descriptions)
- conn.close()
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# cve_trace
+# $ ./bin/common/srtool_utils.py --cve-trace CVE-2024-23180
+#
+
+def cve_trace(cve_name):
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_check = conn.cursor()
+ cur_del = conn.cursor()
+
+ # Preload the data source table
+ datasource_table = {}
+ for ds in cur.execute('SELECT * FROM orm_datasource').fetchall():
+ datasource_table[ds[ORM.DATASOURCE_ID]] = ds[ORM.DATASOURCE_KEY]
+
+ cve = cur.execute('SELECT * FROM orm_cve WHERE name = "%s"' % cve_name).fetchone()
+ if not cve:
+ print("ERROR: could not find CVE '%s'" % cve_name)
+ exit(1)
+ print("CVE found:\n Cve[%s]='%s'" % (cve[ORM.CVE_ID],cve[ORM.CVE_NAME]))
+ cve_id = cve[ORM.CVE_ID]
+
+ cve_local = cur.execute('SELECT * FROM orm_cvelocal WHERE name = "%s"' % cve_name).fetchone()
+ if cve_local:
+ print("CVE LOCAL found:\n '%s' at %s" % (cve_local[ORM.CVELOCAL_NAME],cve_local[ORM.CVELOCAL_ID]))
+
+ print("CveSource found:")
+ cvesource_list = cur.execute('SELECT * FROM orm_cvesource WHERE cve_id = "%s"' % cve_id)
+ for cvesource in cvesource_list:
+ print(" CveSource=[%s] CVE=%s DataSource=%s (%s)" % (cvesource[ORM.CVESOURCE_ID],cvesource[ORM.CVESOURCE_CVE_ID],cvesource[ORM.CVESOURCE_DATASOURCE_ID],datasource_table[cvesource[ORM.CVESOURCE_DATASOURCE_ID]]))
+ # Check the other foreign key
+ check = cur_check.execute('SELECT * FROM orm_datasource WHERE id = "%s"' % cvesource[ORM.CVESOURCE_DATASOURCE_ID]).fetchone()
+ if not check:
+ print("ERROR: DataSource not found at %s" % cvesource[ORM.CVESOURCE_DATASOURCE_ID])
+
+
+ print("CveAccess found:")
+ cveaccess_list = cur.execute('SELECT * FROM orm_cveaccess WHERE cve_id = "%s"' % cve_id)
+ for cveaccess in cveaccess_list:
+ print(" CVE=%s User=%s" % (cveaccess[ORM.CVEACCESS_CVE_ID],cveaccess[ORM.CVEACCESS_USER_ID]))
+ # Check the other foreign key
+ check = cur_check.execute('SELECT * FROM orm_cveaccess WHERE id = "%s"' % cveaccess[ORM.CVEACCESS_USER_ID]).fetchone()
+ if not check:
+ print("ERROR: User not found at %s" % cveaccess[ORM.CVEACCESS_USER_ID])
+
+ print("CveHistory found:")
+ cvehistory_list = cur.execute('SELECT * FROM orm_cvehistory WHERE cve_id = "%s"' % cve_id)
+ for cvehistory in cvehistory_list:
+ print(" CVE=%s History='%s...'" % (cvehistory[ORM.CVEHISTORY_CVE_ID],cvehistory[ORM.CVEHISTORY_COMMENT][:60]))
+
+ print("PackageToCve found:")
+ packagetocve_list = cur.execute('SELECT * FROM orm_packagetocve WHERE cve_id = "%s"' % cve_id)
+ for packagetocve in packagetocve_list:
+ print(" CVE=%s Package=%s" % (packagetocve[ORM.PACKAGETOCVE_CVE_ID],packagetocve[ORM.CVEHISTORY_PACKAGE_ID]))
+
+ print("CveReference found:")
+ cvereference_list = cur.execute('SELECT * FROM orm_cvereference WHERE cve_id = "%s"' % cve_id)
+ for cvereference in cvereference_list:
+ print(" CVE=%s DataSource=%s" % (cvereference[ORM.CVEREFERENCE_CVE_ID],cvereference[ORM.CVEREFERENCE_DATASOURCE_ID]))
+
+ print("CveToVulnerablility found:")
+ cvetovulnerablility_list = cur.execute('SELECT * FROM orm_cvetovulnerablility WHERE cve_id = "%s"' % cve_id)
+ for cvetovulnerablility in cvetovulnerablility_list:
+ print(" CVE=%s Vulnerability=%s" % (cvetovulnerablility[ORM.CVETOVULNERABLILITY_CVE_ID],cvetovulnerablility[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID]))
+
+ # Trial delete
+ if not force:
+ return
+
+ if False:
+ cvehistory_list = cur.execute('SELECT * FROM orm_cvehistory WHERE cve_id = "%s"' % cve_id)
+ for cvehistory in cvehistory_list:
+ print("DELETE CveHistory: CVE=%s History='%s...'" % (cvehistory[ORM.CVEHISTORY_CVE_ID],cvehistory[ORM.CVEHISTORY_COMMENT][:60]))
+ sql = 'DELETE FROM orm_cvehistory WHERE cve_id=?'
+ cur_del.execute(sql, (cve_id,))
+
+ if True:
+ cvesource_list = cur.execute('SELECT * FROM orm_cvesource WHERE cve_id = "%s"' % cve_id)
+ for cvesource in cvesource_list:
+ print("DELETE Cvesource: CVE=%s DataSource=%s" % (cvesource[ORM.CVESOURCE_CVE_ID],cvesource[ORM.CVESOURCE_DATASOURCE_ID]))
+ sql = 'DELETE FROM orm_cvesource WHERE cve_id=?'
+ cur_del.execute(sql, (cve_id,))
+
+ if True:
+ if cve_local:
+ print("DELETE CVE LOCAL: '%s' at %s" % (cve_local[ORM.CVELOCAL_NAME],cve_local[ORM.CVELOCAL_ID]))
+ sql = 'DELETE FROM orm_cvelocal WHERE name=?'
+ cur_del.execute(sql, (cve_name,))
+
+ if False:
+ print("DELETE CVE: '%s' at %s" % (cve[ORM.CVE_NAME],cve[ORM.CVE_ID]))
+ sql = 'DELETE FROM orm_cve WHERE id=?'
+ cur_del.execute(sql, (cve_id,))
+
+ conn.commit()
+ SQL_CLOSE_CONN(conn)
+
+
+#################################
+# report_cve_comments_to_recipes
+#
+# Try to merge comments and packages in orm_cve table.
+#
+
+def report_cve_comments_to_recipes():
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+
+ """
+ comments:
+ Exclude all containing '%wr%' or '%do not include%' or "%n't%":
+ not (comments like "" or comments like '%wr%' or comments like '%do not include%' or comments like "%n't%")
+ Starting from 6 words, it's not purely product names any more: length(comments) - length(replace(comments, ' ', '')) + 1 = 6
+ packages:
+ Exclude all containing '%wr%' and this should have covered all cases; all these cases have empty comments
+ """
+ cur.execute("SELECT id, comments, packages FROM orm_cve WHERE NOT (comments = '')")
+
+ total = 0
+ review_needed = 0
+ # key: ORM.CVE_ID
+ # value: [curr_comments, curr_packages, proposed_comments, proposed_packages, review_needed]
+ dct_cve_obj = {}
+ # all cve_obj has non-empty comments
+ for cve_obj in cur:
+ id = cve_obj[0]
+ comments = cve_obj[1].lower()
+ packages = cve_obj[2].lower()
+ lst_comments = comments.strip('. ').split(' ')
+ lst_packages = packages.split(' ')
+ if 'wr' in packages:
+ dct_cve_obj[id] = ['', cve_obj[2], cve_obj[2], '', 0]
+ total += 1
+ continue
+ elif 'wr' in comments or 'do not include' in comments or "n't" in comments:
+ if packages != '' and packages != 'reject':
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], cve_obj[1] + ' | packages:' + packages, 'reject:' + packages, 0]
+ total += 1
+ continue
+ comments_strict_in_packages = True
+ for comment in lst_comments:
+ if comment not in packages:
+ comments_strict_in_packages = False
+ break
+ if comments_strict_in_packages:
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], '', packages, 0]
+ total += 1
+ continue
+ # if there is only a single word in comments
+ elif ' ' not in comments:
+ if packages == '':
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], '', comments, 0]
+ total += 1
+ continue
+ if comments in packages:
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], '', packages, 0]
+ total += 1
+ continue
+ merged = False
+ for i, package in enumerate(lst_packages):
+ if package in comment:
+ lst_packages[i] = comment
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], '', ' '.join(lst_packages), 0]
+ merged = True
+ break
+ if not merged:
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], '', packages + ' ' + comments, 0]
+ total += 1
+ continue
+ elif len(lst_comments) < 6 and packages == '':
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], '', comments, 0]
+ total += 1
+ continue
+ else:
+ dct_cve_obj[id] = [cve_obj[1], cve_obj[2], '', '', 1]
+ review_needed += 1
+ total += 1
+
+ print("Automatically merged", str(round((total - review_needed)/total * 100, 2)) + '%', 'of data')
+
+ with open('report_cve_comments_to_recipes.csv', 'x', newline='') as f:
+ csvwriter = csv.writer(f, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
+ csvwriter.writerow(['id', 'curr_comments', 'curr_packages', 'proposed_comments', 'proposed_packages', 'review_needed'])
+ for id in dct_cve_obj:
+ csvwriter.writerow([id] + dct_cve_obj[id])
+
+###############################################################
+#
+# reset scores: reset new CVEs so that they can be rescored
+#
+
+def reset_scores():
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
+ cur_write = conn.cursor()
+ cur_ds = conn.cursor()
+ is_change = False
+ write_count = 0
+ progress_set_max(cmd_count if cmd_count else 100)
+
+ if debug_sql:
+ SQL_DEBUG(True,'RST')
+
+ # Scan the open CVEs
+ sql = "SELECT * FROM orm_cve WHERE status='%s' AND score_date IS NOT NULL;" % (ORM.STATUS_NEW)
+ cur.execute(sql)
+ for i,cve in enumerate(cur):
+ cve_name = cve[ORM.CVE_NAME]
+ progress_show(cve_name)
+
+ # Progress indicator support
+ if 0 == i % 10:
+ print('%04d: %20s\r' % (i,cve_name), end='')
+ if (0 == i % 200) and (not cmd_skip) and is_change:
+ SQL_COMMIT(conn)
+ print("%4d: COMMIT" % i)
+ sleep(2)
+ is_change = False
+ # Development/debug support
+ if cmd_skip > i:
+ continue
+ # Test short count, cap at 100
+ if (cmd_count < (i - cmd_skip)) or (100 < (i - cmd_skip)):
+ print("Count return: %s,%s" % (i,cmd_count))
+ break
+
+ sql = ''' UPDATE orm_cve
+ SET score_date = ?
+ WHERE id = ?'''
+ SQL_EXECUTE(cur_write,sql, (None, cve[ORM.CVE_ID]))
+ write_count += 1
+ is_change = True
+
+ if is_change:
+ print("COMMIT")
+ SQL_COMMIT(conn)
+ print("\nUpdated CVEs=%d" % (write_count))
+ # End progress
+ progress_done('Done')
+ # Dump the SQL transaction data
+ if debug_sql:
+ SQL_DUMP()
#################################
# main loop
@@ -2519,6 +2941,7 @@ def main(argv):
global cmd_count
global force
global srtDbName
+ global debug_sql
# setup
parser = argparse.ArgumentParser(description='srtool_utils.py: manage/repair the SRTool database')
@@ -2543,6 +2966,9 @@ def main(argv):
parser.add_argument('--fix-bad-score-date', action='store_const', const='fix_bad_score_date', dest='command', help='Clear score dates to fix obsolete formats')
parser.add_argument('--fix-trim-cve-scores', action='store_const', const='fix_trim_cve_scores', dest='command', help='Trim V3/V2 scores to one decimal place standard')
parser.add_argument('--fix-inherit-affected-components', action='store_const', const='fix_inherit_affected_components', dest='command', help='Inherit the affected components field from CVE to its children')
+ parser.add_argument('--fix-notify-access', action='store_const', const='fix_notify_access', dest='command', help='Remove notify-access that do not point to existing notify records')
+ parser.add_argument('--fix-cvelocal', action='store_const', const='fix_cvelocal', dest='command', help='Find and fix CveLocal duplicate/dettached records')
+ parser.add_argument('--fix-cvesource', action='store_const', const='fix_cvesource', dest='command', help='Find and fix CveSource duplicate/dettached records')
# Continuous maintenance validation and repair routines
@@ -2556,15 +2982,23 @@ def main(argv):
parser.add_argument('--report-cve-status-summary', action='store_const', const='report_cve_status_summary', dest='command', help='Report the CVE status summary')
parser.add_argument('--report-db-status-summary', action='store_const', const='report_db_status_summary', dest='command', help='Report the database status summary')
parser.add_argument('--report-unattached-records', action='store_const', const='report_unattached_records', dest='command', help='Report VUL/INV/DEF unattached to parent CVE/VUL/INV')
+ parser.add_argument('--report-cve-comments-to-recipes', action='store_const', const='report_cve_comments_to_recipes', dest='command', help='Report the "comments" field in the CVE table')
+
+ # Other
+
+ parser.add_argument('--cve-trace', dest='cve_trace', help='Trace a CVE (FOREIGN KEY constraint failed)')
+ parser.add_argument('--reset-scores', action='store_const', const='reset_scores', dest='command', help='Reset new CVEs for score test')
# Options
parser.add_argument('--database', '-D', dest='database', help='Select specific alternate database file (e.g. a backup)')
+ parser.add_argument('--progress', '-P', action='store_true', dest='do_progress', help='Progress output')
parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force the update')
parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ parser.add_argument('--debug-sql', action='store_true', dest='debug_sql', help='Debug SQL writes')
args = parser.parse_args()
@@ -2576,6 +3010,8 @@ def main(argv):
if None != args.count:
cmd_count = int(args.count)
force = args.force
+ debug_sql = args.debug_sql
+ progress_set_on(args.do_progress)
# Test for example the backup databases
if args.database:
@@ -2629,6 +3065,12 @@ def main(argv):
fix_bad_score_date()
elif 'fix_inherit_affected_components' == args.command:
fix_inherit_affected_components()
+ elif 'fix_notify_access' == args.command:
+ fix_notify_access()
+ elif 'fix_cvelocal' == args.command:
+ fix_cvelocal()
+ elif 'fix_cvesource' == args.command:
+ fix_cvesource()
elif args.fix_severity:
fix_severity(args.fix_severity)
@@ -2649,7 +3091,13 @@ def main(argv):
report_db_status_summary()
elif 'report_unattached_records' == args.command:
report_unattached_records()
+ elif 'report_cve_comments_to_recipes' == args.command:
+ report_cve_comments_to_recipes()
+ elif args.cve_trace:
+ cve_trace(args.cve_trace)
+ elif 'reset_scores' == args.command:
+ reset_scores()
else:
print("Command not found")
diff --git a/bin/cve_checker/datasource.json b/bin/cve_checker/datasource.json
new file mode 100755
index 00000000..5fc14da8
--- /dev/null
+++ b/bin/cve_checker/datasource.json
@@ -0,0 +1,25 @@
+{
+ "srtsetting" : [
+ {
+ "name" : "_SRTOOL_CVE_MESSAGE",
+ "helptext" : "CVE Checker MOD",
+ "value" : "CVe Checker MOD"
+ }
+ ],
+ "datasource" : [
+ {
+ "key" : "0201-cvechecker-update",
+ "data" : "cvechecker",
+ "source" : "cvechecker",
+ "name" : "cvechecker_Updates",
+ "description" : "Datasource for scheduled updates",
+ "cve_filter" : "",
+ "init" : "",
+ "update" : "",
+ "lookup" : "",
+ "_comment_" : "2 = Daily, 5 = OnDemand (disabled), Update on Saturdays at 2:00 am",
+ "update_frequency" : "2",
+ "update_time" : "{\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/cve_checker/patcher.json b/bin/cve_checker/patcher.json
new file mode 100755
index 00000000..a0c96733
--- /dev/null
+++ b/bin/cve_checker/patcher.json
@@ -0,0 +1,41 @@
+{
+ "_comments_" : "Blank values indicate defaults",
+ "label" : "ACME",
+ "patcher_dir" : "bin/acme/patcher",
+ "patch_set" : [
+ {
+ "_comments_" : "The ACME custom version of the Jira integration script",
+ "original" : "bin/common/srtool_jira_template.py",
+ "custom" : "bin/acme/srtool_jira_acme.py",
+ "patch" : "",
+ "options" : ""
+ },
+ {
+ "original" : "bin/srt",
+ "custom" : "bin/acme/patcher/inplace/bin/srt",
+ "patch" : "",
+ "options" : "INPLACE DISABLE"
+ }
+ ],
+ "custom_directories" : [
+ {
+ "dir" : "bin/acme"
+ },
+ {
+ "dir" : "lib/acme"
+ }
+ ],
+ "documentation" : [
+ {
+ "help_original" : "the location of the original mainline file",
+ "help_custom" : "the location of the derived and customized file",
+ "help_original_INPLACE" : "In the INPLACE mode, this is the location of the mainline file that has been customized",
+ "help_custom_INPLACE" : "In the INPLACE mode, this is the stash location of the customized file",
+ "help_patch" : "optional location of extracted patch file, default is '$patcher_dir/$filename.patch'",
+ "help_options" : "When empty, indicates the default workflow of a custom file in custom app directory derived from a mainline template file (e.g. bin/common/srtool_jira_template.py)",
+ "help_options_INPLACE" : "Add the 'INPLACE' key if the file is patched in place in the mainline code",
+ "help_options_DISABLE" : "Add the 'DISABLE' key to make this mapping inactive",
+ "help_custom_directories" : "These are the custom directories that are overlaid on the SRTool code, and are ignored for in-place scanning"
+ }
+ ]
+}
diff --git a/bin/cve_checker/srtool_cvechecker.py b/bin/cve_checker/srtool_cvechecker.py
new file mode 100755
index 00000000..6144cb5a
--- /dev/null
+++ b/bin/cve_checker/srtool_cvechecker.py
@@ -0,0 +1,950 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Commandline Tool
+#
+# Copyright (C) 2023 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys
+import argparse
+import json
+import subprocess
+import logging
+import pytz
+
+# load the srt.sqlite schema indexes
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+from common.srtool_sql import *
+from common.srtool_progress import *
+from common.srtool_common import log_error
+
+# Setup:
+logger = logging.getLogger("srt")
+
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR','.')
+SRT_REPORT_DIR = f"{SRT_BASE_DIR}/reports"
+
+# data/cve_checker/yocto-metrics/cve-check/master/1697871310.json
+REMOTE_URL = 'git://git.yoctoproject.org/yocto-metrics'
+REMOTE_PATH = ''
+LOCAL_DIR = 'data/cve_checker/yocto-metrics'
+BRANCH = ''
+
+# Import Channel support
+CK_LOCAL_DIR = 'data/cve_checker'
+
+
+# From lib/cve_check/views.py
+CK_UNDEFINED = 0
+CK_UNPATCHED = 1
+CK_IGNORED = 2
+CK_PATCHED = 3
+
+verbose = False
+test = False
+cmd_count = 0
+cmd_skip = 0
+force_update = False
+
+#################################
+# Helper methods
+#
+
+# quick development/debugging support
+def _log(msg):
+ DBG_LVL = os.environ['SRTDBG_LVL'] if ('SRTDBG_LVL' in os.environ) else 2
+ DBG_LOG = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srt_dbg.log'
+ if 1 == DBG_LVL:
+ print(msg)
+ elif 2 == DBG_LVL:
+ f1=open(DBG_LOG, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+def srtsetting_get(conn,key,default_value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Fetch the key for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ try:
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if srtsetting:
+ if is_dict:
+ return(srtsetting['value'])
+ else:
+ return(srtsetting[ORM.SRTSETTING_VALUE])
+ except Exception as e:
+ print(f"ERROR:{e}")
+ return(default_value)
+
+def srtsetting_set(conn,key,value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Set the key value for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if not srtsetting:
+ sql = ''' INSERT INTO orm_srtsetting (name, helptext, value) VALUES (?,?,?)'''
+ SQL_EXECUTE(cur, sql, (key,'',value))
+ if verbose: print(f"INSERT:{key}:{value}:")
+ else:
+ if verbose: print(f"UPDATE[{srtsetting[ORM.SRTSETTING_ID]}]:{key}:{value}:")
+ sql = ''' UPDATE orm_srtsetting
+ SET value=?
+ WHERE id=?'''
+ if is_dict:
+ SQL_EXECUTE(cur, sql, (value,srtsetting['id']))
+ else:
+ SQL_EXECUTE(cur, sql, (value,srtsetting[ORM.SRTSETTING_ID]))
+ SQL_COMMIT(conn)
+
+def do_chdir(newdir,delay=0.200):
+ os.chdir(newdir)
+ # WARNING: we need a pause else the chdir will break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+def do_makedirs(newdir,delay=0.200):
+ try:
+ os.makedirs(newdir)
+ except:
+ # dir already exists
+ pass
+ # WARNING: we need a pause else the makedirs could break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+#
+# Sub Process calls
+# Enforce that all scripts run from the SRT_BASE_DIR context (re:WSGI)
+#
+def execute_process(*args):
+ # Only string-type parameters allowed
+ cmd_list = []
+ for arg in args:
+ if not arg: continue
+ if isinstance(arg, (list, tuple)):
+ # Flatten all the way down
+ for a in arg:
+ if not a: continue
+ cmd_list.append(str(a))
+ else:
+ cmd_list.append(str(arg))
+
+ result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return(result.returncode,result.stdout.decode('utf-8'),result.stderr.decode('utf-8'))
+
+def execute_commmand(cmnd,path=''):
+ print(f"FOO1:EXECUTE_COMMMAND:{cmnd}:{path}:")
+ cwd = os.getcwd()
+ if path:
+ do_chdir(path,0.4)
+ print(f"FOO2:EXECUTE_COMMMAND:{os.getcwd()}:")
+ result_returncode,result_stdout,result_stderr = execute_process(cmnd)
+ if 0 != result_returncode:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ print(f"ERROR({result_returncode}):{result_stderr}")
+ return(1)
+ if verbose:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ if path:
+ do_chdir(cwd)
+
+# Insure the git repo is cloned and available
+def prepare_git(repo_dir,repo_url,branch):
+ if True or verbose: print(f"prepare_git:({repo_dir},{repo_url})")
+ if not os.path.isdir(repo_dir):
+ repo_parent_dir = os.path.dirname(repo_dir)
+ do_makedirs(repo_parent_dir)
+ print(f"= Clone '{REMOTE_URL}' ... =")
+ cmnd=['git','clone',REMOTE_URL]
+ execute_commmand(cmnd,repo_parent_dir)
+ else:
+ print(f"= Clone '{REMOTE_URL}' skip ... =")
+
+ if branch:
+ print("= Checkout branch '{BRANCH}' ... =")
+ cmnd=['git','-C',repo_dir,'checkout',branch]
+ execute_commmand(cmnd)
+
+ # Get the latest data with a safety pull
+ print("= Pull ... =")
+ cmnd=['git','-C',repo_dir,'pull']
+ execute_commmand(cmnd)
+
+# Compute a sortable CVE name
+def get_name_sort(cve_name):
+ try:
+ a = cve_name.split('-')
+ cve_name_sort = '%s-%s-%07d' % (a[0],a[1],int(a[2]))
+ except:
+ cve_name_sort = cve_name
+ return cve_name_sort
+
+def score2cve_score(score):
+ try:
+ return(float(score))
+ except:
+ return(0.0)
+
+def cve_score2severity(score):
+ score_num = score2cve_score(score)
+ if score_num < 2.5:
+ severity = "Low"
+ elif score_num < 5.0:
+ severity = "Medium"
+ elif score_num < 7.5:
+ severity = "High"
+ else :
+ severity = "Critical"
+ return(severity)
+
+def cve_scores2priority(score_v2,score_v3):
+ score_num = max(score2cve_score(score_v2),score2cve_score(score_v3))
+ if score_num < 2.5:
+ priority = ORM.PRIORITY_LOW
+ elif score_num < 5.0:
+ priority = ORM.PRIORITY_MEDIUM
+ elif score_num < 7.5:
+ priority = ORM.PRIORITY_HIGH
+ else :
+ priority = ORM.PRIORITY_CRITICAL
+ return(priority)
+
+def status2orm_ck(status):
+ if 'Unpatched' == status:
+ return (CK_UNPATCHED,ORM.STATUS_VULNERABLE)
+ elif 'Patched' == status:
+ return (CK_PATCHED,ORM.STATUS_NOT_VULNERABLE)
+ elif 'Ignored' == status:
+ return (CK_IGNORED,ORM.STATUS_NOT_VULNERABLE)
+ else:
+ return (CK_UNDEFINED,ORM.STATUS_NEW)
+
+def count_ck_records(cur):
+ def count_rows(table_name):
+ cur.execute(f"SELECT COUNT(*) FROM {table_name}")
+ return(cur.fetchone()[0])
+ Ck_Audit_cnt = count_rows('cve_checker_Ck_Audit')
+ Ck_Package_cnt = count_rows('cve_checker_Ck_Package')
+ Ck_Product_cnt = count_rows('cve_checker_Ck_Product')
+ Ck_Layer_cnt = count_rows('cve_checker_Ck_Layer')
+ CkPackage2Cve_cnt = count_rows('cve_checker_CkPackage2Cve')
+ CkPackage2CkProduct_cnt = count_rows('cve_checker_CkPackage2CkProduct')
+ return(Ck_Audit_cnt, Ck_Package_cnt, Ck_Product_cnt, Ck_Layer_cnt, CkPackage2Cve_cnt, CkPackage2CkProduct_cnt)
+
+#################################
+# Check Auto Builder CVE_Checker output files
+#
+# e.g. https://git.yoctoproject.org/yocto-metrics/tree/cve-check/master/1697612432.json
+#
+# Unit tests:
+# bin/cve_checker/srtool_cvechecker.py --validate-cvechk-ab master -v
+# bin/cve_checker/srtool_cvechecker.py --validate-cvechk-ab dunfell -v
+#
+
+def validate_cvechk_ab(release):
+ repo_dir = os.path.join(srtool_basepath,LOCAL_DIR)
+ LOCAL_PATH = f'cve-check/{release}'
+
+ # Insure that the repo is present and updated
+ prepare_git(repo_dir,REMOTE_URL,BRANCH)
+
+ # Find the JSON file
+ json_dir = os.path.join(repo_dir,LOCAL_PATH)
+ file_list = []
+ for root, dirs, files in os.walk(json_dir):
+ for i,file in enumerate(files):
+ if not file.endswith('.json'):
+ continue
+ file_list.append(file)
+ print(f"CVKCHK JSON file count = {len(file_list)}")
+
+ progress_set_max(len(file_list))
+ # Scan the JSON files
+ print(f"Release = {release}")
+ for i,json_file in enumerate(file_list):
+
+ # Debugging support
+ if cmd_skip and (i < cmd_skip):
+ continue
+ if cmd_count and (i > (cmd_skip + cmd_count)):
+ continue
+
+ with open(os.path.join(json_dir,json_file)) as json_data:
+ progress_show(json_file)
+ try:
+ dct = json.load(json_data)
+ except Exception as e:
+ print(f"ERROR:JSON_FILE_LOAD:{json_file}:{e}", file=sys.stderr)
+ continue
+
+ if 0 == (i % 20): print(f"{i:4}\r",end='',flush=True)
+
+ for elem in dct:
+ print(f"TOP ELEM:{elem}")
+
+ multiple_products = []
+ mismatch_products = []
+ mismatch_iscves = []
+
+ elem_packages = dct['package']
+ print(f"PACKAGE COUNT:{len(elem_packages)}")
+ for package in elem_packages:
+ name = package['name']
+ short_name = name.replace('-native','')
+
+ package_products = package['products']
+ if 1 != len(package_products):
+ s = f"{name}={len(package_products)}"
+ for product in package_products:
+ s += f":{product['product']}"
+ multiple_products.append(s)
+
+ is_cves = ''
+ for product in package_products:
+ if not is_cves:
+ is_cves = product['cvesInRecord']
+ if short_name != product['product']:
+ mismatch_products.append(f"{name}!={product['product']}")
+ if is_cves != product['cvesInRecord']:
+ mismatch_iscves.append(f"{name}:{is_cves} != {product['cvesInRecord']}")
+
+ print(f"multiple_products:{len(multiple_products)}")
+ for i,mp in enumerate(multiple_products):
+ print(f" {mp}")
+ if i > 5: break
+ print(f"mixed_products:{len(mismatch_products)}")
+ for i,mp in enumerate(mismatch_products):
+ print(f" {mp}")
+ if i > 5: break
+ print(f"mixed_iscves:{len(mismatch_iscves)}")
+ for i,mp in enumerate(mismatch_iscves):
+ print(f" {mp}")
+ if i > 5: break
+ progress_done('Done')
+
+#################################
+# Import Auto Builder CVE_Checker output files
+#
+# e.g. https://git.yoctoproject.org/yocto-metrics/tree/cve-check/master/1697612432.json
+#
+# Unit tests:
+# bin/cve_checker/srtool_cvechecker.py --import-cvechk 7,nanbield,nanbield -v (7 = AB repo)
+# bin/cve_checker/srtool_cvechecker.py --import-cvechk 6,master,<none> -v (6 = SSH import)
+#
+
+def import_cvechk(key,audit_name):
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+
+ ck_import_id,ck_audit_key,ck_import_select = key.split(',')
+
+ _log("Prepare Import channel")
+ sql = """SELECT * FROM cve_checker_CkUploadManager WHERE id = ?"""
+ ck_import = SQL_EXECUTE(cur, sql, (ck_import_id,)).fetchone()
+ if not ck_import:
+ print(f"ERROR: ck_import not found '{ck_import_id}'")
+ exit(1)
+
+ ck_json_list = []
+ if 'Repo' == ck_import['import_mode']:
+ # Isolate the repo's directory name from the local path (first dir)
+ repo_dir_name = ck_import['path']
+ pos = repo_dir_name.find('/')
+ if pos > 0:
+ repo_dir_name = repo_dir_name[0:pos]
+ repo_dir = os.path.join(srtool_basepath,CK_LOCAL_DIR,repo_dir_name)
+ repo_url = ck_import['repo']
+ repo_branch = ck_import['branch']
+
+ # Insure that the repo is present and updated
+ _log("Prepare repo")
+ print(f"FOO:prepare_git({repo_dir},{repo_url},{repo_branch})")
+ prepare_git(repo_dir,repo_url,repo_branch)
+
+ # Is the selector a file?
+ if ck_import_select.endswith('.json'):
+ ck_json_list.append(os.path.join(srtool_basepath,CK_LOCAL_DIR,ck_import['path'],ck_import_select))
+ else:
+ # Gather files from this sub-directory
+ json_dir = os.path.join(srtool_basepath,CK_LOCAL_DIR,ck_import['path'],ck_import_select)
+ for root, dirs, files in os.walk(json_dir):
+ for i,file in enumerate(files):
+ if not file.endswith('.json'):
+ continue
+ ck_json_list.append(os.path.join(json_dir,file))
+ if not ck_json_list:
+ print(f"ERROR: no JSON files found in '{json_dir}'")
+ exit(1)
+ else:
+ print(f"CVKCHK JSON file count = {len(ck_json_list)}")
+ elif 'SSL' == ck_import['import_mode']:
+ host,path = ck_import['path'].split(':')
+ path = os.path.join(path,ck_import_select)
+ ck_ssl_cp_list = []
+ if path.endswith('.json'):
+ ck_ssl_cp_list.append(path)
+ else:
+ cmnd = ['ssh','-i', ck_import['pem'], host, 'ls', path+'/*.json']
+ exec_returncode,exec_stdout,exec_stderr = execute_process(*cmnd)
+ for i,line in enumerate(exec_stdout.splitlines()):
+ line = line.strip()
+ ck_ssl_cp_list.append(line)
+ print(f"FOUND_SSL_JSON={ck_ssl_cp_list}:")
+ local_import_dir = os.path.join(srtool_basepath,'data/cve_checker/ssl')
+ do_makedirs(local_import_dir)
+ cmnd = ['scp','-i', ck_import['pem'], f"{host}:{path}"+"/*", local_import_dir]
+ exec_returncode,exec_stdout,exec_stderr = execute_process(*cmnd)
+ for file in ck_ssl_cp_list:
+ ck_json_list.append(os.path.join(local_import_dir,os.path.basename(file)))
+
+ elif 'Upload' == ck_import['import_mode']:
+ print(f"FOO:UPLOAD:{ck_import_select}:")
+ # Is the selector a file?
+ if ck_import_select.endswith('.json'):
+ print(f"FOO1:{ck_import_select}")
+ ck_json_list.append(ck_import_select)
+ else:
+ print(f"ERROR: Upload: not a JSON file '{ck_import_select}'")
+ exit(1)
+
+ elif 'File' == ck_import['import_mode']:
+ print(f"FOO:{ck_import['path']}:{ck_import_select}:")
+ # Is the selector a file?
+ if ck_import['path'].endswith('.json'):
+ print(f"FOO1:{ck_import['path']}")
+ ck_json_list.append(ck_import['path'])
+ else:
+ # Gather files from this sub-directory
+ json_dir = os.path.join(ck_import['path'],ck_import_select)
+ print(f"FOO2:CHECK:{json_dir}")
+ for root, dirs, files in os.walk(json_dir):
+ for i,file in enumerate(files):
+ if not file.endswith('.json'):
+ continue
+ ck_json_list.append(os.path.join(json_dir,file))
+ if not ck_json_list:
+ print(f"ERROR: no JSON files found in '{json_dir}'")
+ exit(1)
+ else:
+ print(f"CVKCHK JSON file count = {len(ck_json_list)}")
+ else:
+ print(f"ERROR: import mode not recognized '{ck_import['import_mode']}'")
+ exit(1)
+ print(f"FOUND_JSON={ck_json_list}:")
+
+ _log("Prepare ORM Products")
+ sql = """SELECT * FROM orm_product WHERE `key` = ?"""
+ orm_product = SQL_EXECUTE(cur, sql, (ck_audit_key,)).fetchone()
+ if not orm_product:
+ print(f"ERROR: release not found '{ck_audit_key}'")
+ exit(1)
+
+ # Find or create audit, just one per day per release
+ _log("Prepare Audit record")
+ audit_date = datetime.now()
+ if not audit_name:
+ audit_name = f"audit_{audit_date.strftime('%Y%m%d')}_{orm_product['key']}_"
+ sql = f"""SELECT * FROM cve_checker_ck_audit WHERE `name` = ?"""
+ found_audit = SQL_EXECUTE(cur, sql, (audit_name,)).fetchone()
+ if found_audit:
+ ck_audit_id = int(found_audit['id'])
+ # Preclear audit's packages and their indexes
+ sql = f"""SELECT * FROM cve_checker_ck_package WHERE ck_audit_id = ?"""
+ for ck_package in SQL_EXECUTE(cur, sql, params=(ck_audit_id,)).fetchall():
+ sql = f"""DELETE FROM cve_checker_ckpackage2cve WHERE ck_package_id = ?"""
+ SQL_EXECUTE(cur, sql, params=(ck_package['id'],))
+ sql = f"""DELETE FROM cve_checker_ckpackage2ckproduct WHERE ck_package_id = ?"""
+ SQL_EXECUTE(cur, sql, params=(ck_package['id'],))
+ sql = f"""DELETE FROM cve_checker_ck_package WHERE id = ?"""
+ SQL_EXECUTE(cur, sql, params=(ck_package['id'],))
+ SQL_COMMIT(conn)
+ else:
+ # Create a parent audit record
+ sql = ''' INSERT INTO cve_checker_ck_audit (name, orm_product_id,create_time) VALUES (?, ?, ?)'''
+ SQL_EXECUTE(cur, sql, (audit_name,orm_product['id'],audit_date,))
+ ck_audit_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ SQL_COMMIT(conn)
+ if verbose: print(f"ck_audit_id={ck_audit_id}")
+
+ # Scan the JSON files
+ print(f"Release = {ck_audit_key}")
+ if verbose:
+ Ck_Audit_org, Ck_Package_org, Ck_Product_org, Ck_Layer_org, CkPackage2Cve_org, CkPackage2CkProduct_org = count_ck_records(cur)
+
+ layer_id_cache = {}
+ product_id_cache = {}
+ cve_id_cache = {}
+ layer_id_cache_hit = 0
+ product_id_cache_hit = 0
+ cve_id_cache_hit = 0
+ added_cve = 0
+ issue_cnt = 0
+
+ # Prefetch the existing CVE IDs
+ _log("Prepare CVE pre-fetch")
+ print(f"Prefetch CVE IDs ...")
+ sql = f"""SELECT id,name FROM orm_cve"""
+ orm_cves = SQL_EXECUTE(cur, sql, ).fetchall()
+ for orm_cve in orm_cves:
+ layer_id_cache[orm_cve['name']] = orm_cve['id']
+
+ for json_file in ck_json_list:
+ with open(json_file) as json_data:
+ try:
+ dct = json.load(json_data)
+ except Exception as e:
+ print(f"ERROR:JSON_FILE_LOAD:{json_file}:{e}", file=sys.stderr)
+ continue
+
+ elem_packages = dct['package']
+ print(f"PACKAGE COUNT:{len(elem_packages)}")
+ progress_set_max(len(elem_packages))
+ for i,package in enumerate(elem_packages):
+ # Debugging support
+ if cmd_skip and (i < cmd_skip):
+ continue
+ if cmd_count and (i > (cmd_skip + cmd_count)):
+ continue
+ if 0 == (i % 20): print(f"{i:4}\r",end='',flush=True)
+
+ #
+ # Extract the ck_package records
+ #
+
+ package_name = package['name']
+ package_version = package['version']
+ ck_layer_name = package['layer']
+ progress_show(package_name)
+
+ # Fetch or create the ck_layer
+ ck_layer_id = 0
+ if ck_layer_name in layer_id_cache:
+ ck_layer_id = layer_id_cache[ck_layer_name]
+ layer_id_cache_hit += 1
+ if not ck_layer_id:
+ sql = f"""SELECT * FROM cve_checker_ck_layer WHERE "name" = ?"""
+ ck_layer = SQL_EXECUTE(cur, sql, params=(ck_layer_name,)).fetchone()
+ if ck_layer:
+ ck_layer_id = ck_layer['id']
+ if not ck_layer_id:
+ # Create layer record
+ sql = ''' INSERT INTO cve_checker_ck_layer (name) VALUES (?)'''
+ SQL_EXECUTE(cur, sql, (ck_layer_name,))
+ ck_layer_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ SQL_COMMIT(conn)
+ layer_id_cache[ck_layer_name] = ck_layer_id
+
+ # Create ck_package record
+ sql = ''' INSERT INTO cve_checker_ck_package (name,version,ck_layer_id,unpatched_cnt,ignored_cnt,patched_cnt,ck_audit_id) VALUES (?, ?, ?, ?, ?, ?, ?)'''
+ params = (package_name,package_version,ck_layer_id,0,0,0,ck_audit_id)
+ SQL_EXECUTE(cur, sql, params)
+ ck_package_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ SQL_COMMIT(conn)
+
+ # Fetch or create the ck_products
+ for product in package['products']:
+ ck_product_name = product['product']
+ ck_cvesInRecord = product['cvesInRecord']
+
+ ck_product_id = 0
+ if ck_product_name in product_id_cache:
+ ck_product_id = product_id_cache[ck_product_name]
+ product_id_cache_hit += 1
+ if not ck_product_id:
+ sql = f"""SELECT * FROM cve_checker_ck_product WHERE "name" = ?"""
+ ck_product = SQL_EXECUTE(cur, sql, params=(ck_product_name,)).fetchone()
+ if ck_product:
+ ck_product_id = ck_product['id']
+ if not ck_product_id:
+ # Create layer record
+ sql = ''' INSERT INTO cve_checker_ck_product (name) VALUES (?)'''
+ SQL_EXECUTE(cur, sql, (ck_product_name,))
+ ck_product_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ SQL_COMMIT(conn)
+ sql = f"""SELECT * FROM cve_checker_ck_product WHERE "name" = ?"""
+ ck_product = SQL_EXECUTE(cur, sql, params=(ck_product_name,)).fetchone()
+ product_id_cache[ck_product_name] = ck_product_id
+
+ # Create CkPackage2CkProduct
+ sql = ''' INSERT INTO cve_checker_ckpackage2ckproduct (ck_package_id,ck_product_id,cvesInRecord) VALUES (?, ?, ?)'''
+ params = (ck_package_id,ck_product_id,('Yes'==ck_cvesInRecord))
+ SQL_EXECUTE(cur, sql, params)
+ SQL_COMMIT(conn)
+
+ # Fetch or create CVE records for issues
+ unpatched_cnt = 0
+ ignored_cnt = 0
+ patched_cnt = 0
+ for issue in package['issue']:
+ issue_cnt += 1
+ issue_id = issue['id']
+ ck_status,orm_status = status2orm_ck(issue['status'])
+ orm_comments = ''
+ orm_packages = ''
+ srtool_today = datetime.now()
+ print(f"CVE={issue_id}:Package={package_name}")
+
+ # increment status sums
+ if CK_UNPATCHED == ck_status:
+ unpatched_cnt += 1
+ elif CK_IGNORED == ck_status:
+ ignored_cnt += 1
+ elif CK_UNPATCHED == ck_status:
+ patched_cnt += 1
+
+ orm_cve_id = 0
+ if issue_id in cve_id_cache:
+ orm_cve_id = cve_id_cache[issue_id]
+ cve_id_cache_hit += 1
+ if not orm_cve_id:
+ sql = f"""SELECT * FROM orm_cve WHERE "name" = ?"""
+ orm_cve = SQL_EXECUTE(cur, sql, params=(issue_id,)).fetchone()
+ if orm_cve:
+ orm_cve_id = orm_cve['id']
+ if not orm_cve_id:
+ # Create a placehold CVE record until is it published and imported from NVD
+ sql_elements = [
+ 'name',
+ 'name_sort',
+ 'priority',
+ 'status',
+ 'comments',
+ 'comments_private',
+ 'tags',
+ 'cve_data_type',
+ 'cve_data_format',
+ 'cve_data_version',
+ 'public',
+ 'publish_state',
+ 'publish_date',
+ 'acknowledge_date',
+ 'description',
+ 'publishedDate',
+ 'lastModifiedDate',
+ 'recommend',
+ 'recommend_list',
+ 'cvssV3_baseScore',
+ 'cvssV3_baseSeverity',
+ 'cvssV2_baseScore',
+ 'cvssV2_severity',
+ 'packages',
+ 'srt_updated',
+ 'srt_created',
+ ]
+ sql_qmarks = []
+ for i in range(len(sql_elements)):
+ sql_qmarks.append('?')
+ sql_values = (
+ issue_id,
+ get_name_sort(issue_id),
+ cve_scores2priority(issue['scorev2'],issue['scorev3']),
+ orm_status,
+ orm_comments,
+ '',
+ '',
+ '',
+ '',
+ '',
+ True,
+ ORM.PUBLISH_UNPUBLISHED,
+ '',
+ None,
+ issue['summary'],
+ '',
+ '',
+ '',
+ '',
+ issue['scorev3'],
+ cve_score2severity(issue['scorev3']),
+ issue['scorev2'],
+ cve_score2severity(issue['scorev2']),
+ orm_packages,
+ srtool_today,
+ srtool_today
+ )
+ sql, params = 'INSERT INTO orm_cve (%s) VALUES (%s)' % (','.join(sql_elements),','.join(sql_qmarks)),sql_values
+ SQL_EXECUTE(cur, sql, params)
+ orm_cve_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ added_cve += 1
+ # Commit the new CVE and history
+ SQL_COMMIT(conn)
+
+ # Update package status sums
+ update_comment = "%s {%s}" % (ORM.UPDATE_CREATE_STR % ORM.UPDATE_SOURCE_CVE,'Created from CVE Checker')
+ sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
+ SQL_EXECUTE(cur, sql, (orm_cve_id,update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+ SQL_COMMIT(conn)
+
+ cve_id_cache[issue_id] = orm_cve_id
+
+ # Create CkPackage2Cve
+ sql = ''' INSERT INTO cve_checker_ckpackage2cve (ck_package_id,orm_cve_id,ck_status,ck_audit_id) VALUES (?,?,?,?)'''
+ SQL_EXECUTE(cur, sql, (ck_package_id,orm_cve_id,ck_status,ck_audit_id,))
+
+ # Update counts in the CK_Package
+ sql = ''' UPDATE cve_checker_ck_package
+ SET unpatched_cnt = ?, ignored_cnt = ?, patched_cnt = ?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (unpatched_cnt,ignored_cnt,patched_cnt,ck_package_id))
+
+ # Commit these records
+ SQL_COMMIT(conn)
+
+
+ if verbose:
+ Ck_Audit_cnt, Ck_Package_cnt, Ck_Product_cnt, Ck_Layer_cnt, CkPackage2Cve_cnt, CkPackage2CkProduct_cnt = count_ck_records(cur)
+ print(f"Packages = {len(elem_packages)}")
+ print(f"Ck_Audit diff = {Ck_Audit_cnt - Ck_Audit_org}")
+ print(f"Ck_Package diff = {Ck_Package_cnt - Ck_Package_org}")
+ print(f"Ck_Product diff = {Ck_Product_cnt - Ck_Product_org}")
+ print(f"Ck_Layer diff = {Ck_Layer_cnt - Ck_Layer_org}")
+ print(f"CkPackage2Cve diff = {Ck_Audit_cnt - Ck_Layer_org}")
+ print(f"CkPackage2CkProduct diff = {Ck_Audit_cnt - Ck_Audit_org}")
+ print(f"Issue count = {issue_cnt}")
+ print(f"Added Orm_CVE records = {added_cve}")
+ print(f"layer_id_cache_hit = {layer_id_cache_hit}")
+ print(f"product_id_cache_hit = {product_id_cache_hit}")
+ print(f"cve_id_cache_hit = {cve_id_cache_hit}")
+
+ progress_done('Done')
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# update_imports
+#
+
+def update_imports():
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+ now = datetime.now(pytz.utc)
+
+ _log("Update Import channel lists")
+ sql = """SELECT * FROM cve_checker_CkUploadManager"""
+ for ck_import in SQL_EXECUTE(cur, sql, ).fetchall():
+ # 2023-11-20T07:19:47.033Z
+ select_refresh = ck_import['select_refresh'][:26]
+ print(f"FOO1:{select_refresh}")
+ select_refresh = datetime.strptime(select_refresh,'%Y-%m-%d %H:%M:%S.%f')
+ select_refresh = select_refresh.replace(tzinfo=pytz.utc)
+ # Update no more that every 10 minutes
+ delta = now - select_refresh
+ print(f"FOO2:{delta} = {now} - {select_refresh}")
+ if (1 > delta.days) and ((10 * 60) > delta.seconds) and (not force_update):
+ continue
+
+ ck_list = []
+ if 'Repo' == ck_import['import_mode']:
+ # Isolate the repo's directory name from the local path (first dir)
+ repo_dir_name = ck_import['path']
+ pos = repo_dir_name.find('/')
+ if pos > 0:
+ repo_dir_name = repo_dir_name[0:pos]
+ repo_dir = os.path.join(srtool_basepath,CK_LOCAL_DIR,repo_dir_name)
+ repo_url = ck_import['repo']
+ repo_branch = ck_import['branch']
+
+ # Insure that the repo is present and updated
+ _log("Prepare repo")
+ print(f"FOO:prepare_git({repo_dir},{repo_url},{repo_branch})")
+ prepare_git(repo_dir,repo_url,repo_branch)
+
+ # Is the selector a file?
+ if ck_import['path'].endswith('.json'):
+ pass
+ else:
+ # Gather files from this sub-directory
+ json_dir = os.path.join(srtool_basepath,CK_LOCAL_DIR,ck_import['path'])
+ for root, dirs, files in os.walk(json_dir,topdown=True):
+ print(f"BAR:{dirs}:{files}:")
+ for i,dir in enumerate(dirs):
+ ck_list.append(dir)
+ for i,file in enumerate(files):
+ if file.endswith('.json'):
+ ck_list.append(file)
+ # Only the first level
+ break
+
+ elif 'SSL' == ck_import['import_mode']:
+ host,path = ck_import['path'].split(':')
+ cmnd = ['ssh','-i', ck_import['pem'], host, 'ls', path]
+ exec_returncode,exec_stdout,exec_stderr = execute_process(*cmnd)
+ for i,line in enumerate(exec_stdout.splitlines()):
+ line = line.strip()
+ ck_list.append(line)
+
+ elif 'File' == ck_import['import_mode']:
+ # Is the selector a file?
+ if ck_import['path'].endswith('.json'):
+ # Put the file's name in the list
+ ck_list.append(os.path.basename(ck_import['path']))
+ else:
+ # Gather files from this sub-directory
+ json_dir = ck_import['path']
+ for root, dirs, files in os.walk(json_dir,topdown=True):
+ print(f"BAR:{dirs}:{files}:")
+ for i,dir in enumerate(dirs):
+ ck_list.append(dir)
+ for i,file in enumerate(files):
+ if file.endswith('.json'):
+ ck_list.append(file)
+ # Only the first level
+ break
+
+ if ck_list:
+ ck_list.sort()
+ print(f"FOUND_SELECTS[{ck_import['id']}]={ck_list}:")
+ sql = ''' UPDATE cve_checker_CkUploadManager
+ SET select_list=?, select_refresh = ?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, ('|'.join(ck_list),now,ck_import['id']))
+
+
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# new_to_historical
+#
+# For boot strapping an installation,
+# set triage CVE set going forward
+#
+# Range is either 'all', or all CVEs
+# before a given end published date
+#
+
+def new_to_historical(end_date):
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+
+ is_all = True
+ if 'all' != end_date:
+ is_all = False
+ try:
+ pub_date = datetime.strptime(end_date,'%Y-%m-%d')
+ except:
+ print(f"ERROR: pub date not in YYYY-MM-DD: '{end_date}'")
+ exit(1)
+
+ # SRTool Status
+ HISTORICAL = 0
+ NEW = 1
+ status_changes = {}
+
+ sql = """SELECT name,publishedDate,id FROM orm_cve where status = ?"""
+ for cve in SQL_EXECUTE(cur, sql, (NEW,) ).fetchall():
+ name = cve['name']
+
+ cve_year = name[:name.find('-',5)]
+ if not cve_year in status_changes:
+ status_changes[cve_year] = [0,0,0]
+ status_changes[cve_year][0] += 1
+
+ if (not is_all) and (cve['publishedDate'] > end_date):
+ status_changes[cve_year][2] += 1
+ continue
+ status_changes[cve_year][1] += 1
+
+ if not test:
+ sql = ''' UPDATE orm_cve
+ SET status=?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (HISTORICAL,cve['id']))
+
+ print("\n Results")
+ print("Year Found Changed Kept")
+ for cve_year in sorted(status_changes.keys()):
+ print(f"{cve_year}: {status_changes[cve_year][0]:7} {status_changes[cve_year][1]:7} {status_changes[cve_year][2]:7}")
+
+ if not test:
+ SQL_COMMIT(conn)
+ else:
+ print(f"NOTE: changes not committed due to 'test' flag")
+
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+ global test
+ global force_update
+ global cmd_count
+ global cmd_skip
+
+ parser = argparse.ArgumentParser(description='srtool_cve_checker.py: CVE Checker results import')
+
+ parser.add_argument('--import-cvechk', '-i', dest='import_cvechk', help='Import an audit channel')
+ parser.add_argument('--audit-name', '-n', dest='audit_name', help='Name for audit')
+ parser.add_argument('--progress', '-P', action='store_true', dest='do_progress', help='Progress output')
+
+ parser.add_argument('--update-imports', '-u', action='store_true', dest='update_imports', help='Update the import lists')
+ parser.add_argument('--new-to-historical', dest='new_to_historical', help="Change 'new' cves to 'historical' for 'all' or since pub date [all|yyyy-mm-dd]")
+
+ # Test
+ parser.add_argument('--validate-cvechk-ab', '-V', dest='validate_cvechk_ab', help='Validate the AB cve-checker JSON file')
+
+ # Debugging support
+ parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--test', '-t', action='store_true', dest='test', help='Test, dry-run')
+ parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose debugging')
+ parser.add_argument('--local-job', action='store_true', dest='local_job', help='Use local job')
+ args = parser.parse_args()
+
+ ret = 0
+ verbose = args.verbose
+ test = args.test
+ force_update = args.force_update
+ cmd_count = int(args.count) if args.count else 0
+ cmd_skip = int(args.skip) if args.skip else 0
+ progress_set_on(args.do_progress)
+
+ if args.validate_cvechk_ab:
+ validate_cvechk_ab(args.validate_cvechk_ab)
+ elif args.import_cvechk:
+ import_cvechk(args.import_cvechk,args.audit_name)
+ elif args.update_imports:
+ update_imports()
+ elif args.new_to_historical:
+ new_to_historical(args.new_to_historical)
+
+ elif args.drop_ck_tables:
+ drop_ck_tables()
+
+ else:
+ print("srtool_cve_checker.py:Command not found")
+ ret = 1
+
+ progress_done('Done')
+ return(ret)
+
+
+if __name__ == '__main__':
+ srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ exit( main(sys.argv[1:]) )
+
diff --git a/bin/cve_checker/srtool_cvechecker_util.py b/bin/cve_checker/srtool_cvechecker_util.py
new file mode 100755
index 00000000..68498653
--- /dev/null
+++ b/bin/cve_checker/srtool_cvechecker_util.py
@@ -0,0 +1,465 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Commandline Tool
+#
+# Copyright (C) 2023 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+#
+# NOTE: this is a fix-it file, to fix broken tables and late global
+# changes. The fix-its are preserved for reference and re-use.
+#
+
+import os
+import sys
+import argparse
+import json
+import subprocess
+import logging
+
+# load the srt.sqlite schema indexes
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+from common.srtool_sql import *
+from common.srtool_progress import *
+from common.srtool_common import log_error
+
+# Setup:
+logger = logging.getLogger("srt")
+
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR','.')
+SRT_REPORT_DIR = f"{SRT_BASE_DIR}/reports"
+
+# data/cve_checker/yocto-metrics/cve-check/master/1697871310.json
+REMOTE_URL = 'git://git.yoctoproject.org/yocto-metrics'
+REMOTE_PATH = ''
+LOCAL_DIR = 'data/cve_checker/yocto-metrics'
+BRANCH = ''
+
+# From lib/cve_check/views.py
+CK_UNDEFINED = 0
+CK_UNPATCHED = 1
+CK_IGNORED = 2
+CK_PATCHED = 3
+
+verbose = False
+test = False
+cmd_count = 0
+cmd_skip = 0
+force_update = False
+
+#################################
+# Helper methods
+#
+
+# quick development/debugging support
+def _log(msg):
+ DBG_LVL = os.environ['SRTDBG_LVL'] if ('SRTDBG_LVL' in os.environ) else 2
+ DBG_LOG = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srt_dbg.log'
+ if 1 == DBG_LVL:
+ print(msg)
+ elif 2 == DBG_LVL:
+ f1=open(DBG_LOG, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+#
+# Sub Process calls
+# Enforce that all scripts run from the SRT_BASE_DIR context (re:WSGI)
+#
+def execute_process(*args):
+ # Only string-type parameters allowed
+ cmd_list = []
+ for arg in args:
+ if not arg: continue
+ if isinstance(arg, (list, tuple)):
+ # Flatten all the way down
+ for a in arg:
+ if not a: continue
+ cmd_list.append(str(a))
+ else:
+ cmd_list.append(str(arg))
+
+ srt_base_dir = os.environ.get('SRT_BASE_DIR')
+ if srt_base_dir and (srt_base_dir != os.getcwd()):
+ os.chdir(srt_base_dir)
+ if cmd_list[0].startswith('bin/') or cmd_list[0].startswith('./bin'):
+ cmd_list[0] = os.path.join(srt_base_dir,cmd_list[0])
+
+ result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return(result.returncode,result.stdout.decode('utf-8'),result.stderr.decode('utf-8'))
+
+def srtsetting_get(conn,key,default_value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Fetch the key for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ try:
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if srtsetting:
+ if is_dict:
+ return(srtsetting['value'])
+ else:
+ return(srtsetting[ORM.SRTSETTING_VALUE])
+ except Exception as e:
+ print(f"ERROR:{e}")
+ return(default_value)
+
+def srtsetting_set(conn,key,value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Set the key value for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if not srtsetting:
+ sql = ''' INSERT INTO orm_srtsetting (name, helptext, value) VALUES (?,?,?)'''
+ SQL_EXECUTE(cur, sql, (key,'',value))
+ print("INSERT:{key}:{value}:")
+ else:
+ print("UPDATE[{srtsetting[ORM.SRTSETTING_ID]}]:{key}:{value}:")
+ sql = ''' UPDATE orm_srtsetting
+ SET value=?
+ WHERE id=?'''
+ if is_dict:
+ SQL_EXECUTE(cur, sql, (value,srtsetting['id']))
+ else:
+ SQL_EXECUTE(cur, sql, (value,srtsetting[ORM.SRTSETTING_ID]))
+ SQL_COMMIT(conn)
+
+def do_chdir(newdir,delay=0.200):
+ os.chdir(newdir)
+ # WARNING: we need a pause else the chdir will break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+def do_makedirs(newdir,delay=0.200):
+ try:
+ os.makedirs(newdir)
+ except:
+ # dir already exists
+ pass
+ # WARNING: we need a pause else the makedirs could break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+def execute_commmand(cmnd,path=''):
+ cwd = os.getcwd()
+ if path:
+ do_chdir(path)
+ result_returncode,result_stdout,result_stderr = execute_process(cmnd)
+ if 0 != result_returncode:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ print("ERROR({result_returncode}):{result_stderr}")
+ return(1)
+ if verbose:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ if path:
+ do_chdir(cwd)
+
+# Insure the git repo is cloned and available
+def prepare_git(repo_dir,repo_url,branch):
+ if verbose: print(f"prepare_git:({repo_dir},{repo_url})")
+ if not os.path.isdir(repo_dir):
+ repo_parent_dir = os.path.dirname(repo_dir)
+ do_makedirs(repo_parent_dir)
+ print(f"= Clone '{REMOTE_URL}' ... =")
+ cmnd=['git','clone',repo_url]
+ execute_commmand(cmnd,repo_parent_dir)
+ else:
+ print(f"= Clone '{REMOTE_URL}' skip ... =")
+
+ if branch:
+ print("= Checkout branch '{BRANCH}' ... =")
+ cmnd=['git','-C',repo_dir,'checkout',branch]
+ execute_commmand(cmnd)
+
+ # Get the latest data with a safety pull
+ print("= Pull ... =")
+ cmnd=['git','-C',repo_dir,'pull']
+ execute_commmand(cmnd)
+
+# Compute a sortable CVE name
+def get_name_sort(cve_name):
+ try:
+ a = cve_name.split('-')
+ cve_name_sort = '%s-%s-%07d' % (a[0],a[1],int(a[2]))
+ except:
+ cve_name_sort = cve_name
+ return cve_name_sort
+
+def score2cve_score(score):
+ try:
+ return(float(score))
+ except:
+ return(0.0)
+
+def cve_score2severity(score):
+ score_num = score2cve_score(score)
+ if score_num < 2.5:
+ severity = "Low"
+ elif score_num < 5.0:
+ severity = "Medium"
+ elif score_num < 7.5:
+ severity = "High"
+ else :
+ severity = "Critical"
+ return(severity)
+
+def cve_scores2priority(score_v2,score_v3):
+ score_num = max(score2cve_score(score_v2),score2cve_score(score_v3))
+ if score_num < 2.5:
+ priority = ORM.PRIORITY_LOW
+ elif score_num < 5.0:
+ priority = ORM.PRIORITY_MEDIUM
+ elif score_num < 7.5:
+ priority = ORM.PRIORITY_HIGH
+ else :
+ priority = ORM.PRIORITY_CRITICAL
+ return(priority)
+
+def status2orm_ck(status):
+ if 'Unpatched' == status:
+ return (CK_UNPATCHED,ORM.STATUS_VULNERABLE)
+ elif 'Patched' == status:
+ return (CK_PATCHED,ORM.STATUS_NOT_VULNERABLE)
+ elif 'Ignored' == status:
+ return (CK_IGNORED,ORM.STATUS_NOT_VULNERABLE)
+ else:
+ return (CK_UNDEFINED,ORM.STATUS_NEW)
+
+def count_ck_records(cur):
+ def count_rows(table_name):
+ cur.execute(f"SELECT COUNT(*) FROM {table_name}")
+ return(cur.fetchone()[0])
+ Ck_Audit_cnt = count_rows('cve_checker_Ck_Audit')
+ Ck_Package_cnt = count_rows('cve_checker_Ck_Package')
+ Ck_Product_cnt = count_rows('cve_checker_Ck_Product')
+ Ck_Layer_cnt = count_rows('cve_checker_Ck_Layer')
+ CkPackage2Cve_cnt = count_rows('cve_checker_CkPackage2Cve')
+ CkPackage2CkProduct_cnt = count_rows('cve_checker_CkPackage2CkProduct')
+ return(Ck_Audit_cnt, Ck_Package_cnt, Ck_Product_cnt, Ck_Layer_cnt, CkPackage2Cve_cnt, CkPackage2CkProduct_cnt)
+
+#################################
+# drop_ck_tables
+#
+# Drop it all and start over
+#
+
+def drop_ck_tables():
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+
+ def drop(cur, table_name):
+ SQL_EXECUTE(cur, f"DROP TABLE {table_name}")
+ drop(cur, 'cve_checker_Ck_Audit')
+ drop(cur, 'cve_checker_Ck_Package')
+ drop(cur, 'cve_checker_Ck_Product')
+ drop(cur, 'cve_checker_Ck_Layer')
+ drop(cur, 'cve_checker_CkPackage2Cve')
+ drop(cur, 'cve_checker_CkPackage2CkProduct')
+
+ SQL_EXECUTE(cur, f"DELETE FROM django_migrations WHERE app = ?",("cve_checker",))
+
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# fix_orm_cvehistory
+#
+# Some orm_cvehistory records were created with the CVE name instead of
+# the CVE's record ID
+#
+
+def fix_orm_cvehistory():
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+
+ ck_add_cve = open("ck_add_cve.txt", "a")
+
+ sql = f"""SELECT id,cve_id FROM orm_cvehistory WHERE cve_id LIKE ? || '%'"""
+ cvehistories = SQL_EXECUTE(cur, sql, params=('CVE',)).fetchall()
+ for i,cvehistory in enumerate(cvehistories):
+ if 0 == (i % 20): print(f"{i:4}\r",end='',flush=True)
+
+ cve_name = cvehistory['cve_id']
+ ck_add_cve.write(cve_name+'\n')
+
+ sql = f"""SELECT id,name FROM orm_cve WHERE name = ?"""
+ cve = SQL_EXECUTE(cur, sql, params=(cve_name,)).fetchone()
+ if cve:
+ sql = ''' UPDATE orm_cvehistory
+ SET cve_id=?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (cve['id'],cvehistory['id']))
+ else:
+ print(f"ERROR: could not find CVE '{cve_name}'")
+
+ print(f"Bad cvehistories = {len(cvehistories)}")
+
+ if force_update:
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+ ck_add_cve.close()
+
+#################################
+# fix_issue_to_audit
+#
+# The audit link for CkPackage2Cve records was added late. Catch up the existing records.
+#
+
+def fix_issue_to_audit():
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+
+ i = 0
+ issue_cnt = 0
+ issues_to_fix = 0
+ sql = f"""SELECT id FROM cve_checker_ck_audit"""
+ for ck_audit in SQL_EXECUTE(cur, sql, ).fetchall():
+ ck_audit_id = ck_audit['id']
+ print(f"\nAUDIT #{ck_audit_id} ...")
+
+ sql = f"""SELECT id FROM cve_checker_ck_package WHERE ck_audit_id = ?"""
+ for ck_package in SQL_EXECUTE(cur, sql, (ck_audit_id,)).fetchall():
+ ck_package_id = ck_package['id']
+
+ if 0 == (i % 20): print(f"{i:4}\r",end='',flush=True)
+ i += 1
+
+ sql = f"""SELECT id,ck_audit_id FROM cve_checker_ckpackage2cve WHERE ck_package_id = ?"""
+ for ckpackage2cve in SQL_EXECUTE(cur, sql, (ck_package_id,)).fetchall():
+ issue_cnt += 1
+
+ if ckpackage2cve['ck_audit_id']:
+ continue
+
+ issues_to_fix += 1
+ sql = ''' UPDATE cve_checker_ckpackage2cve
+ SET ck_audit_id=?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (ck_audit_id,ckpackage2cve['id'],))
+
+ if force_update:
+ SQL_COMMIT(conn)
+
+ print(f"issue_cnt = {issue_cnt}")
+ print(f"issues_to_fix = {issues_to_fix}")
+
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# add_status_counts_to_packages
+#
+# Populate the added status counts to package records
+#
+
+def add_status_counts_to_packages():
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+
+ issue_cnt = 0
+ issues_to_fix = 0
+ sql = f"""SELECT id,name FROM cve_checker_ck_package"""
+ for i,ck_package in enumerate(SQL_EXECUTE(cur, sql, ).fetchall()):
+ # Debugging support
+ if cmd_count and (i > cmd_count):
+ break
+
+ def get_status_count(status):
+ cur.execute(f"SELECT COUNT(*) FROM cve_checker_ckpackage2cve WHERE ck_package_id = ? AND ck_status = ?", (ck_package['id'],status,))
+ return(cur.fetchone()[0])
+
+ unpatched_cnt = get_status_count(CK_UNPATCHED)
+ ignored_cnt = get_status_count(CK_IGNORED)
+ patched_cnt = get_status_count(CK_PATCHED)
+
+ if verbose:
+ print(f"PACKAGE:{ck_package['name']}:{unpatched_cnt}:{ignored_cnt}:{patched_cnt}:")
+
+ sql = ''' UPDATE cve_checker_ck_package
+ SET unpatched_cnt=?, ignored_cnt=?, patched_cnt=?
+ WHERE id=?'''
+ SQL_EXECUTE(cur, sql, (unpatched_cnt,ignored_cnt,patched_cnt,ck_package['id'],))
+
+ if force_update:
+ SQL_COMMIT(conn)
+
+ print("Done.")
+
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+ global test
+ global force_update
+ global cmd_count
+ global cmd_skip
+
+ parser = argparse.ArgumentParser(description='srtool_cve_checker_util.py: Fix CVE Checker results import')
+
+ parser.add_argument('--drop-ck-tables', action='store_true', dest='drop_ck_tables', help='Drop Cve_Check tables and start again')
+ parser.add_argument('--fix-orm-cvehistory', action='store_true', dest='fix_orm_cvehistory', help='Fix cvehistory created with CVE name instead of id')
+ parser.add_argument('--fix-issue-to-audit', action='store_true', dest='fix_issue_to_audit', help='Populate added audit ID to issue records')
+ parser.add_argument('--add-counts-to-packages', action='store_true', dest='counts_to_packages', help='Populate status counts in package records')
+
+ # Debugging support
+ parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--test', '-t', action='store_true', dest='test', help='Test, dry-run')
+ parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose debugging')
+ args = parser.parse_args()
+
+ ret = 0
+ verbose = args.verbose
+ test = args.test
+ force_update = args.force_update
+ if None != args.count:
+ cmd_count = int(args.count)
+ if None != args.skip:
+ cmd_skip = int(args.skip)
+
+ if args.drop_ck_tables:
+ drop_ck_tables()
+ elif args.fix_orm_cvehistory:
+ fix_orm_cvehistory()
+ elif args.fix_issue_to_audit:
+ fix_issue_to_audit()
+ elif args.counts_to_packages:
+ add_status_counts_to_packages()
+
+
+ else:
+ print("srtool_cve_checker_util.py:Command not found")
+ ret = 1
+
+ progress_done('Done')
+ return(ret)
+
+
+if __name__ == '__main__':
+ srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ exit( main(sys.argv[1:]) )
+
diff --git a/bin/cve_checker/srtool_env.sh b/bin/cve_checker/srtool_env.sh
new file mode 100755
index 00000000..e8cb3ffa
--- /dev/null
+++ b/bin/cve_checker/srtool_env.sh
@@ -0,0 +1,4 @@
+# Main application shell settings
+
+
+
diff --git a/bin/dev_tools/db_migration_config_sample.yml b/bin/dev_tools/db_migration_config_sample.yml
new file mode 100755
index 00000000..5d5c9e37
--- /dev/null
+++ b/bin/dev_tools/db_migration_config_sample.yml
@@ -0,0 +1,42 @@
+source:
+ name: sqlite_production
+ type: postgres_production
+destination:
+ name: postgres_production
+ type: postgres
+
+sqlite_production:
+ path: srt.sqlite
+
+sqlite_development:
+ path: srt_dev.sqlite
+
+postgres_production:
+ host: localhost
+ user: admin
+ password: password
+ database: srtool
+ port: 5432
+
+postgres_development:
+ host: localhost
+ user: admin
+ password: password
+ database: srtool_dev
+ port: 5432
+
+mysql_production:
+ host: localhost
+ user: admin
+ password: password
+ db: srtool
+ port: 3306
+
+mysql_development:
+ host: localhost
+ user: admin
+ password: password
+ db: srtool_dev
+ port: 3306
+
+
diff --git a/bin/dev_tools/db_migrations.py b/bin/dev_tools/db_migrations.py
new file mode 100755
index 00000000..1fb65562
--- /dev/null
+++ b/bin/dev_tools/db_migrations.py
@@ -0,0 +1,339 @@
+#!/usr/bin/env python3
+
+# Module Imports
+import sys
+import sqlite3
+try:
+ import MySQLdb
+except:
+ print("NOTE: 'MySQLdb' not currently installed")
+try:
+ import psycopg2
+except:
+ print("NOTE: 'psycopg2' not currently installed")
+##from tqdm import tqdm
+import time
+from progress.bar import Bar
+from pick import pick
+import yaml
+import os
+import argparse
+
+# Global variables
+verbose = False
+cmd_skip = 0
+cmd_count = 0
+
+def get_connection(config, db_type):
+ if db_type == "sqlite":
+ return sqlite3.connect(config['path'])
+ elif db_type == "mysql":
+ return MySQLdb.connect(**config)
+ else:
+ return psycopg2.connect(**config)
+
+def get_connections(config):
+ source_conn = get_connection(config[config['source']['name']], config['source']['type'])
+ dest_conn = get_connection(config[config['destination']['name']], config['destination']['type'])
+ return source_conn, dest_conn
+
+# Returns foreign key list for a given table
+def get_foreign_key_list(conn, table, source_type="sqlite"):
+ cur = conn.cursor()
+ if verbose: print("TABLE:%s" % table)
+ if source_type == "sqlite":
+ sql = f"""PRAGMA foreign_key_list({table});"""
+ else:
+ print(f"""ERROR: foreign key search for '{source_type}' databases not yet supported""")
+ exit(1)
+ cur.execute(sql)
+ foreign_keys = []
+ for foreign_key in cur:
+ #Example sqlite: (0, 0, 'users_srtuser', 'user_id', 'id', 'NO ACTION', 'NO ACTION', 'NONE')
+ if verbose: print(" KEY:%s" % str(foreign_key))
+ foreign_keys.append(foreign_key[2])
+ return foreign_keys
+
+# returns dictionary with keys as table names, and values as dictionary with column names and count from source and dest conn
+def get_db_info(conn, dest_conn=None, source_type="sqlite", mysql_db=None):
+ sqlite_sql = """SELECT m.name as table_name, p.name as column_name, p.type as type FROM sqlite_master AS m JOIN pragma_table_info(m.name) AS p where table_name != 'sqlite_sequence' ORDER BY m.name, p.cid"""
+ mysql_sql = f"""SELECT * FROM information_schema.columns where table_name like '%%' and table_schema = '{mysql_db}' order by table_name,ordinal_position"""
+ pg_sql = """SELECT * FROM information_schema.columns where table_schema = 'public' order by table_name,ordinal_position"""
+ cur = conn.cursor()
+ sql = sqlite_sql if source_type == "sqlite" else mysql_sql if source_type == "mysql" else pg_sql
+ cur.execute(sql)
+ columns = cur.description
+ results = [{columns[index][0]:column for index, column in enumerate(value)} for value in cur.fetchall()]
+ if source_type != "sqlite":
+ results = [{'column_name': col['COLUMN_NAME'], 'table_name': col['TABLE_NAME'], 'type': col['DATA_TYPE']}for col in results]
+ tables = {}
+ for i in results:
+ if i['table_name'] not in tables:
+ tables[i['table_name']] = {'columns': [], 'types': []}
+ tables[i['table_name']]['columns'].append(i['column_name'])
+ tables[i['table_name']]['types'].append(i['type'])
+ for table in tables:
+ cur = conn.cursor()
+ sql = f"SELECT count(*) from {table}"
+ cur.execute(sql)
+ results = cur.fetchone()[0]
+ tables[table]['source_count'] = results
+ tables[table]['foreign_keys'] = get_foreign_key_list(conn, table, source_type)
+ if dest_conn is not None:
+ for table in tables:
+ cur = dest_conn.cursor()
+ sql = f"SELECT count(*) from {table}"
+ cur.execute(sql)
+ results = cur.fetchone()[0]
+ tables[table]['dest_count'] = results
+ return tables
+
+# Orders the table list from no foreign key dependencies to all satisfied
+def gen_table_order_sql(source_conn, tables):
+ # Ordered table names: goal state
+ table_names_ordered = []
+ # As yet un-ordered table names: initial state
+ table_names_unordered = []
+ for table in tables:
+ # Never overwrite the migrations table
+ if 'django_migrations' == table:
+ continue
+ table_names_unordered.append([table,tables[table]['foreign_keys'].copy()])
+
+ if verbose: print("Len(table_names_unordered) = %d" % len(table_names_unordered))
+ interation = 0
+ while len(table_names_unordered):
+ change = False
+ interation += 1
+ for i in range(len(table_names_unordered),0,-1):
+ i_index = i-1
+ if verbose: print("Pass %s:(%s)=%s" % (interation,i_index,str(table_names_unordered[i_index])))
+ table_name = table_names_unordered[i_index][0]
+ foreign_keys = table_names_unordered[i_index][1]
+ # If newly satisfied dependency, remove dependency
+ if foreign_keys:
+ for j in range(len(foreign_keys),0,-1):
+ j_index = j-1
+ # Found in resolved ordered list
+ if foreign_keys[j_index] in table_names_ordered:
+ del table_names_unordered[i_index][1][j_index]
+ change = True
+ # If no pending dependencies, promote
+ if not foreign_keys:
+ # No pending dependencies, so move
+ table_names_ordered.append(table_name)
+ # Remove old name from unordered list
+ del table_names_unordered[i_index]
+ change = True
+ if verbose: print(" * Promote:%s" % table_name)
+ # Sanity Check for unresolvable loops
+ if not change:
+ print("ERROR: Unresolvable table dependency loop")
+ for t in table_names_ordered:
+ print(" Resolved:%s" % t)
+ for t in table_names_unordered:
+ print(" Unresolved:%s" % str(t))
+ exit(1)
+ return table_names_ordered
+
+# Pre-clear the destination tables, in reverse dependency order (most to least)
+def clear_dest_tables(dest_conn, table_names_ordered, tables, destination_type):
+ bar = Bar('Pre-clearing destination tables', max=len(table_names_ordered))
+ success = True
+ cur = dest_conn.cursor()
+ for i in range(len(table_names_ordered),0,-1):
+ i_index = i-1
+ sql = "DELETE from %s;" % table_names_ordered[i_index]
+ try:
+ cur.execute(sql, None)
+ bar.next()
+ except Exception as e:
+ success = False
+ print(f"\n\nException:\n{e}\n\nSQL: {sql}\nparams: None")
+ break
+ bar.finish()
+ if success:
+ dest_conn.commit()
+
+# Transfer the tables, one by one
+def transfer_sql(source_conn, dest_conn, table_names_ordered, tables, source_type, destination_type):
+ source_cur = source_conn.cursor()
+ dest_cur = dest_conn.cursor()
+
+ print("Transfer_sql...")
+
+ if verbose:
+ bar_max = 0
+ for table in tables:
+ bar_max += int(tables[table]['source_count'])
+ else:
+ bar_max = len(table_names_ordered)
+ bar = Bar('Transfering data by table', max=bar_max)
+
+ for table in table_names_ordered:
+ success = True
+ count = 0
+
+ q = '`' if destination_type != "postgres" else '"'
+ tables[table]['columns'] = [f'{q}{i}{q}' for i in tables[table]['columns']]
+ sql = f"""SELECT {','.join(tables[table]['columns'])} from {table};"""
+ source_cur.execute(sql)
+ for entry_count,entry in enumerate(source_cur):
+ # Development/debug support
+ if cmd_skip and (entry_count < cmd_skip): continue
+ if cmd_count and ((entry_count - cmd_skip) > cmd_count): break
+
+ entry = list(entry)
+ if table == "orm_cve":
+ if entry[-2] == '' or (entry[-2] is not None and 'RESERVED' in entry[-2]):
+ entry[-2] = None # set acknowledge date to None if currently value is invalid
+ if destination_type == "postgres":
+ for i in range(len(entry)): # handle lack of booleans in non postgres
+ if "bool" in tables[table]['types'][i]:
+ entry[i] = entry[i] != 0
+
+ sql = f"""INSERT INTO {table} ({','.join(tables[table]['columns'])}) VALUES ({','.join(['%s'] * len(entry))});"""
+ try:
+ dest_cur.execute(sql, entry)
+ if verbose: bar.next()
+ # Commit batches as we go
+ count += 1
+ if 0 == (count % 100):
+ dest_conn.commit()
+ except Exception as e:
+ success = False
+ print(f"\n\nException:\n{e}\n\nSQL: {sql}\nparams: {entry}")
+ break
+
+ # Commit the balance of this table
+ if not verbose:
+ bar.next()
+ if success:
+ dest_conn.commit()
+ bar.finish()
+
+def run_tests(tables, source_conn, dest_conn):
+ print('running tests!')
+ matching_counts = 0
+ mismatched_tables = []
+ for table in tables:
+ table_info = tables[table]
+ if table_info['source_count'] == table_info['dest_count']:
+ matching_counts += 1
+ else:
+ mismatched_tables.append(table)
+
+ print(f'Matching Tables Counts between source and destination out of total tables:{matching_counts}/{len(tables)}')
+ print(f'Mismatched tables: {mismatched_tables}')
+ source_count = tables['orm_cve']['source_count']
+ dest_count = tables['orm_cve']['dest_count']
+ if source_count != dest_count:
+ print('orm_cve count does not match between source and destination, not checkin description lengths')
+ source_conn.close()
+ dest_conn.close()
+ return
+
+ source_curr = source_conn.cursor()
+ dest_curr = dest_conn.cursor()
+ query = 'select length(description) as dl, length(comments) as cl from orm_cve order by NAME LIMIT 1000 OFFSET '
+ mismatch = False
+ bars = source_count // 1000 + 1
+ print(f"Numbers of rows in orm_cve: {source_count}")
+ bar = Bar('Checking description lengths in batches of 1000', max=bars)
+ for i in range(bars):
+ offset_query = f'{query}{i * 1000}'
+ source_curr.execute(offset_query)
+ dest_curr.execute(offset_query)
+ columns = source_curr.description
+
+ source = [{columns[index][0]:column for index, column in enumerate(value)} for value in source_curr.fetchall()]
+ dest = [{columns[index][0]:column for index, column in enumerate(value)} for value in dest_curr.fetchall()]
+ mismatch = False
+ for i in range(len(source)):
+ if source[i]['dl'] != dest[i]['dl'] or source[i]['cl'] != dest[i]['cl']:
+ print(f'source:\n{source[i]}\n\ndestination: {dest[i]}\n\n')
+ mismatch = True
+ break
+ bar.next()
+ if mismatch:
+ break
+ bar.finish()
+ if mismatch:
+ print("Error: mismatched length of description in orm_cve")
+ else:
+ print("Success: Description and comment length matches for every row in orm_cve")
+ source_conn.close()
+ dest_conn.close()
+
+def repair_sequences_postgres(tables, dest_conn):
+ bar = Bar('Repairing table sequences', max=len(tables))
+ for table in tables:
+ id = 'id'
+ if table in ['django_session']:
+ bar.next()
+ continue
+ sql = f"SELECT setval(pg_get_serial_sequence('{table}', '{id}'), (SELECT MAX({id}) FROM {table})+1);"
+ cur = dest_conn.cursor()
+ try:
+ cur.execute(sql)
+ bar.next()
+ except Exception as e:
+ print(f"\n\nException:\n{e}\n\nSQL: {sql}\n")
+ break
+ bar.finish()
+
+def main(config, test=False, repair=False, show_order=False):
+ source_conn, dest_conn = get_connections(config)
+ mysql_db_name = config[config['source']['name']]['db'] if config['source']['type'] == "mysql" else None
+ tables = get_db_info(source_conn, dest_conn, config['source']['type'], mysql_db_name)
+ if repair:
+ repair_sequences_postgres(tables, dest_conn)
+ source_conn.close()
+ dest_conn.close()
+ return
+ if test:
+ run_tests(tables, source_conn, dest_conn)
+ return
+ _, select_table = pick(('all tables', 'select tables'), "Would you like to copy all tables, or specific tables for transfer?")
+ if select_table: # filter tables
+ selection = pick(list(tables.items()), f"Please Select which of {len(tables)} tables to copy (use space key to select).\nFormat: Table Name(Current Source Count:Current Destination Count)", multiselect=True, min_selection_count=1, options_map_func= lambda option: f"{option[0]}({option[1]['source_count']}:{option[1]['dest_count']})")
+ selection = [value[0] for value in selection ]
+ tables = {item[0]:item[1] for item in selection}
+
+ # Order the table names by foreign key dependecies
+ table_names_ordered = gen_table_order_sql(source_conn, tables)
+ if show_order:
+ print("Ordered Data Tables: %s" % len(table_names_ordered))
+ for i,table_name in enumerate(table_names_ordered):
+ print("%2d) %-30s %s" % (i+1, table_name, str(tables[table_name]['foreign_keys'])))
+ return
+ # Pre-clear the destination tables to remove obsolete data
+ clear_dest_tables(dest_conn, table_names_ordered, tables, config['destination']['type'])
+ # Transfer the tables, one by one
+ transfer_sql(source_conn, dest_conn, table_names_ordered, tables, config['source']['type'], config['destination']['type'])
+ # Fix up the table sequences
+ repair_sequences_postgres(tables, dest_conn)
+ source_conn.close()
+ dest_conn.close()
+
+if __name__ == "__main__":
+ my_parser = argparse.ArgumentParser(description='DB Migration Script (Postgres/Sqlite/MySql)')
+ my_parser.add_argument('--path',default="db_migration_config.yml", type=str,help='the path to configuration file, default is ./db_migration_config.yml')
+ my_parser.add_argument('--test',default=False, action="store_true", help='Whether to test migration')
+ my_parser.add_argument('--repair', default=False, action="store_true", help="Whether to repair postgres sequences if destination is postgres database")
+ my_parser.add_argument('--show-order', '-o', default=False, action="store_true", dest="show_order", help="Show tables in least to most dependency order")
+ my_parser.add_argument('--verbose', '-v', default=False, action="store_true", dest="verbose", help="Verbose information")
+ my_parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ my_parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ args = my_parser.parse_args()
+
+ verbose = args.verbose
+ if args.skip:
+ cmd_skip = int(args.skip)
+ if args.count:
+ cmd_count = int(args.count)
+
+ with open(args.path, "r") as ymlfile:
+ config = yaml.safe_load(ymlfile)
+ main(config, test=args.test, repair=args.repair, show_order=args.show_order)
+
diff --git a/bin/dev_tools/dump_jason.py b/bin/dev_tools/dump_jason.py
new file mode 100755
index 00000000..979dce67
--- /dev/null
+++ b/bin/dev_tools/dump_jason.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2017-2018 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import json
+import os
+import sys
+
+with open(sys.argv[1], 'r') as handle:
+ parsed = json.load(handle)
+ print(json.dumps(parsed, indent=4, sort_keys=True)) \ No newline at end of file
diff --git a/bin/dev_tools/dump_jason.sh b/bin/dev_tools/dump_jason.sh
new file mode 100755
index 00000000..5e57b088
--- /dev/null
+++ b/bin/dev_tools/dump_jason.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2017-2018 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import json
+import os
+import sys
+
+with open('filename.txt', 'r') as handle:
+ parsed = json.load(handle)
+ print(json.dumps(parsed, indent=4, sort_keys=True)) \ No newline at end of file
diff --git a/bin/dev_tools/history.py b/bin/dev_tools/history.py
index 90798747..654555ff 100755
--- a/bin/dev_tools/history.py
+++ b/bin/dev_tools/history.py
@@ -25,7 +25,7 @@ import os
import sys
import argparse
from datetime import datetime, date, timedelta
-import sqlite3
+from common.srtool_sql import *
import re
import subprocess
@@ -123,7 +123,7 @@ def trace(item):
stamp_day = re.sub(',.*','',stamp_day)
stamp_text = '%s,%-9s %8s' % (stamp[0],stamp_day,stamp_date)
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
if item.startswith('CVE-'):
@@ -132,7 +132,7 @@ def trace(item):
status = ORM.get_orm_string(cve[ORM.CVE_STATUS],ORM.STATUS_STR)
print("%s] %-16s, %s, %s %s , %s %s " % (stamp_text, cve[ORM.CVE_NAME], status, cve[ORM.CVE_CVSSV3_BASESCORE],cve[ORM.CVE_CVSSV3_BASESEVERITY],cve[ORM.CVE_CVSSV2_BASESCORE],cve[ORM.CVE_CVSSV2_SEVERITY]))
- conn.close()
+ SQL_CLOSE_CONN(conn)
#######################################################################
# replay_nist
diff --git a/bin/dev_tools/lssrt.sh b/bin/dev_tools/lssrt.sh
index f9404d06..5a8259ee 100755
--- a/bin/dev_tools/lssrt.sh
+++ b/bin/dev_tools/lssrt.sh
@@ -4,7 +4,7 @@
declare -A srts
IFS=$'\n' # make newlines the only separator
-for p in $(ps -e -o pid,cmd | grep "manage.py runserver" | grep 'srt' ) ; do
+for p in $(ps -e -o pid,cmd | grep "\(manage.py runserver\|srtool_update.py\)" | grep 'srt' ) ; do
if [ "${p}" != "${p/grep/}" ] ; then
continue
fi
@@ -14,4 +14,3 @@ for p in $(ps -e -o pid,cmd | grep "manage.py runserver" | grep 'srt' ) ; do
pid=${p%% *} # strip python path
echo "[$pid]($d)"
done
-
diff --git a/bin/dev_tools/master_app.sh b/bin/dev_tools/master_app.sh
index bf11d7f1..6abbdfd4 100755
--- a/bin/dev_tools/master_app.sh
+++ b/bin/dev_tools/master_app.sh
@@ -138,14 +138,19 @@ for p in $(find bin -name srtool_env.sh -exec grep -l "SRT_MAIN_APP" {} \;) ; do
else
echo "DISABLE_MASTER:$p"
mv -f $ds_dir/datasource.json $ds_dir/datasource.json_sample 2> /dev/null
- # Remove old app's datasources
- prev_app=$(basename $ds_dir)
- bin/common/srtool_utils.py --remove-app-sources $prev_app
+ # Remove old app's datasources, if database
+ if [ -f "bin/common/srt_schema.py" ] ; then
+ prev_app=$(basename $ds_dir)
+ bin/common/srtool_utils.py --remove-app-sources $prev_app
+ fi
fi
done
if [ "yp" = "$master_app" ] ; then
echo "SET_MASTER:./bin/$master_app"
else
- echo bin/common/srtool_utils.py --remove-app-sources yp
+ # Remove YP's datasources, if database
+ if [ -f "bin/common/srt_schema.py" ] ; then
+ echo bin/common/srtool_utils.py --remove-app-sources yp
+ fi
fi
diff --git a/bin/dev_tools/migrate.sh b/bin/dev_tools/migrate.sh
new file mode 100755
index 00000000..49e923c0
--- /dev/null
+++ b/bin/dev_tools/migrate.sh
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+# Update the model migration file(s)
+# Run generally, or select a specific model.py
+./bin/srt manage makemigrations $1
diff --git a/bin/dev_tools/nohup_start.sh b/bin/dev_tools/nohup_start.sh
new file mode 100755
index 00000000..71a77232
--- /dev/null
+++ b/bin/dev_tools/nohup_start.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+# SRTool helper script to start the instance with hohup
+
+if [ -z "$SRT_PORT" ] ; then
+ SRT_PORT=9000
+fi
+
+# Accept parameters (like 'noautoupdate')
+mv -f nohup.out nohup_prev.out
+nohup ./bin/srt start webport=0.0.0.0:$SRT_PORT $*
+cat nohup.out
diff --git a/bin/dev_tools/prepare_environment.sh b/bin/dev_tools/prepare_environment.sh
new file mode 100755
index 00000000..5536c3e1
--- /dev/null
+++ b/bin/dev_tools/prepare_environment.sh
@@ -0,0 +1,64 @@
+#!/bin/echo ERROR: This script needs to be sourced. Please run as .
+
+#
+# Prepare virtual environment for SRTool
+#
+# $ . prepare_environment.sh
+#
+
+# Prepare local venv for Python
+echo "* Python venv ..."
+if [ ! -d ".venv" ] ; then
+ # sudo apt install python3.10-venv
+ python3 -m venv .venv
+fi
+
+# Start Venv
+source .venv/bin/activate
+
+# Source the standard SRT environment settings
+echo "* srt_env.sh ..."
+if [ ! -f "./srt_env.sh" ] ; then
+ cp bin/dev_tools/* .
+fi
+
+# Set the fundamental environment settings
+# Specify environment (even if cwd is in the "lib" directory due to Apache)
+echo "* SRTool basic environment ..."
+export SRT_BASE_DIR="$(pwd)"
+export SRT_BASE_DIR="$(echo $SRT_BASE_DIR | sed -e 's|lib/||')"
+export SRT_MODE="Studio_Prodution"
+# Python 3 support
+if [ -z "$TZ" ] ; then
+ export TZ="America/Los_Angeles"
+fi
+
+# HTTPS support (Apache)
+#export SRT_CSRF_TRUSTED_ORIGINS="https://hostname.company.com"
+
+# Quick development log
+export SRTDBG_LOG="$SRT_BASE_DIR/srt_dbg.log"
+
+# Override the standard settings
+echo "* SRTool custom development environment ..."
+export SRT_PORT="9000"
+export SRT_SKIP_AUTOUPDATE=0
+export SRTDBG_MINIMAL_DB=0
+export SRTDBG_SKIP_DEFECT_IMPORT=1
+export SRTDBG_SKIP_CVE_IMPORT=0
+export SRTDBG_SKIP_CPE_IMPORT=1
+
+# Email credentials
+export SRT_EMAIL_SMTP="smtp.org.com"
+export SRT_EMAIL_PASSWD="temp_password"
+export SRT_EMAIL_USER="temp_user"
+export SRT_EMAIL_FROM="temp_user@org.com"
+
+# Defect (e.g. Jira) credentials
+export SRT_DEFECT_PASSWD="temp_password"
+export SRT_DEFECT_USER="temp_user"
+
+echo "* SRTool Python requirements ..."
+pip3 install -r bin/srtool-requirements.txt
+
+echo "* SRTool running on port $SRT_PORT"
diff --git a/bin/dev_tools/quick_find.sh b/bin/dev_tools/quick_find.sh
index cf200106..c900f2ec 100755
--- a/bin/dev_tools/quick_find.sh
+++ b/bin/dev_tools/quick_find.sh
@@ -1,7 +1,24 @@
#!/bin/bash
+#
+# Helper script to quickly find strings in the source
-# SRTool helper script to quickly find strings in the source
+show="-l"
+if [ "show" = "$1" ] ; then
+ show=""
+ shift
+fi
-find bin -exec grep -l "$1" {} \; 2> /dev/null
-find lib -exec grep -l "$1" {} \; 2> /dev/null
+dir=''
+if [ -d "$1" ] ; then
+ dir="$1"
+ shift
+fi
+if [ -z "$dir" ] ; then
+ echo "find bin -exec grep $show \"$1\" {} \; 2> /dev/null | grep -v __pycache__"
+ find bin -exec grep $show "$1" {} \; 2> /dev/null | grep -v __pycache__
+ find lib -exec grep $show "$1" {} \; 2> /dev/null | grep -v __pycache__
+else
+ echo "find $dir -exec grep $show \"$1\" {} \; 2> /dev/null| grep -v __pycache__"
+ find $dir -exec grep $show "$1" {} \; 2> /dev/null| grep -v __pycache__
+fi
diff --git a/bin/dev_tools/restart.sh b/bin/dev_tools/restart.sh
index f8a00a30..fc7cdbac 100755
--- a/bin/dev_tools/restart.sh
+++ b/bin/dev_tools/restart.sh
@@ -1,3 +1,3 @@
-./stop.sh
-./start.sh
+./stop.sh $*
+./start.sh $*
diff --git a/bin/dev_tools/srt_env.sh b/bin/dev_tools/srt_env.sh
index f36a2329..07bfdf6e 100755
--- a/bin/dev_tools/srt_env.sh
+++ b/bin/dev_tools/srt_env.sh
@@ -7,50 +7,55 @@ if [ "$0" = "$BASH_SOURCE" ]; then
fi
mode="$1"
-echo "mode=|$mode|"
-if [ "debug" != "$mode" ] ; then
- # Standard Environment
- echo "=== SRTool STANDARD MODE ==="
- export SRT_PORT=9000
- # Quick development log
- export SRTDBG_LOG=`pwd`/srt_dbg.log
- # Development/debugging flags
- export SRTDBG_MINIMAL_DB=0
- export SRTDBG_SKIP_DEFECT_IMPORT=0
- export SRTDBG_SKIP_CVE_IMPORT=0
- export SRTDBG_SKIP_CPE_IMPORT=0
- export SRT_SKIP_AUTOUPDATE=0
- # Email credentials
- export SRT_EMAIL_SMTP=smtp.org.com
- export SRT_EMAIL_PASSWD=temp_password
- export SRT_EMAIL_USER=temp_user
- export SRT_EMAIL_FROM=temp_user@org.com
- # Defect (e.g. Jira) credentials
- export SRT_DEFECT_PASSWD=temp_password
- export SRT_DEFECT_USER=temp_user
-else
- # Minimal Development Environment
- # Standard Environment
- echo "=== SRTool DEBUG MODE ==="
+# Standard Base Environment
+if [ -z "$mode" ] ; then
+ msg="=== SRTool STANDARD MODE ==="
+fi
+export SRT_PORT=9000
+export SRT_MODE=""
+# Quick development log
+export SRTDBG_LOG=`pwd`/srt_dbg.log
+# Development/debugging flags
+export SRTDBG_MINIMAL_DB=0
+export SRTDBG_SKIP_DEFECT_IMPORT=0
+export SRTDBG_SKIP_CVE_IMPORT=0
+export SRTDBG_SKIP_CPE_IMPORT=0
+export SRT_SKIP_AUTOUPDATE=0
+# Email credentials
+export SRT_EMAIL_SMTP=smtp.org.com
+export SRT_EMAIL_PASSWD=temp_password
+export SRT_EMAIL_USER=temp_user
+export SRT_EMAIL_FROM=temp_user@org.com
+# Defect (e.g. Jira) credentials
+export SRT_DEFECT_PASSWD=temp_password
+export SRT_DEFECT_USER=temp_user
+
+if [ "secure" == "$mode" ] ; then
+ msg="=== SRTool SECURE MODE ==="
+ # Lock out 'other' permissions
+ umask 007
+ chmod -R o-rwx .
+ export SRT_MODE="SECURE"
+elif [ "devel" == "$mode" ] ; then
+ msg="=== SRTool DEVELOPMENT MODE ==="
+ # Alternate port from main
+ export SRT_PORT=9020
+ export SRT_MODE="DEVEL"
+ # Disable defect system queries (except on demand)
+ export SRTDBG_SKIP_DEFECT_IMPORT=1
+elif [ "debug" == "$mode" ] ; then
+ msg="=== SRTool DEBUG MODE ==="
+ # Minimal debug bootstrap environment
+ # with development and debugging flags
export SRT_PORT=9990
- # Quick development log
- export SRTDBG_LOG=`pwd`/srt_dbg.log
- # Development/debugging flags
+ export SRT_MODE="DEBUG"
export SRTDBG_MINIMAL_DB=1
export SRTDBG_SKIP_DEFECT_IMPORT=1
export SRTDBG_SKIP_CVE_IMPORT=0
export SRTDBG_SKIP_CPE_IMPORT=0
export SRT_SKIP_AUTOUPDATE=1
- # Email credentials
- export SRT_EMAIL_SMTP=smtp.org.com
- export SRT_EMAIL_PASSWD=temp_password
- export SRT_EMAIL_USER=temp_user
- export SRT_EMAIL_FROM=temp_user@org.com
- # Defect (e.g. Jira) credentials
- export SRT_DEFECT_PASSWD=temp_password
- export SRT_DEFECT_USER=temp_user
fi
+echo $msg
echo "SRT_PORT=$SRT_PORT ; Change it with: export SRT_PORT=9123"
-
diff --git a/bin/dev_tools/start.sh b/bin/dev_tools/start.sh
index f73dc7bc..f7997ea7 100755
--- a/bin/dev_tools/start.sh
+++ b/bin/dev_tools/start.sh
@@ -7,5 +7,7 @@ if [ -z "$SRT_PORT" ] ; then
fi
# Accept parameters (like 'noautoupdate')
-./bin/srt start webport=0.0.0.0:$SRT_PORT $1
+./bin/srt start webport=0.0.0.0:$SRT_PORT $*
+# Show external access link
+echo "External access: $(hostname -i):$SRT_PORT"
diff --git a/bin/dev_tools/stop.sh b/bin/dev_tools/stop.sh
index 37722fed..cf5a62c3 100755
--- a/bin/dev_tools/stop.sh
+++ b/bin/dev_tools/stop.sh
@@ -2,5 +2,4 @@
# SRTool helper script to stop the instance
-./bin/srt stop
-
+./bin/srt stop $*
diff --git a/bin/dev_tools/tail.sh b/bin/dev_tools/tail.sh
index 4c44cd47..8b18325a 100755
--- a/bin/dev_tools/tail.sh
+++ b/bin/dev_tools/tail.sh
@@ -2,6 +2,12 @@
# SRTool helper script to quickly dump the log files
+tag=0
+if [ "tag" == "$1" ] ; then
+ tag=1
+ shift
+fi
+
CONTEXT=$1
if [ -n "$CONTEXT" ] ; then
CONTEXT="-n $CONTEXT"
@@ -13,6 +19,12 @@ if [ -z "$SRTDBG_LOG" ] ; then
SRTDBG_LOG=/tmp/srt_dbg.log
fi
+if [ 1 -eq $tag ] ; then
+ echo "===TAG `date` TAG===" >> srt_web.log
+ echo "===TAG `date` TAG===" >> $SRTDBG_ERR_LOG
+ echo "===TAG `date` TAG===" >> $SRTDBG_LOG
+fi
+
echo "--- srt_web.log --------------------"
tail srt_web.log $CONTEXT
echo "--- $SRTDBG_ERR_LOG --------------------"
diff --git a/bin/dev_tools/update_status.sh b/bin/dev_tools/update_status.sh
index 243626a4..5cca0769 100755
--- a/bin/dev_tools/update_status.sh
+++ b/bin/dev_tools/update_status.sh
@@ -27,9 +27,14 @@ if [ -z "$updater" ] ; then
exit 1
else
echo "UPDATER:$updater"
+ echo ""
fi
-# Test if their is an open update in progress
+# Display status log
+tail -n 20 "update_logs/update_status.log"
+echo ""
+
+# Test if there is an open update in progress
cat .srtupdate.task
is_start=`grep "^Update" .srtupdate.task | grep -v "<cron_start>"`
is_stop=`grep "^Done" .srtupdate.task`
diff --git a/bin/mitre/datasource_2010.json b/bin/mitre/datasource_2010.json
index 547de7a8..89d82041 100755
--- a/bin/mitre/datasource_2010.json
+++ b/bin/mitre/datasource_2010.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2010",
"cve_filter" : "CVE-2010",
- "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2010' --file=data/allitems-cvrf-year-2010.xml --url-file=allitems-cvrf-year-2010.xml",
- "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2010' --file=data/allitems-cvrf-year-2010.xml --url-file=allitems-cvrf-year-2010.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2010' --file=data/allitems-cvrf-year-2010.xml --url-file=allitems-cvrf-year-2010.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2010' --file=data/allitems-cvrf-year-2010.xml --url-file=allitems-cvrf-year-2010.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2010.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2011.json b/bin/mitre/datasource_2011.json
index 2138154a..14a41e0b 100755
--- a/bin/mitre/datasource_2011.json
+++ b/bin/mitre/datasource_2011.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2011",
"cve_filter" : "CVE-2011",
- "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2011' --file=data/allitems-cvrf-year-2011.xml --url-file=allitems-cvrf-year-2011.xml",
- "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2011' --file=data/allitems-cvrf-year-2011.xml --url-file=allitems-cvrf-year-2011.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2011' --file=data/allitems-cvrf-year-2011.xml --url-file=allitems-cvrf-year-2011.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2011' --file=data/allitems-cvrf-year-2011.xml --url-file=allitems-cvrf-year-2011.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2011.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2012.json b/bin/mitre/datasource_2012.json
index 49f32562..de42723f 100755
--- a/bin/mitre/datasource_2012.json
+++ b/bin/mitre/datasource_2012.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2012",
"cve_filter" : "CVE-2012",
- "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2012' --file=data/allitems-cvrf-year-2012.xml --url-file=allitems-cvrf-year-2012.xml",
- "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2012' --file=data/allitems-cvrf-year-2012.xml --url-file=allitems-cvrf-year-2012.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2012' --file=data/allitems-cvrf-year-2012.xml --url-file=allitems-cvrf-year-2012.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2012' --file=data/allitems-cvrf-year-2012.xml --url-file=allitems-cvrf-year-2012.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2012.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2013.json b/bin/mitre/datasource_2013.json
index d18fe739..1995fa6d 100755
--- a/bin/mitre/datasource_2013.json
+++ b/bin/mitre/datasource_2013.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2013",
"cve_filter" : "CVE-2013",
- "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2013' --file=data/allitems-cvrf-year-2013.xml --url-file=allitems-cvrf-year-2013.xml",
- "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2013' --file=data/allitems-cvrf-year-2013.xml --url-file=allitems-cvrf-year-2013.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2013' --file=data/allitems-cvrf-year-2013.xml --url-file=allitems-cvrf-year-2013.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2013' --file=data/allitems-cvrf-year-2013.xml --url-file=allitems-cvrf-year-2013.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2013.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2014.json b/bin/mitre/datasource_2014.json
index fc469f99..d2cba168 100755
--- a/bin/mitre/datasource_2014.json
+++ b/bin/mitre/datasource_2014.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2014",
"cve_filter" : "CVE-2014",
- "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2014' --file=data/allitems-cvrf-year-2014.xml --url-file=allitems-cvrf-year-2014.xml",
- "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2014' --file=data/allitems-cvrf-year-2014.xml --url-file=allitems-cvrf-year-2014.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2014' --file=data/allitems-cvrf-year-2014.xml --url-file=allitems-cvrf-year-2014.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --download-only --source='Mitre 2014' --file=data/allitems-cvrf-year-2014.xml --url-file=allitems-cvrf-year-2014.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2014.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2015.json b/bin/mitre/datasource_2015.json
index e91f7bd0..5e5a24c6 100755
--- a/bin/mitre/datasource_2015.json
+++ b/bin/mitre/datasource_2015.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2015",
"cve_filter" : "CVE-2015",
- "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml",
- "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2015' --file=data/allitems-cvrf-year-2015.xml --url-file=allitems-cvrf-year-2015.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2015.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2016.json b/bin/mitre/datasource_2016.json
index 5fba94b6..c2bc1906 100755
--- a/bin/mitre/datasource_2016.json
+++ b/bin/mitre/datasource_2016.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2016",
"cve_filter" : "CVE-2016",
- "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml",
- "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2016' --file=data/allitems-cvrf-year-2016.xml --url-file=allitems-cvrf-year-2016.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2016.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2017.json b/bin/mitre/datasource_2017.json
index 9047fd5e..f3cfdf54 100755
--- a/bin/mitre/datasource_2017.json
+++ b/bin/mitre/datasource_2017.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2017",
"cve_filter" : "CVE-2017",
- "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml",
- "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2017' --file=data/allitems-cvrf-year-2017.xml --url-file=allitems-cvrf-year-2017.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2017.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2018.json b/bin/mitre/datasource_2018.json
index 567c46bd..d8b28c0a 100755
--- a/bin/mitre/datasource_2018.json
+++ b/bin/mitre/datasource_2018.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2018",
"cve_filter" : "CVE-2018",
- "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml",
- "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2018' --file=data/allitems-cvrf-year-2018.xml --url-file=allitems-cvrf-year-2018.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2018.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2019.json b/bin/mitre/datasource_2019.json
index f106f88f..e07cf377 100755
--- a/bin/mitre/datasource_2019.json
+++ b/bin/mitre/datasource_2019.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2019",
"cve_filter" : "CVE-2019",
- "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml",
- "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2019' --file=data/allitems-cvrf-year-2019.xml --url-file=allitems-cvrf-year-2019.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2019.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2020.json b/bin/mitre/datasource_2020.json
index b66d43a1..f26f3b2f 100755
--- a/bin/mitre/datasource_2020.json
+++ b/bin/mitre/datasource_2020.json
@@ -7,8 +7,9 @@
"name" : "MITRE",
"description" : "MITRE 2020",
"cve_filter" : "CVE-2020",
- "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2020' --file=data/allitems-cvrf-year-2020.xml --url-file=allitems-cvrf-year-2020.xml",
- "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2020' --file=data/allitems-cvrf-year-2020.xml --url-file=allitems-cvrf-year-2020.xml",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2020' --file=data/allitems-cvrf-year-2020.xml --url-file=allitems-cvrf-year-2020.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2020' --file=data/allitems-cvrf-year-2020.xml --url-file=allitems-cvrf-year-2020.xml --progress",
"lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2020.xml %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/mitre/datasource_2021.json b/bin/mitre/datasource_2021.json
new file mode 100755
index 00000000..72379b01
--- /dev/null
+++ b/bin/mitre/datasource_2021.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2021",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2021",
+ "cve_filter" : "CVE-2021",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2021' --file=data/allitems-cvrf-year-2021.xml --url-file=allitems-cvrf-year-2021.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2021' --file=data/allitems-cvrf-year-2021.xml --url-file=allitems-cvrf-year-2021.xml --progress",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2021.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2022.json b/bin/mitre/datasource_2022.json
new file mode 100755
index 00000000..608acecc
--- /dev/null
+++ b/bin/mitre/datasource_2022.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2022",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2022",
+ "cve_filter" : "CVE-2022",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2022' --file=data/allitems-cvrf-year-2022.xml --url-file=allitems-cvrf-year-2022.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2022' --file=data/allitems-cvrf-year-2022.xml --url-file=allitems-cvrf-year-2022.xml --progress",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2022.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/datasource_2023.json b/bin/mitre/datasource_2023.json
new file mode 100755
index 00000000..ca3cdaba
--- /dev/null
+++ b/bin/mitre/datasource_2023.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0020-mitre-2023",
+ "data" : "cve",
+ "source" : "mitre",
+ "name" : "MITRE",
+ "description" : "MITRE 2023",
+ "cve_filter" : "CVE-2023",
+ "attributes" : "DISABLE ",
+ "init" : "bin/mitre/srtool_mitre.py --initialize --source='Mitre 2023' --file=data/allitems-cvrf-year-2023.xml --url-file=allitems-cvrf-year-2023.xml --progress",
+ "update" : "bin/mitre/srtool_mitre.py --update --source='Mitre 2023' --file=data/allitems-cvrf-year-2023.xml --url-file=allitems-cvrf-year-2023.xml --progress",
+ "lookup" : "bin/mitre/srtool_mitre.py --file=data/allitems-cvrf-year-2023.xml %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/mitre/srtool_mitre.py b/bin/mitre/srtool_mitre.py
index 75789b7a..cdf6ff6e 100755
--- a/bin/mitre/srtool_mitre.py
+++ b/bin/mitre/srtool_mitre.py
@@ -31,8 +31,7 @@ import sys
import xml.etree.ElementTree as ET
import argparse
import shutil
-import sqlite3
-from datetime import datetime, timedelta
+from datetime import datetime, timedelta, date
import pytz
from urllib.request import urlopen
@@ -41,10 +40,13 @@ from urllib.request import urlopen
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
from common.srt_schema import ORM
+from common.srtool_progress import *
+from common.srtool_sql import *
# Setup:
srtDbName = 'srt.sqlite'
srtErrorLog = 'srt_errors.txt'
+COMMIT_DELAY = 64
mitre_cvrf_url = 'https://cve.mitre.org/data/downloads'
mitre_cvrf_xml = 'data/allitems-cvrf-year-2018.xml'
@@ -97,10 +99,10 @@ def get_cve_default_status(is_init,publishedDate,description):
if None == init_new_date:
# Precalculate and cache the relative 'new' date for efficiency
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
sql = '''SELECT * FROM orm_srtsetting WHERE name=?'''
- CVE_INIT_NEW_DELTA = cur.execute(sql, ('CVE_INIT_NEW_DELTA',)).fetchone()
+ CVE_INIT_NEW_DELTA = SQL_EXECUTE(cur, sql, ('CVE_INIT_NEW_DELTA',)).fetchone()
if CVE_INIT_NEW_DELTA is None:
cve_init_new_delta = 30
else:
@@ -166,6 +168,11 @@ def fetch_cve(cve_name,cvrf_xml_file):
datasource_xml = os.path.join(srtool_basepath,cvrf_xml_file)
cache_file = os.path.join(srtool_basepath,mitre_cache_dir,"%s.txt" % cve_name)
+ # Insure that the original data file exists
+ if not os.path.isfile(datasource_xml):
+ print("description=There is no loaded Mitre data.")
+ return
+
# Insure the cache dir exists
cache_dir = os.path.join(srtool_basepath,mitre_cache_dir)
if not os.path.isdir(cache_dir):
@@ -261,34 +268,52 @@ def append_cve_database(is_init,file_xml):
tree = ET.parse(file_xml)
root = tree.getroot()
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
cur_write = conn.cursor()
cur_ds = conn.cursor()
datasource_id = 0
- srtool_today = datetime.today()
+ srtool_today = date.today()
+ version_date = ''
+
+ # Progress expected max
+ if cmd_count:
+ progress_set_max(cmd_count)
+ else:
+ progress_set_max(len(root))
i = 0
for child in root:
i += 1
+
+ # Extract document date record
+ # <DocumentTracking>
+ # <Version>2020.01.14.22</Version>
+ if 'DocumentTracking' in child.tag:
+ for child_d in child:
+ if 'Version' in child_d.tag:
+ version_date = datetime.strptime(child_d.text, '%Y.%m.%d.%H')
+
+ # Find the Vulnerability records
if not 'Vulnerability' in child.tag:
continue
+
summary = _extract_text(child)
cve_name = summary['CVE']
+ progress_show(cve_name,force_newline=True)
# Progress indicator support
- if 0 == i % 10:
- print('%04d: %20s \r' % (i,cve_name), end='')
- if (0 == i % 200):
- conn.commit()
+ if 0 == (i % COMMIT_DELAY):
+ SQL_COMMIT(conn)
print('')
+ sys.stdout.flush()
if cmd_count and (i > cmd_count):
break
# Find the datasource matching these CVE prefixes
if 0 == datasource_id:
sql = "SELECT * FROM orm_datasource WHERE data = ? AND source = ?"
- cur_ds.execute(sql, ('cve','mitre',))
+ SQL_EXECUTE(cur_ds, sql, ('cve','mitre',))
for ds in cur_ds:
if ds[ORM.DATASOURCE_CVE_FILTER] and cve_name.startswith(ds[ORM.DATASOURCE_CVE_FILTER]):
datasource_id = ds[ORM.DATASOURCE_ID]
@@ -300,35 +325,45 @@ def append_cve_database(is_init,file_xml):
# Define the CVE (if not already there - e.g. not defined by NIST)
sql = ''' SELECT * FROM orm_cve WHERE name = ?'''
- cve = cur_write.execute(sql, (cve_name,)).fetchone()
+ cve = SQL_EXECUTE(cur_write, sql, (cve_name,)).fetchone()
if cve:
cve_id = cve[ORM.CVE_ID]
- print("MITRE:FOUND %20s\r" % cve_name, end='')
+ if not progress_status()[PROGRESS_STATUS_ENABLE]:
+ print("MITRE:FOUND %20s" % cve_name, end='\r')
else:
# Get the default CVE status
status = get_cve_default_status(is_init,summary['Published'],summary['Description'])
- # 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
- sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, tags, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, acknowledge_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, srt_updated, srt_created, packages)
+ # 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
+ sql = ''' INSERT INTO orm_cve (name, name_sort, priority, status, comments, comments_private, tags, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, acknowledge_date, description, "publishedDate", "lastModifiedDate", recommend, recommend_list, "cvssV3_baseScore", "cvssV3_baseSeverity", "cvssV2_baseScore", "cvssV2_severity", srt_updated, srt_created, packages)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
- cur.execute(sql, (cve_name, get_name_sort(cve_name), ORM.PRIORITY_UNDEFINED, status, '', '', '', 'CVE', 'MITRE', '', 1, ORM.PUBLISH_UNPUBLISHED, '', '', summary['Description'], summary['Published'], summary['Modified'],0, '', '', '', '', '', datetime.now(), datetime.now(),''))
+ SQL_EXECUTE(cur, sql, (cve_name, get_name_sort(cve_name), ORM.PRIORITY_UNDEFINED, status, '', '', '', 'CVE', 'MITRE', '', True, ORM.PUBLISH_UNPUBLISHED, '', None, summary['Description'], summary['Published'], summary['Modified'],0, '', '', '', '', '', datetime.now(), datetime.now(),''))
# 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
- cve_id = cur.lastrowid
- print("MITRE:ADDED %20s\r" % cve_name)
+ cve_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ if not progress_status()[PROGRESS_STATUS_ENABLE]:
+ print("MITRE:ADDED %20s" % cve_name, end='\r')
# Also create CVE history entry
update_comment = "%s {%s}" % (ORM.UPDATE_CREATE_STR % ORM.UPDATE_SOURCE_CVE,'Created from MITRE')
sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
- cur.execute(sql, (cve_id,update_comment,srtool_today,ORM.USER_SRTOOL_NAME,) )
+ SQL_EXECUTE(cur, sql, (cve_id,update_comment,srtool_today,ORM.USER_SRTOOL_NAME,) )
# Add this data source to the CVE
sql = '''SELECT * FROM orm_cvesource WHERE cve_id=? AND datasource_id=? '''
- if not cur_ds.execute(sql, (cve_id,datasource_id)).fetchone():
- sql = ''' INSERT into orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
- cur_ds.execute(sql, (cve_id,datasource_id))
+ if not SQL_EXECUTE(cur_ds, sql, (cve_id,datasource_id)).fetchone():
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ SQL_EXECUTE(cur_ds, sql, (cve_id,datasource_id))
- conn.commit()
+ #update datasource's lastModifiedDate after successsfuly updating it
+ if datasource_id:
+ print("\nVersion Date=%s" % str(version_date))
+ sql = """UPDATE orm_datasource SET "lastModifiedDate" = ? WHERE id='%s'""" % datasource_id
+ SQL_EXECUTE(cur, sql, (str(version_date),))
+
+ SQL_COMMIT(conn)
print("\nTotal = %5d\n" % i)
+ # End progress
+ progress_done('Done')
#################################
# test dump
@@ -389,8 +424,9 @@ def dump(file_xml):
print("OTHER TOP TAG=%s" % child.tag)
i += 1
- if (0 == (i % 20)):
- print("%5d\r" % i,end = '')
+ if not progress_status()[PROGRESS_STATUS_ENABLE]:
+ if (0 == (i % 20)):
+ print("%5d" % i,end = '\r')
print("\nTotal = %5d\n" % i)
#################################
@@ -413,16 +449,21 @@ def main(argv):
parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Fetch CVE detail')
parser.add_argument('--file', dest='cve_file', help='Local CVE source file')
+ parser.add_argument('--progress', '-P', action='store_true', dest='do_progress', help='Progress output')
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--verbose', '-v', action='store_true', dest='is_verbose', help='Enable verbose debugging output')
parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ parser.add_argument('--debug-sql', action='store_true', dest='debug_sql', help='Debug SQL writes')
parser.add_argument('--dump', '-D', action='store_const', const='dump', dest='command', help='test dump data')
parser.add_argument('--dump2', '-2', action='store_const', const='dump2', dest='command', help='test dump data')
args = parser.parse_args()
+ # fetch any environment overrides
+ set_override('SRTDBG_MINIMAL_DB')
+
if args.is_verbose:
verbose = True
if None != args.skip:
@@ -431,6 +472,9 @@ def main(argv):
cmd_count = int(args.count)
elif get_override('SRTDBG_MINIMAL_DB'):
cmd_count = 20
+ if args.debug_sql:
+ SQL_DEBUG(True,'MTR')
+ progress_set_on(args.do_progress)
if 'dump' == args.command:
dump(mitre_cvrf_xml)
@@ -449,8 +493,6 @@ def main(argv):
fetch_cve(args.cve_detail,args.cve_file)
return
- # fetch any environment overrides
- set_override('SRTDBG_MINIMAL_DB')
# Required parameters to continue
if not args.source:
@@ -472,6 +514,10 @@ def main(argv):
else:
print("Command not found")
+ # Dump the SQL transaction data
+ if args.debug_sql:
+ SQL_DUMP()
+
if __name__ == '__main__':
srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
main(sys.argv[1:])
diff --git a/bin/nist/datasource.json b/bin/nist/datasource.json
index 28633b61..8bc33f8c 100644
--- a/bin/nist/datasource.json
+++ b/bin/nist/datasource.json
@@ -23,11 +23,11 @@
"attributes" : "PREVIEW-SOURCE",
"cve_filter" : "",
"init" : "",
- "update" : "bin/nist/srtool_nist.py --update_nist_incremental --source='NIST Modified Data' --file=data/nvdcve-1.1-modified.json --url-file=nvdcve-1.1-modified.json.gz --url-meta=nvdcve-1.1-modified.meta",
+ "update" : "bin/nist/srtool_nist.py --update_nist_incremental --source='NIST Modified Data' --file=data/nvdcve-1.1-modified.json --url-file=nvdcve-1.1-modified.json.gz --url-meta=nvdcve-1.1-modified.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-modified.json %command%",
"update_frequency" : "2",
- "_comment_" : "Update at 7:00 am",
- "update_time" : "{\"hour\":\"7\"}"
+ "_comment_" : "Update at 1:00 pm",
+ "update_time" : "{\"hour\":\"13\"}"
}
]
}
diff --git a/bin/nist/datasource_2002.json b/bin/nist/datasource_2002.json
index f4e62d34..6b29436a 100755
--- a/bin/nist/datasource_2002.json
+++ b/bin/nist/datasource_2002.json
@@ -8,8 +8,8 @@
"name" : "NIST",
"description" : "NIST 2002",
"cve_filter" : "CVE-2002",
- "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2002' --file=data/nvdcve-1.1-2002.json --url-file=nvdcve-1.1-2002.json.gz --url-meta=nvdcve-1.1-2002.meta",
- "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2002' --file=data/nvdcve-1.1-2002.json --url-file=nvdcve-1.1-2002.json.gz --url-meta=nvdcve-1.1-2002.meta",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2002' --file=data/nvdcve-1.1-2002.json --url-file=nvdcve-1.1-2002.json.gz --url-meta=nvdcve-1.1-2002.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2002' --file=data/nvdcve-1.1-2002.json --url-file=nvdcve-1.1-2002.json.gz --url-meta=nvdcve-1.1-2002.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2002.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2003.json b/bin/nist/datasource_2003.json
index 8bcf620a..ad301b57 100755
--- a/bin/nist/datasource_2003.json
+++ b/bin/nist/datasource_2003.json
@@ -8,8 +8,8 @@
"name" : "NIST",
"description" : "NIST 2003",
"cve_filter" : "CVE-2003",
- "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2003' --file=data/nvdcve-1.1-2003.json --url-file=nvdcve-1.1-2003.json.gz --url-meta=nvdcve-1.1-2003.meta",
- "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2003' --file=data/nvdcve-1.1-2003.json --url-file=nvdcve-1.1-2003.json.gz --url-meta=nvdcve-1.1-2003.meta",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2003' --file=data/nvdcve-1.1-2003.json --url-file=nvdcve-1.1-2003.json.gz --url-meta=nvdcve-1.1-2003.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2003' --file=data/nvdcve-1.1-2003.json --url-file=nvdcve-1.1-2003.json.gz --url-meta=nvdcve-1.1-2003.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2003.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2004.json b/bin/nist/datasource_2004.json
index 3839e125..c4e4f838 100755
--- a/bin/nist/datasource_2004.json
+++ b/bin/nist/datasource_2004.json
@@ -8,8 +8,8 @@
"name" : "NIST",
"description" : "NIST 2004",
"cve_filter" : "CVE-2004",
- "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2004' --file=data/nvdcve-1.1-2004.json --url-file=nvdcve-1.1-2004.json.gz --url-meta=nvdcve-1.1-2004.meta",
- "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2004' --file=data/nvdcve-1.1-2004.json --url-file=nvdcve-1.1-2004.json.gz --url-meta=nvdcve-1.1-2004.meta",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2004' --file=data/nvdcve-1.1-2004.json --url-file=nvdcve-1.1-2004.json.gz --url-meta=nvdcve-1.1-2004.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2004' --file=data/nvdcve-1.1-2004.json --url-file=nvdcve-1.1-2004.json.gz --url-meta=nvdcve-1.1-2004.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2004.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2005.json b/bin/nist/datasource_2005.json
index a3acfaa7..fad1bbac 100755
--- a/bin/nist/datasource_2005.json
+++ b/bin/nist/datasource_2005.json
@@ -8,8 +8,8 @@
"name" : "NIST",
"description" : "NIST 2005",
"cve_filter" : "CVE-2005",
- "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2005' --file=data/nvdcve-1.1-2005.json --url-file=nvdcve-1.1-2005.json.gz --url-meta=nvdcve-1.1-2005.meta",
- "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2005' --file=data/nvdcve-1.1-2005.json --url-file=nvdcve-1.1-2005.json.gz --url-meta=nvdcve-1.1-2005.meta",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2005' --file=data/nvdcve-1.1-2005.json --url-file=nvdcve-1.1-2005.json.gz --url-meta=nvdcve-1.1-2005.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2005' --file=data/nvdcve-1.1-2005.json --url-file=nvdcve-1.1-2005.json.gz --url-meta=nvdcve-1.1-2005.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2005.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2006.json b/bin/nist/datasource_2006.json
index 6f3c508e..af8fbc72 100755
--- a/bin/nist/datasource_2006.json
+++ b/bin/nist/datasource_2006.json
@@ -8,8 +8,8 @@
"name" : "NIST",
"description" : "NIST 2006",
"cve_filter" : "CVE-2006",
- "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2006' --file=data/nvdcve-1.1-2006.json --url-file=nvdcve-1.1-2006.json.gz --url-meta=nvdcve-1.1-2006.meta",
- "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2006' --file=data/nvdcve-1.1-2006.json --url-file=nvdcve-1.1-2006.json.gz --url-meta=nvdcve-1.1-2006.meta",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2006' --file=data/nvdcve-1.1-2006.json --url-file=nvdcve-1.1-2006.json.gz --url-meta=nvdcve-1.1-2006.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2006' --file=data/nvdcve-1.1-2006.json --url-file=nvdcve-1.1-2006.json.gz --url-meta=nvdcve-1.1-2006.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2006.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2007.json b/bin/nist/datasource_2007.json
index 5ea00944..5f46571d 100755
--- a/bin/nist/datasource_2007.json
+++ b/bin/nist/datasource_2007.json
@@ -8,8 +8,8 @@
"name" : "NIST",
"description" : "NIST 2007",
"cve_filter" : "CVE-2007",
- "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2007' --file=data/nvdcve-1.1-2007.json --url-file=nvdcve-1.1-2007.json.gz --url-meta=nvdcve-1.1-2007.meta",
- "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2007' --file=data/nvdcve-1.1-2007.json --url-file=nvdcve-1.1-2007.json.gz --url-meta=nvdcve-1.1-2007.meta",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2007' --file=data/nvdcve-1.1-2007.json --url-file=nvdcve-1.1-2007.json.gz --url-meta=nvdcve-1.1-2007.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2007' --file=data/nvdcve-1.1-2007.json --url-file=nvdcve-1.1-2007.json.gz --url-meta=nvdcve-1.1-2007.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2007.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2008.json b/bin/nist/datasource_2008.json
index 891d3046..8923a4b6 100755
--- a/bin/nist/datasource_2008.json
+++ b/bin/nist/datasource_2008.json
@@ -8,8 +8,8 @@
"name" : "NIST",
"description" : "NIST 2008",
"cve_filter" : "CVE-2008",
- "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2008' --file=data/nvdcve-1.1-2008.json --url-file=nvdcve-1.1-2008.json.gz --url-meta=nvdcve-1.1-2008.meta",
- "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2008' --file=data/nvdcve-1.1-2008.json --url-file=nvdcve-1.1-2008.json.gz --url-meta=nvdcve-1.1-2008.meta",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2008' --file=data/nvdcve-1.1-2008.json --url-file=nvdcve-1.1-2008.json.gz --url-meta=nvdcve-1.1-2008.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2008' --file=data/nvdcve-1.1-2008.json --url-file=nvdcve-1.1-2008.json.gz --url-meta=nvdcve-1.1-2008.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2008.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2009.json b/bin/nist/datasource_2009.json
index 2bebc343..edca168b 100755
--- a/bin/nist/datasource_2009.json
+++ b/bin/nist/datasource_2009.json
@@ -8,8 +8,8 @@
"name" : "NIST",
"description" : "NIST 2009",
"cve_filter" : "CVE-2009",
- "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2009' --file=data/nvdcve-1.1-2009.json --url-file=nvdcve-1.1-2009.json.gz --url-meta=nvdcve-1.1-2009.meta",
- "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2009' --file=data/nvdcve-1.1-2009.json --url-file=nvdcve-1.1-2009.json.gz --url-meta=nvdcve-1.1-2009.meta",
+ "init" : "bin/nist/srtool_nist.py --download-only --source='NIST 2009' --file=data/nvdcve-1.1-2009.json --url-file=nvdcve-1.1-2009.json.gz --url-meta=nvdcve-1.1-2009.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --download-only --source='NIST 2009' --file=data/nvdcve-1.1-2009.json --url-file=nvdcve-1.1-2009.json.gz --url-meta=nvdcve-1.1-2009.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2009.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2010.json b/bin/nist/datasource_2010.json
index 21030e45..562fd8c5 100755
--- a/bin/nist/datasource_2010.json
+++ b/bin/nist/datasource_2010.json
@@ -7,8 +7,8 @@
"name" : "NIST",
"description" : "NIST 2010",
"cve_filter" : "CVE-2010",
- "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2010' --file=data/nvdcve-1.1-2010.json --url-file=nvdcve-1.1-2010.json.gz --url-meta=nvdcve-1.1-2010.meta",
- "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2010' --file=data/nvdcve-1.1-2010.json --url-file=nvdcve-1.1-2010.json.gz --url-meta=nvdcve-1.1-2010.meta",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2010' --file=data/nvdcve-1.1-2010.json --url-file=nvdcve-1.1-2010.json.gz --url-meta=nvdcve-1.1-2010.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2010' --file=data/nvdcve-1.1-2010.json --url-file=nvdcve-1.1-2010.json.gz --url-meta=nvdcve-1.1-2010.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2010.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2011.json b/bin/nist/datasource_2011.json
index 5b0bb052..7f50b3e0 100755
--- a/bin/nist/datasource_2011.json
+++ b/bin/nist/datasource_2011.json
@@ -7,8 +7,8 @@
"name" : "NIST",
"description" : "NIST 2011",
"cve_filter" : "CVE-2011",
- "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2011' --file=data/nvdcve-1.1-2011.json --url-file=nvdcve-1.1-2011.json.gz --url-meta=nvdcve-1.1-2011.meta",
- "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2011' --file=data/nvdcve-1.1-2011.json --url-file=nvdcve-1.1-2011.json.gz --url-meta=nvdcve-1.1-2011.meta",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2011' --file=data/nvdcve-1.1-2011.json --url-file=nvdcve-1.1-2011.json.gz --url-meta=nvdcve-1.1-2011.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2011' --file=data/nvdcve-1.1-2011.json --url-file=nvdcve-1.1-2011.json.gz --url-meta=nvdcve-1.1-2011.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2011.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2012.json b/bin/nist/datasource_2012.json
index 69d40bad..6505a244 100755
--- a/bin/nist/datasource_2012.json
+++ b/bin/nist/datasource_2012.json
@@ -7,8 +7,8 @@
"name" : "NIST",
"description" : "NIST 2012",
"cve_filter" : "CVE-2012",
- "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2012' --file=data/nvdcve-1.1-2012.json --url-file=nvdcve-1.1-2012.json.gz --url-meta=nvdcve-1.1-2012.meta",
- "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2012' --file=data/nvdcve-1.1-2012.json --url-file=nvdcve-1.1-2012.json.gz --url-meta=nvdcve-1.1-2012.meta",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2012' --file=data/nvdcve-1.1-2012.json --url-file=nvdcve-1.1-2012.json.gz --url-meta=nvdcve-1.1-2012.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2012' --file=data/nvdcve-1.1-2012.json --url-file=nvdcve-1.1-2012.json.gz --url-meta=nvdcve-1.1-2012.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2012.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2013.json b/bin/nist/datasource_2013.json
index 2f2d313a..b7768906 100755
--- a/bin/nist/datasource_2013.json
+++ b/bin/nist/datasource_2013.json
@@ -7,8 +7,8 @@
"name" : "NIST",
"description" : "NIST 2013",
"cve_filter" : "CVE-2013",
- "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2013' --file=data/nvdcve-1.1-2013.json --url-file=nvdcve-1.1-2013.json.gz --url-meta=nvdcve-1.1-2013.meta",
- "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2013' --file=data/nvdcve-1.1-2013.json --url-file=nvdcve-1.1-2013.json.gz --url-meta=nvdcve-1.1-2013.meta",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2013' --file=data/nvdcve-1.1-2013.json --url-file=nvdcve-1.1-2013.json.gz --url-meta=nvdcve-1.1-2013.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2013' --file=data/nvdcve-1.1-2013.json --url-file=nvdcve-1.1-2013.json.gz --url-meta=nvdcve-1.1-2013.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2013.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2014.json b/bin/nist/datasource_2014.json
index 619197c1..59cd83f7 100755
--- a/bin/nist/datasource_2014.json
+++ b/bin/nist/datasource_2014.json
@@ -7,8 +7,8 @@
"name" : "NIST",
"description" : "NIST 2014",
"cve_filter" : "CVE-2014",
- "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2014' --file=data/nvdcve-1.1-2014.json --url-file=nvdcve-1.1-2014.json.gz --url-meta=nvdcve-1.1-2014.meta",
- "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2014' --file=data/nvdcve-1.1-2014.json --url-file=nvdcve-1.1-2014.json.gz --url-meta=nvdcve-1.1-2014.meta",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2014' --file=data/nvdcve-1.1-2014.json --url-file=nvdcve-1.1-2014.json.gz --url-meta=nvdcve-1.1-2014.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2014' --file=data/nvdcve-1.1-2014.json --url-file=nvdcve-1.1-2014.json.gz --url-meta=nvdcve-1.1-2014.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2014.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2015.json b/bin/nist/datasource_2015.json
index 7600aac1..49a942b2 100755
--- a/bin/nist/datasource_2015.json
+++ b/bin/nist/datasource_2015.json
@@ -7,8 +7,8 @@
"name" : "NIST",
"description" : "NIST 2015",
"cve_filter" : "CVE-2015",
- "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2015' --file=data/nvdcve-1.1-2015.json --url-file=nvdcve-1.1-2015.json.gz --url-meta=nvdcve-1.1-2015.meta",
- "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2015' --file=data/nvdcve-1.1-2015.json --url-file=nvdcve-1.1-2015.json.gz --url-meta=nvdcve-1.1-2015.meta",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2015' --file=data/nvdcve-1.1-2015.json --url-file=nvdcve-1.1-2015.json.gz --url-meta=nvdcve-1.1-2015.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2015' --file=data/nvdcve-1.1-2015.json --url-file=nvdcve-1.1-2015.json.gz --url-meta=nvdcve-1.1-2015.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2015.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2016.json b/bin/nist/datasource_2016.json
index 55244a2b..c2ce8401 100755
--- a/bin/nist/datasource_2016.json
+++ b/bin/nist/datasource_2016.json
@@ -7,8 +7,8 @@
"name" : "NIST",
"description" : "NIST 2016",
"cve_filter" : "CVE-2016",
- "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2016' --file=data/nvdcve-1.1-2016.json --url-file=nvdcve-1.1-2016.json.gz --url-meta=nvdcve-1.1-2016.meta",
- "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2016' --file=data/nvdcve-1.1-2016.json --url-file=nvdcve-1.1-2016.json.gz --url-meta=nvdcve-1.1-2016.meta",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2016' --file=data/nvdcve-1.1-2016.json --url-file=nvdcve-1.1-2016.json.gz --url-meta=nvdcve-1.1-2016.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2016' --file=data/nvdcve-1.1-2016.json --url-file=nvdcve-1.1-2016.json.gz --url-meta=nvdcve-1.1-2016.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2016.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2017.json b/bin/nist/datasource_2017.json
index 2c68ed89..38703954 100755
--- a/bin/nist/datasource_2017.json
+++ b/bin/nist/datasource_2017.json
@@ -7,8 +7,8 @@
"name" : "NIST",
"description" : "NIST 2017",
"cve_filter" : "CVE-2017",
- "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2017' --file=data/nvdcve-1.1-2017.json --url-file=nvdcve-1.1-2017.json.gz --url-meta=nvdcve-1.1-2017.meta",
- "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2017' --file=data/nvdcve-1.1-2017.json --url-file=nvdcve-1.1-2017.json.gz --url-meta=nvdcve-1.1-2017.meta",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2017' --file=data/nvdcve-1.1-2017.json --url-file=nvdcve-1.1-2017.json.gz --url-meta=nvdcve-1.1-2017.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2017' --file=data/nvdcve-1.1-2017.json --url-file=nvdcve-1.1-2017.json.gz --url-meta=nvdcve-1.1-2017.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2017.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2018.json b/bin/nist/datasource_2018.json
index 03d09612..799c9b4a 100755
--- a/bin/nist/datasource_2018.json
+++ b/bin/nist/datasource_2018.json
@@ -7,8 +7,8 @@
"name" : "NIST",
"description" : "NIST 2018",
"cve_filter" : "CVE-2018",
- "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2018' --file=data/nvdcve-1.1-2018.json --url-file=nvdcve-1.1-2018.json.gz --url-meta=nvdcve-1.1-2018.meta",
- "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2018' --file=data/nvdcve-1.1-2018.json --url-file=nvdcve-1.1-2018.json.gz --url-meta=nvdcve-1.1-2018.meta",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2018' --file=data/nvdcve-1.1-2018.json --url-file=nvdcve-1.1-2018.json.gz --url-meta=nvdcve-1.1-2018.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2018' --file=data/nvdcve-1.1-2018.json --url-file=nvdcve-1.1-2018.json.gz --url-meta=nvdcve-1.1-2018.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2018.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2019.json b/bin/nist/datasource_2019.json
index 269f77bc..9e5ba11d 100755
--- a/bin/nist/datasource_2019.json
+++ b/bin/nist/datasource_2019.json
@@ -7,8 +7,8 @@
"name" : "NIST",
"description" : "NIST 2019",
"cve_filter" : "CVE-2019",
- "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2019' --file=data/nvdcve-1.1-2019.json --url-file=nvdcve-1.1-2019.json.gz --url-meta=nvdcve-1.1-2019.meta",
- "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2019' --file=data/nvdcve-1.1-2019.json --url-file=nvdcve-1.1-2019.json.gz --url-meta=nvdcve-1.1-2019.meta",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2019' --file=data/nvdcve-1.1-2019.json --url-file=nvdcve-1.1-2019.json.gz --url-meta=nvdcve-1.1-2019.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2019' --file=data/nvdcve-1.1-2019.json --url-file=nvdcve-1.1-2019.json.gz --url-meta=nvdcve-1.1-2019.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2019.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2020.json b/bin/nist/datasource_2020.json
index e4bb63dc..3f88e2bf 100755
--- a/bin/nist/datasource_2020.json
+++ b/bin/nist/datasource_2020.json
@@ -7,8 +7,8 @@
"name" : "NIST",
"description" : "NIST 2020",
"cve_filter" : "CVE-2020",
- "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2020' --file=data/nvdcve-1.1-2020.json --url-file=nvdcve-1.1-2020.json.gz --url-meta=nvdcve-1.1-2020.meta",
- "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2020' --file=data/nvdcve-1.1-2020.json --url-file=nvdcve-1.1-2020.json.gz --url-meta=nvdcve-1.1-2020.meta",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2020' --file=data/nvdcve-1.1-2020.json --url-file=nvdcve-1.1-2020.json.gz --url-meta=nvdcve-1.1-2020.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2020' --file=data/nvdcve-1.1-2020.json --url-file=nvdcve-1.1-2020.json.gz --url-meta=nvdcve-1.1-2020.meta --progress",
"lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2020.json %command%",
"update_frequency" : "3",
"_comment_" : "Update on Saturdays at 2:00 am",
diff --git a/bin/nist/datasource_2021.json b/bin/nist/datasource_2021.json
new file mode 100755
index 00000000..1fea6d0f
--- /dev/null
+++ b/bin/nist/datasource_2021.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2021",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2021",
+ "cve_filter" : "CVE-2021",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2021' --file=data/nvdcve-1.1-2021.json --url-file=nvdcve-1.1-2021.json.gz --url-meta=nvdcve-1.1-2021.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2021' --file=data/nvdcve-1.1-2021.json --url-file=nvdcve-1.1-2021.json.gz --url-meta=nvdcve-1.1-2021.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2021.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2022.json b/bin/nist/datasource_2022.json
new file mode 100755
index 00000000..6aae8e44
--- /dev/null
+++ b/bin/nist/datasource_2022.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2022",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2022",
+ "cve_filter" : "CVE-2022",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2022' --file=data/nvdcve-1.1-2022.json --url-file=nvdcve-1.1-2022.json.gz --url-meta=nvdcve-1.1-2022.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2022' --file=data/nvdcve-1.1-2022.json --url-file=nvdcve-1.1-2022.json.gz --url-meta=nvdcve-1.1-2022.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2022.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/datasource_2023.json b/bin/nist/datasource_2023.json
new file mode 100755
index 00000000..85b52c0c
--- /dev/null
+++ b/bin/nist/datasource_2023.json
@@ -0,0 +1,18 @@
+{
+ "datasource" : [
+ {
+ "key" : "0010-nist-2023",
+ "data" : "cve",
+ "source" : "nist",
+ "name" : "NIST",
+ "description" : "NIST 2023",
+ "cve_filter" : "CVE-2023",
+ "init" : "bin/nist/srtool_nist.py --init_nist --source='NIST 2023' --file=data/nvdcve-1.1-2023.json --url-file=nvdcve-1.1-2023.json.gz --url-meta=nvdcve-1.1-2023.meta --progress",
+ "update" : "bin/nist/srtool_nist.py --update_nist --source='NIST 2023' --file=data/nvdcve-1.1-2023.json --url-file=nvdcve-1.1-2023.json.gz --url-meta=nvdcve-1.1-2023.meta --progress",
+ "lookup" : "bin/nist/srtool_nist.py --file=data/nvdcve-1.1-2023.json %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 2:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/nist/srtool_nist.py b/bin/nist/srtool_nist.py
index 9efd3d19..44de0074 100755
--- a/bin/nist/srtool_nist.py
+++ b/bin/nist/srtool_nist.py
@@ -5,7 +5,7 @@
#
# Security Response Tool Commandline Tool
#
-# Copyright (C) 2018 Wind River Systems
+# Copyright (C) 2018-2021 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -28,7 +28,6 @@ import os
import sys
import re
import argparse
-import sqlite3
import json
from datetime import datetime, date, timedelta
import pytz
@@ -39,6 +38,9 @@ import traceback
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
from common.srt_schema import ORM
+from common.srtool_progress import *
+from common.srtool_sql import *
+from common.srtool_common import log_error
# Setup:
lookupTable = []
@@ -55,7 +57,6 @@ ACTION_INCREMENT = 'Increment'
ACTION_DOWNLOAD = 'Download'
ACTION_UPDATE_CVE = 'Update_Cve'
-srtDbName = 'srt.sqlite'
srtErrorLog = 'srt_errors.txt'
verbose = False
force_update = False
@@ -63,6 +64,7 @@ force_cache = False
update_skip_history = False
cmd_skip = 0
cmd_count = 0
+COMMIT_DELAY = 64
nist_datasources = {}
@@ -227,7 +229,7 @@ def CVE_ItemToSummary(CVE_Item,header_only=False):
summary['recommend_list'] = ''
summary['publish_state'] = ORM.PUBLISH_UNPUBLISHED
summary['publish_date'] = ''
- summary['acknowledge_date'] = ''
+ summary['acknowledge_date'] = None
summary['packages'] = ''
# Fix score to sortable string value
@@ -258,16 +260,18 @@ def CVE_ItemToSummary(CVE_Item,header_only=False):
summary['cpe_list'] += nist_scan_configuration_or(cpe_or_node, summary['name'], j)
else:
print("ERROR CONFIGURE:OR_OP?:%s" % cpe_or_node['operator'])
+ log_error("ERROR: NIST (%s) CONFIGURE:OR_OP?:%s" % (summary['name'],cpe_or_node['operator']))
elif "OR" == config['operator']:
summary['cpe_list'] += nist_scan_configuration_or(config, summary['name'], 0)
else:
print("ERROR CONFIGURE:OP?:%s" % config['operator'])
+ log_error("ERROR: NIST (%s) CONFIGURE:OP?:%s" % (summary['name'],config['operator']))
summary['cpe_list'] += '[/and]|'
summary['cpe_list'] += '[/config]|'
summary['ref_list'] = ''
for i, ref in enumerate(CVE_Item['cve']['references']['reference_data']):
- summary['ref_list'] += '%s%s\t%s\t%s' % ('|' if i>0 else '',ref['url'],','.join([tag for tag in ref['tags']]),ref['refsource'])
+ summary['ref_list'] += '%s%s\t%s\t%s' % ('|' if i>0 else '',ref['url'],','.join([tag for tag in ref['tags']]),ref['refsource'] if ref['refsource'] else '-')
return summary
@@ -282,15 +286,16 @@ def get_cve_default_status(action,publishedDate):
if None == init_new_date:
# Precalculate and cache the relative 'new' date for efficiency
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
sql = '''SELECT * FROM orm_srtsetting WHERE name=?'''
- CVE_INIT_NEW_DELTA = cur.execute(sql, ('CVE_INIT_NEW_DELTA',)).fetchone()
+ CVE_INIT_NEW_DELTA = SQL_EXECUTE(cur, sql, ('CVE_INIT_NEW_DELTA',)).fetchone()
+ #SQL_EXECUTE(cur, sql, ('CVE_INIT_NEW_DELTA',))
+ #CVE_INIT_NEW_DELTA = SQL_FETCH_ONE(cur)
if CVE_INIT_NEW_DELTA is None:
cve_init_new_delta = 30
else:
cve_init_new_delta = int(CVE_INIT_NEW_DELTA[ORM.SRTSETTING_VALUE])
-
date_delta = timedelta(days=cve_init_new_delta)
init_new_date = datetime.now(pytz.utc) - date_delta
#print("\nPreset new data = %s" % init_new_date.strftime("%Y-%m-%d"))
@@ -319,20 +324,21 @@ def sql_cwe_query(conn, value):
CWE_VULNERABLE_COUNT = 6
cur = conn.cursor()
sql = '''SELECT * FROM orm_cwetable WHERE name=?'''
- cwe = cur.execute(sql, (value,)).fetchone()
+ cwe = SQL_EXECUTE(cur, sql, (value,)).fetchone()
if cwe is None:
+ # "1" is True for both Sqlite and Postgress
sql = '''INSERT INTO orm_cwetable (name, href, summary, description, vulnerable_count, found) VALUES (?,'','','',1,1)'''
- cur.execute(sql, (value,))
- cwe_id = cur.lastrowid
- cur.close()
+ SQL_EXECUTE(cur, sql, (value,))
+ cwe_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
+ SQL_CLOSE_CUR(cur)
return cwe_id
else:
sql = ''' UPDATE orm_cwetable
SET vulnerable_count = ?
WHERE id = ?'''
- cur.execute(sql, (cwe[CWE_VULNERABLE_COUNT] + 1,cwe[CWE_ID]))
- conn.commit()
- cur.close()
+ SQL_EXECUTE(cur, sql, (cwe[CWE_VULNERABLE_COUNT] + 1,cwe[CWE_ID]))
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
return cwe[CWE_ID]
#generates and executes appropriate SQLite query for new CVE to CWE relation
@@ -340,12 +346,12 @@ def sql_cwe_query(conn, value):
def sql_cve2cwe_query(conn, cve_id, cwe_id):
cur = conn.cursor()
sql = '''SELECT * FROM orm_cvetocwe WHERE cve_id=? AND cwe_id=?'''
- cve2cwe = cur.execute(sql, (cve_id, cwe_id)).fetchone()
+ cve2cwe = SQL_EXECUTE(cur, sql, (cve_id, cwe_id)).fetchone()
if cve2cwe is None:
sql = '''INSERT INTO orm_cvetocwe (cve_id, cwe_id) VALUES (?, ?)'''
- cur.execute(sql, (cve_id, cwe_id))
- conn.commit()
- cur.close()
+ SQL_EXECUTE(cur, sql, (cve_id, cwe_id))
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
#######################################################################
#
@@ -362,7 +368,7 @@ def sql_cve_query(action, conn, summary, log):
is_change = False
cur = conn.cursor()
sql = '''SELECT * FROM orm_cve WHERE name=?'''
- cve_current = cur.execute(sql, (summary['name'],)).fetchone()
+ cve_current = SQL_EXECUTE(cur, sql, (summary['name'],)).fetchone()
cve_id = -1
srtool_today = datetime.today()
if cve_current is None:
@@ -371,12 +377,6 @@ def sql_cve_query(action, conn, summary, log):
# Get the default CVE status
summary['status'] = get_cve_default_status(action,summary['publish_date'])
-# # Offsets... 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
-# sql = ''' INSERT into orm_cve (name, name_sort, priority, status, comments, comments_private, tags, cve_data_type, cve_data_format, cve_data_version, public, publish_state, publish_date, acknowledge_date, description, publishedDate, lastModifiedDate, recommend, recommend_list, cvssV3_baseScore, cvssV3_baseSeverity, cvssV2_baseScore, cvssV2_severity, srt_updated, srt_created, packages)
-# VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)'''
-# cur.execute(sql, (cve.name, get_name_sort(cve.name), cve.priority, status, cve.comments, cve.comments_private, cve.tags, cve.cve_data_type, cve.cve_data_format, cve.cve_data_version, 1, cve.publish_state, cve.publish_date, cve.acknowledge_date, cve.description, cve.publishedDate, cve.lastModifiedDate, cve.recommend, cve.recommend_list, cve.cvssV3_baseScore, cve.cvssV3_baseSeverity, cve.cvssV2_baseScore, cve.cvssV2_severity, srtool_today, srtool_today,''))
-# # Offsets... 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
-
sql_elements = [
'name',
'name_sort',
@@ -437,17 +437,18 @@ def sql_cve_query(action, conn, summary, log):
srtool_today
)
- #print('INSERT into orm_cve (%s) VALUES (%s)' % (','.join(sql_elements),','.join(sql_qmarks)),sql_values)
- cur.execute('INSERT into orm_cve (%s) VALUES (%s)' % (','.join(sql_elements),','.join(sql_qmarks)),sql_values)
+ #print('INSERT INTO orm_cve (%s) VALUES (%s)' % (','.join(sql_elements),','.join(sql_qmarks)),sql_values)
+ sql, params = 'INSERT INTO orm_cve (%s) VALUES (%s)' % (','.join(sql_elements),','.join(sql_qmarks)),sql_values
+ SQL_EXECUTE(cur, sql, params)
is_change = True
- cve_id = cur.lastrowid
+ cve_id = SQL_GET_LAST_ROW_INSERTED_ID(cur)
if log: log.write("\tINSERTED '%s'\n" % summary['name'])
# Also create CVE history entry
update_comment = "%s {%s}" % (ORM.UPDATE_CREATE_STR % ORM.UPDATE_SOURCE_CVE,'Created from NIST')
sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
- cur.execute(sql, (cve_id,update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+ SQL_EXECUTE(cur, sql, (cve_id,update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
elif (cve_current[ORM.CVE_LASTMODIFIEDDATE] < summary['lastModifiedDate']) or force_update:
count_update += 1
@@ -480,12 +481,12 @@ def sql_cve_query(action, conn, summary, log):
cve_data_version = ?,
status = ?,
description = ?,
- publishedDate = ?,
- lastModifiedDate = ?,
- cvssV3_baseScore = ?,
- cvssV3_baseSeverity = ?,
- cvssV2_baseScore = ?,
- cvssV2_severity = ?,
+ "publishedDate" = ?,
+ "lastModifiedDate" = ?,
+ "cvssV3_baseScore" = ?,
+ "cvssV3_baseSeverity" = ?,
+ "cvssV2_baseScore" = ?,
+ "cvssV2_severity" = ?,
score_date = ?,
srt_updated = ?
WHERE id = ?'''
@@ -506,7 +507,7 @@ def sql_cve_query(action, conn, summary, log):
summary['score_date'],
srt_updated,
cve_id)
- cur.execute(sql, sql_values)
+ SQL_EXECUTE(cur, sql, sql_values)
is_change = True
if log: log.write("\tUPDATED '%s'\n" % summary['name'])
@@ -533,7 +534,7 @@ def sql_cve_query(action, conn, summary, log):
# Add update to history
update_comment = "%s%s" % (ORM.UPDATE_UPDATE_STR % ORM.UPDATE_SOURCE_CVE,';'.join(history_update))
sql = '''INSERT INTO orm_cvehistory (cve_id, comment, date, author) VALUES (?,?,?,?)'''
- cur.execute(sql, (cve_id,update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
+ SQL_EXECUTE(cur, sql, (cve_id,update_comment,srtool_today.strftime(ORM.DATASOURCE_DATE_FORMAT),ORM.USER_SRTOOL_NAME,) )
### TO-DO
### CREATE NOTIFICATION IF SCORE/SEVERITY HAS CHANGED
@@ -544,7 +545,7 @@ def sql_cve_query(action, conn, summary, log):
cve_id = cve_current[ORM.CVE_ID]
is_change = False
if log: log.write("\tSKIPPED '%s'\n" % summary['name'])
- cur.close()
+ SQL_CLOSE_CUR(cur)
return (cve_id, is_change)
#######################################################################
@@ -563,12 +564,14 @@ def prescan_modified(cve_filter):
break
if not modify_datasource:
print("ERROR: 'NIST Modified Data' not found")
+ log_error("ERROR: 'NIST Modified Data' not found")
return cve_skip_list
nist_file = os.path.join(srtool_basepath,get_file_from_lookup(modify_datasource[ORM.DATASOURCE_LOOKUP]))
try:
if not os.path.isfile(nist_file):
print("ERROR: no such file '%s'" % nist_file)
+ log_error("ERROR: no such file '%s'" % nist_file)
exit(1)
f = open(nist_file, 'r')
source_dct = json.load(f)
@@ -585,6 +588,7 @@ def prescan_modified(cve_filter):
if verbose: print("MODSKIP:%s:1ADDMOD" % cve_name)
except Exception as e:
print("ERROR:%s" % e)
+ log_error("ERROR: NIST: prescan_modified(%s) '%s'" % (cve_filter,e))
return(cve_skip_list)
@@ -600,7 +604,7 @@ def nist_json(action, summary_json_url, datasource, datasource_file, log, date_n
import gzip
global count_read
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
# Special handling around the NIST Modified Source
@@ -614,7 +618,7 @@ def nist_json(action, summary_json_url, datasource, datasource_file, log, date_n
cve_skip_list = []
if is_modified_source:
sql = '''SELECT * FROM orm_cvesource WHERE datasource_id=? '''
- for d2c in cur.execute(sql, (datasource[ORM.DATASOURCE_ID],)):
+ for d2c in SQL_EXECUTE(cur, sql, (datasource[ORM.DATASOURCE_ID],)):
preview_dict[d2c[ORM.CVESOURCE_CVE_ID]] = d2c[ORM.CVESOURCE_ID]
if verbose: print("MODCHK:%8d:1ADDPREV" % d2c[ORM.CVESOURCE_CVE_ID])
else:
@@ -646,6 +650,12 @@ def nist_json(action, summary_json_url, datasource, datasource_file, log, date_n
CVE_Items = dct['CVE_Items']
total = len(CVE_Items)
+ # Progress expected max
+ if cmd_count:
+ progress_set_max(cmd_count)
+ else:
+ progress_set_max(total)
+
cache_path = os.path.join(srtool_basepath, nist_cache_dir)
#begin parsing each cve in the JSON data
for i, CVE_Item in enumerate(CVE_Items):
@@ -655,6 +665,10 @@ def nist_json(action, summary_json_url, datasource, datasource_file, log, date_n
if get_override('SRTDBG_MINIMAL_DB') and (i > 10):
break
+ # Development/debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+
#print('.', end='', flush=True)
try:
# Translate a CVE_Item JSON node
@@ -670,7 +684,9 @@ def nist_json(action, summary_json_url, datasource, datasource_file, log, date_n
pass
# Indicate progress
- print('[%4d]%30s\r' % ((i * 100)/ total, summary['name']), end='', flush=True)
+ progress_show(summary['name'],force_newline=True)
+ if not progress_status()[PROGRESS_STATUS_ENABLE]:
+ print('[%4d]%30s' % ((i * 100)/ total, summary['name']), end='\r', flush=True)
if verbose:
# Remove this progress from the verbose lines (allows sorting by cve_id)
print('')
@@ -702,36 +718,36 @@ def nist_json(action, summary_json_url, datasource, datasource_file, log, date_n
# Add this data source to the CVE
sql = '''SELECT * FROM orm_cvesource WHERE cve_id=? AND datasource_id=? '''
- exists = cur.execute(sql, (cve_id,datasource[ORM.DATASOURCE_ID])).fetchone()
+ exists = SQL_EXECUTE(cur, sql, (cve_id,datasource[ORM.DATASOURCE_ID])).fetchone()
if exists is None:
# If volatile source, first remove all existing (potentially obsolete) NIST datasources to CVE
if is_modified_source:
if verbose: print("MODCHK:%8d:3aREM_OLD_CVESOURCE %s" % (cve_id,summary['name']))
sql = '''SELECT * FROM orm_cvesource WHERE cve_id=?'''
- for cve2ds in cur.execute(sql, (cve_id, )):
+ for cve2ds in SQL_EXECUTE(cur, sql, (cve_id, )):
if cve2ds[ORM.CVESOURCE_DATASOURCE_ID] in nist_datasources:
sql = 'DELETE FROM orm_cvesource WHERE id=?'
- cur.execute(sql, (cve2ds[ORM.CVESOURCE_ID],))
+ SQL_EXECUTE(cur, sql, (cve2ds[ORM.CVESOURCE_ID],))
if verbose: print("MODCHK:%8d:3bREM_FROM_CVESOURCE DS:%d" % (cve_id,cve2ds[ORM.CVESOURCE_DATASOURCE_ID]))
# Now, add found NIST datasource to CVE
- sql = ''' INSERT into orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
- cur.execute(sql, (cve_id,datasource[ORM.DATASOURCE_ID]))
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ SQL_EXECUTE(cur, sql, (cve_id,datasource[ORM.DATASOURCE_ID]))
if verbose: print("MODCHK:%8d:4ADD_TO_CVESOURCE" % cve_id)
else:
if verbose: print("MODCHK:%8d:4NO_CHANGE_CVESOURCE" % cve_id)
pass
# Safety commit as we go
- if 199 == (i % 200):
- conn.commit()
+ if 0 == (i % COMMIT_DELAY):
+ SQL_COMMIT(conn)
print('')
except Exception as e:
print(traceback.format_exc())
print("UPDATE FAILED")
- cur.close()
- conn.close()
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
raise Exception("Failed to import CVEs %s: %s" % (datasource_file, e))
print()
log.write("total number of CVEs checked: %s\n" % total)
@@ -742,26 +758,29 @@ def nist_json(action, summary_json_url, datasource, datasource_file, log, date_n
for cve_id in preview_dict.keys():
# First, remove volatile and obsolete CveSource reference
sql = 'DELETE FROM orm_cvesource WHERE id=?'
- cur.execute(sql, (preview_dict[cve_id],))
+ SQL_EXECUTE(cur, sql, (preview_dict[cve_id],))
if verbose: print("MODCHK:%8d:6REMOVE DEAD LINK" % cve_id)
# Second, reattach to normal CveSource reference
- cve = cur.execute('SELECT * FROM orm_cve WHERE id = "%s"' % cve_id).fetchone()
+ cve = SQL_EXECUTE(cur, 'SELECT * FROM orm_cve WHERE id = "%s"' % cve_id).fetchone()
if cve:
for ds_id in nist_datasources:
datasource_cve_filter = nist_datasources[ds_id][ORM.DATASOURCE_CVE_FILTER]
if datasource_cve_filter and cve[ORM.CVE_NAME].startswith(datasource_cve_filter):
- sql = ''' INSERT into orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
- cur.execute(sql, (cve_id,ds_id))
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ SQL_EXECUTE(cur, sql, (cve_id,ds_id))
if verbose: print("MODCHK:%8d:7MOVE TO NORMAL %d" % (cve_id,ds_id))
break
else:
msg = "ERROR: missing CVE record '%d' when reattaching obsolete CveSource reference" % cve_id
print(msg)
log.write(msg)
+ log_error(msg)
- conn.commit()
- cur.close()
- conn.close()
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+ # End progress
+ progress_done('Done')
#######################################################################
# check for updates and apply if any
@@ -794,19 +813,19 @@ def update_nist(action,datasource_description, url_file, url_meta, cve_file):
pass
# Set up database connection
- conn = sqlite3.connect(srtDbName)
- c = conn.cursor()
+ conn = SQL_CONNECT()
+ cur = conn.cursor()
# Prefetch the NIST data sources to assist MODIFIED <-> NORMAL transitions
sql = "SELECT * FROM orm_datasource WHERE source = 'nist'"
- c.execute(sql)
+ SQL_EXECUTE(cur, sql)
nist_datasources = {}
- for ds in c:
+ for ds in cur:
nist_datasources[ds[ORM.DATASOURCE_ID]] = ds
sql = "SELECT * FROM orm_datasource WHERE description='%s'" % datasource_description
- c.execute(sql)
- for ds in c:
+ SQL_EXECUTE(cur, sql)
+ for ds in cur.fetchall():
try:
f = urlopen(nist_meta_url) #Note: meta files are not in json format, hence manual parse
content = f.readline().decode('UTF-8')
@@ -835,9 +854,9 @@ def update_nist(action,datasource_description, url_file, url_meta, cve_file):
log.write("\n")
#update datasource's lastModifiedDate after successsfuly updating it
- sql = "UPDATE orm_datasource SET lastModifiedDate = ? WHERE id='%s'" % ds[ORM.DATASOURCE_ID]
- c.execute(sql, (str(date_new),))
- conn.commit()
+ sql = """UPDATE orm_datasource SET "lastModifiedDate" = ? WHERE id='%s'""" % ds[ORM.DATASOURCE_ID]
+ SQL_EXECUTE(cur, sql, (str(date_new),))
+ SQL_COMMIT(conn)
else:
if verbose: print("NIST: NO %s NEEDED" % action)
log.write("No %s needed\n" % action)
@@ -846,25 +865,25 @@ def update_nist(action,datasource_description, url_file, url_meta, cve_file):
log.write("\n")
# Reset datasource's lastModifiedDate as today
- sql = "UPDATE orm_datasource SET lastModifiedDate = ? WHERE id='%s'" % ds[ORM.DATASOURCE_ID]
- c.execute(sql, (datetime.today().strftime(ORM.DATASOURCE_DATETIME_FORMAT),) )
- conn.commit()
+ sql = """UPDATE orm_datasource SET "lastModifiedDate" = ? WHERE id='%s'""" % ds[ORM.DATASOURCE_ID]
+ SQL_EXECUTE(cur, sql, (datetime.today().strftime(ORM.DATASOURCE_DATETIME_FORMAT),) )
+ SQL_COMMIT(conn)
#######
## TESTING PURPOSES ONLY: reset lastModifiedDate so will always need update!
#######
# sql = '''UPDATE orm_datasource
- # SET lastModifiedDate = "0001-01-01 01:01:01"
+ # SET "lastModifiedDate" = "0001-01-01 01:01:01"
# WHERE description="NIST JSON Modified Data 2017" '''
- # c.execute(sql)
- # conn.commit()
+ # SQL_EXECUTE(cur, sql)
+ # SQL_COMMIT(conn)
f.close()
except URLError as e:
raise Exception("Failed to open %s: %s" % (nist_meta_url, e))
log.close()
- c.close()
- conn.close()
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
def file_date(filename,utc=False):
t = os.path.getmtime(filename)
@@ -939,7 +958,7 @@ def fetch_cve(cve_name,cve_source_file):
else:
# Return the results
for key in summary.keys():
- print('%s=%s' % (key,summary[key]))
+ print('%s=%s' % (key,str(summary[key]).strip()))
def cve_summary(cve_name):
cve_name = cve_name.upper()
@@ -949,7 +968,7 @@ def cve_summary(cve_name):
DSMAP_MOD = 2
DSMAP_UPDATE = 3
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur_ds = conn.cursor()
cur_cve = conn.cursor()
base_id = -1
@@ -974,6 +993,7 @@ def cve_summary(cve_name):
print(" cvssV3_baseSeverity:%s" % summary['cvssV3_baseSeverity'])
print(" cvssV2_baseScore :%s" % summary['cvssV2_baseScore'])
print(" cvssV2_severity :%s" % summary['cvssV2_severity'])
+ print(" publishedDate :%s" % summary['publishedDate'])
print(" lastModifiedDate :%s" % summary['lastModifiedDate'])
else:
print(" %s: There is no CVE record for %s in %s" % (key,cve_name,data_map[DSMAP_FILE]))
@@ -983,16 +1003,16 @@ def cve_summary(cve_name):
# Support CVE record IDs in addition to CVE names
cve = None
if cve_name[0].isdigit():
- cve = cur_cve.execute('SELECT * FROM orm_cve WHERE id = %s' % cve_name).fetchone()
+ cve = SQL_EXECUTE(cur_cve, 'SELECT * FROM orm_cve WHERE id = %s' % cve_name).fetchone()
if not cve:
print("CVE Summary:")
print(" CVE : There is no CVE record for this ID %s in orm_cve" % (cve_name))
return
cve_name = cve[ORM.CVE_NAME]
else:
- cve = cur_cve.execute('SELECT * FROM orm_cve WHERE name = "%s"' % cve_name).fetchone()
+ cve = SQL_EXECUTE(cur_cve, 'SELECT * FROM orm_cve WHERE name = "%s"' % cve_name).fetchone()
- cur_ds.execute('SELECT * FROM orm_datasource;')
+ SQL_EXECUTE(cur_ds, 'SELECT * FROM orm_datasource;')
datasource_map = {}
for datasource in cur_ds:
#print("Datasource[%d]='%s'" % (datasource[ORM.DATASOURCE_ID],datasource[ORM.DATASOURCE_DESCRIPTION]))
@@ -1011,7 +1031,7 @@ def cve_summary(cve_name):
show_summary("BASE",cve_name,datasource_map,base_id)
show_summary("MOD ",cve_name,datasource_map,modified_id)
if cve:
- cur_ds.execute('SELECT * FROM orm_cvesource WHERE cve_id = %d' % cve[ORM.CVE_ID])
+ SQL_EXECUTE(cur_ds, 'SELECT * FROM orm_cvesource WHERE cve_id = %d' % cve[ORM.CVE_ID])
# Return the CVE record's current values
print("CVE Summary:")
print(" CVE [%s]: %s " % (cve[ORM.CVE_ID],cve[ORM.CVE_NAME],))
@@ -1022,6 +1042,7 @@ def cve_summary(cve_name):
print(" cvssV2_severity :%s" % cve[ORM.CVE_CVSSV2_SEVERITY])
print(" public_notes :%s" % cve[ORM.CVE_COMMENTS])
print(" status :%s" % ORM.get_orm_string(cve[ORM.CVE_STATUS],ORM.STATUS_STR))
+ print(" publishedDate :%s" % cve[ORM.CVE_PUBLISHEDDATE])
print(" lastModifiedDate :%s" % cve[ORM.CVE_LASTMODIFIEDDATE])
# Return the DataSource mapping results
print("DataSource Summary:")
@@ -1046,7 +1067,18 @@ def cve_summary(cve_name):
#
def update_cve_list(action,cve_string_list,conn=None):
- cve_list = cve_string_list.split(',')
+ if '/' == cve_string_list[0]:
+ # Read list from file
+ cve_list = []
+ file_fd = open(cve_string_list, 'r')
+ lines = file_fd.readlines()
+ for line in lines:
+ if line:
+ cve_list.append(line.strip())
+ file_fd.close()
+ else:
+ # Read list from comma string
+ cve_list = cve_string_list.split(',')
DS_MODIFIED_SOURCE = 0
DS_CVEFILTER = 1
@@ -1057,18 +1089,18 @@ def update_cve_list(action,cve_string_list,conn=None):
# Set up database connection
do_close = False
if not conn:
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
do_close = True
cur = conn.cursor()
# Gather the CVE prefix to lookup commands
sql = "SELECT * FROM orm_datasource"
- cur.execute(sql)
+ SQL_EXECUTE(cur, sql)
datasource_table = []
datasource_nist_ids = {}
for datasource in cur:
- if 'nist' != datasource[ORM.DATASOURCE_SOURCE]:
- # Only consider NIST datasources
+ if ('nist' != datasource[ORM.DATASOURCE_SOURCE]) or ('cve' != datasource[ORM.DATASOURCE_DATA]) :
+ # Only consider NIST CVE datasources
continue
# Track the IDs for NIST sources
@@ -1084,7 +1116,6 @@ def update_cve_list(action,cve_string_list,conn=None):
fd = None
source_dct = []
for datasource in datasource_table:
-
# Simple caching
if fd:
fd.close()
@@ -1136,14 +1167,14 @@ def update_cve_list(action,cve_string_list,conn=None):
# First, remove all existing (potentially obsolete) NIST datasources to CVE
sql = '''SELECT * FROM orm_cvesource WHERE cve_id=?'''
- for cve2ds in cur.execute(sql, (cve_id, )):
+ for cve2ds in SQL_EXECUTE(cur, sql, (cve_id, )):
if cve2ds[ORM.CVESOURCE_DATASOURCE_ID] in datasource_nist_ids:
sql = 'DELETE FROM orm_cvesource WHERE id=?'
- cur.execute(sql, (cve2ds[ORM.CVESOURCE_ID],))
+ SQL_EXECUTE(cur, sql, (cve2ds[ORM.CVESOURCE_ID],))
if verbose: print(" NIST_REMOVE_OLDSOURCE:%s" % (cve2ds[ORM.CVESOURCE_DATASOURCE_ID]))
# Second, add found NIST datasource to CVE
- sql = ''' INSERT into orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
- cur.execute(sql, (cve_id,datasource[DS_ID],))
+ sql = ''' INSERT INTO orm_cvesource (cve_id, datasource_id) VALUES (?, ?)'''
+ SQL_EXECUTE(cur, sql, (cve_id,datasource[DS_ID],))
# Note, CVE top record was updated with found values (NIST wins over other sources)
# when sql_cve_query() executed
@@ -1155,19 +1186,19 @@ def update_cve_list(action,cve_string_list,conn=None):
return
if update:
- conn.commit()
- cur.close()
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
if do_close:
- conn.close()
+ SQL_CLOSE_CONN(conn)
def update_existing_cves(action,cve_prefix):
# Set up database connection
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
# Gather the CVE prefix to lookup commands
sql = 'SELECT * FROM orm_cve WHERE name LIKE "'+cve_prefix+'%"'
- cur.execute(sql)
+ SQL_EXECUTE(cur, sql)
cve_table = []
i = 0
for cve in cur:
@@ -1189,8 +1220,8 @@ def update_existing_cves(action,cve_prefix):
print("SEND:%2d:%s" % (i,cve[ORM.CVE_NAME]))
update_cve_list(action,','.join(cve_table),conn)
- cur.close()
- conn.close()
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
#######################################################################
@@ -1221,12 +1252,14 @@ def main(argv):
parser.add_argument('--url-meta', dest='url_meta', help='CVE URL meta extension')
parser.add_argument('--file', dest='cve_file', help='Local CVE source file')
+ parser.add_argument('--progress', '-P', action='store_true', dest='do_progress', help='Progress output')
parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
parser.add_argument('--force-cache', action='store_true', dest='force_cache', help='Force update')
parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
parser.add_argument('--count', dest='count', help='Debugging: short run record count')
parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose output')
+ parser.add_argument('--debug-sql', action='store_true', dest='debug_sql', help='Debug SQL writes')
args = parser.parse_args()
verbose = args.verbose
@@ -1239,6 +1272,9 @@ def main(argv):
cmd_count = 0
if None != args.count:
cmd_count = int(args.count)
+ if args.debug_sql:
+ SQL_DEBUG(True,'NST')
+ progress_set_on(args.do_progress)
#srt_error_log("DEBUG:srtool_nist:%s" % args)
@@ -1303,9 +1339,10 @@ def main(argv):
print("DATABASE %s FINISHED\n" % action)
print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
except Exception as e:
- print("DATABASE %s FAILED ... %s" % (action,e))
+ print("ERROR:DATABASE %s FAILED ... %s" % (action,e))
master_log.write("SRTOOL:%s:%s:\t\t\t...\t\t\tFAILED ... %s\n" % (date.today(), args.source, e))
print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
+ log_error("ERROR:NIST:DATABASE %s FAILED ... '%s'" % (action,e))
ret = 1
elif 'update_nist_incremental' == args.command:
try:
@@ -1315,9 +1352,11 @@ def main(argv):
print("DATABASE UPDATE FINISHED\n")
print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
except Exception as e:
- print("DATABASE INCREMENT FAILED ... %s" % e)
+ print("ERROR:DATABASE INCREMENT FAILED ... %s" % e)
+ print(" :%s" % traceback.format_exc())
print("Read=%d,Created=%d,Updated=%d" % (count_read,count_create,count_update))
master_log.write("SRTOOL:%s:%s:\t\t\t...\t\t\tFAILED ... %s\n" % (date.today(), args.source, e))
+ log_error("ERROR:NIST:DATABASE INCREMENT %s FAILED ... '%s'" % (action,e))
ret = 1
elif 'download_nist' == args.command:
print ("BEGINNING NIST UPDATES PLEASE WAIT ... this can take some time")
@@ -1329,6 +1368,9 @@ def main(argv):
ret = 1
print("Command not found")
master_log.close()
+ # Dump the SQL transaction data
+ if args.debug_sql:
+ SQL_DUMP()
if 0 != ret:
exit(ret)
diff --git a/bin/redhat/srtool_redhat.py b/bin/redhat/srtool_redhat.py
index 9b629cbe..32aa12f1 100755
--- a/bin/redhat/srtool_redhat.py
+++ b/bin/redhat/srtool_redhat.py
@@ -31,11 +31,13 @@ import sys
import re
import json
import argparse
+from datetime import datetime, date, timedelta
from urllib.request import urlopen, Request
# Setup:
srtDbName = 'srt.sqlite'
+REDHAT_STALE_DAYS = 4
redhat_cache_dir = 'data/cache/redhat'
redhat_cve_url = "https://access.redhat.com/labs/securitydataapi/cve"
@@ -64,6 +66,12 @@ def get_override(key):
return 'yes' == overrides[key]
return False
+srtErrorLog = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srtool_dbg.log'
+def _log(msg):
+ f1=open(srtErrorLog, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
#################################
# Fetch a CVE record from Red Hat
# REST API, cache the results
@@ -109,9 +117,22 @@ def fetch_cve(cve_name):
except:
pass
- if not os.path.isfile(datasource_file):
- if verbose: print("REDHAT:URLOPEN:%s" % datasource_url)
+ if os.path.isfile(datasource_file):
+ # See if the cache file is stale
+ now = datetime.now()
+ file_time = datetime.fromtimestamp(os.path.getmtime(datasource_file))
+ days_diff = (now - file_time).days
+ if days_diff > REDHAT_STALE_DAYS:
+ os.remove(datasource_file)
+ _log("REDHAT_STALE:%s:%s" % (days_diff,datasource_file))
+ if os.path.isfile(datasource_file):
+ # Use cached CVE file
+ if verbose: print("REDHAT:CACHE:%s" % datasource_file)
+ with open(datasource_file) as json_data:
+ dct = json.load(json_data)
+ if verbose: print("REDHAT:URLOPEN:%s" % datasource_url)
+ else:
# Fetch and/or refresh upstream CVE file
# NOTE: Setting a known browser user agent to accomodate mod_security or some similar server security feature,
# which blocks known spider/bot user agents at the Red Hat site
@@ -133,11 +154,6 @@ def fetch_cve(cve_name):
# Cache the record
datasource_file_fd = open(datasource_file, 'w+')
datasource_file_fd.write(json.dumps(dct))
- else:
- # Use cached CVE file
- if verbose: print("REDHAT:CACHE:%s" % datasource_file)
- with open(datasource_file) as json_data:
- dct = json.load(json_data)
extract_json(dct,'',10)
# for key in summary.keys():
diff --git a/bin/srt b/bin/srt
index 4cff6fd9..ddc3b6a7 100755
--- a/bin/srt
+++ b/bin/srt
@@ -3,7 +3,7 @@
# SRTool - shell script to start "Security Response Tool"
# Copyright (C) 2013-2015 Intel Corp.
-# Copyright (C) 2018 Wind River Systems
+# Copyright (C) 2018-2023 Wind River Systems
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -23,9 +23,22 @@ Usage 1: bin/srt start|stop [webport=<address:port>]
Optional arguments:
[webport] Set the SRTool server port (default: localhost:8000)
[noautoupdate] Disable the auto update server
+ [start_update] Start only the update server (when main server is service/SSL)
Usage 2: bin/srt manage [createsuperuser|lsupdates|migrate|makemigrations|checksettings|collectstatic|...]
"
+preset_basic_directories()
+{
+ # create working directories for srtool
+ mkdir -p $SRT_BASE_DIR/backups
+ mkdir -p $SRT_BASE_DIR/data
+ mkdir -p $SRT_BASE_DIR/data/cache
+ mkdir -p $SRT_BASE_DIR/logs
+ mkdir -p $SRT_BASE_DIR/reports
+ mkdir -p $SRT_BASE_DIR/update_logs
+ touch $SRT_BASE_DIR/update_logs/master_log.txt
+}
+
databaseCheck()
{
retval=0
@@ -48,18 +61,31 @@ databaseCheck()
}
get_srt_env_settings() {
+ echo "## Inherited SRT environment settings" > $ENV_FILE
+ echo "SRT_BASE_DIR=$SRT_BASE_DIR" >> $ENV_FILE
+ echo "SRTDBG_LOG=$SRTDBG_LOG" >> $ENV_FILE
+ if [ -z "$TZ" ] ; then
+ export TZ=America/Chicago
+ echo "TZ=America/Chicago" >> $ENV_FILE
+ fi
+
mainapp="yp"
# Apply all shell settings except default app 'yp'
# Only look in directories with proper 'datasource.json' files
- for envscript in $(find ./bin -name "datasource.json") ; do
- envscript=${envscript/datasource.json/srtool_env.sh}
+ for envscript in $(find $SRT_BASE_DIR/bin -name "datasource.json") ; do
+ envscript="${envscript/datasource.json/srtool_env.sh}"
if [ -f "$envscript" -a "$envscript" = "${envscript/bin\/yp/}" ] ; then
. $envscript
+ echo "## Inherit: $envscript" >> $ENV_FILE
+ cat $envscript >> $ENV_FILE
fi
done
# if no main app, default to 'yp'
if [ -z "$SRT_MAIN_APP" ] ; then
- . ./bin/yp/srtool_env.sh
+ envscript="$SRT_BASE_DIR/bin/yp/srtool_env.sh"
+ . $envscript
+ echo "## Inherit: $envscript" >> $ENV_FILE
+ cat $envscript >> $ENV_FILE
fi
echo "SRT_MAIN_APP=$SRT_MAIN_APP"
}
@@ -79,10 +105,8 @@ webserverKillAll()
fi
done
- # Stop the Update app
- if [ 0 -eq $no_auto_update ] ; then
- ${SRT_BASE_DIR}/bin/common/srtool_update.py --cron-stop
- fi
+ # Stop the Update app (even if start was disabled)
+ ${SRT_BASE_DIR}/bin/common/srtool_update.py --cron-stop
}
webserverStartAll()
@@ -99,9 +123,10 @@ webserverStartAll()
databaseCheck || return 1
echo "Starting SRTool webserver..."
+ echo "RUN: $MANAGE runserver --noreload $ADDR_PORT </dev/null >> ${SRT_BASE_DIR}/srt_web.log 2>&1 & echo \$! >${SRT_BASE_DIR}/.srtmain.pid"
$MANAGE runserver --noreload "$ADDR_PORT" \
- </dev/null >>${SRT_BASE_DIR}/srt_web.log 2>&1 \
+ </dev/null >> ${SRT_BASE_DIR}/srt_web.log 2>&1 \
& echo $! >${SRT_BASE_DIR}/.srtmain.pid
sleep 1
@@ -116,12 +141,24 @@ webserverStartAll()
# Start the Update app
if [ 0 -eq $no_auto_update ] ; then
- ${SRT_BASE_DIR}/bin/common/srtool_update.py --cron-start > /dev/null 2>&1 &
+ ${SRT_BASE_DIR}/bin/common/srtool_update.py --cron-start $UPDATE_FOLLOW_PID_FILE >> ${SRT_BASE_DIR}/srt_update.log 2>&1 &
echo "SRTool update service started at PID $!"
fi
return $retval
}
+update_start_all()
+{
+ # Start the Update app
+ if [ 0 -eq $no_auto_update ] ; then
+ echo " First stop any running updater"
+ ${SRT_BASE_DIR}/bin/common/srtool_update.py --cron-stop
+ echo " Now (re)start updater"
+ ${SRT_BASE_DIR}/bin/common/srtool_update.py --cron-start $UPDATE_FOLLOW_PID_FILE -v >> ${SRT_BASE_DIR}/srt_update.log 2>&1 &
+ echo " SRTool update service started at PID $!"
+ fi
+}
+
INSTOPSYSTEM=0
# define the stop command
@@ -216,19 +253,23 @@ export SRT_BASE_DIR=$(dirname $SRT)
SRT_BASE_DIR=$(readlink -f $SRT_BASE_DIR)
SRT_BASE_DIR=$(dirname $SRT_BASE_DIR)
MANAGE="python3 $SRT_BASE_DIR/lib/manage.py"
+ENV_FILE=$SRT_BASE_DIR/.env_vars.env
+
+# Pre-set the local database configuration file if not yet done
+if [ ! -f "$SRT_BASE_DIR/srt_dbconfig.yml" ] ; then
+ cp "$SRT_BASE_DIR/bin/srt_dbconfig.yml" "$SRT_BASE_DIR/srt_dbconfig.yml"
+fi
-# Fetch the datasource environent settings
+# Fetch the datasource environent settings and copy current environment variables to txt for wsgi.py to read
get_srt_env_settings
# insure basic directories are present
-mkdir -p $SRT_BASE_DIR/data
-mkdir -p $SRT_BASE_DIR/data/cache
-mkdir -p $SRT_BASE_DIR/update_logs
-touch $SRT_BASE_DIR/update_logs/master_log.txt
+preset_basic_directories
ADDR_PORT="localhost:8000"
unset CMD
manage_cmd=""
+UPDATE_FOLLOW_PID_FILE=""
if [ "1" = "$SRT_SKIP_AUTOUPDATE" ] ; then
no_auto_update=1
else
@@ -245,6 +286,15 @@ for param in $*; do
manage )
CMD=$param
;;
+ export_env )
+ CMD=$param
+ ;;
+ start_update )
+ CMD=$param
+ ;;
+ update_follow_pid=*)
+ UPDATE_FOLLOW_PID_FILE="--follow-pid-file=${param#*=}"
+ ;;
webport=*)
ADDR_PORT="${param#*=}"
# Split the addr:port string
@@ -307,10 +357,6 @@ case $CMD in
echo "Failed ${CMD}."
exit 4
fi
- # create working directories for srtool
- mkdir -p $SRT_BASE_DIR/update_logs
- mkdir -p $SRT_BASE_DIR/backups
- mkdir -p $SRT_BASE_DIR/reports
# set fail safe stop system on terminal exit
trap stop_system SIGHUP
echo "Successful ${CMD}."
@@ -325,6 +371,17 @@ case $CMD in
$MANAGE $manage_cmd
;;
+ export_env )
+ echo "#export variables"
+ cat $ENV_FILE
+ exit 0
+ ;;
+
+ start_update )
+ echo "start update service"
+ update_start_all
+ echo "update service started"
+ ;;
esac
diff --git a/bin/srt_dbconfig.yml b/bin/srt_dbconfig.yml
new file mode 100644
index 00000000..5d3c7c6b
--- /dev/null
+++ b/bin/srt_dbconfig.yml
@@ -0,0 +1,41 @@
+dbselect: sqlite_prodution
+
+sqlite_prodution:
+ dbtype: sqlite
+ path: srt.sqlite
+
+sqlite_development:
+ dbtype: sqlite
+ path: srt_dev.sqlite
+
+postgres_prodution:
+ dbtype: postgres
+ host: localhost
+ user: admin
+ passwd: password
+ name: srtool
+ port: 5432
+
+postgres_development:
+ dbtype: postgres
+ host: localhost
+ user: admin
+ passwd: password
+ name: srtool_dev
+ port: 5432
+
+mysql_prodution:
+ dbtype: mysql
+ host: localhost
+ user: admin
+ passwd: password
+ name: srtool
+ port: 3306
+
+mysql_development:
+ dbtype: mysql
+ host: localhost
+ user: admin
+ passwd: password
+ name: srtool_dev
+ port: 3306
diff --git a/bin/srtool-requirements.txt b/bin/srtool-requirements.txt
index ab2d4895..2dc9517b 100755
--- a/bin/srtool-requirements.txt
+++ b/bin/srtool-requirements.txt
@@ -1,3 +1,11 @@
-Django>1.11.1,<2.3
+Django==4.0
pytz
requests
+jira
+pyyaml
+progress
+pick
+openpyxl
+python-dotenv
+#psycopg2==2.8.6
+#mysqlclient
diff --git a/bin/ubuntu_trivy/datasource.json b/bin/ubuntu_trivy/datasource.json
new file mode 100755
index 00000000..9b58d88e
--- /dev/null
+++ b/bin/ubuntu_trivy/datasource.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0041-ubuntu-trivy",
+ "data" : "cve",
+ "source" : "UBUNTU_Trivy",
+ "name" : "UBUNTU_Trivy",
+ "description" : "Ubuntu Trivy Repo",
+ "attributes" : "ALT-SOURCE",
+ "cve_filter" : "CVE-",
+ "init" : "bin/ubuntu_trivy/srtool_ubuntu_trivy.py --initialize",
+ "update" : "bin/ubuntu_trivy/srtool_ubuntu_trivy.py --update",
+ "lookup" : "bin/ubuntu_trivy/srtool_ubuntu_trivy.py %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 3:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/ubuntu_trivy/license.txt b/bin/ubuntu_trivy/license.txt
new file mode 100755
index 00000000..f917c53d
--- /dev/null
+++ b/bin/ubuntu_trivy/license.txt
@@ -0,0 +1,4 @@
+[ Ubuntu ]
+
+The ubuntu-cve is PUBLIC.
+
diff --git a/bin/ubuntu_trivy/srtool_ubuntu_trivy.py b/bin/ubuntu_trivy/srtool_ubuntu_trivy.py
new file mode 100755
index 00000000..619c3bf3
--- /dev/null
+++ b/bin/ubuntu_trivy/srtool_ubuntu_trivy.py
@@ -0,0 +1,295 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2013 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+#
+# Theory of operation
+#
+# * This script manages the ubuntu_trivy based CVE data
+#
+
+import os
+import sys
+import argparse
+import shutil
+from urllib.request import urlopen
+
+# load the srt.sqlite schema indexes
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+from common.srtool_sql import *
+from common.srtool_progress import *
+from common.srtool_common import log_error
+
+ubuntu_trivy_cve_url = 'git://git.launchpad.net/ubuntu-cve-tracker'
+ubuntu_trivy_repo_dir = 'data/ubuntu_trivy/ubuntu-cve-tracker'
+ubuntu_trivy_cve_dir = 'data/ubuntu_trivy/ubuntu-cve-tracker'
+ubuntu_trivy_cve_subdir = ('active','ignored','retired')
+
+# Globals
+verbose = False
+
+#################################
+# Helper Functions
+#
+
+#
+# Sub Process calls
+# Enforce that all scripts run from the SRT_BASE_DIR context (re:WSGI)
+#
+def execute_process(*cmd_list):
+ result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return(result.returncode,result.stdout.decode('utf-8'),result.stderr.decode('utf-8'))
+
+def srtsetting_get(conn,key,default_value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Fetch the key for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ try:
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if srtsetting:
+ if is_dict:
+ return(srtsetting['value'])
+ else:
+ return(srtsetting[ORM.SRTSETTING_VALUE])
+ except Exception as e:
+ print(f"ERROR:{e}")
+ return(default_value)
+
+def srtsetting_set(conn,key,value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Set the key value for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if not srtsetting:
+ sql = ''' INSERT INTO orm_srtsetting (name, helptext, value) VALUES (?,?,?)'''
+ SQL_EXECUTE(cur, sql, (key,'',value))
+ if verbose: print(f"INSERT:{key}:{value}:")
+ else:
+ if verbose: print("UPDATE[{srtsetting[ORM.SRTSETTING_ID]}]:{key}:{value}:")
+ sql = ''' UPDATE orm_srtsetting
+ SET value=?
+ WHERE id=?'''
+ if is_dict:
+ SQL_EXECUTE(cur, sql, (value,srtsetting['id']))
+ else:
+ SQL_EXECUTE(cur, sql, (value,srtsetting[ORM.SRTSETTING_ID]))
+ SQL_COMMIT(conn)
+
+def do_chdir(newdir,delay=0.200):
+ os.chdir(newdir)
+ # WARNING: we need a pause else the chdir will break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+def do_makedirs(newdir,delay=0.200):
+ try:
+ os.makedirs(newdir)
+ except:
+ # dir already exists
+ pass
+ # WARNING: we need a pause else the makedirs could break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+def execute_commmand(cmnd,path=''):
+ cwd = os.getcwd()
+ if path:
+ do_chdir(path,delay=1.0)
+ result_returncode,result_stdout,result_stderr = execute_process(*cmnd)
+ if 0 != result_returncode:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ print("ERROR({result_returncode}):{result_stderr}")
+ return(1)
+ if verbose:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ if path:
+ do_chdir(cwd,delay=1.0)
+
+# For Jobs, with captured output
+def execute_system(cmnd):
+ srt_base_dir = os.environ.get('SRT_BASE_DIR')
+ if srt_base_dir and (srt_base_dir != os.getcwd()):
+ os.chdir(srt_base_dir)
+ return os.system(cmnd)
+
+# Insure the git repo is cloned and available
+def prepare_git(repo_dir,repo_url,branch):
+ repo = os.path.basename(repo_url)
+ if verbose: print(f"prepare_git:({repo_dir},{repo_url})")
+ if not os.path.isdir(repo_dir):
+ # Clone into the repo's parent directory
+ repo_parent_dir = os.path.dirname(repo_dir)
+ do_makedirs(repo_parent_dir)
+ if verbose: print(f"= Clone '{repo}' ... =")
+ cmnd=['git','clone',repo_url]
+ execute_commmand(cmnd,repo_parent_dir)
+ else:
+ if verbose: print(f"= Clone '{repo}' skip ... =")
+
+ if branch:
+ if verbose: print("= Checkout branch '{branch}' ... =")
+ cmnd=['git','-C',repo_dir,'checkout',branch]
+ execute_commmand(cmnd)
+
+ # Get the latest data with a safety pull
+ if verbose: print("= Pull ... =")
+ cmnd=['git','-C',repo_dir,'pull']
+ execute_commmand(cmnd)
+
+#################################
+# Initialize and/or refresh the Ubuntu Trivy repo
+#
+
+def init_ubuntu_trivy():
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+ today = datetime.now().strftime("%Y-%m-%d")
+ repo_update = srtsetting_get(conn,"UBUNTU_TRIVY_UPDATE","")
+
+ if not os.path.isdir(ubuntu_trivy_cve_dir):
+ prepare_git(ubuntu_trivy_repo_dir,ubuntu_trivy_cve_url,'')
+ elif today != repo_update:
+ prepare_git(ubuntu_trivy_repo_dir,ubuntu_trivy_cve_url,'')
+ srtsetting_set(conn,"UBUNTU_TRIVY_UPDATE",today)
+
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# Fetch a CVE record from ubuntu_trivy
+# REST API, cache the results
+#
+
+def fetch_cve(cve_name):
+ # Refresh the repo if needed
+ init_ubuntu_trivy()
+
+ msg = 'description='
+ found = False
+ stop_after_linux = False
+ for subdir in ubuntu_trivy_cve_subdir:
+ datasource_file = os.path.join(srtool_basepath,ubuntu_trivy_cve_dir,subdir,cve_name)
+ if os.path.isfile(datasource_file):
+ with open(datasource_file, 'r') as fp:
+ patches_found = False
+ for line_patch in fp:
+
+ line_patch = line_patch.strip()
+ if line_patch.startswith('Patches_'):
+ patches_found = True
+ if patches_found:
+ msg += f"{line_patch}[EOL]"
+ # For kernel, only accept the first section
+ if line_patch.startswith('Patches_linux:'):
+ stop_after_linux = True
+ if stop_after_linux and (not line_patch):
+ break
+ found = True
+ break
+ if not found:
+ msg += 'Ubuntu Trivy record not found.'
+
+ print(msg)
+
+#################################
+# comparibles
+#
+#
+
+def comparibles(cve_list_file):
+ if not cve_list_file.startswith('/'):
+ cve_list_file = os.path.join(srtool_basepath,cve_list_file)
+ if os.path.isfile(cve_list_file):
+ with open(cve_list_file, 'r') as fp:
+ for line in fp:
+ msg = ''
+ cve_name = line.strip()
+ found = False
+ stop_after_linux = False
+ for subdir in ubuntu_trivy_cve_subdir:
+ datasource_file = os.path.join(srtool_basepath,ubuntu_trivy_cve_dir,subdir,cve_name)
+ if os.path.isfile(datasource_file):
+ with open(datasource_file, 'r') as fp:
+ patches_found = False
+ for line_patch in fp:
+ line_patch = line_patch.strip()
+ if line_patch.startswith('Patches_'):
+ patches_found = True
+ if patches_found:
+ msg += f"{line_patch}[EOL]"
+ # For kernel, only accept the first section
+ if line_patch.startswith('Patches_linux:'):
+ stop_after_linux = True
+ if stop_after_linux and (not line_patch):
+ break
+ found = True
+ break
+ if not found:
+ msg = 'Ubuntu Trivy record not found.'
+
+ print(f"{cve_name}||{msg}")
+ else:
+ print(f"ERROR: missing CVE list file '{cve_list_file}'", file=sys.stderr)
+ return(1)
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+
+ # setup
+
+ parser = argparse.ArgumentParser(description='srtool_ubuntu_trivy.py: manage ubuntu_trivy CVE data')
+ parser.add_argument('--initialize', '-i', action='store_const', const='init_ubuntu_trivy', dest='command', help='Download the Ubuntu Trivy repo')
+ parser.add_argument('--update', '-u', action='store_const', const='update_ubuntu_trivy', dest='command', help='Update the Ubuntu Trivy repo')
+ parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Fetch CVE detail')
+
+ parser.add_argument('--comparibles', dest='comparibles', help='Return ubuntu-trivy data for list of CVEs')
+
+ parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='is_verbose', help='Enable verbose debugging output')
+ args = parser.parse_args()
+
+ if args.is_verbose:
+ verbose = True
+
+ if 'init_ubuntu_trivy' == args.command:
+ init_ubuntu_trivy()
+ elif 'update_ubuntu_trivy' == args.command:
+ # No difference from init at this time
+ init_ubuntu_trivy()
+ elif args.cve_detail:
+ fetch_cve(args.cve_detail)
+ elif args.comparibles:
+ comparibles(args.comparibles)
+
+ else:
+ print("Command not found")
+
+if __name__ == '__main__':
+ srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ main(sys.argv[1:])
diff --git a/bin/wr_trivy/datasource.json b/bin/wr_trivy/datasource.json
new file mode 100755
index 00000000..a0150e0f
--- /dev/null
+++ b/bin/wr_trivy/datasource.json
@@ -0,0 +1,19 @@
+{
+ "datasource" : [
+ {
+ "key" : "0040-wr-trivy",
+ "data" : "cve",
+ "source" : "WR_Trivy",
+ "name" : "WR_Trivy",
+ "description" : "Wind River Trivy Repo",
+ "attributes" : "ALT-SOURCE",
+ "cve_filter" : "CVE-",
+ "init" : "bin/wr_trivy/srtool_wr_trivy.py --initialize",
+ "update" : "bin/wr_trivy/srtool_wr_trivy.py --update",
+ "lookup" : "bin/wr_trivy/srtool_wr_trivy.py %command%",
+ "update_frequency" : "3",
+ "_comment_" : "Update on Saturdays at 3:00 am",
+ "update_time" : "{\"weekday\":\"5\",\"hour\":\"2\"}"
+ }
+ ]
+}
diff --git a/bin/wr_trivy/license.txt b/bin/wr_trivy/license.txt
new file mode 100755
index 00000000..4a6aaab3
--- /dev/null
+++ b/bin/wr_trivy/license.txt
@@ -0,0 +1,4 @@
+[ Wind River ]
+
+The wrlinux-cve is PUBLIC.
+
diff --git a/bin/wr_trivy/srtool_wr_trivy.py b/bin/wr_trivy/srtool_wr_trivy.py
new file mode 100755
index 00000000..64b78a65
--- /dev/null
+++ b/bin/wr_trivy/srtool_wr_trivy.py
@@ -0,0 +1,264 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2013 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+#
+# Theory of operation
+#
+# * This script manages the wr_trivy based CVE data
+#
+
+import os
+import sys
+import argparse
+import shutil
+from urllib.request import urlopen
+
+# load the srt.sqlite schema indexes
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+from common.srtool_sql import *
+from common.srtool_progress import *
+from common.srtool_common import log_error
+
+#wr_trivy_cve_url_file = 'https://salsa.wr_trivy.org/security-tracker-team/security-tracker/blob/master/data/CVE/list'
+wr_trivy_cve_url = 'https://distro.windriver.com/git/windriver-cve-tracker.git'
+wr_trivy_repo_dir = 'data/wr_trivy/windriver-cve-tracker'
+wr_trivy_cve_dir = 'data/wr_trivy/windriver-cve-tracker/active'
+
+# Globals
+verbose = False
+
+#################################
+# Helper Functions
+#
+
+#
+# Sub Process calls
+# Enforce that all scripts run from the SRT_BASE_DIR context (re:WSGI)
+#
+def execute_process(*cmd_list):
+ result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return(result.returncode,result.stdout.decode('utf-8'),result.stderr.decode('utf-8'))
+
+def srtsetting_get(conn,key,default_value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Fetch the key for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ try:
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if srtsetting:
+ if is_dict:
+ return(srtsetting['value'])
+ else:
+ return(srtsetting[ORM.SRTSETTING_VALUE])
+ except Exception as e:
+ print(f"ERROR:{e}")
+ return(default_value)
+
+def srtsetting_set(conn,key,value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Set the key value for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if not srtsetting:
+ sql = ''' INSERT INTO orm_srtsetting (name, helptext, value) VALUES (?,?,?)'''
+ SQL_EXECUTE(cur, sql, (key,'',value))
+ if verbose: print(f"INSERT:{key}:{value}:")
+ else:
+ if verbose: print("UPDATE[{srtsetting[ORM.SRTSETTING_ID]}]:{key}:{value}:")
+ sql = ''' UPDATE orm_srtsetting
+ SET value=?
+ WHERE id=?'''
+ if is_dict:
+ SQL_EXECUTE(cur, sql, (value,srtsetting['id']))
+ else:
+ SQL_EXECUTE(cur, sql, (value,srtsetting[ORM.SRTSETTING_ID]))
+ SQL_COMMIT(conn)
+
+def do_chdir(newdir,delay=0.200):
+ os.chdir(newdir)
+ # WARNING: we need a pause else the chdir will break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+def do_makedirs(newdir,delay=0.200):
+ try:
+ os.makedirs(newdir)
+ except:
+ # dir already exists
+ pass
+ # WARNING: we need a pause else the makedirs could break
+ # susequent commands (e.g. 'git clone' and 'git checkout')
+ time.sleep(delay)
+
+def execute_commmand(cmnd,path=''):
+ cwd = os.getcwd()
+ if path:
+ do_chdir(path)
+ result_returncode,result_stdout,result_stderr = execute_process(*cmnd)
+ if 0 != result_returncode:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ print("ERROR({result_returncode}):{result_stderr}")
+ return(1)
+ if verbose:
+ print(f"execute_commmand[{os.getcwd()}]|{cmnd}|{result_stdout}|")
+ if path:
+ do_chdir(cwd)
+
+# Insure the git repo is cloned and available
+def prepare_git(repo_dir,repo_url,branch):
+ repo = os.path.basename(repo_url)
+ if verbose: print(f"prepare_git:({repo_dir},{repo_url})")
+ if not os.path.isdir(repo_dir):
+ # Clone into the repo's parent directory
+ repo_parent_dir = os.path.dirname(repo_dir)
+ do_makedirs(repo_parent_dir)
+ if verbose: print(f"= Clone '{repo}' ... =")
+ cmnd=['git','clone',repo_url]
+ execute_commmand(cmnd,repo_parent_dir)
+ else:
+ if verbose: print(f"= Clone '{repo}' skip ... =")
+
+ if branch:
+ if verbose: print("= Checkout branch '{branch}' ... =")
+ cmnd=['git','-C',repo_dir,'checkout',branch]
+ execute_commmand(cmnd)
+
+ # Get the latest data with a safety pull
+ if verbose: print("= Pull ... =")
+ cmnd=['git','-C',repo_dir,'pull']
+ execute_commmand(cmnd)
+
+#################################
+# Initialize and/or refresh the Wind River Trivy repo
+#
+
+def init_wr_trivy():
+ conn = SQL_CONNECT(column_names=True)
+ cur = SQL_CURSOR(conn)
+ today = datetime.now().strftime("%Y-%m-%d")
+ repo_update = srtsetting_get(conn,"WR_TRIVY_UPDATE","")
+
+ if not os.path.isdir(wr_trivy_cve_dir):
+ prepare_git(wr_trivy_repo_dir,wr_trivy_cve_url,'')
+ elif today != repo_update:
+ prepare_git(wr_trivy_repo_dir,wr_trivy_cve_url,'')
+ srtsetting_set(conn,"WR_TRIVY_UPDATE",today)
+
+ SQL_COMMIT(conn)
+ SQL_CLOSE_CUR(cur)
+ SQL_CLOSE_CONN(conn)
+
+#################################
+# Fetch a CVE record from wr_trivy
+# REST API, cache the results
+#
+
+def fetch_cve(cve_name):
+ # Refresh the repo if needed
+ init_wr_trivy()
+
+ msg = 'description='
+ datasource_file = os.path.join(srtool_basepath,wr_trivy_cve_dir,cve_name)
+ if os.path.isfile(datasource_file):
+ with open(datasource_file, 'r') as fp:
+ for line in fp:
+ msg += "%s[EOL]" % line.replace("\n","")
+ else:
+ msg += 'WR Trivy record not found. Unless this CVE was published in the last week, it is presumed to be Not-Vulnerable'
+
+ if verbose: print("wr_trivy:FILEOPEN:%s" % datasource_text)
+
+ print(msg)
+
+#################################
+# comparibles
+#
+#
+
+def comparibles(cve_list_file):
+ if not cve_list_file.startswith('/'):
+ cve_list_file = os.path.join(srtool_basepath,cve_list_file)
+ if os.path.isfile(cve_list_file):
+ with open(cve_list_file, 'r') as fp:
+ for line in fp:
+ msg = ''
+ cve_name = line.strip()
+ cve_file_name = os.path.join(srtool_basepath,wr_trivy_cve_dir,cve_name)
+ if os.path.isfile(cve_file_name):
+ with open(cve_file_name, 'r') as fp:
+ patches_found = False
+ for line_patch in fp:
+ line_patch = line_patch.strip()
+ if line_patch.startswith('Patches_'):
+ patches_found = True
+ if patches_found:
+ msg += f"{line_patch}[EOL]"
+ else:
+ msg = 'Assumed Not-Vulnerable unless very recent'
+
+ print(f"{cve_name}||{msg}")
+ else:
+ print(f"ERROR: missing CVE list file '{cve_list_file}'", file=sys.stderr)
+ return(1)
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+
+ # setup
+
+ parser = argparse.ArgumentParser(description='srtool_wr_trivy.py: manage wr_trivy CVE data')
+ parser.add_argument('--initialize', '-i', action='store_const', const='init_wr_trivy', dest='command', help='Download the Wind River Trivy repo')
+ parser.add_argument('--update', '-u', action='store_const', const='update_wr_trivy', dest='command', help='Update the Wind River Trivy repo')
+ parser.add_argument('--cve-detail', '-d', dest='cve_detail', help='Fetch CVE detail')
+
+ parser.add_argument('--comparibles', dest='comparibles', help='Return wr-trivy data for list of CVEs')
+
+ parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--update-skip-history', '-H', action='store_true', dest='update_skip_history', help='Skip history updates')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='is_verbose', help='Enable verbose debugging output')
+ args = parser.parse_args()
+
+ if args.is_verbose:
+ verbose = True
+
+ if 'init_wr_trivy' == args.command:
+ init_wr_trivy()
+ elif 'update_wr_trivy' == args.command:
+ # No difference from init at this time
+ init_wr_trivy()
+ elif args.cve_detail:
+ fetch_cve(args.cve_detail)
+ elif args.comparibles:
+ comparibles(args.comparibles)
+
+ else:
+ print("Command not found")
+
+if __name__ == '__main__':
+ srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ main(sys.argv[1:])
diff --git a/bin/yp/srtool_cve_checker.py b/bin/yp/srtool_cve_checker.py
new file mode 100755
index 00000000..36edf28f
--- /dev/null
+++ b/bin/yp/srtool_cve_checker.py
@@ -0,0 +1,277 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Commandline Tool
+#
+# Copyright (C) 2021-2023 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import sys
+import argparse
+import json
+import subprocess
+import logging
+
+# load the srt.sqlite schema indexes
+dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+sys.path.insert(0, dir_path)
+from common.srt_schema import ORM
+from common.srtool_sql import *
+from common.srtool_progress import *
+from common.srtool_common import log_error
+
+# Setup:
+logger = logging.getLogger("srt")
+
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR','.')
+SRT_REPORT_DIR = f"{SRT_BASE_DIR}/reports"
+
+verbose = False
+test = False
+cmd_count = 0
+cmd_skip = 0
+force_update = False
+is_job = False
+
+#################################
+# Helper methods
+#
+
+# quick development/debugging support
+def _log(msg):
+ DBG_LVL = os.environ['SRTDBG_LVL'] if ('SRTDBG_LVL' in os.environ) else 2
+ DBG_LOG = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/srt_dbg.log'
+ if 1 == DBG_LVL:
+ print(msg)
+ elif 2 == DBG_LVL:
+ f1=open(DBG_LOG, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+# Sub Process calls
+def execute_process(*args):
+ cmd_list = []
+ for arg in args:
+ if not arg: continue
+ if isinstance(arg, (list, tuple)):
+ # Flatten all the way down
+ for a in arg:
+ if not a: continue
+ cmd_list.append(a)
+ else:
+ cmd_list.append(arg)
+
+ if verbose: print(f"EXECUTE_PROCESS:{cmd_list}:PWD={os.getcwd()}")
+ result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return result.returncode,result.stdout.decode('utf-8'),result.stderr.decode('utf-8')
+
+
+def srtsetting_get(conn,key,default_value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Fetch the key for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ try:
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if srtsetting:
+ if is_dict:
+ return(srtsetting['value'])
+ else:
+ return(srtsetting[ORM.SRTSETTING_VALUE])
+ except Exception as e:
+ print("ERROR:%s" % (e))
+ return(default_value)
+
+def srtsetting_set(conn,key,value,is_dict=True):
+ cur = SQL_CURSOR(conn)
+ # Set the key value for SrtSetting
+ sql = f"""SELECT * FROM orm_srtsetting WHERE `name` = ?"""
+ srtsetting = SQL_EXECUTE(cur, sql,(key,)).fetchone()
+ if not srtsetting:
+ sql = ''' INSERT INTO orm_srtsetting (name, helptext, value) VALUES (?,?,?)'''
+ SQL_EXECUTE(cur, sql, (key,'',value))
+ print("INSERT:%s:%s:" % (key,value))
+ else:
+ print("UPDATE[%d]:%s:%s:" % (srtsetting[ORM.SRTSETTING_ID],key,value))
+ sql = ''' UPDATE orm_srtsetting
+ SET value=?
+ WHERE id=?'''
+ if is_dict:
+ SQL_EXECUTE(cur, sql, (value,srtsetting['id']))
+ else:
+ SQL_EXECUTE(cur, sql, (value,srtsetting[ORM.SRTSETTING_ID]))
+ SQL_COMMIT(conn)
+
+
+#################################
+# scan Auto Builder CVE_Checker output files
+#
+# e.g. https://git.yoctoproject.org/yocto-metrics/tree/cve-check/master/1697612432.json
+#
+
+def validate_cvechk_ab():
+# conn = SQL_CONNECT(column_names=True)
+# cur = SQL_CURSOR(conn)
+
+ # data/cve_checker/yocto-metrics/cve-check/master/1697871310.json
+ REMOTE_URL = 'git://git.yoctoproject.org/yocto-metrics'
+ REMOTE_PATH = ''
+ LOCAL_DIR = 'data/cve_checker'
+ BRANCH = 'master'
+ LOCAL_PATH = 'yocto-metrics/cve-check/master'
+
+ # git@gitlab.aws-eu-north-1.devstar.cloud:pbahrs/studio-developer-image-updater.git
+ # data/wr-studio-conductor/windshare_migration/containers/*.json
+ repo_dir = os.path.join(srtool_basepath,LOCAL_DIR)
+ if False:
+ if not os.path.isdir(repo_dir):
+ print(f"= Clone '{REMOTE_URL}' ... =")
+ github_action([f"clone {REMOTE_URL}"],os.path.dirname(repo_dir),True)
+
+ if BRANCH:
+ print("= Checkout branch '%s' ... =" % BRANCH)
+ cmnd=['git','-C',repo_dir,'checkout',BRANCH]
+ if verbose: print(f"CMND:{cmnd}")
+ execute_commmand(cmnd)
+ time.sleep(0.200)
+
+ # Get the latest data with a safety pull
+ print("= Pull ... =")
+ github_action(['pull'],repo_dir)
+
+ # Find the JSON file
+ json_dir = os.path.join(repo_dir,LOCAL_PATH)
+ file_list = []
+ for root, dirs, files in os.walk(json_dir):
+ for i,file in enumerate(files):
+ if not file.endswith('.json'):
+ continue
+ file_list.append(file)
+ print("CVKCHK JSON file count = %d" % len(file_list))
+
+ progress_set_max(len(file_list))
+ # Scan the JSON files
+ for i,json_file in enumerate(file_list):
+
+ # Debugging support
+ if cmd_skip and (i < cmd_skip):
+ continue
+ if cmd_count and (i > (cmd_skip + cmd_count)):
+ continue
+
+ with open(os.path.join(json_dir,json_file)) as json_data:
+ progress_show(json_file)
+ try:
+ dct = json.load(json_data)
+ except Exception as e:
+ print("ERROR:JSON_FILE_LOAD:%s:%s" % (json_file,e), file=sys.stderr)
+ continue
+
+ if 0 == (i % 20): print("%4d\r" % i,end='',flush=True)
+
+ for elem in dct:
+ print(f"TOP ELEM:{elem}")
+
+ multiple_products = []
+ mismatch_products = []
+ mismatch_iscves = []
+
+ elem_packages = dct['package']
+ print(f"PACKAGE COUNT:{len(elem_packages)}")
+ for package in elem_packages:
+ name = package['name']
+ short_name = name.replace('-native','')
+
+ package_products = package['products']
+ if 1 != len(package_products):
+ s = f"{name}={len(package_products)}"
+ for product in package_products:
+ s += f":{product['product']}"
+ multiple_products.append(s)
+
+ is_cves = ''
+ for product in package_products:
+ if not is_cves:
+ is_cves = product['cvesInRecord']
+ if short_name != product['product']:
+ mismatch_products.append(f"{name}!={product['product']}")
+ if is_cves != product['cvesInRecord']:
+ mismatch_iscves.append(f"{name}:{is_cves} != {product['cvesInRecord']}")
+
+ print(f"multiple_products:{len(multiple_products)}")
+ for i,mp in enumerate(multiple_products):
+ print(f" {mp}")
+ if i > 5: break
+ print(f"mismatch_products:{len(mismatch_products)}")
+ for i,mp in enumerate(mismatch_products):
+ print(f" {mp}")
+ if i > 5: break
+ print(f"mismatch_iscves:{len(mismatch_iscves)}")
+ for i,mp in enumerate(mismatch_iscves):
+ print(f" {mp}")
+ if i > 5: break
+
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+ global test
+ global force_update
+ global cmd_count
+ global cmd_skip
+
+ parser = argparse.ArgumentParser(description='srtool_cve_checker.py: CVE Checker results import')
+
+ # Test
+ parser.add_argument('--validate-cvechk-ab', '-V', action='store_const', const='validate_cvechk_ab', dest='command', help='Validate the JSON file')
+
+ # Debugging support
+ parser.add_argument('--force', '-f', action='store_true', dest='force_update', help='Force update')
+ parser.add_argument('--test', '-t', action='store_true', dest='test', help='Test, dry-run')
+ parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+ parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Verbose debugging')
+ parser.add_argument('--local-job', action='store_true', dest='local_job', help='Use local job')
+ args = parser.parse_args()
+
+ ret = 0
+ verbose = args.verbose
+ test = args.test
+ force_update = args.force_update
+ if None != args.count:
+ cmd_count = int(args.count)
+ if None != args.skip:
+ cmd_skip = int(args.skip)
+
+ if 'validate_cvechk_ab' == args.command:
+ validate_cvechk_ab()
+
+ else:
+ print("srtool_cve_checker.py:Command not found")
+ ret = 1
+
+ progress_done('Done')
+ return(ret)
+
+
+if __name__ == '__main__':
+ srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ exit( main(sys.argv[1:]) )
+
diff --git a/bin/yp/srtool_defect.py b/bin/yp/srtool_defect.py
index 0e189a3a..b976cf46 100755
--- a/bin/yp/srtool_defect.py
+++ b/bin/yp/srtool_defect.py
@@ -26,13 +26,13 @@
import os
import sys
import argparse
-import sqlite3
import json
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
from common.srt_schema import ORM
+from common.srtool_sql import *
# Setup:
master_log = ''
@@ -133,7 +133,7 @@ class Defect:
#
def new_defect_name(product_prefix):
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
sql = "SELECT * FROM orm_srtsetting WHERE name='current_defect_simulation_index'"
@@ -147,7 +147,7 @@ def new_defect_name(product_prefix):
sql = '''UPDATE orm_srtsetting SET value=? WHERE id = ?'''
cur.execute(sql, (index, cvi[ORM.SRTSETTING_ID]))
conn.commit() #commit to db
- conn.close()
+ SQL_CLOSE_CONN(conn)
defect_name = "DEFECT-%s-%05d" % (product_prefix,index)
return defect_name
diff --git a/bin/yp/srtool_publish.py b/bin/yp/srtool_publish.py
new file mode 100755
index 00000000..cabc7452
--- /dev/null
+++ b/bin/yp/srtool_publish.py
@@ -0,0 +1,1052 @@
+#!/usr/bin/env python3
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2017-2020 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+#
+# Theory of operation
+#
+#
+#
+#
+
+import os
+import sys
+import re
+import csv
+import argparse
+from common.srtool_sql import *
+from datetime import datetime
+import time
+import glob
+import traceback
+
+# The Jira integration script owns the translations
+#from srtool_jira import translate_status
+from common.srtool_common import get_name_sort
+
+lookupTable = []
+cveIndex = {}
+jiraIndex = {}
+db_change = False
+
+# Setup:
+verbose = False
+cmd_truncate = False
+cmd_skip = 0
+cmd_count = 0
+force = False
+
+srtoolDBName = 'srt.sqlite'
+srtUpdateName = 'srt_update_db.csv'
+srtSchemaName = 'srt_schema.py'
+
+# Generate output files
+prev2srtUpdateName = 'data/publish/srt_diff_update_db.csv'
+prev2srtNvName = 'data/publish/srt_diff_nv_db.csv'
+prev2srtNoDefectName = 'data/publish/srt_diff_nodefect_db.csv'
+prev2srtOpenName = 'data/publish/srt_diff_open_db.csv'
+prev2srtSVNSName = 'data/publish/cve-svns-srtool-%s-%s.csv'
+
+#################################
+# Helper methods
+#
+
+overrides = {}
+
+def set_override(key,value=None):
+ if not value is None:
+ overrides[key] = value
+ elif key in os.environ.keys():
+ overrides[key] = 'yes' if os.environ[key].startswith('1') else 'no'
+ else:
+ overrides[key] = 'no'
+ if 'yes' == overrides[key]:
+ print("OVERRIDE: %s = %s" % (key,overrides[key]))
+
+def get_override(key):
+ if key in overrides.keys():
+ return 'yes' == overrides[key]
+ return False
+
+# quick development/debugging support
+def _log(msg):
+ DBG_LVL = os.environ['SRTDBG_LVL'] if ('SRTDBG_LVL' in os.environ) else 2
+ DBG_LOG = os.environ['SRTDBG_LOG'] if ('SRTDBG_LOG' in os.environ) else '/tmp/toaster.log'
+ if 1 == DBG_LVL:
+ print(msg)
+ elif 2 == DBG_LVL:
+ f1=open(DBG_LOG, 'a')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+# Trigger verbose then stop for a path taken
+def trigger_verbose_stop(msg):
+ global verbose
+ global cmd_count
+ if not verbose:
+ print(msg)
+ verbose = True
+ cmd_count = 1
+
+
+
+#################################
+# ORM mapping for the given database file
+#
+
+# ORM mapping for the given database file
+class ORM_Class(object):
+ # Members will be added dynamically
+
+ # General routine to return string name of a constant (e.g. 'DATASOURCE_FREQUENCY_STR')
+ @staticmethod
+ def get_orm_string(value,string_set):
+ if None == value: return('None')
+ string_list = string_set.split(',')
+ string_count = len(string_list)
+ value = int(value)
+ if (value < 0) or (value >= string_count):
+ print("ERROR: value '%d' out of range of '%s'" % (value,string_set))
+ return '<error>'
+ return string_list[value]
+
+# Instantiate the ORM class object
+ORM = ORM_Class()
+
+# Attach the specific database schema attibutes and values
+def import_orm_schema(databaseDir):
+ global ORM
+
+ schema = os.path.join(databaseDir,srtSchemaName)
+ # Generate the schema file if not found
+ if not os.path.isfile(schema):
+ ret = os.system("%s --generate-schema-header-dir %s" % (os.path.join(srtool_basepath,'bin/common/srtool_common.py'),databaseDir))
+
+ with open(schema) as fp:
+ for line in fp:
+ try:
+ name = line[:line.index('=')].strip()
+ value = line[line.index('=')+1:].strip()
+ if '"' == value[0]:
+ value = value[1:-1]
+ elif "'" == value[0]:
+ value = value[1:-1]
+ else:
+ value = int(value)
+ except:
+ continue
+ setattr(ORM, name, value)
+
+
+#################################
+# Product list and attributes
+#
+#
+
+class ProductListClass(object):
+ products = []
+ custom_key_list = []
+
+ SORTBY_ORDER = 0x0001
+ SORTBY_KEY = 0x0002
+ SORTBY_CUSTOM = 0x0004
+ SORTBY_REVERSE = 0x0010
+ INCLUDE_PUBLIC = 0x0100
+ INCLUDE_PUBLIC_NO = 0x0200
+ INCLUDE_MODE_DEVELOP = 0x1000
+ INCLUDE_MODE_SUPPORT = 0x2000
+ INCLUDE_MODE_EOL = 0x4000
+ INCLUDE_ALL = 0xff00
+
+ def fetch_products(self,conn,filter):
+ def get_dict_tag(tag,dict_str,default=None):
+ dict = json.loads(dict_str)
+ if tag in dict:
+ return dict[tag]
+ return default
+
+ cur = conn.cursor()
+ sql = "SELECT * FROM orm_product"
+ for product_item in cur.execute(sql):
+ # Filter the product list
+ skip = True
+ mode = get_dict_tag('mode',product_item[ORM.PRODUCT_PRODUCT_TAGS],'')
+ public_status = get_dict_tag('public_status',product_item[ORM.PRODUCT_PRODUCT_TAGS],'yes')
+ if (filter & self.INCLUDE_MODE_DEVELOP) and (mode == 'develop'): skip = False
+ if (filter & self.INCLUDE_MODE_SUPPORT) and (mode == 'support'): skip = False
+ if (filter & self.INCLUDE_MODE_EOL ) and (mode == 'eol' ): skip = False
+ if (filter & self.INCLUDE_PUBLIC ) and (public_status != 'no'): skip = False
+ if (filter & self.INCLUDE_PUBLIC_NO) and (public_status == 'no'): skip = False
+ if skip:
+ continue
+
+ product = {}
+ product['id'] = product_item[ORM.PRODUCT_ID]
+ product['order'] = product_item[ORM.PRODUCT_ORDER]
+ product['key'] = product_item[ORM.PRODUCT_KEY]
+ product['name'] = product_item[ORM.PRODUCT_NAME]
+ if product_item[ORM.PRODUCT_VERSION]:
+ product['name'] += " %s" product_item[ORM.PRODUCT_VERSION]
+ if product_item[ORM.PRODUCT_PROFILE]:
+ product['name'] += " %s" product_item[ORM.PRODUCT_PROFILE]
+ self.products.append(product)
+
+ def set_custom_key_list(self,key_list):
+ self.custom_key_list = key_list
+
+ def get_product_list(self,sortby):
+ def sortByOrder(val):
+ return val['order']
+ def sortByKey(val):
+ return val['key']
+
+ if (self.SORTBY_ORDER & sortby):
+ plist = self.products
+ plist.sort(key = sortByOrder, reverse = (self.SORTBY_REVERSE == (self.SORTBY_REVERSE & sortby)))
+ elif (self.SORTBY_KEY & sortby):
+ plist = self.products
+ plist.sort(key = sortByKey, reverse = (self.SORTBY_REVERSE == (self.SORTBY_REVERSE & sortby)))
+ elif (self.SORTBY_CUSTOM & sortby):
+ plist = []
+ for key in self.custom_key_list:
+ for product in products:
+ if key == product['key']:
+ plist.append(product)
+ continue
+ return(plist)
+
+# Instantiate the class
+productList = ProductListClass()
+
+
+#################################
+# publish charts
+#
+#
+
+# Extracted update CSV file schema
+I_NAME = 0
+I_V2SEVERITY = 1
+I_V3SEVERITY = 2
+I_LIN5 = 3
+I_CGP5 = 4
+I_OVP = 5
+I_LIN6 = 6
+I_CGP6 = 7
+I_SCP6 = 8
+I_LIN7 = 9
+I_CGP7 = 10
+I_SCP7 = 11
+I_LIN8 = 12
+I_LIN9 = 13
+I_LIN10 = 14
+I_LIN18 = 15
+I_OUT_MAX = 16
+I_LIN19 = 16
+I_LINCCM = 17
+I_REJECT = 18
+I_MAX = 19
+
+COLUMN_LABELS = (
+ (I_NAME ,'NAME'),
+ (I_V2SEVERITY ,'V2SEVERITY'),
+ (I_V3SEVERITY ,'V3SEVERITY'),
+ (I_LIN5 ,'LIN5'),
+ (I_CGP5 ,'CGP5'),
+ (I_OVP ,'OVP'),
+ (I_LIN6 ,'LIN6'),
+ (I_CGP6 ,'CGP6'),
+ (I_SCP6 ,'SCP6'),
+ (I_LIN7 ,'LIN7'),
+ (I_CGP7 ,'CGP7'),
+ (I_SCP7 ,'SCP7'),
+ (I_LIN8 ,'LIN8'),
+ (I_LIN9 ,'LIN9'),
+ (I_LIN10 ,'LIN10'),
+ (I_LIN18 ,'LIN18'),
+ (I_OUT_MAX ,'OUT_MAX'),
+ (I_LIN19 ,'LIN19'),
+ (I_LINCCM ,'LINCCM'),
+ (I_REJECT ,'REJECT'),
+)
+
+
+def OBSOLETE_map_productId_productPrefix(conn):
+ cur = conn.cursor()
+
+ sql = "SELECT * FROM orm_product"
+ for product in cur.execute(sql):
+ print("[%d]'%s'" % (product[ORM.PRODUCT_ID],product[ORM.PRODUCT_KEY]))
+
+
+
+ product_prefix_table = {}
+ for key in ('LIN5','CGP5','OVP' ,'LIN6','CGP6','SCP6','LIN7','CGP7','SCP7','LIN8','LIN9','LIN10','LIN1018','LIN1019','LINCCM'):
+# for key in ('LIN7','CGP7','SCP7','LIN8','LIN9','LIN10','LIN1018','LIN1019','LINCCM'):
+ sql = "SELECT * FROM orm_product WHERE key='%s'" % key
+ print("FOO11: '%s'" % sql)
+ product = cur.execute(sql).fetchone()
+ if product:
+ product_prefix_table[product[ORM.PRODUCT_ID]] = key
+ else:
+ print("ERROR: could not match key '%s' to product" % key)
+ # Developer must fix database before continuing
+ exit(1)
+ return product_prefix_table
+
+# Name;V2Severity;V3Severity;LIN5;CGP5;OVP;LIN6;CGP6;SCP6;LIN7;CGP7;SCP7;LIN8;LIN9;LIN10;LIN18;LIN19;LINCCM
+def srt2update(srtDatabasePath):
+
+ srt_database_file = os.path.join(srtDatabasePath,srtoolDBName)
+ if not os.path.isfile(srt_database_file) and not force:
+ print("ERROR: Missing database file '%s'" % srt_database_file)
+ return()
+ srtfile_name = os.path.join(srtDatabasePath,srtUpdateName)
+ if os.path.isfile(srtfile_name) and not force:
+ print("Note: Update file '%s' already present" % srtfile_name)
+ return()
+ print("Extracting update information from '%s' to '%s'" % (srt_database_file,srtfile_name))
+
+ conn = SQL_CONNECT()
+ cur_cve = conn.cursor()
+ cur_vul = conn.cursor()
+ cur_inv = conn.cursor()
+ cur_i2d = conn.cursor()
+ cur_def = conn.cursor()
+
+ def product_append(srt_row,map,key):
+ if key in map:
+ srt_row.append('/'.join(product_map[key]))
+ else:
+ srt_row.append('')
+
+ # Prefetch product table, make fast lookup dict (ID to Key)
+ productList.fetch_products(conn,ProductListClass.INCLUDE_PUBLIC|ProductListClass.INCLUDE_MODE_SUPPORT)
+ product_list = productList.get_product_list(ProductListClass.SORTBY_ORDER|ProductListClass.SORTBY_REVERSE)
+ product_prefix_table = {}
+ for product in product_list:
+ product_prefix_table[product['id']] = product['key']
+
+ with open(srtfile_name, 'w') as srtfile:
+ # Write header
+ sustaining_row = []
+ sustaining_row.append('Name')
+ sustaining_row.append('V2Severity')
+ sustaining_row.append('V3Severity')
+ # now process the releases into investigations
+ for j_index in range(I_MAX):
+ sustaining_row.append(COLUMN_LABELS[j_index][1])
+ sustaining_row.append('Reject')
+ srtfile.write("%s\n" % ';'.join(sustaining_row))
+
+ for i,cve in enumerate(cur_cve.execute("SELECT * FROM orm_cve")):
+ srt_row = []
+ try:
+ srt_row.append('%s:%s:%s:%s:%s' % (cve[ORM.CVE_NAME],ORM.get_orm_string(cve[ORM.CVE_STATUS],ORM.STATUS_STR),cve[ORM.CVE_PUBLISHEDDATE],cve[ORM.CVE_LASTMODIFIEDDATE],cve[ORM.CVE_COMMENTS].replace(':',' - ').replace(';',' - ')))
+ srt_row.append(cve[ORM.CVE_CVSSV2_SEVERITY].upper())
+ srt_row.append(cve[ORM.CVE_CVSSV3_BASESEVERITY].upper())
+ except Exception as e:
+ # was intermittent error one day
+ _log("DATAERROR:%s:" % (e))
+ exit(1)
+
+ product_map = {}
+
+ sql = "SELECT * FROM orm_cvetovulnerablility WHERE cve_id='%s'" % cve[ORM.CVE_ID]
+ for c2v in cur_vul.execute(sql):
+ vulnerability_id = c2v[ORM.CVETOVULNERABLILITY_VULNERABILITY_ID]
+ sql = "SELECT * FROM orm_investigation WHERE vulnerability_id='%s'" % vulnerability_id
+ for investigation in cur_inv.execute(sql):
+ investigation_id = investigation[ORM.INVESTIGATION_ID]
+ sql = "SELECT * FROM orm_investigationtodefect WHERE investigation_id='%s'" % investigation_id
+ is_defects = False
+ for i2d in cur_i2d.execute(sql):
+ is_defects = True
+ defect_id = i2d[ORM.INVESTIGATIONTODEFECT_DEFECT_ID]
+ sql = "SELECT * FROM orm_defect WHERE id='%s'" % defect_id
+ defect = cur_def.execute(sql).fetchone()
+ defect_name = defect[ORM.DEFECT_NAME].strip()
+ defect_rcpl = defect[ORM.DEFECT_RELEASE_VERSION].strip()
+ defect_status = ORM.get_orm_string(defect[ORM.DEFECT_SRT_STATUS],ORM.STATUS_STR)
+ defect_resolution = ORM.get_orm_string(defect[ORM.DEFECT_RESOLUTION],ORM.DEFECT_RESOLUTION_STR)
+ # Account for broken duplicate links
+ if not defect_rcpl and defect[ORM.DEFECT_DUPLICATE_OF].startswith('missing'):
+ defect_resolution = 'DUP_MISSING_PARENT'
+ # Extract the Jira last update if present
+ try:
+ # 2019-01-02T18:41:00.000-0800
+ defect_updated = re.sub('T.*','',defect[ORM.DEFECT_DATE_UPDATED])
+ except:
+ defect_updated = ''
+
+# print("FOO4:%s" % defect_name)
+
+ product_prefix = re.sub('-.*','',defect_name)
+ if not product_prefix in product_map:
+ product_map[product_prefix] = ['%s:%s:%s:%s' % (defect_name,defect_rcpl if defect_rcpl else defect_status,defect_resolution,defect_updated)]
+ else:
+ product_map[product_prefix].append('%s:%s:%s:%s' % (defect_name,defect_rcpl if defect_rcpl else defect_status,defect_resolution,defect_updated))
+ if (not is_defects) and (investigation[ORM.INVESTIGATION_PRODUCT_ID] in product_prefix_table):
+ try:
+ product_prefix = product_prefix_table[investigation[ORM.INVESTIGATION_PRODUCT_ID]]
+ product_map[product_prefix] = ['%s:%s:%s:%s' % ('no_defects',ORM.get_orm_string(investigation[ORM.INVESTIGATION_STATUS],ORM.STATUS_STR),'','')]
+ except:
+ print("\nBAR1:%s|%s|%s" % (srtfile_name,investigation[ORM.INVESTIGATION_PRODUCT_ID],product_prefix_table))
+
+ # Name;V2Severity;V3Severity;<product1>;<product2>;<product3>;...
+ fir product in product_list:
+ product_append(srt_row,product_map,product['key'])
+
+ # Add reject flag
+ if ('** REJECT **' in cve[ORM.CVE_DESCRIPTION]):
+ srt_row.append('REJECT')
+ else:
+ srt_row.append('')
+
+ srtfile.write("%s\n" % ';'.join(srt_row))
+
+ # Debug support
+ if cmd_skip and (i < cmd_skip): continue
+ if cmd_count and ((i - cmd_skip) > cmd_count): break
+ # Progress indicator support
+ if 0 == i % 100:
+ print('%04d: %20s\r' % (i,cve[ORM.CVE_NAME]), end='')
+
+ SQL_CLOSE_CONN(conn)
+
+
+
+#Name V2Severity V3Severity LIN5_RCPL CGP5_RCPL OVP_RCPL LIN6_RCPL CGP6_RCPL SCP6_RCPL LIN7_RCPL CGP7_RCPL SCP7_RCPL LIN8_RCPL LIN9_RCPL LIN10_RCPL LIN18_RCPL LIN19_RCPL LINCCM_RCPL
+def validate_update(prev_path,current_path,report_start_date,report_stop_date,do_svsn):
+ print("\n=== Generate Update Review/Validation Report, %s to %s ===\n" % (report_start_date,report_stop_date))
+
+ product_prefix = {}
+ product_prefix[I_NAME] = 'NAME'
+ product_prefix[I_V2SEVERITY] = 'V2SEVERITY'
+ product_prefix[I_V3SEVERITY] = 'V3SEVERITY'
+ product_prefix[I_LIN5] = 'LIN5'
+ product_prefix[I_CGP5] = 'CGP5'
+ product_prefix[I_OVP] = 'OVP'
+ product_prefix[I_LIN6] = 'LIN6'
+ product_prefix[I_CGP6] = 'CGP6'
+ product_prefix[I_SCP6] = 'SCP6'
+ product_prefix[I_LIN7] = 'LIN7'
+ product_prefix[I_CGP7] = 'CGP7'
+ product_prefix[I_SCP7] = 'SCP7'
+ product_prefix[I_LIN8] = 'LIN8'
+ product_prefix[I_LIN9] = 'LIN9'
+ product_prefix[I_LIN10] = 'LIN10'
+ product_prefix[I_LIN18] = 'LIN18'
+ product_prefix[I_LIN19] = 'LIN19'
+ product_prefix[I_LINCCM] = 'LINCCM'
+ product_prefix[I_REJECT] = 'REJECT'
+
+ enable_data = {}
+ enable_data[I_NAME] = True
+ enable_data[I_V2SEVERITY] = True
+ enable_data[I_V3SEVERITY] = True
+ enable_data[I_LIN5] = False
+ enable_data[I_CGP5] = False
+ enable_data[I_OVP] = False
+ enable_data[I_LIN6] = False
+ enable_data[I_CGP6] = False
+ enable_data[I_SCP6] = False
+ enable_data[I_LIN7] = True
+ enable_data[I_CGP7] = True
+ enable_data[I_SCP7] = True
+ enable_data[I_LIN8] = True
+ enable_data[I_LIN9] = True
+ enable_data[I_LIN10] = True
+ enable_data[I_LIN18] = True
+ enable_data[I_LIN19] = True
+ enable_data[I_LINCCM] = False
+ enable_data[I_REJECT] = False
+
+ # Filter start date
+# report_start_date = '2019-02-16'
+# report_stop_date = '2019-04-30'
+ srtool_today = datetime.today().strftime('%Y-%m-%d')
+
+ # Print SVNS header
+ def print_svns_header(writer):
+ if not writer:
+ return
+ header = [
+ 'CVE Number',
+ 'Priority',
+ 'Version',
+ 'CVSSv3_Severity',
+ 'CVSSv3_Score',
+ 'CVE Description',
+ 'SRT Comments',
+ 'Modifications',
+ 'Created Date',
+ 'Modified Date',
+ 'SRT Acknowledged Date',
+ ]
+ # Append the product columns
+ # Fix-up names
+ prodname = {}
+ prodname[I_LIN19] = 'WRLinux LTS 19'
+ prodname[I_LIN18] = 'WRLinux LTS 18'
+ prodname[I_LIN10] = 'WRLinux LTS 17'
+ prodname[I_LIN9] = 'WRLinux 9.0.0'
+ prodname[I_LIN8] = 'WRLinux 8.0.0'
+ prodname[I_OVP] = 'WRLinux OVP'
+ prodname[I_LIN7] = 'WRLinux 7.0.0'
+ prodname[I_SCP7] = 'WRLinux SCP 7.0.0'
+ prodname[I_CGP7] = 'WRLinux CGP 7.0.0'
+ prodname[I_LIN6] = 'WRLinux 6.0.0'
+ prodname[I_CGP6] = 'WRLinux CGP 6.0.0'
+ prodname[I_SCP6] = 'WRLinux SCP 6.0.0'
+ prodname[I_LIN5] = 'WRLinux 5.0.1'
+ prodname[I_CGP5] = 'WRLinux CGP 5.0.1'
+ for index in range(I_LIN5,I_LIN18+1):
+ header.append('Status %s' % prodname[index])
+ header.append('CQ/Jira Case')
+ writer.writerow(header)
+
+ # Print SVNS row
+ def write_svns_row_on_change(writer,row,modify,cur_cve,last_jira):
+ if not writer:
+ return
+
+ cve_name = row[I_NAME].split(':')[0]
+ sql = "SELECT * FROM orm_cve WHERE name='%s'" % cve_name
+ cve = cur_cve.execute(sql).fetchone()
+ if not cve:
+ print("ERROR_LOOKUP:|%s|" % row[I_NAME])
+ exit(1)
+ print_row = []
+ print_row.append(cve[ORM.CVE_NAME])
+ print_row.append(cve[ORM.CVE_CVSSV2_SEVERITY])
+ print_row.append(cve[ORM.CVE_CVSSV2_BASESCORE])
+ print_row.append(cve[ORM.CVE_CVSSV3_BASESEVERITY])
+ print_row.append(cve[ORM.CVE_CVSSV3_BASESCORE])
+
+ if cmd_truncate:
+ print_row.append(cve[ORM.CVE_DESCRIPTION][:20])
+ else:
+ print_row.append(cve[ORM.CVE_DESCRIPTION])
+
+ cve_comments = cve[ORM.CVE_COMMENTS]
+ if not cve_comments:
+ cve_comments = cve[ORM.CVE_PACKAGES]
+ print_row.append(cve_comments)
+
+ # Use publish date if acknowledge date not available
+ try:
+ acknowledge_date = cve[ORM.CVE_ACKNOWLEDGE_DATE]
+# if not acknowledge_date:
+# acknowledge_date = datetime.strptime(cve[ORM.CVE_PUBLISHEDDATE], '%Y-%m-%d')
+ if acknowledge_date:
+ # NO ACK_DATE:CVE-2013-2516:2019-03-27 07:18:03.982215,2019-02-15:unconverted data remains: .982215
+ acknowledge_date = re.sub('\..*','',acknowledge_date)
+ acknowledge_date = datetime.strptime(acknowledge_date, '%Y-%m-%d %H:%M:%S')
+ acknowledge_date = acknowledge_date.strftime('%Y-%m-%d')
+ else:
+ acknowledge_date = ''
+ except Exception as e:
+ acknowledge_date = ''
+ print("NO ACK_DATE:%s:%s,%s:%s" % (cve[ORM.CVE_NAME],cve[ORM.CVE_ACKNOWLEDGE_DATE],cve[ORM.CVE_PUBLISHEDDATE],e))
+
+ print_row.append(modify)
+ print_row.append(' ' + cve[ORM.CVE_PUBLISHEDDATE]) # BLock automatic date conversions
+ print_row.append(' ' + cve[ORM.CVE_LASTMODIFIEDDATE])
+ print_row.append(' ' + acknowledge_date)
+
+ for index in range(I_LIN5,I_LIN18+1):
+ if not row[index] or ('Not_Vulnerable' == row[index]):
+ print_row.append('Not vulnerable')
+ else:
+ # unmark the "()" inactive status decorators
+ print_row.append(row[index].replace('(','').replace(')',''))
+
+ if 0 <= last_jira.find(';'):
+ jira = re.sub(';.*','',last_jira)
+ print_row.append(jira)
+ if verbose:
+ jiras = re.sub('.*;','',last_jira)
+ print_row.append(jiras)
+ else:
+ print_row.append(last_jira)
+
+ writer.writerow(print_row)
+ return 1
+
+ # Print the row if any item after the CVE name is filled
+ def write_row_on_change(fd,key,row,last_jira):
+ for index in range(I_V2SEVERITY,len(row)):
+ if row[index]:
+ print_row = []
+ for i,col in enumerate(row):
+ if enable_data[i]:
+ print_row.append(col)
+ if 0 <= last_jira.find(';'):
+ jira = re.sub(';.*','',last_jira)
+ print_row.append(jira)
+ jiras = re.sub('.*;','',last_jira)
+ print_row.append(jiras)
+ else:
+ print_row.append(last_jira)
+ fd.write("%s;%s\n" % (key,';'.join(print_row)))
+ return 1
+ return 0
+
+ # Compute the Jira defect from this CVE's latest release
+ def get_latest_jira(srt_row):
+ latest_jira = ''
+ foo = []
+# for index in range(I_LIN5,I_LIN18+1):
+ # Assert that base products (LIN6,LIN7) win over related profiles
+ for index in (I_CGP7,I_SCP7,I_LIN7,I_LIN8,I_LIN9,I_LIN10,I_LIN18):
+ for item in srtEntry[index].split('/'):
+ if not item or item.startswith('no_defects'):
+ continue
+ foo.append(item)
+ srt_defect,status,resolution,defect_updated = item.split(':')
+ if (status in ('Vulnerable','Investigate')) or status[0].isdigit():
+ latest_jira = srt_defect
+ return latest_jira + ";%s" % ','.join(foo)
+
+
+ # Create output files
+ prev2srtUpdateFile = open(prev2srtUpdateName, 'w')
+ prev2srtNvFile = open(prev2srtNvName, 'w')
+ prev2srtNoDefectFile = open(prev2srtNoDefectName, 'w')
+ prev2srtOpenFile = open(prev2srtOpenName, 'w')
+ prev2srtSVNSNameFull = prev2srtSVNSName % (report_start_date.replace('-',''),report_stop_date.replace('-',''))
+ if do_svsn:
+ prev2srtSVNSFile = open(prev2srtSVNSNameFull, 'w')
+ prev2srtWriter = csv.writer(prev2srtSVNSFile, delimiter=';', quotechar='"', quoting=csv.QUOTE_MINIMAL)
+ srt_database_file = os.path.join(current_path,srtoolDBName)
+ _log("FOO:validate_update:db=%s" % srt_database_file)
+ conn = SQL_CONNECT()
+ cur_cve = conn.cursor()
+ else:
+ prev2srtSVNSFile = None
+ prev2srtWriter = None
+ conn = None
+ cur_cve = None
+
+ # Create the headers
+ update_reason = []
+ for index in range(I_NAME,I_LIN19):
+ if enable_data[index]:
+ update_reason.append(product_prefix[index])
+ prev2srtUpdateFile.write("Type;%s\n" % ';'.join(update_reason))
+ prev2srtNvFile.write("Type;%s\n" % ';'.join(update_reason))
+ print_svns_header(prev2srtWriter)
+
+ # Insure that summary files are in place
+ srt2update(prev_path)
+ srt2update(current_path)
+
+ # Load the Previous dataset in to memory
+ is_first_row = True
+ prevlookupTable = []
+ cveIndex = {}
+ index = 0
+ prev_update_file = os.path.join(prev_path,srtUpdateName)
+ print("Loading previous update information from %s" % prev_update_file)
+ with open(prev_update_file, newline='') as prevfile:
+ CVE_reader = csv.reader(prevfile, delimiter=';', quotechar='"')
+ for i,row in enumerate(CVE_reader):
+ if is_first_row or not len(row):
+ is_first_row = False
+ continue
+
+ dbEntry=[]
+ for col in row:
+ dbEntry.append(col.strip())
+ prevlookupTable.append(dbEntry)
+
+ cve_name = dbEntry[I_NAME].split(':')[0]
+ cveIndex[cve_name] = index
+ index += 1
+
+# if dbEntry[I_NAME].startswith('CVE-2018-12384'):
+# print("BOO2")
+
+# if 'CVE-2018-12384' in cveIndex:
+# print("SOO2!")
+
+ # Load the SRTool data, line by line
+ is_first_row = True
+ count = 0
+ cve_update_count = 0
+ cve_nv_count = 0
+ cve_other_count = 0
+ cve_new_count = 0
+ current_update_file = os.path.join(current_path,srtUpdateName)
+ print("Comparing current update information from %s" % current_update_file)
+
+ with open(current_update_file, newline='') as srtfile:
+ CVE_reader = csv.reader(srtfile, delimiter=';', quotechar='"')
+ for i_row,row in enumerate(CVE_reader):
+ if is_first_row or not len(row):
+ is_first_row = False
+ continue
+ count += 1
+
+ # Load record
+ srtEntry=[]
+ for col in row:
+ srtEntry.append(col)
+
+ # Extract CVE name, status
+ cve_name,cve_status,cve_published,cve_lastmodifieddate,cve_comments = srtEntry[I_NAME].split(':')
+
+ # Preset the validation rows
+ update_reason = []
+ nv_rcpl_reason = []
+ other_reason = []
+ svns_reason = []
+ for i in range(I_MAX):
+ update_reason.append('')
+ nv_rcpl_reason.append('')
+ other_reason.append('')
+ svns_reason.append('')
+ update_reason[I_NAME] = cve_name
+ nv_rcpl_reason[I_NAME] = cve_name
+ other_reason[I_NAME] = cve_name
+ svns_reason[I_NAME] = cve_name
+
+# if ('CVE-2015-1006' != cve_name):
+# continue
+# print("FOO1:%s" % cve_name)
+
+ # Find matching Prev CVE entry
+ is_new = False
+ if not cve_name in cveIndex:
+ is_new = True
+ else:
+ prevEntry = prevlookupTable[cveIndex[cve_name]]
+ # Extract previous CVE name, status
+ prev_cve_name,prev_cve_status,prev_cve_published,prev_cve_lastmodifieddate,prev_cve_comments = prevEntry[I_NAME].split(':')
+ # Sanity Test
+ if not prev_cve_name == cve_name:
+ print("Lookup mismatch:(%s,%s)" % (prevEntry[I_NAME],cve_name))
+ exit(1)
+ # Previous 'New_Reserved' placeholder CVE entries do not count in 'new' test
+ if prev_cve_status in ('New_Reserved'):
+ is_new = True
+
+ if is_new:
+## print("FOO2")
+ # New!
+
+ # In range for "New"?
+ if (report_start_date <= cve_published) and (cve_published <= report_stop_date):
+
+ # ('Historical','New','New_Reserved','Investigate','Vulnerable','Not_Vulnerable','(New)','(Investigate)','(Vulnerable)','(Not Vulnerable)')
+ if cve_status not in ('Investigate','Vulnerable','Not_Vulnerable'):
+ continue
+
+ # New CVE record
+ for index in range(I_V2SEVERITY,I_LIN5):
+ update_reason[index] = srtEntry[index]
+ for index in range(I_LIN5,I_LIN19):
+ if not srtEntry[index].strip():
+ update_reason[index] = ''
+ continue
+
+ try:
+ if srtEntry[index].startswith('REJECT'):
+ continue
+ srt_defect,status,resolution,defect_updated = srtEntry[index].split(':')
+ except Exception as e:
+ print("ERROR:%s:%s:%s" % (cve_name,srtEntry[index],e))
+ exit(1)
+
+ resolution_na = resolution in ('Withdrawn','Rejected','Not Applicable','Replaced By Requirement','Cannot Reproduce')
+ if not srt_defect:
+ update_reason[index] = ''
+ elif srt_defect.startswith('no_defects') and prevEntry[index] and prevEntry[index][0].isdigit():
+ update_reason[index] = ''
+ elif status.startswith('Invalid Version') or status.startswith('unknown'):
+ update_reason[index] = ''
+ elif prevEntry[index].startswith('Not_Vulnerable') and status and status[0].isdigit():
+ # (LIN5-16501:Not_Vulnerable,5.0), (LIN5-11686:Not_Vulnerable,5.0.1.9), (LIN5-10025:Not_Vulnerable,5.0)
+ if resolution_na:
+ update_reason[index] = ''
+ else:
+ update_reason[index] = status
+ else:
+ update_reason[index] = status
+ write_svns_row_on_change(prev2srtWriter,update_reason,'New',cur_cve,get_latest_jira(srtEntry))
+ #
+ update_reason[I_NAME] = '%s;%s;%s;%s' % (cve_name,cve_published,cve_status,cve_comments)
+ cve_new_count += write_row_on_change(prev2srtOpenFile,'~New',update_reason,get_latest_jira(srtEntry))
+ continue
+ elif cve_status in ('Investigate','Vulnerable','Not_Vulnerable'):
+ # Not "New" for selected range, but may have "updated" product entries
+ # Create empty Prev record to test for SRT updates
+ prevEntry = []
+ for i in range(I_MAX):
+ prevEntry.append('')
+ # CVE-2010-0006:Not_Vulnerable:2010-01-26:2018-11-13:Linux
+ prevEntry[I_NAME] = '%s::::' % cve_name
+ else:
+ continue
+
+ # Check unexpected SRTool status for Previous tracked CVE
+ # ('Historical','New','New_Reserved','Investigate','Vulnerable','Not_Vulnerable','(New)','(Investigate)','(Vulnerable)','(Not Vulnerable)')
+ if cve_status not in ('Investigate','Vulnerable','Not_Vulnerable'):
+ prev2srtNoDefectFile.write("ODDSTATUS;%s:%s\n" % (cve_name,cve_status))
+## print("ODDSTATUS;%s:%s" % (cve_name,cve_status))
+ continue
+
+# print("QWERTY:%s,%s" % (prevEntry[I_NAME],srtEntry[I_NAME]))
+
+ # Preset the SVNS row
+ svns_reason = []
+ for index in range(I_MAX):
+ svns_reason.append(prevEntry[index])
+
+ # Start validation
+ if prevEntry[I_V2SEVERITY] != srtEntry[I_V2SEVERITY]:
+ if not srtEntry[I_V2SEVERITY] and srtEntry[I_REJECT]:
+ # UPDATE;CVE-2011-1549;LIN5(:5.0.1,),LIN6(:6.0.0,)
+ pass
+ else:
+ update_reason[I_V2SEVERITY] = "(%s,%s)" % (prevEntry[I_V2SEVERITY],srtEntry[I_V2SEVERITY])
+ svns_reason[I_V2SEVERITY] = srtEntry[I_V2SEVERITY]
+
+ if prevEntry[I_V3SEVERITY] != srtEntry[I_V3SEVERITY]:
+ if not srtEntry[I_V3SEVERITY] and srtEntry[I_REJECT]:
+ # UPDATE;CVE-2011-1549;LIN5(:5.0.1,),LIN6(:6.0.0,)
+ pass
+ else:
+ update_reason[I_V3SEVERITY] = "(%s,%s)" % (prevEntry[I_V3SEVERITY],srtEntry[I_V3SEVERITY])
+ svns_reason[I_V3SEVERITY] = srtEntry[I_V3SEVERITY]
+
+ for index in range(I_LIN5,I_LIN19):
+ if ('Not_Vulnerable' == prevEntry[index]) and ('' == srtEntry[index]):
+ continue
+## print("FOO1:%s" % srtEntry[index])
+ # Group multple defects into one column entry
+ defect_list = []
+ defect_nv_list = []
+ svns_list = []
+##
+ try:
+ if prevEntry[index]:
+ prev_srt_defect,prev_status,prev_resolution,prev_defect_updated = prevEntry[index].split('/')[0].split(':')
+# print("FOOBAR3:%s,%s" % (prev_status,prevEntry[index]))
+ else:
+ prev_status = ''
+ except Exception as e:
+# print("FOOBAR9:%s,%s" % (e,prevEntry[index]))
+ exit(1)
+##
+# print("FEI1:%s,%s,%s" % (prevEntry[index] == srtEntry[index],prevEntry[index],srtEntry[index]))
+
+ for item in srtEntry[index].split('/'):
+## print(" 2:%s" % item)
+ # We want to see defects in Previous that are not in SRT
+ if not item:
+ item = '::FOO:%s' % report_start_date
+ srt_defect,status,resolution,defect_updated = item.split(':')
+ status = status.replace('_OBSOLETE','')
+ resolution_na = resolution in ('Withdrawn','Rejected','Not Applicable','Replaced By Requirement','Cannot Reproduce')
+
+ if (report_start_date > defect_updated) or (defect_updated > report_stop_date):
+ continue
+
+ if prev_status != status:
+ if not srt_defect:
+ #UPDATE;CVE-2011-1549;LIN5(:5.0.1,),LIN6(:6.0.0,)
+ other_reason[index] = "(%s:%s,%s)" % ('no_defects?',prevEntry[index],'release')
+ elif srt_defect.startswith('no_defects') and prevEntry[index] and prevEntry[index][0].isdigit():
+ # (no_defects:5.0.1.4,Vulnerable)
+ prev2srtNoDefectFile.write("NODEFECT;%s;%s;%s\n" % (cve_name,product_prefix[index],prevEntry[index]))
+ elif status.startswith('Invalid Version') or status.startswith('unknown'):
+ # (CGP5-1122#Fixed:5.0.1.14,Invalid Version)
+ prev2srtNoDefectFile.write("BADRCPL;%s;%s;%s\n" % (cve_name,srt_defect,prevEntry[index]))
+ elif prevEntry[index].startswith('Not_Vulnerable') and status and status[0].isdigit():
+ # (LIN5-16501:Not_Vulnerable,5.0), (LIN5-11686:Not_Vulnerable,5.0.1.9), (LIN5-10025:Not_Vulnerable,5.0)
+ if resolution_na:
+ defect_nv_list.append("(%s:%s,%s#%s)" % (srt_defect,prevEntry[index],status,resolution)) #product_prefix[index]
+ else:
+ defect_list.append("(%s:%s,%s#%s)" % (srt_defect,prevEntry[index],status,resolution)) #product_prefix[index]
+ svns_list.append(status)
+ elif (I_OVP == index) and '/' in prevEntry[index]:
+ # (OVP-2382:6.0.0.19/7.0.0.0,7.0), (OVP-2342:6.0.0.19/7.0.0.0,6.0.0.19)
+ # Skip combo OVP for now
+ pass
+ elif 'Historical' == status:
+ # (LIN8-7944:Not_Vulnerable,Historical)
+ prev2srtNoDefectFile.write("HISTORICAL;%s;%s;%s\n" % (cve_name,srt_defect,prevEntry[index]))
+ elif 'DUP_MISSING_PARENT' == resolution:
+ # (LIN6-10042:6.0.0.22,Not_Vulnerable)
+ # Review Skip duplicates with broken links for now
+ prev2srtNoDefectFile.write("NOPARENT;%s;%s;%s:%s\n" % (cve_name,srt_defect,prevEntry[index],status))
+ elif 'Replaced By Requirement' == resolution:
+ # (LIN8-7944:Not_Vulnerable,Historical)
+ prev2srtNoDefectFile.write("REPLACED;%s;%s;%s:%s\n" % (cve_name,srt_defect,prevEntry[index],status))
+ else:
+ defect_list.append("(%s:%s,%s)" % (srt_defect,prevEntry[index],status)) #product_prefix[index]
+ svns_list.append(status)
+ else:
+ svns_list.append(prev_status)
+## svns_reason[index] = prev_status
+
+ # Update the cell, empty or not
+ update_reason[index] = ', '.join(defect_list)
+ nv_rcpl_reason[index] = ', '.join(defect_nv_list)
+
+ # Update the SVNS cell if change
+ if svns_list:
+ svns_reason[index] = ', '.join(svns_list)
+# print("FOOBAR2:%s,%s" % (index,svns_list))
+ else:
+ svns_reason[index] = '%s' % prev_status
+# svns_reason[index] = '*%s' % prev_status
+
+ is_severity_change = False
+ is_release_change = False
+ for index in (I_V2SEVERITY,I_V3SEVERITY):
+ if update_reason[index]:
+ is_severity_change = True
+ for index in range(I_LIN7,I_LIN18+1):
+ if update_reason[index]:
+ is_release_change = True
+
+ # Print results
+ if (report_start_date <= cve_lastmodifieddate) and (cve_lastmodifieddate <= report_stop_date):
+ if is_release_change:
+## print("FOOBAR1")
+ cve_update_count += write_row_on_change(prev2srtUpdateFile,'UPDATE',update_reason,get_latest_jira(srtEntry))
+ write_svns_row_on_change(prev2srtWriter,svns_reason,'Updated',cur_cve,get_latest_jira(srtEntry))
+ elif is_severity_change:
+## print("FOOBAR2")
+ cve_update_count += write_row_on_change(prev2srtUpdateFile,'SEVERITY',update_reason,get_latest_jira(srtEntry))
+ write_svns_row_on_change(prev2srtWriter,svns_reason,'Updated',cur_cve,get_latest_jira(srtEntry))
+ cve_nv_count += write_row_on_change(prev2srtNvFile,'NV_RCPL',nv_rcpl_reason,'')
+ cve_other_count += write_row_on_change(prev2srtUpdateFile,'OTHER',other_reason,'')
+
+ # Debug support
+ if cmd_skip and (cve_update_count < cmd_skip): continue
+ if cmd_count and ((cve_update_count - cmd_skip) > cmd_count): break
+ # Progress indicator support
+ if 0 == i_row % 100:
+ print('%04d: %20s\r' % (i_row,cve_name), end='')
+ if 0 == i_row % 200:
+ time.sleep(0.1)
+
+ print("Count=%d,CVE_Find=%d,CVE_Find=%d,CVE_Other=%s,CVE_New=%s" % (count,cve_update_count,cve_nv_count,cve_other_count,cve_new_count))
+ prev2srtUpdateFile.close()
+ prev2srtNvFile.close()
+ prev2srtNoDefectFile.close()
+ prev2srtOpenFile.close()
+ if prev2srtSVNSFile:
+ prev2srtSVNSFile.close()
+ SQL_CLOSE_CONN(conn)
+ print("SVNS file: " + prev2srtSVNSNameFull)
+
+#################################
+# main loop
+#
+
+def main(argv):
+ global verbose
+ global cmd_skip
+ global cmd_count
+ global cmd_truncate
+ global force
+
+ # setup
+
+ parser = argparse.ArgumentParser(description='srtool_prublish.py: manage SRTool publish table diffs')
+
+ parser.add_argument('--srt2update', dest='srt2update', help='Directory of SRTool database extract update data')
+ parser.add_argument('--validate-update', action='store_const', const='validate_update', dest='command', help='Compare and export update dbs')
+ parser.add_argument('--validate-update-svns', action='store_const', const='validate_update_svns', dest='command', help='Compare and export update dbs and SVNS')
+ parser.add_argument('--start', dest='report_start_date', help='Report start date (default="2019-02-16")')
+ parser.add_argument('--stop', dest='report_stop_date', help='Report stop date (default=<today>)')
+
+ parser.add_argument('--previous', '-p', dest='prev_path', help='Directory of previous update extract')
+ parser.add_argument('--current', '-c', dest='current_path', help="Directory of current update extract [default='.']")
+
+ parser.add_argument('--verbose', '-v', action='store_true', dest='verbose', help='Debugging: verbose output')
+ parser.add_argument('--truncate', action='store_true', dest='truncate', help='Truncate output')
+ parser.add_argument('--force', '-f', action='store_true', dest='force', help='Force Update')
+ parser.add_argument('--skip', dest='skip', help='Debugging: skip record count')
+ parser.add_argument('--count', dest='count', help='Debugging: short run record count')
+
+ args = parser.parse_args()
+
+ verbose = args.verbose
+ cmd_truncate = args.truncate
+ cmd_skip = 0
+ if None != args.skip:
+ cmd_skip = int(args.skip)
+ cmd_count = 0
+ if None != args.count:
+ cmd_count = int(args.count)
+ if get_override('SRTDBG_MINIMAL_DB'):
+ cmd_count = 40
+ force = args.force
+
+ # Paths to update file directories
+ prev_path = None
+ if args.prev_path:
+ prev_path = args.prev_path
+ current_path = '.'
+ if args.current_path:
+ current_path = args.current_path
+
+ report_start_date = '2019-02-16'
+ report_stop_date = datetime.today().strftime('%Y-%M-%D') # '2019-04-30'
+ if args.report_start_date:
+ report_start_date = args.report_start_date
+ if args.report_stop_date:
+ report_stop_date = args.report_stop_date
+
+ if args.srt2update:
+ import_orm_schema(args.srt2update)
+ srt2update(args.srt2update)
+ return()
+
+ if not prev_path:
+ print("ERROR: previous path required '--previous <previous_path>'")
+ exit(1)
+ if 'validate_update' == args.command:
+ import_orm_schema(current_path)
+ validate_update(prev_path,current_path,report_start_date,report_stop_date,False)
+ return()
+ elif 'validate_update_svns' == args.command:
+ import_orm_schema(current_path)
+ validate_update(prev_path,current_path,report_start_date,report_stop_date,True)
+ return()
+
+ else:
+ print("Command not found")
+
+if __name__ == '__main__':
+ # fetch any environment overrides
+
+ _log("MAIN:|%s|" % sys.argv)
+ set_override('SRTDBG_MINIMAL_DB')
+
+ srtool_basepath = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))))
+ main(sys.argv[1:])
diff --git a/bin/yp/srtool_yp.py b/bin/yp/srtool_yp.py
index 1438b59f..3703ab4e 100755
--- a/bin/yp/srtool_yp.py
+++ b/bin/yp/srtool_yp.py
@@ -27,13 +27,13 @@
import os
import sys
import argparse
-import sqlite3
import json
# load the srt.sqlite schema indexes
dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, dir_path)
from common.srt_schema import ORM
+from common.srtool_sql import *
# Setup:
srtDbName = 'srt.sqlite'
@@ -76,7 +76,7 @@ def init_products(source_file):
with open(source_doc) as json_data:
dct = json.load(json_data)
- conn = sqlite3.connect(srtDbName)
+ conn = SQL_CONNECT()
cur = conn.cursor()
Product_Items = dct['Product_Items']
@@ -94,7 +94,7 @@ def init_products(source_file):
product = cur.execute(sql).fetchone()
if product is None:
# NOTE: 'order' is a reserved SQL keyword, so we have to quote it
- sql = ''' INSERT into orm_product ("order", key, name, version, profile, cpe, defect_tags, product_tags) VALUES (?, ?, ?, ?, ?, ?, ?, ?)'''
+ sql = ''' INSERT INTO orm_product (`order`, `key`, name, version, profile, cpe, defect_tags, product_tags) VALUES (?, ?, ?, ?, ?, ?, ?, ?)'''
cur.execute(sql, (order, key, name, version, profile, cpe, defect_tags, product_tags))
else:
sql = ''' UPDATE orm_product
diff --git a/bin/yp/yocto-project-products.json b/bin/yp/yocto-project-products.json
index ea154113..e8164ba6 100755
--- a/bin/yp/yocto-project-products.json
+++ b/bin/yp/yocto-project-products.json
@@ -4,8 +4,8 @@
{
"order" : "1",
"key" : "master",
- "name" : "Yocto Project Linux",
- "version" : "dev",
+ "name" : "Yocto Project",
+ "version" : "master",
"profile" : "",
"cpe" : "cpe:2.3:o:yoctoproject:*:*:*:*:*:*:*:*:*",
"defect_tags" : "{\"key\":\"master\"}",
@@ -15,30 +15,50 @@
{
"order" : "2",
- "key" : "Zeus",
- "name" : "Yocto Project Linux",
- "version" : "3.0",
- "profile" : "",
+ "key" : "nanbield",
+ "name" : "Yocto Project",
+ "version" : "Nanbield",
+ "profile" : "4.3",
"cpe" : "cpe:2.3:o:yoctoproject:linux:3.0:*:*:*:*:*:*:*",
"defect_tags" : "{\"key\":\"zeus\"}",
"product_tags" : "{\"key\":\"zeus\",\"mode\":\"support\"}"
},
{
"order" : "3",
- "key" : "Warrior",
- "name" : "Yocto Project Linux",
- "version" : "2.7",
- "profile" : "",
+ "key" : "mickledore",
+ "name" : "Yocto Project",
+ "version" : "Mickledore",
+ "profile" : "4.2",
"cpe" : "cpe:2.3:o:yoctoproject:linux:2.7:*:*:*:*:*:*:*",
"defect_tags" : "{\"key\":\"warrior\"}",
"product_tags" : "{\"key\":\"warrior\",\"mode\":\"support\"}"
},
{
"order" : "4",
- "key" : "Thud",
- "name" : "Yocto Project Linux",
- "version" : "2.6",
- "profile" : "",
+ "key" : "langdale",
+ "name" : "Yocto Project",
+ "version" : "Langdale",
+ "profile" : "4.1",
+ "cpe" : "cpe:2.3:o:yoctoproject:linux:2.6:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"thud\"}",
+ "product_tags" : "{\"key\":\"thud\",\"mode\":\"support\"}"
+ },
+ {
+ "order" : "5",
+ "key" : "kirkstone",
+ "name" : "Yocto Project",
+ "version" : "Kirkstone",
+ "profile" : "4.0",
+ "cpe" : "cpe:2.3:o:yoctoproject:linux:2.6:*:*:*:*:*:*:*",
+ "defect_tags" : "{\"key\":\"thud\"}",
+ "product_tags" : "{\"key\":\"thud\",\"mode\":\"support\"}"
+ },
+ {
+ "order" : "6",
+ "key" : "dunfell",
+ "name" : "Yocto Project",
+ "version" : "Dunfell",
+ "profile" : "3.1",
"cpe" : "cpe:2.3:o:yoctoproject:linux:2.6:*:*:*:*:*:*:*",
"defect_tags" : "{\"key\":\"thud\"}",
"product_tags" : "{\"key\":\"thud\",\"mode\":\"support\"}"
diff --git a/data/recipe_names_from_layer_index.txt b/data/recipe_names_from_layer_index.txt
new file mode 100755
index 00000000..75efd629
--- /dev/null
+++ b/data/recipe_names_from_layer_index.txt
@@ -0,0 +1,3844 @@
+a2jmidid
+abseil-cpp
+accountsservice
+ace
+ace-cloud-editor
+acl
+acpica
+acpid
+acpitool
+adcli
+adduser
+ade
+adwaita-icon-theme
+aer-inject
+agent-proxy
+aircrack-ng
+alsa-equal
+alsa-lib
+alsa-oss
+alsa-plugins
+alsa-state
+alsa-tools
+alsa-topology-conf
+alsa-ucm-conf
+alsa-utils
+alsa-utils-scripts
+anaconda-init
+android-tools
+android-tools-conf
+anspass
+anthy
+aoetools
+apache-websocket
+apache2
+apmd
+apparmor
+appstream-glib
+apr
+apr-util
+apt
+argp-standalone
+arno-iptables-firewall
+arptables
+arpwatch
+asciidoc
+asio
+aspell
+assimp
+at
+at-spi2-atk
+at-spi2-core
+atftp
+atk
+atkmm
+atop
+attr
+audiofile
+audit
+aufs-util
+augeas
+autoconf
+autoconf-2.13-native
+autoconf-archive
+autofs
+automake
+avahi
+avro-c
+aws-iot-device-sdk-cpp
+azure-c-shared-utility
+azure-iot-sdk-c
+azure-macro-utils-c
+azure-uamqp-c
+azure-uhttp-c
+azure-umqtt-c
+babeld
+babeltrace
+babeltrace2
+babl
+backport-iwlwifi
+base-files
+base-passwd
+bash
+bash
+bash-completion
+bastille
+bats
+bazel-native
+bc
+bc
+bcc
+bcm2835-bootfiles
+bdftopcf
+bdwgc
+bigbuckbunny-1080p
+bigbuckbunny-480p
+bigbuckbunny-720p
+bind
+binutils
+binutils-cross-canadian-i686
+binutils-cross-i686
+binutils-cross-testsuite
+binutils-crosssdk-x86_64-oesdk-linux
+biossums
+bison
+bison
+bjam-native
+blktool
+blktrace
+blueman
+bluez5
+bmap-tools
+boinc-client
+bonnie++
+boost
+boot-config
+bootchart
+bootchart2
+botan
+bpftool
+bpftrace
+breakpad
+bridge-utils
+broadcom-bt-firmware
+brotli
+bsd-headers
+btrfs-tools
+buck-security
+build-appliance-image
+build-compare
+build-sysroots
+builder
+buildtools-extended-tarball
+buildtools-tarball
+bundler
+busybox
+busybox-inittab
+byacc
+bzip2
+c-ares
+c3-app-container
+c3-systemd-container
+ca-certificates
+cairo
+cairomm
+can-isotp
+can-utils
+cannelloni
+cantarell-fonts
+canutils
+capnproto
+caps
+catch2
+catfish
+ccache
+ccid
+ccs-tools
+cdparanoia
+cdrkit
+cdrtools-native
+celt051
+celt051
+ceph
+ceres-solver
+cfengine
+cfengine-masterfiles
+cgdb
+cgl-unittest
+cgroup-lite
+checkpolicy
+checksec
+checksecurity
+chef
+chef-zero
+cherokee
+chkrootkit
+chrony
+chrpath
+cifs-utils
+cim-schema-docs
+cim-schema-exper
+cim-schema-final
+cinematicexperience
+cirros
+civetweb
+cjson
+cjson
+ckermit
+clamav
+clang
+clang-cross-canadian-i686
+clang-cross-i686
+clang-crosssdk-x86_64
+cli11
+clinfo
+cloc
+cloud-image-compute
+cloud-image-controller
+cloud-image-guest
+cloud-init
+cluster
+cluster-glue
+clutter-1.0
+clutter-gst-3.0
+clutter-gtk-1.0
+cma-test
+cmake
+cmake-native
+cmark
+cmpi-bindings
+cni
+cni
+cockpit
+coderay
+cogl-1.0
+collectd
+colord
+colord-gtk
+colord-native
+compiler-rt
+compose-file
+con2fbmap
+concurrencykit
+concurrent-ruby
+conmon
+connman
+connman-conf
+connman-gnome
+conntrack-tools
+consolekit
+consul
+consul-migrate
+container-base
+container-shutdown-notifier
+containerd-docker
+containerd-opencontainers
+cool.io
+core-image-anaconda
+core-image-anaconda-initramfs
+core-image-base
+core-image-cgl
+core-image-cgl-initramfs
+core-image-clutter
+core-image-full-cmdline
+core-image-kernel-dev
+core-image-mingw-sdktest
+core-image-minimal
+core-image-minimal-dev
+core-image-minimal-initramfs
+core-image-minimal-mtdutils
+core-image-minimal-xfce
+core-image-rt
+core-image-rt
+core-image-rt-extended
+core-image-rt-sdk
+core-image-rt-sdk
+core-image-sato
+core-image-sato-dev
+core-image-sato-ptest-fast
+core-image-sato-sdk
+core-image-sato-sdk-ptest
+core-image-selinux
+core-image-selinux-minimal
+core-image-testmaster
+core-image-testmaster-initramfs
+core-image-tiny
+core-image-tiny-initramfs
+core-image-weston
+core-image-x11
+coreutils
+coreutils
+corosync
+cpio
+cpio
+cpprest
+cppunit
+cppzmq
+cpuburn-arm
+cpufrequtils
+cpuid
+cpupower
+cracklib
+crash
+crda
+createrepo-c
+cri-o
+criu
+crmsh
+cronie
+cross-localedef-native
+crossguid
+crun
+cryptfs-tpm2
+cryptodev-linux
+cryptodev-module
+cryptodev-tests
+cryptsetup
+cscope
+ctags
+ctapi-common
+cube-builder
+cube-builder-initramfs
+cube-cmd-server
+cube-desktop
+cube-dom0
+cube-essential
+cube-graphical-builder
+cube-install
+cube-k8s-node
+cube-server
+cube-update
+cube-vrf
+cunit
+cups
+cups-filters
+curl
+curlpp
+cve-update-db-native
+cwautomacros
+cxxtest
+cyclictest
+cyrus-sasl
+czmq
+daemonize
+daemontools
+dante
+daq
+dash
+db
+dbench
+dbus
+dbus-broker
+dbus-daemon-proxy
+dbus-glib
+dbus-test
+dbus-wait
+dcadec
+dconf
+dconf-editor
+ddrescue
+debianutils
+debootstrap
+debsums
+dejagnu
+depmodwrapper-cross
+desktop-file-utils
+dev86
+devilspie2
+devmem2
+dfu-util
+dfu-util-native
+dhcp
+dhcpcd
+dhex
+dhrystone
+dialog
+dibbler
+dietsplash
+diff-lcs
+diffoscope
+diffstat
+diffutils
+diffutils
+digitemp
+ding-libs
+diod
+directfb
+directfb-examples
+distcc
+distcc-config
+distro-feed-configs
+dldt-inference-engine
+dldt-model-optimizer
+dleyna-connector-dbus
+dleyna-core
+dleyna-renderer
+dleyna-server
+dlm
+dlt-daemon
+dm-verity-image-initramfs
+dmalloc
+dmidecode
+dnf
+dnf-plugin-tui
+dnfdragora
+dnsmasq
+dnssec-conf
+docbook-xml-dtd4
+docbook-xsl-stylesheets
+docker
+docker-ce
+docker-distribution
+docker-moby
+docopt.cpp
+dom0-init
+dos2unix
+dosfstools
+dosfstools
+dotnet
+dovecot
+doxygen
+dpdk
+dpdk
+dpkg
+dracut
+drbd
+drbd-utils
+dropbear
+dstat
+dtach
+dtc
+dumb-init
+dvb-apps
+dwarfsrcfiles
+e2fsprogs
+ebtables
+ecryptfs-utils
+ed
+ed
+edac-utils
+efibootmgr
+efitools
+efitools-native
+efivar
+eject
+elfutils
+elfutils
+ell
+emlog
+enca
+enchant2
+encodings
+enscript
+epeg
+epiphany
+erlang
+erlang
+erlang-native
+erlang-native
+erubis
+esmtp
+espeak
+essential-init
+etcd
+ethtool
+eudev
+evince
+evolution-data-server
+evolution-data-server-native
+evtest
+example
+exfat-utils
+exiv2
+exo
+expat
+expect
+ez-ipupdate
+f2fs-tools
+faac
+faad2
+facter
+faenza-icon-theme
+fatcat
+fatresize
+fb-test
+fbgrab
+fbida
+fbset
+fbset-modes
+fcgi
+fdk-aac
+fetchmail
+ffmpeg
+fftw
+figlet
+file
+file-roller
+findutils
+findutils
+fio
+fipscheck
+firewalld
+flac
+flashrom
+flatbuffers
+flex
+fltk
+fltk-native
+fluentbit
+fluentd
+fluidsynth
+fluidsynth-native
+fmt
+font-adobe-100dpi
+font-adobe-utopia-100dpi
+font-alias
+font-bh-100dpi
+font-bh-lucidatypewriter-100dpi
+font-bitstream-100dpi
+font-cursor-misc
+font-misc-misc
+font-util
+fontconfig
+fontforge
+formfactor
+fping
+frame
+freediameter
+freeglut
+freeradius
+freerdp
+freetype
+fribidi
+fscryptctl
+ftgl
+fts
+function2
+funyahoo-plusplus
+fuse
+fuse-exfat
+fuse-overlayfs
+fuse3
+fvwm
+fwknop
+fwts
+gammu
+garcon
+gateone
+gattlib
+gawk
+gawk
+gcc
+gcc-cross-canadian-i686
+gcc-cross-i686
+gcc-crosssdk-x86_64-oesdk-linux
+gcc-runtime
+gcc-sanitizers
+gcc-source-10.1.0
+gconf
+gcr
+gd
+gdb
+gdb-cross-canadian-i686
+gdb-cross-i686
+gdbm
+gdbm
+gdk-pixbuf
+gdm
+geany
+geany-plugins
+gedit
+gegl
+geis
+gen-coredump
+gengetopt
+gensio
+geoclue
+geocode-glib
+geoip
+geoip-perl
+geoipupdate
+geos
+gerbera
+gettext
+gettext
+gettext-minimal-native
+gexiv2
+gflags
+ghex
+ghostscript
+giflib
+gigolo
+gimp
+git
+gjs
+glade
+glew
+glfw
+glib-2.0
+glib-networking
+glibc
+glibc-locale
+glibc-mtrace
+glibc-scripts
+glibc-testsuite
+glibmm
+glide
+glm
+glmark2
+glog
+glusterfs
+gma500-gfx-check
+gmime
+gmmlib
+gmp
+gmp
+gnome-autoar
+gnome-backgrounds
+gnome-bluetooth
+gnome-calculator
+gnome-common
+gnome-control-center
+gnome-desktop-testing
+gnome-desktop3
+gnome-doc-utils-stub
+gnome-flashback
+gnome-font-viewer
+gnome-keyring
+gnome-menus3
+gnome-online-accounts
+gnome-panel
+gnome-session
+gnome-settings-daemon
+gnome-shell
+gnome-shell-extensions
+gnome-system-monitor
+gnome-terminal
+gnome-themes-extra
+gnome-tweaks
+gnu-config
+gnu-efi
+gnulib
+gnupg
+gnupg
+gnuplot
+gnutls
+gnutls
+go
+go-build
+go-capability
+go-cli
+go-connections
+go-context
+go-cross-canadian-i686
+go-cross-core2-32
+go-crosssdk-x86_64-oesdk-linux
+go-dbus
+go-dep
+go-digest
+go-distribution
+go-errors
+go-fsnotify
+go-helloworld
+go-libtrust
+go-logrus
+go-md2man
+go-metalinter
+go-mux
+go-native
+go-patricia
+go-pty
+go-runtime
+go-systemd
+gobject-introspection
+google-authenticator-libpam
+google-cloud-sdk
+googletest
+gparted
+gperf
+gperf
+gperftools
+gpgme
+gphoto2
+gpm
+gpsd
+gpsd-machine-conf
+gptfdisk
+gradm
+grail
+graphviz
+grep
+grep
+grilo
+groff
+groff
+grpc
+grpc-go
+grub
+grub
+grub-bootconf
+grub-efi
+grubby
+grubby
+gsettings-desktop-schemas
+gsl
+gsoap
+gsound
+gspell
+gssdp
+gst-examples
+gst-instruments
+gst-shark
+gst-validate
+gstd
+gstreamer1.0
+gstreamer1.0-libav
+gstreamer1.0-meta-base
+gstreamer1.0-omx
+gstreamer1.0-plugins-bad
+gstreamer1.0-plugins-base
+gstreamer1.0-plugins-good
+gstreamer1.0-plugins-ugly
+gstreamer1.0-python
+gstreamer1.0-rtsp-server
+gstreamer1.0-vaapi
+gtk+
+gtk+3
+gtk-doc
+gtkmm
+gtkmm3
+gtkperf
+gtksourceview-classic-light
+gtksourceview3
+gtksourceview4
+gtkwave
+guider
+gunicorn
+gupnp
+gupnp-av
+gupnp-dlna
+gupnp-igd
+gupnp-tools
+gvfs
+gyp
+gyp-py2
+gzip
+gzip
+harfbuzz
+hashicorp-serf
+hashie
+haveged
+hdcp
+hddtemp
+hdf5
+hdparm
+heartbeat
+help2man-native
+hexedit
+hiawatha
+hicolor-icon-theme
+hidapi
+hiera
+highline
+hiredis
+hostapd
+hplip
+htop
+htpdate
+http-parser
+http-parser.rb
+hunspell
+hunspell-dictionaries
+hwdata
+hwlatdetect
+hwloc
+hyperstart
+i2c-tools
+ibm-iotf-embeddedc
+ibus
+ibus-native
+iceauth
+icecc-create-env
+icewm
+icon-slicer
+icu
+icyque
+id3lib
+ifenslave
+ifmetric
+ifplugd
+iftop
+ifupdown
+ifuse
+igmpproxy
+igt-gpu-tools
+iksemel
+ima-evm-utils
+ima-inspect
+ima-policy
+imagemagick
+imapfilter
+imsettings
+indent
+inetutils
+iniparser
+init-ifupdown
+init-system-helpers
+initramfs-boot
+initramfs-cgl-boot
+initramfs-cube-builder
+initramfs-debug
+initramfs-debug-image
+initramfs-dm-verity
+initramfs-framework
+initramfs-kexecboot-image
+initramfs-kexecboot-klibc-image
+initramfs-live-boot
+initramfs-live-boot-tiny
+initramfs-live-install
+initramfs-live-install-efi
+initramfs-live-install-efi-testfs
+initramfs-live-install-testfs
+initramfs-module-install
+initramfs-module-install-efi
+initramfs-module-resizefs
+initramfs-module-setup-live
+initramfs-ostree
+initramfs-ostree-image
+initramfs-tools
+initrdscripts-ima
+initrdscripts-secure-core
+initscripts
+inotify-tools
+intel-compute-runtime
+intel-graphics-compiler
+intel-media-driver
+intel-mediasdk
+intel-microcode
+intel-pcm
+intel-vaapi-driver
+intltool
+iotop
+iozone3
+ipaddress
+ipc-run
+ipcalc
+iperf2
+iperf3
+ipmitool
+ipmiutil
+ippool
+iproute2
+ipsec-test
+iptables
+iptraf-ng
+iputils
+ipvsadm
+ipxe
+irda-utils
+irqbalance
+irssi
+isa-l
+iscsi-initiator-utils
+isic
+iso-codes
+isomd5sum
+itstool
+itt
+iucode-tool
+iw
+iwd
+ixgbe
+ixgbevf
+jack
+jansson
+jasper
+jhi
+joe
+jpnevulator
+jq
+jquery
+json
+json-c
+json-glib
+json-spirit
+jsoncpp
+jsonrpc
+kata-agent
+kata-proxy
+kata-runtime
+kata-shim
+kbd
+kconfig-frontends
+kea
+keepalived
+kern-tools-native
+kernel-devsrc
+kernel-initramfs
+kernel-initramfs-image
+kernel-module-emlog
+kernel-module-mali
+kernel-selftest
+kexec-tools
+kexec-tools-klibc
+kexecboot
+kexecboot-cfg
+key-store
+keybinder
+keymaps
+keyutils
+klcc-cross
+klibc
+klibc-static-utils
+klibc-utils
+kmod
+kmod-native
+kmscube
+konkretcmpi
+kpatch
+krb5
+kronosnet
+kubernetes
+kubernetes
+kura
+kvm-image-minimal
+kvmtool
+l3afpad
+lame
+lapack
+latencytop
+lcdproc
+lcms
+lcov
+ldconfig-native
+ldns
+ledmon
+lemon
+leptonica
+less
+leveldb
+lftp
+lib-perl
+liba52
+libacpi
+libaio
+libalgorithm-diff-perl
+libao
+libarchive
+libass
+libassuan
+libatasmart
+libatomic-ops
+libauthen-radius-perl
+libauthen-sasl-perl
+libavc1394
+libblockdev
+libbsd
+libburn
+libbytesize
+libc-bench
+libcamera
+libcanberra
+libcap
+libcap-ng
+libcap-ng-python
+libcapture-tiny-perl
+libcdio
+libcdio-paranoia
+libcec
+libcereal
+libcgi-perl
+libcgroup
+libchamplain
+libcheck
+libclass-method-modifiers-perl
+libcomps
+libconfig
+libconfig-autoconf-perl
+libconfig-general-perl
+libconnman-qt5
+libconvert-asn1-perl
+libcroco
+libcrypt-openssl-guess-perl
+libcrypt-openssl-random-perl
+libcrypt-openssl-rsa-perl
+libcurses-perl
+libcxx
+libcyusbserial
+libdaemon
+libdata-hexdump-perl
+libdazzle
+libdbd-mysql-perl
+libdbd-sqlite-perl
+libdbi
+libdbi-perl
+libdbus-c++
+libdc1394
+libde265
+libdev-checklib-perl
+libdevel-globaldestruction-perl
+libdevmapper
+libdigest-hmac-perl
+libdigest-sha1-perl
+libdivecomputer
+libdmx
+libdnet
+libdnf
+libdrm
+libdvbcsa
+libdvbpsi
+libdvdcss
+libdvdnav
+libdvdread
+libebml
+libedit
+libee
+libeigen
+libencode-locale-perl
+libencode-perl
+libenv-perl
+libepoxy
+liberation-fonts
+liberror-perl
+libesmtp
+libestr
+libev
+libevdev
+libevent
+libexecinfo
+libexif
+libextutils-config-perl
+libextutils-cppguess-perl
+libextutils-helpers-perl
+libextutils-installpaths-perl
+libextutils-parsexs-perl
+libfakekey
+libfann
+libfastjson
+libffi
+libfile-fnmatch-perl
+libfile-slurp-perl
+libfile-slurper-perl
+libfm
+libfm-extra
+libfontenc
+libforms
+libftdi
+libgcc
+libgcc-initial
+libgcrypt
+libgdata
+libgee
+libgfortran
+libgit2
+libgloss
+libglu
+libgnomekbd
+libgpg-error
+libgphoto2
+libgpiod
+libgpiod
+libgsf
+libgssglue
+libgtkstylus
+libgtop
+libgudev
+libgusb
+libgweather
+libgxim
+libhandy
+libharu
+libhtml-parser-perl
+libhtml-tagset-perl
+libhtml-tree-perl
+libhtp
+libhugetlbfs
+libibverbs
+libical
+libice
+libiconv
+libiconv
+libid3tag
+libidn
+libidn
+libidn2
+libiec61883
+libiio
+libimobiledevice
+libimport-into-perl
+libinih
+libinput
+libio-pty-perl
+libio-socket-ssl-perl
+libio-stringy-perl
+libipc-signal-perl
+libipt
+libjitterentropy
+libjpeg-turbo
+libjs-jquery
+libjs-sizzle
+libjson-perl
+libkcapi
+libksba
+liblbxutil
+libldb
+liblightmodbus
+liblinebreak
+liblocale-gettext-perl
+liblockfile
+liblogging
+liblognorm
+libmad
+libmailtools-perl
+libmali-xlnx
+libmatchbox
+libmatroska
+libmbim
+libmcrypt
+libmediaart
+libmediaart-2.0
+libmemcached
+libmemcached
+libmhash
+libmicrohttpd
+libmikmod
+libmime-charset-perl
+libmime-types-perl
+libmimetic
+libmms
+libmng
+libmnl
+libmodbus
+libmodbus
+libmodplug
+libmodule-build-perl
+libmodule-build-tiny-perl
+libmodule-pluggable-perl
+libmodule-runtime-perl
+libmodulemd
+libmoo-perl
+libmpc
+libmpd
+libmpdclient
+libmspack
+libmtp
+libmusicbrainz
+libmxml
+libmypaint
+libndp
+libnet
+libnet-dns-perl
+libnet-dns-sec-perl
+libnet-ldap-perl
+libnet-libidn-perl
+libnet-ssleay-perl
+libnet-telnet-perl
+libnetfilter-acct
+libnetfilter-conntrack
+libnetfilter-cthelper
+libnetfilter-cttimeout
+libnetfilter-log
+libnetfilter-queue
+libnewt
+libnfc
+libnfnetlink
+libnftnl
+libnice
+libnl
+libnma
+libnotify
+libnsl2
+libnss-mdns
+libnss-nis
+libnss-nisplus
+liboauth
+libogg
+libol
+libomxil
+liboop
+libopenmpt
+libopus
+libotr
+libowfat
+libp11
+libpam
+libpcap
+libpciaccess
+libpcre
+libpcre2
+libpeas
+libperlio-gzip-perl
+libpfm4
+libpipeline
+libplist
+libpng
+libproc-waitstat-perl
+libproxy
+libpsl
+libpthread-stubs
+libpwquality
+libqb
+libqmi
+libqofono
+libraw1394
+librdmacm
+librealsense
+librelp
+librepo
+libreport
+librole-tiny-perl
+librsvg
+librsync
+libsamplerate0
+libsass
+libsdl
+libsdl-gfx
+libsdl-image
+libsdl-mixer
+libsdl-net
+libsdl-ttf
+libsdl2
+libsdl2-image
+libsdl2-mixer
+libsdl2-net
+libsdl2-ttf
+libseccomp
+libsecret
+libselinux
+libselinux-python
+libsemanage
+libsepol
+libserialport
+libsigc++-2.0
+libsigc++-3
+libsign
+libsigrok
+libsigrokdecode
+libsm
+libsmi
+libsndfile1
+libsoc
+libsocket6-perl
+libsocketcan
+libsodium
+libsolv
+libsombok3
+libsoup-2.4
+libspatialite
+libsquish
+libsrtp
+libssh
+libssh2
+libssp-nonshared
+libstatgrab
+libstemmer
+libstrictures-perl
+libsub-exporter-progressive-perl
+libsub-uplevel-perl
+libtalloc
+libtar
+libtasn1
+libtdb
+libteam
+libterm-readkey-perl
+libtest-deep-perl
+libtest-harness-perl
+libtest-needs-perl
+libtest-nowarnings-perl
+libtest-pod-perl
+libtest-warn-perl
+libtest-warnings-perl
+libtevent
+libtext-charwidth-perl
+libtext-diff-perl
+libtext-iconv-perl
+libtext-wrapi18n-perl
+libtheora
+libtimedate-perl
+libtimezonemap
+libtinyxml
+libtinyxml2
+libtirpc
+libtool
+libtool-cross
+libtool-native
+libtorrent
+libubootenv
+libubox
+libucontext
+libuio
+libunicode-linebreak-perl
+libunique
+libunistring
+libunix-statgrab
+libunwind
+libupnp
+liburcu
+liburi-perl
+libusb-compat
+libusb1
+libusbg
+libusbgx
+libusbgx-config
+libusbmuxd
+libuser
+libutempter
+libuv
+libva
+libva-initial
+libva-utils
+libvcard
+libvdpau
+libvirt
+libvmi
+libvncserver
+libvorbis
+libvpx
+libwacom
+libwebp
+libwebsockets
+libwhisker2-perl
+libwmf
+libwnck
+libwnck3
+libwpe
+libwww-perl
+libx11
+libx11-compose-data
+libx86-1
+libxau
+libxaw
+libxcam
+libxcb
+libxcomposite
+libxcrypt
+libxcrypt-compat
+libxcursor
+libxdamage
+libxdmcp
+libxext
+libxfce4ui
+libxfce4util
+libxfixes
+libxfont
+libxfont2
+libxft
+libxi
+libxinerama
+libxkbcommon
+libxkbfile
+libxkbui
+libxklavier
+libxml++
+libxml-filter-buffertext-perl
+libxml-libxml-perl
+libxml-namespacesupport-perl
+libxml-parser-perl
+libxml-perl
+libxml-sax-base-perl
+libxml-sax-perl
+libxml-sax-writer-perl
+libxml-simple-perl
+libxml2
+libxmu
+libxpm
+libxpresent
+libxrandr
+libxrender
+libxres
+libxscrnsaver
+libxshmfence
+libxslt
+libxt
+libxtst
+libxv
+libxvmc
+libxxf86vm
+libyami
+libyami-utils
+libyaml
+libyui
+libyui-ncurses
+libzip
+lighttpd
+links
+links-x11
+linpack
+linux-atm
+linux-dummy
+linux-firmware
+linux-intel
+linux-intel-dev
+linux-intel-rt
+linux-libc-headers
+linux-yocto
+linux-yocto-dev
+linux-yocto-rt
+linux-yocto-tiny
+linuxptp
+lio-utils
+lirc
+live555
+lksctp-tools
+lldpd
+llvm
+llvm-common
+llvm-project-source-10.0.1
+lmbench
+lms
+lmsensors
+lmsensors-config
+lockdev
+lockfile-progs
+log4c
+log4cplus
+log4cpp
+logcheck
+logfsprogs
+logrotate
+logwarn
+logwatch
+loudmouth
+lowpan-tools
+lprng
+lrzsz
+lsb-release
+lshw
+lsof
+lsscsi
+ltp
+ltrace
+lttng-modules
+lttng-modules
+lttng-tools
+lttng-ust
+lua
+luajit
+luaposix
+lvm2
+lxc
+lxcfs
+lxdm
+lynis
+lz4
+lzip
+lzo
+lzop
+m4
+m4
+m4-native
+macchanger
+mailcap
+mailx
+make
+make
+make-mod-scripts
+makedepend
+makedevs
+makedumpfile
+maliit-framework-qt5
+maliit-plugins-qt5
+man-db
+man-pages
+mariadb
+mariadb-native
+matchbox-config-gtk
+matchbox-desktop
+matchbox-keyboard
+matchbox-panel-2
+matchbox-session
+matchbox-session-sato
+matchbox-terminal
+matchbox-theme-sato
+matchbox-wm
+maven
+mbedtls
+mbuffer
+mc
+mc
+mce-inject
+mce-test
+mcelog
+mcpp
+mcstrans
+md5deep
+mdadm
+mdbus2
+mdns
+memcached
+memcached
+memstat
+memtester
+menu-cache
+menulibre
+mercurial
+mesa
+mesa-demos
+mesa-gl
+meson
+meta-environment-extsdk-qemux86
+meta-environment-qemux86
+meta-extsdk-toolchain
+meta-filesystems-image
+meta-filesystems-image-base
+meta-go-toolchain
+meta-ide-support
+meta-initramfs-image
+meta-multimedia-image
+meta-multimedia-image-base
+meta-networking-image
+meta-networking-image-base
+meta-oe-image
+meta-oe-image-base
+meta-oe-ptest-image
+meta-perl-base
+meta-perl-image
+meta-perl-ptest-image
+meta-python-image
+meta-python-image-base
+meta-python-ptest-image
+meta-python2-image
+meta-python2-image-base
+meta-python2-ptest-image
+meta-toolchain
+meta-toolchain-qt5
+meta-webserver-image
+meta-webserver-image-base
+meta-world-pkgdata
+metacity
+metee
+method-source
+metrics-discovery
+mg
+mime-construct
+mime-support
+mime-types
+mimic
+mingetty
+mini-iconv
+mini-x-session
+minicom
+minicoredumper
+minidlna
+minini
+miniupnpd
+mixlib-authentication
+mixlib-cli
+mixlib-config
+mixlib-log
+mixlib-shellout
+mkfontscale
+mklibs-native
+mksh
+mm-common
+mmap-smack-test
+mmc-utils
+mobile-broadband-provider-info
+mod-wsgi
+modemmanager
+modutils-initscripts
+mokutil
+mongodb
+monit
+monit
+monkey
+mosh
+mosquitto
+mousepad
+mozjs
+mpc
+mpd
+mpeg2dec
+mpfr
+mpg123
+mpich
+mpv
+mraa
+mscgen
+msgpack
+msgpack-c
+msmtp
+msmtp
+msr-tools
+mtd-utils
+mtdev
+mtools
+mtools
+mtr
+mtree
+multimedia-libcamera-image
+multipath-tools
+musl
+musl-obstack
+musl-utils
+mutt
+mutter
+mx-1.0
+mycroft
+mypaint-brushes-1.0
+mysql-python
+nagios-core
+nagios-nrpe
+nagios-nsca
+nagios-plugins
+nana
+nano
+nanoio
+nanomsg
+nanomsg
+nanopb
+nasm
+nativesdk-buildtools-perl-dummy
+nativesdk-clang-glue
+nativesdk-erlang
+nativesdk-icecc-toolchain
+nativesdk-libtool
+nativesdk-meson
+nativesdk-mingw-w64-headers
+nativesdk-mingw-w64-runtime
+nativesdk-mingw-w64-winpthreads
+nativesdk-packagegroup-qt5-toolchain-host
+nativesdk-packagegroup-sdk-host
+nativesdk-qemu-helper
+nativesdk-qtbase
+nativesdk-sdk-provides-dummy
+nativesdk-wic
+nautilus
+nbd
+nbdkit
+nbench-byte
+ncftp
+ncmpc
+ncp
+ncrack
+ncurses
+ndctl
+ndisc6
+ne10
+neard
+neon
+net-snmp
+net-ssh
+net-ssh-gateway
+net-ssh-multi
+net-tools
+netbase
+netcat
+netcat-openbsd
+netcf
+netdata
+netkit-ftp
+netkit-rpc
+netkit-rsh
+netkit-rusers
+netkit-rwho
+netkit-telnet
+netkit-tftp
+netns
+netperf
+netplan
+nettle
+nettle
+network-manager-applet
+networkd-dispatcher
+networkmanager
+networkmanager-openvpn
+newlib
+nfacct
+nfs-export-root
+nfs-utils
+nftables
+nghttp2
+nginx
+nginx
+ngraph
+nicstat
+nikto
+ninja
+nlohmann-fifo
+nlohmann-json
+nmap
+nmon
+nng
+node-iothub-explorer
+node-red
+node-red-contrib-azureiothubnode
+node-red-contrib-google-cloud
+node-red-contrib-ibm-watson-iot
+nodejs
+nopoll
+nostromo
+notary
+novnc
+npth
+nspr
+nss
+nss-myhostname
+nss-pam-ldapd
+ntfs-3g-ntfsprogs
+ntimed
+ntop
+ntp
+numactl
+numlockx
+nuttcp
+nvme-cli
+nvmetcli
+oath
+obex-data-server
+obexftp
+ocfs2-tools
+oci-image-spec
+oci-image-tools
+oci-runtime-spec
+oci-runtime-tools
+oci-systemd-hook
+octave
+oe-scap
+ofono
+ogl-runtime
+ohai
+oisp-cli
+onboard
+onednn
+onig
+open-iscsi-kernel
+open-iscsi-user
+open-isns
+open-model-zoo
+open-vm-tools
+openal-soft
+openbox
+opencl-clang
+opencl-clang
+opencl-headers
+opencl-icd-loader
+openconnect
+opencore-amr
+openct
+opencv
+openembedded-release
+openflow
+openflow
+openh264
+openhpi
+openipmi
+openjdk-8-native
+openjpeg
+openl2tp
+openldap
+openldap
+openlldp
+openlmi-tools
+openmp
+openobex
+openocd
+opensaf
+opensbi
+opensc
+openscap
+openscap
+openscap-daemon
+openssh
+openssl
+openssl
+openssl-fips
+openssl-fips-example
+openssl-tpm-engine
+openstack-image-aio
+openstack-image-compute
+openstack-image-controller
+openstack-image-network
+openvpn
+openvswitch
+openwsman
+openzone
+opkg
+opkg-arch-config
+opkg-keyrings
+opkg-utils
+oprofile
+opus-tools
+opusfile
+orage
+orc
+orrery
+os-release
+oscam
+ostree
+ostree
+ostree
+ostree-upgrade-mgr
+overc-conftools
+overc-installer
+overc-system-agent
+overc-utils
+ovmf
+ovmf-shell-image
+ovmf-shell-image-enrollkeys
+owfs
+p11-kit
+p7zip
+p8platform
+p910nd
+pacemaker
+package-index
+packagegroup-anaconda-support
+packagegroup-audio
+packagegroup-base
+packagegroup-basic
+packagegroup-boot
+packagegroup-builder
+packagegroup-busybox-replacement
+packagegroup-cgl
+packagegroup-cgl-applications
+packagegroup-cgl-kernel
+packagegroup-cgl-middleware
+packagegroup-cgl-swdevtools
+packagegroup-cloud-aws
+packagegroup-cloud-azure
+packagegroup-cloud-benchmarking
+packagegroup-cloud-compute
+packagegroup-cloud-controller
+packagegroup-cloud-debug
+packagegroup-cloud-extras
+packagegroup-cloud-google
+packagegroup-cloud-ibm
+packagegroup-cloud-network
+packagegroup-cloud-oisp
+packagegroup-container
+packagegroup-containers
+packagegroup-core-base-utils
+packagegroup-core-boot
+packagegroup-core-boot-wrs
+packagegroup-core-buildessential
+packagegroup-core-clutter
+packagegroup-core-device-devel
+packagegroup-core-eclipse-debug
+packagegroup-core-full-cmdline
+packagegroup-core-nfs
+packagegroup-core-sdk
+packagegroup-core-security
+packagegroup-core-security-ptest
+packagegroup-core-selinux
+packagegroup-core-ssh-dropbear
+packagegroup-core-ssh-openssh
+packagegroup-core-standalone-sdk-target
+packagegroup-core-tools-debug
+packagegroup-core-tools-profile
+packagegroup-core-tools-testapps
+packagegroup-core-x11
+packagegroup-core-x11-base
+packagegroup-core-x11-sato
+packagegroup-core-x11-xserver
+packagegroup-cross-canadian-qemux86
+packagegroup-dom0
+packagegroup-dummy-monitoring
+packagegroup-efi-secure-boot
+packagegroup-empty-monitoring
+packagegroup-essential
+packagegroup-fonts-truetype
+packagegroup-glusterfs
+packagegroup-gnome-apps
+packagegroup-gnome-desktop
+packagegroup-go-cross-canadian-qemux86
+packagegroup-go-sdk-target
+packagegroup-graphical-builder
+packagegroup-ids
+packagegroup-ima
+packagegroup-ima-initramfs
+packagegroup-installer-x11-anaconda
+packagegroup-k8s
+packagegroup-luks
+packagegroup-luks-initramfs
+packagegroup-meta-filesystems
+packagegroup-meta-initramfs
+packagegroup-meta-multimedia
+packagegroup-meta-networking
+packagegroup-meta-oe
+packagegroup-meta-perl
+packagegroup-meta-python
+packagegroup-meta-python2
+packagegroup-meta-webserver
+packagegroup-nagios-monitoring
+packagegroup-networkmanager
+packagegroup-ovp-criu
+packagegroup-ovp-debug
+packagegroup-ovp-default-monitoring
+packagegroup-ovp-docker
+packagegroup-ovp-lttng-toolchain
+packagegroup-ovp-trace-tools
+packagegroup-ovp-vm
+packagegroup-qt5-qtcreator-debug
+packagegroup-qt5-toolchain-target
+packagegroup-sdk-target
+packagegroup-self-hosted
+packagegroup-selinux-minimal
+packagegroup-selinux-policycoreutils
+packagegroup-service-discovery
+packagegroup-tools-bluetooth
+packagegroup-tpm
+packagegroup-tpm2
+packagegroup-tpm2-initramfs
+packagegroup-util-linux
+packagegroup-vm-sep
+packagegroup-wr-base
+packagegroup-wr-bsps
+packagegroup-wr-core-cgl
+packagegroup-wr-core-cut
+packagegroup-wr-core-db
+packagegroup-wr-core-dhcp
+packagegroup-wr-core-interactive
+packagegroup-wr-core-libs-extended
+packagegroup-wr-core-mail
+packagegroup-wr-core-net
+packagegroup-wr-core-networking
+packagegroup-wr-core-perl
+packagegroup-wr-core-python
+packagegroup-wr-core-security
+packagegroup-wr-core-sys-util
+packagegroup-wr-core-util
+packagegroup-xfce
+packagegroup-xfce-base
+packagegroup-xfce-desktop
+packagegroup-xfce-extended
+packagegroup-xfce-multimedia
+packagegroup-zabbix-monitoring
+paho-mqtt-c
+pam-plugin-ccreds
+pam-plugin-ldapdb
+pam-ssh-agent-auth
+pamela
+pango
+pangomm
+parole
+parson
+parted
+passwdqc
+patch
+patch
+patchelf
+pavucontrol
+pax-utils
+paxctl
+pbzip2
+pcimem
+pciutils
+pcmanfm
+pcmciautils
+pcr-extend
+pcsc-lite
+pegtl
+perf
+perl
+pflask
+phonet-utils
+phoronix-test-suite
+php
+phpmyadmin
+physfs
+phytool
+picocom
+pidgin
+pidgin-otr
+pidgin-sipe
+piglit
+pigz
+pimd
+pinentry
+pipewire
+pipewire-0.2
+pixman
+pkcs11-helper
+pkgconf
+pkgconfig
+ply
+plymouth
+pm-graph
+pm-qa
+pm-utils
+pmdk
+pmtools
+pngcheck
+po4a
+poco
+podman
+podman-compose
+pointercal
+pointercal-xinput
+policycoreutils
+polkit
+polkit-group-rule-datetime
+polkit-group-rule-network
+pong-clock
+poppler
+poppler-data
+popt
+portaudio-v19
+postfix
+postgresql
+powertop
+ppp
+ppp-dialin
+pps-tools
+pptp-linux
+prelink
+procmail
+procps
+proftpd
+proj
+projucer
+protobuf
+protobuf-c
+proxy-libintl
+pry
+pseudo
+psmisc
+psplash
+psqlodbc
+ptest-runner
+ptpd
+pty-forward-native
+pugixml
+pulseaudio
+pulseaudio-client-conf-sato
+puppet
+puppet-vswitch
+puppetlabs-stdlib
+pure-ftpd
+purple-skypeweb
+puzzles
+pv
+pxaregs
+pyrtm
+python
+python-aioeventlet
+python-alembic
+python-anyjson
+python-appdirs
+python-asn1crypto
+python-astroid
+python-atomicwrites
+python-attr
+python-attrs
+python-automat
+python-automaton
+python-aws-iot-device-sdk-python
+python-babel
+python-backports-abc
+python-backports-functools-lru-cache
+python-backports-init
+python-backports-ssl
+python-barbican
+python-barbicanclient
+python-bcrypt
+python-beautifulsoup4
+python-beautifulsoup4
+python-behave
+python-bitarray
+python-blinker
+python-booleanpy
+python-boto
+python-cachetools
+python-can
+python-castellan
+python-ceilometer
+python-ceilometerclient
+python-certifi
+python-cffi
+python-chardet
+python-cheetah
+python-cinder
+python-cinderclient
+python-click
+python-cmd2
+python-coloredlogs
+python-configargparse
+python-configparser
+python-constantly
+python-contextlib2
+python-cpuset
+python-crcmod
+python-cryptography
+python-cryptography-vectors
+python-cson
+python-cursive
+python-cython
+python-daemon
+python-daemonize
+python-dateutil
+python-dbus
+python-dbusmock
+python-decorator
+python-deprecated
+python-designateclient
+python-distutils-extra
+python-django
+python-django
+python-django-appconf
+python-django-babel
+python-django-compressor
+python-django-nose
+python-django-openstack-auth
+python-django-pyscss
+python-django-south
+python-djangorestframework
+python-dnspython
+python-docker
+python-docker-pycreds
+python-docutils
+python-dominate
+python-editor
+python-engineio
+python-enum
+python-enum-compat
+python-enum34
+python-epydoc
+python-evdev
+python-falcon
+python-feedformatter
+python-feedparser
+python-fixtures
+python-flake8
+python-flask
+python-flask-babel
+python-flask-bcrypt
+python-flask-bootstrap
+python-flask-login
+python-flask-mail
+python-flask-migrate
+python-flask-nav
+python-flask-navigation
+python-flask-pymongo
+python-flask-restful
+python-flask-script
+python-flask-sijax
+python-flask-socketio
+python-flask-sqlalchemy
+python-flask-uploads
+python-flask-user
+python-flask-wtf
+python-flask-xstatic
+python-funcsigs
+python-functools32
+python-functools32
+python-future
+python-futures
+python-futures
+python-futurist
+python-gdata
+python-gevent
+python-gevent-websocket
+python-glance
+python-glance-store
+python-glanceclient
+python-glancestore
+python-google-api-python-client
+python-greenlet
+python-grpcio
+python-grpcio-tools
+python-gsocketpool
+python-h2
+python-heat
+python-heat-cfntools
+python-heatclient
+python-horizon
+python-hp3parclient
+python-hpack
+python-html5lib
+python-humanfriendly
+python-humanize
+python-hyperframe
+python-hyperlink
+python-hypothesis
+python-idna
+python-imaging
+python-importlib-metadata
+python-incremental
+python-inflection
+python-intervals
+python-ipaddr
+python-ipaddress
+python-ipy
+python-iso8601
+python-isodate
+python-isort
+python-itsdangerous
+python-javaobj-py3
+python-jinja2
+python-jsmin
+python-jsonext
+python-jsonpatch
+python-jsonpath-rw
+python-jsonpath-rw-ext
+python-jsonpointer
+python-jsonref
+python-jsonschema
+python-kazoo
+python-keyring
+python-keystone
+python-keystone-hybrid-backend
+python-keystoneclient
+python-keystonemiddleware
+python-lazy-object-proxy
+python-ldap
+python-license-expression
+python-linecache2
+python-lockfile
+python-lockfile
+python-lrparsing
+python-lxml
+python-m2crypto
+python-magnumclient
+python-mako
+python-manilaclient
+python-markupsafe
+python-mccabe
+python-microversion-parse
+python-mimeparse
+python-mistralclient
+python-mock
+python-monotonic
+python-more-itertools
+python-mox
+python-mox3
+python-mprpc
+python-msgpack
+python-native
+python-ndg-httpsclient
+python-netaddr
+python-netifaces
+python-networkmanager
+python-networkx
+python-neutron
+python-neutron-lib
+python-neutronclient
+python-nose-exclude
+python-nova
+python-novaclient
+python-novnc
+python-numeric
+python-oauth2
+python-oauthlib
+python-openstack-nose
+python-openstackclient
+python-openstacksdk
+python-os-brick
+python-os-client-config
+python-os-traits
+python-os-vif
+python-os-win
+python-os-xenapi
+python-osc-lib
+python-oslo.cache
+python-oslo.concurrency
+python-oslo.config
+python-oslo.context
+python-oslo.db
+python-oslo.i18n
+python-oslo.log
+python-oslo.messaging
+python-oslo.middleware
+python-oslo.policy
+python-oslo.privsep
+python-oslo.reports
+python-oslo.rootwrap
+python-oslo.serialization
+python-oslo.service
+python-oslo.versionedobjects
+python-oslo.vmware
+python-oslotest
+python-osprofiler
+python-ovsdbapp
+python-packaging
+python-paho-mqtt
+python-pam
+python-pamela
+python-parse
+python-parse-type
+python-passlib
+python-paste
+python-pathlib2
+python-pbr
+python-pep8
+python-periphery
+python-pexpect
+python-pika
+python-pika-pool
+python-pint
+python-pip
+python-pluggy
+python-ply
+python-posix-ipc
+python-pretend
+python-prettytable
+python-priority
+python-progress
+python-prompt-toolkit
+python-protobuf
+python-psutil
+python-psycopg2
+python-ptyprocess
+python-py
+python-pyalsaaudio
+python-pyasn1
+python-pyasn1-modules
+python-pybind11
+python-pybluez
+python-pycadf
+python-pycodestyle
+python-pyconnman
+python-pycparser
+python-pycrypto
+python-pycryptodomex
+python-pycurl
+python-pydbus
+python-pyelftools
+python-pyephem
+python-pyexpect
+python-pyfirmata
+python-pyflakes
+python-pyflame
+python-pygobject
+python-pygpgme
+python-pyhamcrest
+python-pyiface
+python-pyinotify
+python-pyjks
+python-pyjwt
+python-pylint
+python-pymisp
+python-pymongo
+python-pymysql
+python-pynetlinux
+python-pyopenssl
+python-pyparsing
+python-pyparted
+python-pyperclip
+python-pyperf
+python-pypowervm
+python-pyrex
+python-pyrex-native
+python-pyro4
+python-pyroute2
+python-pyrsistent
+python-pyrtm
+python-pysaml2
+python-pyscss
+python-pyserial
+python-pysmi
+python-pysnmp
+python-pysocks
+python-pysqlite
+python-pysqlite
+python-pystache
+python-pytest
+python-pytest-helpers-namespace
+python-pytest-runner
+python-pytest-salt
+python-pytest-tempdir
+python-python-editor
+python-pytoml
+python-pytun
+python-pytz
+python-pyudev
+python-pyusb
+python-pywbem
+python-pyyaml
+python-pyzmq
+python-rally
+python-rdflib
+python-redis
+python-requests
+python-requests-oauthlib
+python-rfc3339-validator
+python-rfc3986-validator
+python-rfc3987
+python-robotframework
+python-robotframework-seriallibrary
+python-rtslib-fb
+python-ryu
+python-salttesting
+python-scandir
+python-scrypt
+python-sdnotify
+python-selectors34
+python-semantic-version
+python-semver
+python-serpent
+python-setuptools
+python-setuptools-git
+python-setuptools-scm
+python-sh
+python-sijax
+python-simplejson
+python-singledispatch
+python-six
+python-slip-dbus
+python-smbus
+python-snakefood
+python-snimpy
+python-socketio
+python-soupsieve
+python-sparts
+python-speaklater
+python-sqlalchemy
+python-sqlalchemy-migrate
+python-sqlparse
+python-statistics
+python-stevedore
+python-strict-rfc3339
+python-subprocess32
+python-subunit
+python-suds
+python-suds-jurko
+python-swift
+python-swiftclient
+python-systemd
+python-sysv-ipc
+python-taskflow
+python-tenacity
+python-termcolor
+python-thrift
+python-tinyrpc
+python-toml
+python-tooz
+python-tornado
+python-tornado-redis
+python-tqdm
+python-traceback2
+python-trollius
+python-trove
+python-troveclient
+python-twisted
+python-twitter
+python-twofish
+python-txws
+python-typing
+python-tzlocal
+python-ujson
+python-unicodecsv
+python-unidiff
+python-urllib3
+python-vcversioner
+python-versiontools
+python-visitor
+python-vobject
+python-waitress
+python-wcwidth
+python-webcolors
+python-webdav
+python-webencodings
+python-websocket-client
+python-websockify
+python-werkzeug
+python-which
+python-whoosh
+python-wrapt
+python-wtforms
+python-xattr
+python-xlrd
+python-xstatic
+python-xstatic-angular
+python-xstatic-angular-bootstrap
+python-xstatic-angular-cookies
+python-xstatic-angular-fileupload
+python-xstatic-angular-gettext
+python-xstatic-angular-irdragndrop
+python-xstatic-angular-lrdragndrop
+python-xstatic-angular-mock
+python-xstatic-angular-schema-form
+python-xstatic-bootstrap-datepicker
+python-xstatic-bootstrap-scss
+python-xstatic-bootswatch
+python-xstatic-d3
+python-xstatic-font-awesome
+python-xstatic-font-awesome
+python-xstatic-hogan
+python-xstatic-jquery
+python-xstatic-jquery-migrate
+python-xstatic-jquery-ui
+python-xstatic-jquery.quicksearch
+python-xstatic-jquery.tablesorter
+python-xstatic-jsencrypt
+python-xstatic-magic-search
+python-xstatic-mdi
+python-xstatic-objectpath
+python-xstatic-qunit
+python-xstatic-rickshaw
+python-xstatic-roboto-fontface
+python-xstatic-smart-table
+python-xstatic-spin
+python-xstatic-term.js
+python-xstatic-tv4
+python-yappi
+python-zake
+python-zaqarclient
+python-zipp
+python-zopeinterface
+python3
+python3-absl
+python3-aenum
+python3-aiofiles
+python3-aiohttp
+python3-aiohttp-jinja2
+python3-alembic
+python3-amqp
+python3-amqplib
+python3-anaconda
+python3-ansi2html
+python3-ansible
+python3-ansicolors
+python3-anyjson
+python3-appdirs
+python3-apply-defaults
+python3-argcomplete
+python3-argh
+python3-arpeggio
+python3-arrow
+python3-asn1crypto
+python3-astor
+python3-astroid
+python3-async
+python3-async-timeout
+python3-atomicwrites
+python3-attr
+python3-attrs
+python3-autobahn
+python3-automat
+python3-avahi
+python3-aws-iot-device-sdk-python
+python3-awscli
+python3-azure-iot-device
+python3-babel
+python3-backports-functools-lru-cache
+python3-bandit
+python3-bcrypt
+python3-beautifulsoup4
+python3-behave
+python3-bitarray
+python3-bitstring
+python3-blinker
+python3-blivet
+python3-blivetgui
+python3-booleanpy
+python3-boto3
+python3-boto3
+python3-botocore
+python3-bugsnag
+python3-cachecontrol
+python3-cached-property
+python3-cachetools
+python3-can
+python3-cassandra-driver
+python3-cbor
+python3-cbor2
+python3-cephclient
+python3-certifi
+python3-cffi
+python3-chardet
+python3-cheetah
+python3-cheroot
+python3-cherrypy
+python3-click
+python3-cliff
+python3-cmd2
+python3-colorama
+python3-colorama
+python3-colorama
+python3-coloredlogs
+python3-colorlog
+python3-configargparse
+python3-configparser
+python3-configshell-fb
+python3-constantly
+python3-contextlib2
+python3-coverage
+python3-coverage
+python3-crcmod
+python3-croniter
+python3-cryptography
+python3-cryptography-vectors
+python3-cson
+python3-cssselect
+python3-cycler
+python3-cython
+python3-dateutil
+python3-dbus
+python3-dbus-next
+python3-dbusmock
+python3-dbussy
+python3-debtcollector
+python3-decorator
+python3-defusedxml
+python3-deprecation
+python3-dicttoxml
+python3-dill
+python3-distro
+python3-distutils-extra
+python3-django
+python3-django-appconf
+python3-django-south
+python3-djangorestframework
+python3-dnspython
+python3-docker
+python3-docker-compose
+python3-docker-pycreds
+python3-dockerpty
+python3-docopt
+python3-docutils
+python3-dogpile.cache
+python3-dogpile.core
+python3-dominate
+python3-dt-schema
+python3-ecdsa
+python3-editor
+python3-engineio
+python3-entrypoints
+python3-enum-compat
+python3-et-xmlfile
+python3-evdev
+python3-eventlet
+python3-extras
+python3-extras
+python3-fail2ban
+python3-fann2
+python3-fasteners
+python3-fasteners
+python3-fastentrypoints
+python3-feedformatter
+python3-fire
+python3-flask
+python3-flask-babel
+python3-flask-bootstrap
+python3-flask-cors
+python3-flask-jsonpify
+python3-flask-jwt
+python3-flask-login
+python3-flask-mail
+python3-flask-migrate
+python3-flask-nav
+python3-flask-pymongo
+python3-flask-restful
+python3-flask-script
+python3-flask-sijax
+python3-flask-socketio
+python3-flask-sqlalchemy
+python3-flask-uploads
+python3-flask-user
+python3-flask-wtf
+python3-flask-xstatic
+python3-funcsigs
+python3-future
+python3-gast
+python3-geojson
+python3-gevent
+python3-gevent-websocket
+python3-gg-group-setup
+python3-git
+python3-gitdb
+python3-gmqtt
+python3-google-api-core
+python3-google-api-python-client
+python3-google-auth
+python3-google-cloud-core
+python3-google-cloud-pubsub
+python3-google-cloud-storage
+python3-google-resumable-media
+python3-googleapis-common-protos
+python3-graphviz
+python3-greenlet
+python3-grpc-google-iam-v1
+python3-grpcio
+python3-grpcio-tools
+python3-gsocketpool
+python3-gunicorn
+python3-h2
+python3-h5py
+python3-happybase
+python3-haversine
+python3-hgtools
+python3-hpack
+python3-html2text
+python3-html5lib
+python3-httplib2
+python3-httplib2
+python3-httplib2
+python3-httpretty
+python3-humanfriendly
+python3-humanize
+python3-hyperframe
+python3-hyperlink
+python3-i18n
+python3-ibmiotf
+python3-idna
+python3-idna
+python3-idna-ssl
+python3-imageio
+python3-importlib-metadata
+python3-incremental
+python3-inflection
+python3-iniparse
+python3-intervals
+python3-ipaddress
+python3-ipy
+python3-iso3166
+python3-iso8601
+python3-isodate
+python3-isort
+python3-itsdangerous
+python3-janus
+python3-jaraco-functools
+python3-javaobj-py3
+python3-jdcal
+python3-jeepney
+python3-jinja2
+python3-jmespath
+python3-jsmin
+python3-jsonpatch
+python3-jsonpath-rw
+python3-jsonpointer
+python3-jsonref
+python3-jsonrpcserver
+python3-jsonschema
+python3-kafka
+python3-kconfiglib
+python3-keras-applications
+python3-keras-preprocessing
+python3-keyring
+python3-keystoneauth1
+python3-kiwisolver
+python3-knack
+python3-kombu
+python3-langtable
+python3-lazy-object-proxy
+python3-ldap
+python3-lesscpy
+python3-libarchive-c
+python3-license-expression
+python3-lockfile
+python3-logutils
+python3-lrparsing
+python3-luma-core
+python3-luma-oled
+python3-lxml
+python3-lz4
+python3-m2crypto
+python3-magic
+python3-mako
+python3-mapbox
+python3-markdown
+python3-markupsafe
+python3-matplotlib
+python3-mccabe
+python3-meh
+python3-meld3
+python3-memcache
+python3-memcached
+python3-memcached
+python3-mock
+python3-monotonic
+python3-more-itertools
+python3-mpmath
+python3-mprpc
+python3-msgpack
+python3-msk
+python3-msm
+python3-multidict
+python3-nacl
+python3-ndg-httpsclient
+python3-netaddr
+python3-netifaces
+python3-networkmanager
+python3-networkx
+python3-newrelic
+python3-nmap
+python3-nose
+python3-ntplib
+python3-numpy
+python3-oauth2client
+python3-oauthlib
+python3-obd
+python3-oisp
+python3-openpyxl
+python3-ordered-set
+python3-ordereddict
+python3-os-client-config
+python3-osc-lib
+python3-oslo.i18n
+python3-oslo.utils
+python3-ovs
+python3-packaging
+python3-padaos
+python3-padatious
+python3-paho-mqtt
+python3-pako
+python3-pam
+python3-pandas
+python3-parallax
+python3-paramiko
+python3-paramiko
+python3-paramiko
+python3-parse
+python3-parse-type
+python3-passlib
+python3-paste
+python3-pastedeploy
+python3-pathlib
+python3-pathlib2
+python3-pathtools3
+python3-pbr
+python3-pecan
+python3-pep8
+python3-periphery
+python3-petact
+python3-pexpect
+python3-pid
+python3-pika
+python3-pika-pool
+python3-pillow
+python3-pint
+python3-pip
+python3-pkgconfig
+python3-pluggy
+python3-ply
+python3-pocketsphinx
+python3-polyline
+python3-portend
+python3-positional
+python3-posix-ipc
+python3-prctl
+python3-precise-runner
+python3-pretend
+python3-prettytable
+python3-priority
+python3-productmd
+python3-progress
+python3-prompt-toolkit
+python3-protobuf
+python3-psutil
+python3-ptyprocess
+python3-pulsectl
+python3-py
+python3-py-ubjson
+python3-pyalsaaudio
+python3-pyasn1
+python3-pyasn1-modules
+python3-pyatspi
+python3-pyaudio
+python3-pybind11
+python3-pybluez
+python3-pycairo
+python3-pychromecast
+python3-pycodestyle
+python3-pyconnman
+python3-pycparser
+python3-pycrypto
+python3-pycryptodome
+python3-pycryptodomex
+python3-pycurl
+python3-pydbus
+python3-pydocumentdb
+python3-pyelftools
+python3-pyephem
+python3-pyexpect
+python3-pyfirmata
+python3-pyflakes
+python3-pyflakes
+python3-pygments
+python3-pygobject
+python3-pyhamcrest
+python3-pyiface
+python3-pyinotify
+python3-pyjks
+python3-pyjwt
+python3-pykickstart
+python3-pykwalify
+python3-pylint
+python3-pylyrics
+python3-pymisp
+python3-pymongo
+python3-pymysql
+python3-pynetlinux
+python3-pyopenssl
+python3-pyparsing
+python3-pyparted
+python3-pyperclip
+python3-pyperf
+python3-pyqt5
+python3-pyqtchart
+python3-pyro4
+python3-pyroute2
+python3-pyrsistent
+python3-pyserial
+python3-pysnmp
+python3-pysocks
+python3-pystache
+python3-pystemd
+python3-pytest
+python3-pytest-asyncio
+python3-pytest-helpers-namespace
+python3-pytest-html
+python3-pytest-metadata
+python3-pytest-runner
+python3-pytest-salt
+python3-pytest-tempdir
+python3-pytest-timeout
+python3-python-editor
+python3-python-vlc
+python3-pytoml
+python3-pytun
+python3-pytz
+python3-pyudev
+python3-pyusb
+python3-pywbem
+python3-pyyaml
+python3-pyzmq
+python3-raven
+python3-rcssmin
+python3-rdflib
+python3-redis
+python3-regex
+python3-repoze-lru
+python3-repoze.lru
+python3-repoze.who
+python3-requests
+python3-requests-file
+python3-requests-ftp
+python3-requests-futures
+python3-requests-oauthlib
+python3-requests-toolbelt
+python3-requests-unixsocket
+python3-requestsexceptions
+python3-retrying
+python3-rfc3339-validator
+python3-rfc3986
+python3-rfc3986-validator
+python3-rfc3987
+python3-rjsmin
+python3-robotframework
+python3-robotframework-seriallibrary
+python3-routes
+python3-routes
+python3-rsa
+python3-rsa
+python3-rtslib-fb
+python3-ruamel-yaml
+python3-s3transfer
+python3-saharaclient
+python3-scandir
+python3-scapy
+python3-scapy
+python3-scons
+python3-scons-native
+python3-scp
+python3-scrypt
+python3-sdnotify
+python3-secretstorage
+python3-semantic-version
+python3-semver
+python3-sentry-sdk
+python3-serpent
+python3-setuptools
+python3-setuptools-git
+python3-setuptools-scm
+python3-setuptools-scm-git-archive
+python3-sh
+python3-sijax
+python3-simpleeval
+python3-simplegeneric
+python3-simplejson
+python3-simpleline
+python3-singledispatch
+python3-six
+python3-slip-dbus
+python3-smbus
+python3-smbus2
+python3-smmap
+python3-snappy
+python3-socketio
+python3-soupsieve
+python3-speaklater
+python3-speedtest-cli
+python3-sphinx
+python3-spidev
+python3-spidev
+python3-sqlalchemy
+python3-sqlparse
+python3-sshtunnel
+python3-statsd
+python3-stevedore
+python3-strict-rfc3339
+python3-subunit
+python3-suds-jurko
+python3-supervisor
+python3-suricata-update
+python3-sympy
+python3-systemd
+python3-sysv-ipc
+python3-tabulate
+python3-tempita
+python3-tempora
+python3-term
+python3-termcolor
+python3-test-generator
+python3-testrepository
+python3-testresources
+python3-testscenarios
+python3-testtools
+python3-testtools
+python3-texttable
+python3-thrift
+python3-thrift
+python3-tinyrecord
+python3-toml
+python3-tornado
+python3-tox
+python3-tqdm
+python3-trafaret
+python3-trafaret-config
+python3-transitions
+python3-twine
+python3-twisted
+python3-twitter
+python3-twofish
+python3-txaio
+python3-txws
+python3-typeguard
+python3-typing-extensions
+python3-tzlocal
+python3-u-msgpack-python
+python3-ujson
+python3-unidiff
+python3-uritemplate
+python3-uritemplate
+python3-urllib3
+python3-vcversioner
+python3-versiontools
+python3-vine
+python3-virtualenv
+python3-visitor
+python3-voluptuous
+python3-vsts-cd-manager
+python3-waitress
+python3-warlock
+python3-watchdog
+python3-wcwidth
+python3-weakrefmethod
+python3-webcolors
+python3-webencodings
+python3-webob
+python3-webob
+python3-webrtcvad
+python3-websocket-client
+python3-websocket-client
+python3-websockets
+python3-webtest
+python3-werkzeug
+python3-werkzeug
+python3-wheel
+python3-whoosh
+python3-wrapt
+python3-wsgiref
+python3-wsme
+python3-wtforms
+python3-xlrd
+python3-xmlrunner
+python3-xmltodict
+python3-xmltodict
+python3-xmodem
+python3-xstatic
+python3-xstatic-angular-bootstrap
+python3-xstatic-angular-cookies
+python3-xstatic-angular-fileupload
+python3-xstatic-angular-gettext
+python3-xstatic-angular-irdragndrop
+python3-xstatic-angular-schema-form
+python3-xstatic-bootstrap-datepicker
+python3-xstatic-bootstrap-scss
+python3-xstatic-bootswatch
+python3-xstatic-d3
+python3-xstatic-font-awesome
+python3-xstatic-font-awesome
+python3-xstatic-hogan
+python3-xstatic-jasmine
+python3-xstatic-jquery
+python3-xstatic-jquery-migrate
+python3-xstatic-roboto-fontface
+python3-xxhash
+python3-yappi
+python3-yarl
+python3-zc-lockfile
+python3-zipp
+python3-zopeinterface
+pyxdg
+qat16
+qat17
+qemu
+qemu-helper-native
+qemu-native
+qemu-system-native
+qemuwrapper-cross
+qmllive
+qpdf
+qpid
+qpid-python
+qrencode
+qsiv
+qt-kiosk-browser
+qt3d
+qt5-creator
+qt5-demo-extrafiles
+qt5-opengles2-test
+qt5-plugin-generic-vboxtouch
+qt5everywheredemo
+qt5ledscreen
+qt5nmapcarousedemo
+qt5nmapper
+qtbase
+qtbase-native
+qtcharts
+qtchooser
+qtcoap
+qtconnectivity
+qtdatavis3d
+qtdeclarative
+qtgamepad
+qtgraphicaleffects
+qtimageformats
+qtknx
+qtlocation
+qtlottie
+qtmqtt
+qtmultimedia
+qtnetworkauth
+qtopcua
+qtpurchasing
+qtquick3d
+qtquickcontrols
+qtquickcontrols2
+qtquicktimeline
+qtremoteobjects
+qtscript
+qtscxml
+qtsensors
+qtserialbus
+qtserialport
+qtsmarthome
+qtsvg
+qtsystems
+qttools
+qttranslations
+qtvirtualkeyboard
+qtwayland
+qtwebchannel
+qtwebengine
+qtwebglplugin
+qtwebkit
+qtwebsockets
+qtwebview
+qtx11extras
+qtxmlpatterns
+quagga
+quazip
+quilt
+quilt-native
+quitbattery
+quitindicators
+quota
+qwt-qt5
+rabbitmq-c
+rabbitmq-server
+rack
+racoon2
+radiusclient-ng
+radvd
+rapidjson
+raptor2
+rarpd
+rasdaemon
+rclone
+rdate
+rdfind
+rdist
+rdma-core
+re2c
+read-edid
+readline
+readline
+redhat-security
+redis
+refpolicy-mcs
+refpolicy-mcs
+refpolicy-minimum
+refpolicy-minimum
+refpolicy-mls
+refpolicy-mls
+refpolicy-standard
+refpolicy-standard
+refpolicy-targeted
+refpolicy-targeted
+relayd
+remmina
+renderdoc
+resolvconf
+resource-agents
+rest
+rest-client
+restic
+restorecond
+rfkill
+rgb
+riddler
+ristretto
+rndmac
+rng-tools
+rocksdb
+rodent-icon-theme
+rp-pppoe
+rpcbind
+rpcsvc-proto
+rpi-u-boot-scr
+rpm
+rrdtool
+rsnapshot
+rsync
+rsync
+rsyslog
+rt-app
+rt-tests
+rtl8723bs-bt
+rtmpdump
+rtorrent
+ruby
+ruby-shadow
+ruli
+run-container
+run-postinsts
+runc-docker
+runc-opencontainers
+runv
+runx
+rwmem
+rxvt-unicode
+rxvt-unicode
+rygel
+s-suite
+safec
+saftest
+salt
+samba
+samhain-client
+samhain-server
+samhain-standalone
+sanlock
+sassc
+sato-screenshot
+satyr
+sbc
+sblim-cmpi-devel
+sblim-sfc-common
+sblim-sfcb
+sblim-sfcc
+sbsigntool
+sbsigntool-native
+scap-security-guide
+scap-security-guide
+schedtool-dl
+schroedinger
+screen
+screen-getty
+scsirastools
+sdbus-c++
+sdbus-c++-libsystemd
+sdbus-c++-tools
+sdparm
+seabios
+secilc
+secure-core-image
+secure-core-image-initramfs
+secure-core-minimal-image
+secureboot-selftest-image-signed
+secureboot-selftest-image-unsigned
+security-build-image
+security-client-image
+security-server-image
+security-test-image
+sed
+sed
+sedutil
+selinux-autorelabel
+selinux-dbus
+selinux-gui
+selinux-init
+selinux-labeldev
+selinux-python
+selinux-sandbox
+seloader
+semodule-utils
+ser2net
+serf
+serial-forward
+serialcheck
+serverengine
+sessreg
+sethdlc
+setools
+setserial
+settings-daemon
+setxkbmap
+sg3-utils
+sgpio
+shadow
+shadow-securetty
+shadow-sysroot
+shared-mime-info
+shared-mime-info
+sharutils
+shim
+shutdown-desktop
+sigdump
+signing-keys
+sigrok-cli
+simple-mtpfs
+singularity
+sip3
+skopeo
+skopeo
+slang
+slirp4netns
+sloci-image-native
+slop
+smack
+smack-test
+smartmontools
+smbnetfs
+smcroute
+smem
+smstools3
+snappy
+snort
+socat
+soci
+softhsm
+sota-tools
+sound-theme-freedesktop
+source-han-sans-cn-fonts
+source-han-sans-jp-fonts
+source-han-sans-kr-fonts
+source-han-sans-tw-fonts
+sox
+span-lite
+spawn-fcgi
+spdlog
+speedtest-cli
+speex
+speexdsp
+spf13-cobra
+spf13-pflag
+spice
+spice
+spice-html5
+spice-protocol
+spidev-test
+spirv-shader-generator
+spirv-tools
+spitools
+sqlite
+sqlite-orm
+sqlite3
+squashfs-tools
+squid
+srecord
+sshfs-fuse
+ssiapi
+ssmping
+ssmtp
+sssd
+stalonetray
+startup-notification
+sthttpd
+stm32flash
+strace
+streamripper
+stress-ng
+stressapptest
+strongswan
+strptime
+stunnel
+subversion
+sudo
+surf
+suricata
+swig
+synergy
+sysbench
+sysdig
+sysfsutils
+sysklogd
+syslinux
+syslog-ng
+sysprof
+sysstat
+system-config-keyboard
+systemd
+systemd-boot
+systemd-bootchart
+systemd-bootconf
+systemd-compat-units
+systemd-conf
+systemd-machine-units
+systemd-serialgetty
+systemd-systemctl-native
+systemtap
+systemtap-native
+systemtap-uprobes
+systemu
+sysvinit
+sysvinit-inittab
+taglib
+takao-fonts
+tar
+tar
+target-sdk-provides-dummy
+tbb
+tcf-agent
+tcl
+tclap
+tcp-smack-test
+tcp-wrappers
+tcpdump
+tcpreplay
+tcpslice
+tcsh
+tearsofsteel-1080p
+telepathy-glib
+telepathy-idle
+telepathy-python
+telepathy-python3
+tempest
+tensorboard
+tensorflow
+tensorflow-estimator
+tensorflow-for-poets
+tensorflow-native
+terminus-font
+tesseract
+tesseract-lang
+testexport-tarball
+testfloat
+texinfo
+texinfo
+texinfo-dummy-native
+tftp-hpa
+tgt
+thermald
+thin-provisioning-tools
+thrift
+thunar
+thunar-archive-plugin
+thunar-media-tags-plugin
+thunar-shares-plugin
+thunar-volman
+tiff
+tigervnc
+time
+time
+tini
+tiny-init
+tiny-init
+tinyalsa
+tinymembench
+tinyproxy
+tiobench
+tipcutils
+tiptop
+tk
+tmon
+tmux
+tnftp
+tokyocabinet
+tomoyo-tools
+toscoterm
+toybox
+tpm-quote-tools
+tpm-tools
+tpm2-abrmd
+tpm2-abrmd
+tpm2-tools
+tpm2-tools
+tpm2-tss
+tpm2-tss
+tpm2simulator-native
+traceroute
+tracker
+tracker-miners
+transmission
+tree
+tremor
+triggerhappy
+tripwire
+trousers
+tslib
+tsocks
+tss-testsuite
+ttf-abyssinica
+ttf-arphic-uming
+ttf-bitstream-vera
+ttf-dejavu
+ttf-droid
+ttf-gentium
+ttf-hunkyfonts
+ttf-inconsolata
+ttf-liberation
+ttf-liberation-sans-narrow
+ttf-lklug
+ttf-lohit
+ttf-mplus
+ttf-noto-emoji
+ttf-pt-sans
+ttf-roboto
+ttf-sazanami
+ttf-tlwg
+ttf-ubuntu-font-family
+ttf-vlgothic
+ttf-wqy-zenhei
+tufao
+tumbler
+tunctl
+turbostat
+tvheadend
+twm
+tzcode-native
+tzdata
+tzinfo
+tzinfo-data
+u-boot
+u-boot-imx
+u-boot-tools
+u-boot-uenv
+ubi-utils-klibc
+ucarp
+udev-extraconf
+udevil
+udisks2
+udocker
+udp-smack-test
+ufs-utils
+uftp
+uftrace
+ufw
+uhubctl
+uim
+umip
+uml-utilities
+umoci
+umock-c
+unbound
+unclutter-xfixes
+unfs3
+unicode-ucd
+unifdef
+uninative-tarball
+unionfs-fuse
+unixodbc
+unzip
+update-rc.d
+upm
+upower
+uriparser
+usb-modeswitch
+usb-modeswitch-data
+usbinit
+usbmuxd
+usbredir
+usbutils
+usermode
+usleep
+usrsctp
+uthash
+util-linux
+util-macros
+utouch-evemu
+utouch-frame
+utouch-mtview
+uw-imap
+uwsgi
+uxen-guest-image-minimal
+uxen-guest-tools
+v4l-utils
+v86d
+vala
+valgrind
+valijson
+vblade
+vboxguestdrivers
+vdso-test
+vgabios
+vim
+vim-tiny
+virglrenderer
+vlan
+vlc
+vlock
+vo-aacenc
+vo-amrwbenc
+volatile-binds
+volume-key
+vorbis-tools
+vpnc
+vrf
+vrf-init
+vsftpd
+vte
+vte9
+vulkan-demos
+vulkan-headers
+vulkan-loader
+vulkan-tools
+waffle
+watchdog
+watchdog-config
+wavpack
+wayland
+wayland-protocols
+wbxml2
+wdt-tool
+webkitgtk
+webmin
+webrtc-audio-processing
+websocketpp
+weechat
+weston
+weston-init
+wget
+whetstone
+which
+which
+wic-tools
+wifi-test-suite
+wiggle
+windriver-logos
+wipe
+wireguard-module
+wireguard-tools
+wireless-regdb
+wireshark
+wolfssl
+wpa-supplicant
+wpan-tools
+wpebackend-fdo
+wr-app-container
+wr-common-packages-native
+wr-init
+wr-systemd-container
+wr-themes
+wrlinux-image-cgl
+wrlinux-image-core
+wrlinux-image-initramfs
+wrlinux-image-installer
+wrlinux-image-installer-initramfs
+wrlinux-image-minimal-initramfs
+wrlinux-image-ovp-guest
+wrlinux-image-ovp-kvm
+wrlinux-image-ovp-kvm-minimal
+wrlinux-image-small
+wrlinux-image-std
+wrlinux-image-std-sato
+wrlinux-image-tiny-initramfs
+wvdial
+wvstreams
+wxwidgets
+x11perf
+x11vnc
+x264
+x265
+xarchiver
+xauth
+xbitmaps
+xcb-proto
+xcb-util
+xcb-util-cursor
+xcb-util-image
+xcb-util-keysyms
+xcb-util-renderutil
+xcb-util-wm
+xclock
+xcursor-transparent-theme
+xcursorgen
+xdebug
+xdelta3
+xdg-user-dirs
+xdg-utils
+xdotool
+xdpyinfo
+xen
+xen-guest-image-minimal
+xen-image-minimal
+xen-python2
+xen-tools
+xerces-c
+xev
+xeyes
+xf86-input-evdev
+xf86-input-keyboard
+xf86-input-libinput
+xf86-input-mouse
+xf86-input-synaptics
+xf86-input-tslib
+xf86-input-vmmouse
+xf86-input-void
+xf86-video-armsoc
+xf86-video-armsoc
+xf86-video-ast
+xf86-video-ati
+xf86-video-cirrus
+xf86-video-fbdev
+xf86-video-intel
+xf86-video-mga
+xf86-video-nouveau
+xf86-video-vesa
+xf86-video-vmware
+xfce-dusk-gtk3
+xfce-polkit
+xfce4-appfinder
+xfce4-battery-plugin
+xfce4-calculator-plugin
+xfce4-clipman-plugin
+xfce4-closebutton-plugin
+xfce4-cpufreq-plugin
+xfce4-cpugraph-plugin
+xfce4-datetime-plugin
+xfce4-datetime-setter
+xfce4-dev-tools
+xfce4-diskperf-plugin
+xfce4-embed-plugin
+xfce4-equake-plugin
+xfce4-eyes-plugin
+xfce4-fsguard-plugin
+xfce4-genmon-plugin
+xfce4-hotcorner-plugin
+xfce4-mailwatch-plugin
+xfce4-mount-plugin
+xfce4-mpc-plugin
+xfce4-netload-plugin
+xfce4-notes-plugin
+xfce4-notifyd
+xfce4-panel
+xfce4-panel-profiles
+xfce4-places-plugin
+xfce4-power-manager
+xfce4-pulseaudio-plugin
+xfce4-screensaver
+xfce4-screenshooter
+xfce4-sensors-plugin
+xfce4-session
+xfce4-settings
+xfce4-smartbookmark-plugin
+xfce4-systemload-plugin
+xfce4-taskmanager
+xfce4-terminal
+xfce4-time-out-plugin
+xfce4-timer-plugin
+xfce4-vala
+xfce4-verve-plugin
+xfce4-wavelan-plugin
+xfce4-weather-plugin
+xfce4-whiskermenu-plugin
+xfce4-xkb-plugin
+xfconf
+xfdesktop
+xfmpc
+xfontsel
+xfsdump
+xfsprogs
+xfwm4
+xfwm4-themes
+xgamma
+xhost
+xinetd
+xinit
+xinput
+xinput-calibrator
+xkbcomp
+xkbevd
+xkbprint
+xkbutils
+xkeyboard-config
+xl2tpd
+xlsatoms
+xlsclients
+xlsfonts
+xmag
+xmessage
+xmlrpc-c
+xmlsec1
+xmlstarlet
+xmlto
+xmodmap
+xorg-fonts-100dpi
+xorg-minimal-fonts
+xorg-sgml-doctools
+xorgproto
+xorgxrdp
+xorriso
+xpext
+xprop
+xrandr
+xrdb
+xrdp
+xrefresh
+xrestop
+xscreensaver
+xserver-common
+xserver-nodm-init
+xserver-xf86-config
+xserver-xorg
+xserver-xorg-cvt-native
+xset
+xsetmode
+xsetroot
+xsp
+xstdcmap
+xterm
+xtrans
+xuser-account
+xvinfo
+xvisor
+xwd
+xwininfo
+xwud
+xxhash
+xz
+yad
+yaffs2-utils
+yajl
+yajl
+yajl-ruby
+yard
+yasm
+yavta
+yelp
+yelp-tools
+yelp-xsl
+yocto-compat-logos
+yp-tools
+yp-tools
+ypbind-mt
+zabbix
+zbar
+zchunk
+zenity
+zeroconf
+zeromq
+zile
+zip
+zlib
+zlib-intel
+zlib-qat
+zlib-qat
+zlog
+znc
+zram
+zsh
+zstd
diff --git a/lib/acme/reports.py b/lib/acme/reports.py
index 682852ad..92c5693e 100755
--- a/lib/acme/reports.py
+++ b/lib/acme/reports.py
@@ -34,7 +34,7 @@ from srtgui.reports import Report, ReportManager, ProductsReport
from django.db.models import Q, F
from django.db import Error
-from srtgui.templatetags.projecttags import filtered_filesizeformat
+from srtgui.templatetags.jobtags import filtered_filesizeformat
logger = logging.getLogger("srt")
diff --git a/lib/acme/tables.py b/lib/acme/tables.py
index ee14136b..0e39dc78 100755
--- a/lib/acme/tables.py
+++ b/lib/acme/tables.py
@@ -29,7 +29,7 @@ from orm.models import Notify, NotifyAccess, NotifyCategories
from users.models import SrtUser, UserSafe
from django.db.models import Q, Max, Sum, Count, When, Case, Value, IntegerField
-from django.conf.urls import url
+from django.urls import re_path as url
from django.urls import reverse, resolve
from django.http import HttpResponse
from django.views.generic import TemplateView
diff --git a/lib/acme/templates/acme_hello.html b/lib/acme/templates/acme_hello.html
index 679f45a2..dac5c66c 100755
--- a/lib/acme/templates/acme_hello.html
+++ b/lib/acme/templates/acme_hello.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} ACME {% endblock %}
diff --git a/lib/acme/templates/acme_product.html b/lib/acme/templates/acme_product.html
index f1fb1a8b..0e519acd 100755
--- a/lib/acme/templates/acme_product.html
+++ b/lib/acme/templates/acme_product.html
@@ -1,6 +1,6 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} {{object.name}} - ACME Style {% endblock %}
diff --git a/lib/acme/templates/base.html b/lib/acme/templates/base.html
index 5e1f847f..978f73bd 100755
--- a/lib/acme/templates/base.html
+++ b/lib/acme/templates/base.html
@@ -1,7 +1,6 @@
<!DOCTYPE html>
{% load static %}
-{% load projecttags %}
-{% load project_url_tag %}
+{% load jobtags %}
<html lang="en">
<head>
<title>
diff --git a/lib/acme/urls.py b/lib/acme/urls.py
index be10ef3e..9ce5d43a 100755
--- a/lib/acme/urls.py
+++ b/lib/acme/urls.py
@@ -1,4 +1,4 @@
-from django.conf.urls import include, url
+from django.urls import re_path as url,include
from . import views, tables
urlpatterns = [
diff --git a/lib/cve_checker/__init__.py b/lib/cve_checker/__init__.py
new file mode 100755
index 00000000..e69de29b
--- /dev/null
+++ b/lib/cve_checker/__init__.py
diff --git a/lib/cve_checker/admin.py b/lib/cve_checker/admin.py
new file mode 100755
index 00000000..8c38f3f3
--- /dev/null
+++ b/lib/cve_checker/admin.py
@@ -0,0 +1,3 @@
+from django.contrib import admin
+
+# Register your models here.
diff --git a/lib/cve_checker/apps.py b/lib/cve_checker/apps.py
new file mode 100755
index 00000000..0f8bc069
--- /dev/null
+++ b/lib/cve_checker/apps.py
@@ -0,0 +1,5 @@
+from django.apps import AppConfig
+
+
+class Cve_CheckerConfig(AppConfig):
+ name = 'cve_checker'
diff --git a/lib/cve_checker/migrations/0001_initial.py b/lib/cve_checker/migrations/0001_initial.py
new file mode 100644
index 00000000..29cf266c
--- /dev/null
+++ b/lib/cve_checker/migrations/0001_initial.py
@@ -0,0 +1,71 @@
+# Generated by Django 4.0 on 2023-11-15 08:56
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ initial = True
+
+ dependencies = [
+ ('orm', '0014_alter_packagetocve_applicable'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='Ck_Audit',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=80)),
+ ('create_time', models.DateTimeField(auto_now_add=True, null=True)),
+ ('orm_product', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='orm.product')),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Ck_Layer',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=80)),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Ck_Package',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=80)),
+ ('version', models.CharField(max_length=80)),
+ ('unpatched_cnt', models.IntegerField(default=0)),
+ ('ignored_cnt', models.IntegerField(default=0)),
+ ('patched_cnt', models.IntegerField(default=0)),
+ ('ck_audit', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='cve_checker.ck_audit')),
+ ('ck_layer', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='cve_checker.ck_layer')),
+ ],
+ ),
+ migrations.CreateModel(
+ name='Ck_Product',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(max_length=80)),
+ ],
+ ),
+ migrations.CreateModel(
+ name='CkPackage2Cve',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('ck_status', models.IntegerField(choices=[(0, 'Undefined'), (1, 'Unpatched'), (2, 'Ignored'), (3, 'Patched')], default=0)),
+ ('ck_audit', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='cve_checker.ck_audit')),
+ ('ck_package', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='issue2pk_package', to='cve_checker.ck_package')),
+ ('orm_cve', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='orm.cve')),
+ ],
+ ),
+ migrations.CreateModel(
+ name='CkPackage2CkProduct',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('cvesInRecord', models.BooleanField(default=True)),
+ ('ck_package', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='cve_checker.ck_package')),
+ ('ck_product', models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='cve_checker.ck_product')),
+ ],
+ ),
+ ]
diff --git a/lib/cve_checker/migrations/0002_ckpackage2cve_ck_audit.py b/lib/cve_checker/migrations/0002_ckpackage2cve_ck_audit.py
new file mode 100644
index 00000000..9caf7520
--- /dev/null
+++ b/lib/cve_checker/migrations/0002_ckpackage2cve_ck_audit.py
@@ -0,0 +1,19 @@
+# Generated by Django 4.0 on 2023-11-12 18:32
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('cve_checker', '0001_initial'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='ckpackage2cve',
+ name='ck_audit',
+ field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='cve_checker.ck_audit'),
+ ),
+ ]
diff --git a/lib/cve_checker/migrations/0003_alter_ckpackage2cve_ck_package.py b/lib/cve_checker/migrations/0003_alter_ckpackage2cve_ck_package.py
new file mode 100644
index 00000000..3e6fa9c2
--- /dev/null
+++ b/lib/cve_checker/migrations/0003_alter_ckpackage2cve_ck_package.py
@@ -0,0 +1,19 @@
+# Generated by Django 4.0 on 2023-11-12 20:46
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('cve_checker', '0002_ckpackage2cve_ck_audit'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='ckpackage2cve',
+ name='ck_package',
+ field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='issue2pk_package', to='cve_checker.ck_package'),
+ ),
+ ]
diff --git a/lib/cve_checker/migrations/0004_ck_package_ignored_cnt_ck_package_patched_cnt_and_more.py b/lib/cve_checker/migrations/0004_ck_package_ignored_cnt_ck_package_patched_cnt_and_more.py
new file mode 100644
index 00000000..6f36579c
--- /dev/null
+++ b/lib/cve_checker/migrations/0004_ck_package_ignored_cnt_ck_package_patched_cnt_and_more.py
@@ -0,0 +1,28 @@
+# Generated by Django 4.0 on 2023-11-15 02:23
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('cve_checker', '0003_alter_ckpackage2cve_ck_package'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='ck_package',
+ name='ignored_cnt',
+ field=models.IntegerField(default=0),
+ ),
+ migrations.AddField(
+ model_name='ck_package',
+ name='patched_cnt',
+ field=models.IntegerField(default=0),
+ ),
+ migrations.AddField(
+ model_name='ck_package',
+ name='unpatched_cnt',
+ field=models.IntegerField(default=0),
+ ),
+ ]
diff --git a/lib/cve_checker/migrations/0005_ckuploadmanager.py b/lib/cve_checker/migrations/0005_ckuploadmanager.py
new file mode 100644
index 00000000..bb211c58
--- /dev/null
+++ b/lib/cve_checker/migrations/0005_ckuploadmanager.py
@@ -0,0 +1,27 @@
+# Generated by Django 4.0 on 2023-11-19 21:03
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('cve_checker', '0004_ck_package_ignored_cnt_ck_package_patched_cnt_and_more'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='CkUploadManager',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('order', models.IntegerField(default=0)),
+ ('name', models.CharField(max_length=80)),
+ ('mode', models.CharField(max_length=20)),
+ ('path', models.TextField(blank=True)),
+ ('pem', models.TextField(blank=True)),
+ ('repo', models.TextField(blank=True)),
+ ('branch', models.TextField(blank=True)),
+ ('auto_refresh', models.BooleanField(default=True)),
+ ],
+ ),
+ ]
diff --git a/lib/cve_checker/migrations/0006_rename_mode_ckuploadmanager_import_mode.py b/lib/cve_checker/migrations/0006_rename_mode_ckuploadmanager_import_mode.py
new file mode 100644
index 00000000..46785880
--- /dev/null
+++ b/lib/cve_checker/migrations/0006_rename_mode_ckuploadmanager_import_mode.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.0 on 2023-11-19 21:23
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('cve_checker', '0005_ckuploadmanager'),
+ ]
+
+ operations = [
+ migrations.RenameField(
+ model_name='ckuploadmanager',
+ old_name='mode',
+ new_name='import_mode',
+ ),
+ ]
diff --git a/lib/cve_checker/migrations/0007_ckuploadmanager_select_list_and_more.py b/lib/cve_checker/migrations/0007_ckuploadmanager_select_list_and_more.py
new file mode 100644
index 00000000..121dc9e6
--- /dev/null
+++ b/lib/cve_checker/migrations/0007_ckuploadmanager_select_list_and_more.py
@@ -0,0 +1,23 @@
+# Generated by Django 4.0 on 2023-11-20 07:19
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('cve_checker', '0006_rename_mode_ckuploadmanager_import_mode'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='ckuploadmanager',
+ name='select_list',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AddField(
+ model_name='ckuploadmanager',
+ name='select_refresh',
+ field=models.DateTimeField(auto_now_add=True, null=True),
+ ),
+ ]
diff --git a/lib/cve_checker/migrations/__init__.py b/lib/cve_checker/migrations/__init__.py
new file mode 100755
index 00000000..e69de29b
--- /dev/null
+++ b/lib/cve_checker/migrations/__init__.py
diff --git a/lib/cve_checker/models.py b/lib/cve_checker/models.py
new file mode 100755
index 00000000..8ed61a7a
--- /dev/null
+++ b/lib/cve_checker/models.py
@@ -0,0 +1,165 @@
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2017-2023 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+from __future__ import unicode_literals
+
+import sys
+import os
+import re
+import itertools
+from signal import SIGUSR1
+from datetime import datetime
+
+from django.db import models, IntegrityError, DataError
+from django.db import transaction
+from django.core import validators
+from django.conf import settings
+import django.db.models.signals
+from django.db.models import F, Q, Sum, Count
+from django.contrib.auth.models import AbstractUser, Group, AnonymousUser
+
+from orm.models import Cve, Product
+from srtgui.api import execute_process, execute_process_close_fds
+
+import logging
+logger = logging.getLogger("srt")
+
+# quick development/debugging support
+from srtgui.api import _log
+
+#######################################################################
+# Models
+#
+
+# CVE Checker Audit
+class Ck_Audit(models.Model):
+ search_allowed_fields = ['name', ]
+ name = models.CharField(max_length=80)
+ orm_product = models.ForeignKey(default=None, to='orm.product', null=True, on_delete=models.CASCADE,)
+ create_time = models.DateTimeField(auto_now_add=True, null=True)
+ @property
+ def get_package_count(self):
+ return (Ck_Package.objects.filter(ck_audit=self).count())
+ @property
+ def get_issue_count(self):
+ return (CkPackage2Cve.objects.filter(ck_audit=self).count())
+ @property
+ def get_unpatched_count(self):
+ return (CkPackage2Cve.objects.filter(ck_audit=self).filter(ck_status=CkPackage2Cve.UNPATCHED).count())
+ @property
+ def get_ignored_count(self):
+ return (CkPackage2Cve.objects.filter(ck_audit=self).filter(ck_status=CkPackage2Cve.IGNORED).count())
+ @property
+ def get_patched_count(self):
+ return (CkPackage2Cve.objects.filter(ck_audit=self).filter(ck_status=CkPackage2Cve.PATCHED).count())
+ @property
+ def get_undefined_count(self):
+ return (CkPackage2Cve.objects.filter(ck_audit=self).filter(ck_status=CkPackage2Cve.UNDEFINED).count())
+
+# Generated YP package
+class Ck_Package(models.Model):
+ search_allowed_fields = ['name', ]
+ name = models.CharField(max_length=80)
+ version = models.CharField(max_length=80)
+ ck_layer = models.ForeignKey(default=None, to='cve_checker.ck_layer', null=True, on_delete=models.CASCADE,)
+ ck_audit = models.ForeignKey(default=None, to='cve_checker.ck_audit', null=True, on_delete=models.CASCADE,)
+ # These values are here for filtering support, given limitations of Django's distinct() and table filters
+ unpatched_cnt = models.IntegerField(default=0)
+ ignored_cnt = models.IntegerField(default=0)
+ patched_cnt = models.IntegerField(default=0)
+ @property
+ def get_issue_count(self):
+ return (CkPackage2Cve.objects.filter(ck_package=self).count())
+ @property
+ def get_product_count(self):
+ return (CkPackage2CkProduct.objects.filter(ck_package=self).count())
+ @property
+ def get_product_names(self):
+ id_list = []
+ for pk2pr in CkPackage2CkProduct.objects.filter(ck_package=self):
+ id_list.append(f"{pk2pr.ck_product.name} ({pk2pr.cvesInRecord})")
+ return(','.join(id_list))
+
+# Representation of NVD "CPE"
+class Ck_Product(models.Model):
+ search_allowed_fields = ['name', ]
+ name = models.CharField(max_length=80)
+
+# YP Layer
+class Ck_Layer(models.Model):
+ search_allowed_fields = ['name', ]
+ name = models.CharField(max_length=80)
+
+# CVEs of a Package
+# Unpatched = "Not Fixed" and is (assumed) "Vulnerable"
+# Ignored = "Not Vulnerable" or "Won't Fix" or "Fixed"
+# Patched = "Fixed" or "Not Vulnerable"
+class CkPackage2Cve(models.Model):
+ search_allowed_fields = ['orm_cve__name', 'orm_cve__description']
+ # CveCheck Issue Status
+ UNDEFINED = 0
+ UNPATCHED = 1
+ IGNORED = 2
+ PATCHED = 3
+ CK_STATUS = (
+ (UNDEFINED , 'Undefined'),
+ (UNPATCHED, 'Unpatched'),
+ (IGNORED, 'Ignored'),
+ (PATCHED, 'Patched'),
+ )
+ ck_package = models.ForeignKey(default=None, to='cve_checker.ck_package', related_name="issue2pk_package", null=True, on_delete=models.CASCADE,)
+ orm_cve = models.ForeignKey(default=None, to='orm.cve', null=True, on_delete=models.CASCADE,)
+ ck_status = models.IntegerField(choices=CK_STATUS, default=UNDEFINED)
+ # Link to grandparent audit is included for instanct caounts in the GUI
+ ck_audit = models.ForeignKey(default=None, to='cve_checker.ck_audit', null=True, on_delete=models.CASCADE,)
+ @property
+ def get_status_text(self):
+ if (0 > self.ck_status) or (self.ck_status >= len(CkPackage2Cve.CK_STATUS)):
+ return 'Undefined'
+ return CkPackage2Cve.CK_STATUS[self.ck_status][1]
+
+# Products of a Package
+class CkPackage2CkProduct(models.Model):
+ ck_package = models.ForeignKey(default=None, to='cve_checker.ck_package', null=True, on_delete=models.CASCADE,)
+ ck_product = models.ForeignKey(default=None, to='cve_checker.ck_product', null=True, on_delete=models.CASCADE,)
+ cvesInRecord = models.BooleanField(default=True)
+
+# Products of a Package
+class CkUploadManager(models.Model):
+ order = models.IntegerField(default=0) # Display order
+ name = models.CharField(max_length=80) # Name of this import manager
+ import_mode = models.CharField(max_length=20) # Repo|SSL|File
+ path = models.TextField(blank=True) # Source path, path within repo
+ pem = models.TextField(blank=True) # PEM file for SSH
+ repo = models.TextField(blank=True) # Repository URL
+ branch = models.TextField(blank=True) # Branch in repo if any, for repo
+ auto_refresh = models.BooleanField(default=True) # if wild card, refresh when "Create Audit" is selected
+ select_refresh = models.DateTimeField(auto_now_add=True, null=True) # Last time select list was updated
+ select_list = models.TextField(blank=True) # List (if any) for pull down list, '|' delimited
+ @property
+ def is_select_list(self):
+ return (self.select_list and (0 < len(self.select_list)))
+ @property
+ def get_select_list(self):
+ return self.select_list.split('|')
+ @property
+ def get_path_filename(self):
+ return self.path.split('/')[-1]
diff --git a/lib/cve_checker/reports.py b/lib/cve_checker/reports.py
new file mode 100755
index 00000000..3735bcc3
--- /dev/null
+++ b/lib/cve_checker/reports.py
@@ -0,0 +1,511 @@
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2023 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+# Please run flake8 on this file before sending patches
+
+import os
+import re
+import logging
+from datetime import datetime, date
+import csv
+from openpyxl import Workbook
+from openpyxl import load_workbook
+from openpyxl.styles import Border, Side, PatternFill, Font, GradientFill, Alignment
+from openpyxl.utils import get_column_letter
+import shlex
+
+from srtgui.reports import Report, ReportManager, ProductsReport
+from cve_checker.models import Ck_Audit, Ck_Package, Ck_Product, Ck_Layer, CkPackage2CkProduct, CkPackage2Cve
+from srtgui.api import execute_process
+
+from django.db.models import Q, F
+from django.db import Error
+from srtgui.templatetags.jobtags import filtered_filesizeformat
+
+logger = logging.getLogger("srt")
+
+SRT_BASE_DIR = os.environ['SRT_BASE_DIR']
+SRT_REPORT_DIR = '%s/reports' % SRT_BASE_DIR
+
+# quick development/debugging support
+from srtgui.api import _log
+
+###############################################################################
+# Helper Routines
+#
+
+def _log_args(msg, *args, **kwargs):
+ s = '%s:(' % msg
+ if args:
+ for a in args:
+ s += '%s,' % a
+ s += '),('
+ if kwargs:
+ for key, value in kwargs.items():
+ s += '(%s=%s),' % (key,value)
+ s += ')'
+ _log(s)
+
+def dict_get_value(dict,name,default):
+ return dict[name] if name in dict else default
+
+###############################################################################
+# Excel/openpyxl common look and feel formatting objects
+#
+
+#pyxl_border_all = Border(left=thin, right=thin, top=thin, bottom=thin) # , outline=True)
+pyxl_thin = Side(border_style="thin")
+pyxl_double = Side(border_style="double")
+pyxl_border_left = Border(left=pyxl_thin)
+pyxl_border_bottom = Border(bottom=pyxl_thin)
+pyxl_border_bottom_left = Border(bottom=pyxl_thin, left=pyxl_thin)
+pyxl_alignment_left = Alignment(horizontal='left')
+pyxl_alignment_right = Alignment(horizontal='right')
+pyxl_alignment_wrap = Alignment(wrap_text=True)
+pyxl_alignment_top_wrap = Alignment(vertical="top",wrap_text=True)
+pyxl_font_bold = Font(bold=True)
+pyxl_font_red = Font(color="A00000",bold=True,size = "13")
+pyxl_font_grn = Font(color="00A000",bold=True,size = "13")
+pyxl_font_blu = Font(color="0000A0",bold=True,size = "13")
+pyxl_font_orn = Font(color="FF6600",bold=True,size = "13")
+pyxl_fill_green = PatternFill(start_color="E0FFF0", end_color="E0FFF0", fill_type = "solid")
+# Warning: the form "PatternFill(bgColor="xxxxxx", fill_type = "solid")" returns black cells
+pyxl_backcolor_red = PatternFill(start_color='FCCDBA', end_color='FCCDBA', fill_type = "solid")
+pyxl_backcolor_orn = PatternFill(start_color='FBEAAB', end_color='FBEAAB', fill_type = "solid")
+pyxl_backcolor_yel = PatternFill(start_color='FCFDC7', end_color='FCFDC7', fill_type = "solid")
+pyxl_backcolor_blu = PatternFill(start_color='C5E2FF', end_color='C5E2FF', fill_type = "solid")
+pyxl_backcolor_grn = PatternFill(start_color='D6EDBD', end_color='D6EDBD', fill_type = "solid")
+pyxl_cve_fills = [pyxl_backcolor_red,pyxl_backcolor_orn,pyxl_backcolor_yel,pyxl_backcolor_blu,None,None,None]
+
+def pyxl_write_cell(ws,row_num,column_num,value,border=None,font=None,fill=None,alignment=None):
+ cell = ws.cell(row=row_num, column=column_num)
+ try:
+ cell.value = value
+ if fill:
+ cell.fill = fill
+ if alignment:
+ cell.alignment = alignment
+ if border:
+ cell.border = border
+ if font:
+ cell.font = font
+ except Exception as e:
+ print("ERROR:(%d,%d):%s" % (row_num,column_num,e))
+ # Optional next column return value
+ return(column_num+1)
+
+
+###############################################################################
+# Report Manage for cvecheckerRecord
+#
+
+def doCveCheckerAuditSummaryExcel(ck_audit,options):
+ _log_args("doCveCheckerAuditSummaryExcel", options)
+
+ report_page = dict_get_value(options,'report_page', '')
+ search = dict_get_value(options,'search', '')
+ filter = dict_get_value(options,'filter', '')
+ filter_value = dict_get_value(options,'filter_value', '')
+ orderby = dict_get_value(options,'orderby', '')
+ default_orderby = dict_get_value(options,'default_orderby', '')
+ audit_id = dict_get_value(options,'audit_id', 1)
+
+ do_local_job = False
+ job_local_cnt = 0
+
+ audit_name_fixed = ck_audit.name
+ for ch in (' ','/',':','<','>','$','(',')','\\'):
+ audit_name_fixed = audit_name_fixed.replace(ch,'_')
+
+ report_path = '.'
+ report_name = f"summary_report_{audit_name_fixed}.xlsx"
+ report_full_path = os.path.join(SRT_REPORT_DIR,report_path,report_name)
+ wb = Workbook()
+ primary_sheet_used = False
+
+ #
+ # audit-summary Critical High Medium Low P1 P2 P3 P4 Repos
+ #
+
+ if 'audit-summary' in options:
+ job_local_cnt += 1
+ if do_local_job: job_local.update(job_local_cnt,job_local_max,'audit-summary')
+
+ if not primary_sheet_used:
+ ws = wb.active
+ ws.title = "Audit Summary"
+ primary_sheet_used = True
+ else:
+ ws = wb.create_sheet("Audit Summary")
+
+ ws.column_dimensions[get_column_letter(1)].width = 30
+ ws.column_dimensions[get_column_letter(2)].width = 40
+ row = 1
+
+ col = pyxl_write_cell(ws,row, 1,'CVE Checker Audit Report',font=pyxl_font_bold,border=pyxl_border_bottom)
+ row += 1
+
+ row += 1
+ col = pyxl_write_cell(ws,row, 1,'Audit name')
+ col = pyxl_write_cell(ws,row,col,ck_audit.name,font=pyxl_font_bold)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Release')
+ col = pyxl_write_cell(ws,row,col,ck_audit.orm_product.long_name)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Date')
+ col = pyxl_write_cell(ws,row,col,str(ck_audit.create_time))
+ row += 1
+
+ # Compute products and layers
+ product_count = 0
+ layers = {}
+ for ck_package in Ck_Package.objects.filter(ck_audit=ck_audit):
+ product_count += CkPackage2CkProduct.objects.filter(ck_package=ck_package).count()
+ layers[ck_package.ck_layer.name] = 1
+ layer_count = len(layers)
+
+ # Compute CVEs
+ severity_table = []
+ # Critical, High, Medium, Low, Unknown
+ severity_table.append([0,0,0,0,0]) # UNDEFINED
+ severity_table.append([0,0,0,0,0]) # UNPATCHED
+ severity_table.append([0,0,0,0,0]) # IGNORED
+ severity_table.append([0,0,0,0,0]) # PATCHED
+ s2i = {}
+ s2i['CRITICAL'] = 0
+ s2i['HIGH'] = 1
+ s2i['MEDIUM'] = 2
+ s2i['LOW'] = 3
+ s2i[''] = 4
+ unique_cves = {}
+ for issue in CkPackage2Cve.objects.filter(ck_audit=ck_audit):
+ unique_cves[issue.orm_cve.name] = 1
+ severity = issue.orm_cve.cvssV3_baseSeverity if issue.orm_cve.cvssV3_baseSeverity else issue.orm_cve.cvssV2_severity
+ severity = severity.upper()
+ try:
+ col = s2i[severity.upper()]
+ except:
+ col = 4
+ severity_table[issue.ck_status][col] += 1
+
+ row += 1
+ col = pyxl_write_cell(ws,row, 1,'Package Count')
+ col = pyxl_write_cell(ws,row,col,ck_audit.get_package_count)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Product Count')
+ col = pyxl_write_cell(ws,row,col, product_count)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Layer Count')
+ col = pyxl_write_cell(ws,row,col,layer_count)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Issue Count')
+ col = pyxl_write_cell(ws,row,col,ck_audit.get_issue_count)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Unique Issue Count')
+ col = pyxl_write_cell(ws,row,col,len(unique_cves))
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 4,'Critical',border=pyxl_border_bottom)
+ col = pyxl_write_cell(ws,row,col,'High',border=pyxl_border_bottom)
+ col = pyxl_write_cell(ws,row,col,'Medium',border=pyxl_border_bottom)
+ col = pyxl_write_cell(ws,row,col,'Low',border=pyxl_border_bottom)
+ col = pyxl_write_cell(ws,row,col,'Undefined',border=pyxl_border_bottom)
+ row += 1
+
+ def append_severity(status_id,row):
+ col = pyxl_write_cell(ws,row, 4,severity_table[status_id][0],fill=pyxl_backcolor_red)
+ col = pyxl_write_cell(ws,row,col,severity_table[status_id][1],fill=pyxl_backcolor_orn)
+ col = pyxl_write_cell(ws,row,col,severity_table[status_id][2],fill=pyxl_backcolor_blu)
+ col = pyxl_write_cell(ws,row,col,severity_table[status_id][3],fill=pyxl_backcolor_grn)
+ col = pyxl_write_cell(ws,row,col,severity_table[status_id][4])
+
+ col = pyxl_write_cell(ws,row, 1,'Unpatched_Count')
+ col = pyxl_write_cell(ws,row,col,ck_audit.get_unpatched_count,fill=pyxl_backcolor_red)
+ append_severity(CkPackage2Cve.UNPATCHED,row)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Ignored_Count')
+ col = pyxl_write_cell(ws,row,col,ck_audit.get_ignored_count)
+ append_severity(CkPackage2Cve.IGNORED,row)
+ row += 1
+
+ col = pyxl_write_cell(ws,row, 1,'Patched_Count')
+ col = pyxl_write_cell(ws,row,col,ck_audit.get_patched_count)
+ append_severity(CkPackage2Cve.PATCHED,row)
+ row += 1
+
+ #
+ # package-summary
+ #
+
+ if 'package-summary' in options:
+ job_local_cnt += 1
+ if do_local_job: job_local.update(job_local_cnt,job_local_max,'package-summary')
+
+ if not primary_sheet_used:
+ ws = wb.active
+ ws.title = "Package Summary"
+ primary_sheet_used = True
+ else:
+ ws = wb.create_sheet("Package Summary")
+
+ ws.column_dimensions[get_column_letter(1)].width = 30
+ ws.column_dimensions[get_column_letter(5)].width = 30
+ ws.column_dimensions[get_column_letter(6)].width = 60
+
+ row = 1
+ first_row = 2
+
+ col = 1
+ for header in ('Package','Version','Layer','Issues','Unpatched CVE','Products (cvesInRecord)'):
+# border = pyxl_border_bottom_left if (col in (3,7,12)) else pyxl_border_bottom
+ border = pyxl_border_bottom
+ pyxl_write_cell(ws,row,col,header,border=border)
+ if (col >= 2) and (col <= 10):
+ ws.column_dimensions[get_column_letter(col)].width = 11
+ col += 1
+ row += 1
+
+ # Sort packages by severity,package.name
+ package_list = Ck_Package.objects.filter(ck_audit=ck_audit).order_by('name')
+ for package in package_list:
+ col = pyxl_write_cell(ws,row, 1,package.name)
+ col = pyxl_write_cell(ws,row,col,package.version)
+ col = pyxl_write_cell(ws,row,col,package.ck_layer.name)
+ col = pyxl_write_cell(ws,row,col,package.get_issue_count)
+ col = pyxl_write_cell(ws,row,col,package.unpatched_cnt)
+ col = pyxl_write_cell(ws,row,col,package.get_product_names)
+ row += 1
+
+ #
+ # unpatched-summary
+ #
+
+ if ('unpatched-summary' in options) or ('unpatched-summary-compare' in options):
+ job_local_cnt += 1
+ if do_local_job: job_local.update(job_local_cnt,job_local_max,'unpatched-summary')
+
+ if not primary_sheet_used:
+ ws = wb.active
+ ws.title = "Unpatched Summary"
+ primary_sheet_used = True
+ else:
+ ws = wb.create_sheet("Unpatched Summary")
+
+ # Comparables
+ comparable_list = ['wr_trivy','ubuntu_trivy']
+
+ ws.column_dimensions[get_column_letter(1)].width = 20
+ ws.column_dimensions[get_column_letter(2)].width = 14
+ ws.column_dimensions[get_column_letter(7)].width = 14
+ ws.column_dimensions[get_column_letter(8)].width = 14
+ for i,comparable in enumerate(comparable_list):
+ ws.column_dimensions[get_column_letter(9+i)].width = 60
+
+ row = 1
+ first_row = 2
+ col = 1
+ header_list = ['Issue','Status','V3 Severity','V3 Score','V2 Severity','V2 Score','Published','Package']
+ if 'unpatched-summary-compare' in options:
+ header_list.extend(comparable_list)
+ for header in header_list:
+ border = pyxl_border_bottom
+ pyxl_write_cell(ws,row,col,header,border=border)
+ col += 1
+ row += 1
+
+ # Sort packages by severity,package.name
+ issues_list = CkPackage2Cve.objects.filter(ck_audit=ck_audit).filter(ck_status=CkPackage2Cve.UNPATCHED).order_by('orm_cve__name')
+
+ # Merge comparibles?
+ comparibles = {}
+ if 'unpatched-summary-compare' in options:
+ issue_list = {}
+ for issue in issues_list:
+ issue_list[issue.orm_cve.name] = 1
+ filename = ('.cve_list.txt')
+ with open(filename, 'w') as outfile:
+ outfile.write('\n'.join(str(cve) for cve in issue_list))
+
+ for i,comparable in enumerate(comparable_list):
+ comparibles[comparable] = {}
+ exec_returncode,exec_stdout,exec_stderr = execute_process(f"bin/{comparable}/srtool_{comparable}.py",'--comparibles',filename)
+ for i,line in enumerate(exec_stdout.splitlines()):
+ cve,status = line.split('||')
+ comparibles[comparable][cve] = status.replace('[EOL]','\n')
+
+ # Generate output
+ for issue in issues_list:
+ col = pyxl_write_cell(ws,row, 1,issue.orm_cve.name,alignment=pyxl_alignment_top_wrap)
+ col = pyxl_write_cell(ws,row,col,issue.get_status_text,alignment=pyxl_alignment_top_wrap)
+ if (not issue.orm_cve.cvssV3_baseScore) or (0.1 > float(issue.orm_cve.cvssV3_baseScore)):
+ col = pyxl_write_cell(ws,row,col,'',alignment=pyxl_alignment_top_wrap)
+ col = pyxl_write_cell(ws,row,col,'',alignment=pyxl_alignment_top_wrap)
+ else:
+ col = pyxl_write_cell(ws,row,col,issue.orm_cve.cvssV3_baseSeverity,alignment=pyxl_alignment_top_wrap)
+ col = pyxl_write_cell(ws,row,col,issue.orm_cve.cvssV3_baseScore,alignment=pyxl_alignment_top_wrap)
+ if (not issue.orm_cve.cvssV2_baseScore) or (0.1 > float(issue.orm_cve.cvssV2_baseScore)):
+ col = pyxl_write_cell(ws,row,col,issue.orm_cve.cvssV2_severity,alignment=pyxl_alignment_top_wrap)
+ col = pyxl_write_cell(ws,row,col,issue.orm_cve.cvssV2_baseScore,alignment=pyxl_alignment_top_wrap)
+ else:
+ col = pyxl_write_cell(ws,row,col,'',alignment=pyxl_alignment_top_wrap)
+ col = pyxl_write_cell(ws,row,col,'',alignment=pyxl_alignment_top_wrap)
+ col = pyxl_write_cell(ws,row,col,issue.orm_cve.publishedDate,alignment=pyxl_alignment_top_wrap)
+ col = pyxl_write_cell(ws,row,col,issue.ck_package.name,alignment=pyxl_alignment_top_wrap)
+
+ if 'unpatched-summary-compare' in options:
+ # Extend the height of the row to show the comparible data
+ ws.row_dimensions[row].height = 70
+ for i,comparable in enumerate(comparable_list):
+ if issue.orm_cve.name in comparibles[comparable]:
+ col = pyxl_write_cell(ws,row,col,comparibles[comparable][issue.orm_cve.name],alignment=pyxl_alignment_top_wrap)
+ row += 1
+
+ wb.save(report_name)
+ return(report_name)
+
+###############################################################################
+#
+# Audit Difference Report
+#
+# db_audit_1 is older
+# db_audit_2 is newer
+
+#
+# TBD
+#
+
+def do_audit_cvechecker_diff_report(db_audit_1, db_audit_2, options):
+ _log_args("DO_AUDIT_DIFF_REPORT", db_audit_1.name, db_audit_1.id, db_audit_2.name, db_audit_2.id, options)
+ global audit_summary
+
+ records = dict_get_value(options,'records','')
+ format = dict_get_value(options,'format', '')
+ title = dict_get_value(options,'title', '')
+ report_type = dict_get_value(options,'report_type', '')
+ record_list = dict_get_value(options,'record_list', '')
+
+ audit_scope_criticals = ('0' == dict_get_value(options,'audit_scope', '0'))
+ delimiter = ','
+
+ #
+ # Audits load
+ #
+ db_table_1 = {}
+ for db_rec in cvecheckerRecord.objects.filter(cvecheckeraudit=db_audit_1):
+ key= f"{db_rec.plugin_id}"
+ db_table_1[key] = db_rec.id
+
+ db_table_2 = {}
+ for db_rec in cvecheckerRecord.objects.filter(cvecheckeraudit=db_audit_2):
+ key= f"{db_rec.plugin_id}"
+ db_table_2[key] = db_rec.id
+ _log(f"FOO:DB_TABLE_1:{len(db_table_1)}")
+ _log(f"FOO:DB_TABLE_2:{len(db_table_2)}")
+
+
+ # Audits compare
+ #
+ db_add = []
+ db_remove = []
+ for key in db_table_1: # Is in Older
+ if not key in db_table_2: # Not in Newer (removed)
+ db_remove.append(db_table_1[key])
+ for key in db_table_2: # Is in Newer
+ if not key in db_table_1: # Not in Older (added)
+ db_add.append(db_table_2[key])
+
+ def update_ws(ws,msg,audit,table):
+ row = 1
+ col = 1
+ for header in ('name', 'port','protocol','product'):
+ col = pyxl_write_cell(ws,row,col,header,border = pyxl_border_bottom)
+ row += 1
+ ws.column_dimensions[get_column_letter(1)].width = 40
+ ws.column_dimensions[get_column_letter(2)].width = 14
+ ws.column_dimensions[get_column_letter(3)].width = 40
+ ws.column_dimensions[get_column_letter(5)].width = 40
+
+ count = 0
+ cvechecker_obj=cvecheckerRecord.objects.filter(cvecheckeraudit=audit)
+ for db_rec in cvechecker_obj :
+ if db_rec.id in table:
+ count += 1
+ col = 1
+ col = pyxl_write_cell(ws,row,col,db_rec.name)
+ col = pyxl_write_cell(ws,row,col,db_rec.port)
+ col = pyxl_write_cell(ws,row,col,db_rec.protocol)
+ col = pyxl_write_cell(ws,row,col,db_rec.cvecheckeraudit.product)
+ row += 1
+ row -= 1
+ for i in range(1,5):
+ ws.cell(row=row,column=i).border=pyxl_border_bottom
+ row += 1
+ pyxl_write_cell(ws,row,1,msg)
+ pyxl_write_cell(ws,row,2,count)
+
+ row += 2
+ pyxl_write_cell(ws,row,1,f"cvechecker ({audit.id})")
+ pyxl_write_cell(ws,row,2,audit.name)
+ #pyxl_write_cell(ws,row,4,audit.audit_date)
+
+ report_name = f"cvecheckerbench_diff_report_{db_audit_1.id}_{db_audit_2.id}.xlsx"
+ report_path = '.'
+ report_full_path = os.path.join(SRT_REPORT_DIR,report_path,report_name)
+ wb = Workbook()
+ ws = wb.active
+ ws.title = 'Added'
+ update_ws(ws,'Added',db_audit_2,db_add)
+ ws = wb.create_sheet('Removed')
+ update_ws(ws,'Removed',db_audit_1,db_remove)
+
+ wb.save(report_full_path)
+ return(report_full_path)
+
+###############################################################################
+# Report Manage for cvechecker
+#
+
+class cvecheckerRecordReportManager():
+ @staticmethod
+ def get_report_class(parent_page, *args, **kwargs):
+ _log("CVECHECKERREPORTMANAGER:%s:" % parent_page)
+ if 'gitleaks' == parent_page:
+ # Extend the Products report
+ return cvecheckerRecordSummaryReport(parent_page, *args, **kwargs)
+ else:
+ # Return the default for all other reports
+ return ReportManager.get_report_class(parent_page, *args, **kwargs)
+
+ @staticmethod
+ def get_context_data(parent_page, *args, **kwargs):
+ _log_args("CVECHECKER_REPORTMANAGER_CONTEXT", *args, **kwargs)
+ reporter = cvecheckerRecordReportManager.get_report_class(parent_page, *args, **kwargs)
+ return reporter.get_context_data(*args, **kwargs)
+
+ @staticmethod
+ def exec_report(parent_page, *args, **kwargs):
+ _log_args("CVECHECKER_REPORTMANAGER_EXEC", *args, **kwargs)
+ reporter = cvecheckerRecordReportManager.get_report_class(parent_page, *args, **kwargs)
+ return reporter.exec_report(*args, **kwargs)
diff --git a/lib/cve_checker/tables.py b/lib/cve_checker/tables.py
new file mode 100755
index 00000000..252d109f
--- /dev/null
+++ b/lib/cve_checker/tables.py
@@ -0,0 +1,695 @@
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2023 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+#
+# NOTICE: Important ToasterTable implementation concepts and limitations
+#
+# 1) The order of table method execution. This implies that data added
+# to the table object in "get_context_data" is NOT persistent.
+#
+# a) __init__
+# b) get_context_data
+# c) __init__ (second call reason unknown)
+# d) setup_queryset
+# e) setup_filters (if present)
+# f) setup_columns
+# g) apply_row_customization (if present)
+#
+# 2) Named URL path arguments from "urls.py" are accessible via kwargs
+# WARNING: these values not NOT available in "__init__"
+#
+# Example:
+# urls.ps : url(r'^foo/(?P<my_value>\d+)$',
+# tables.py: my_value = int(kwargs['my_value'])
+#
+# 3) Named URL query arguments the table's url are accessible via the request
+#
+# Example:
+# url : http://.../foo/bar/42605?my_value=25
+# tables.py: my_value = self.request.GET.get('my_value','0')
+#
+# 4) The context[] values are NOT present in the individual "setup_columns" context
+# They must be explicitly implemented into the individual column data without Django translation
+#
+# 5) The HTML page's templatetags are NOT present in the "setup_columns" context
+# They must be explicitly added into the template code
+#
+# Example:
+# static_data_template = '''
+# {% load jobtags %}<span onclick="toggle_select(\'box_{{data.id}}\');">{{data.recommend|recommend_display}}</span>
+# '''
+#
+# WARNING: because there is no context (#4), you cannot for example use dictionary lookup filters
+# use apply_row_customization() method instead, and set the self.dict_name in setup_columns()
+#
+
+import os
+import re
+import json
+from datetime import timedelta, datetime, date
+import pytz
+import traceback
+
+from django.db.models import Q, Max, Sum, Count, When, Case, Value, IntegerField
+from django.urls import re_path as url
+from django.urls import reverse, resolve
+from django.http import HttpResponse
+from django.views.generic import TemplateView
+
+from srtgui.widgets import ToasterTable
+from cve_checker.models import Ck_Audit, Ck_Package, Ck_Product, Ck_Layer, CkPackage2CkProduct, CkPackage2Cve, CkUploadManager
+from orm.models import Cve, Product
+from orm.models import Notify, NotifyAccess, NotifyCategories
+from orm.models import DataSource, SrtSetting, Job
+from users.models import SrtUser, UserSafe
+from srtgui.api import execute_process
+
+from srtgui.tablefilter import TableFilter
+from srtgui.tablefilter import TableFilterActionToggle
+from srtgui.tablefilter import TableFilterActionDateRange
+from srtgui.tablefilter import TableFilterActionDay
+
+# quick development/debugging support
+from srtgui.api import _log
+
+class CveCheckerAuditsTable(ToasterTable):
+ """Table of All CvecheckerRecord audits"""
+
+ def __init__(self, *args, **kwargs):
+ super(CveCheckerAuditsTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "-id"
+
+ def get_context_data(self, **kwargs):
+ create_time = datetime.now(pytz.utc)
+ context = super(CveCheckerAuditsTable, self).get_context_data(**kwargs)
+ context['orm_products'] = Product.objects.all().order_by('name')
+ context['ab_sets'] = ("master","nanbield","mickledore","langdale","kirkstone","dunfell")
+ context['new_audit_name'] = 'audit_%s' % (create_time.strftime('%Y%m%d'))
+ context['default_product'] = 'Yocto Project master'
+ context['srt_cvechecker_update'] = SrtSetting.get_setting('SRT_CVECHECKER_UPDATE','')
+ context['mru'] = Job.get_recent()
+ context['mrj_type'] = 'all'
+ # Enforce at least the "Upload" import
+ ck_import_obj,created = CkUploadManager.objects.get_or_create(name='Upload')
+ if created:
+ ck_import_obj.order = 1
+ ck_import_obj.import_mode = 'Upload'
+ ck_import_obj.path = ''
+ ck_import_obj.pem = ''
+ ck_import_obj.repo = ''
+ ck_import_obj.branch = ''
+ ck_import_obj.auto_refresh = False
+ ck_import_obj.select_refresh = datetime.now(pytz.utc)
+ ck_import_obj.select_list = "master|nanbield|mickledore|langdale|kirkstone|dunfell"
+ ck_import_obj.save()
+ ck_import_obj,created = CkUploadManager.objects.get_or_create(name='Import from Auto Builder scan')
+ if created:
+ ck_import_obj.order = 2
+ ck_import_obj.import_mode = 'Repo'
+ ck_import_obj.path = 'yocto-metrics/cve-check'
+ ck_import_obj.pem = ''
+ ck_import_obj.repo = 'git://git.yoctoproject.org/yocto-metrics'
+ ck_import_obj.branch = ''
+ ck_import_obj.auto_refresh = True
+ ck_import_obj.select_refresh = datetime.now(pytz.utc)
+ ck_import_obj.select_list = "master|nanbield|mickledore|langdale|kirkstone|dunfell"
+ ck_import_obj.save()
+ context['ckuploadmanager'] = CkUploadManager.objects.all().order_by('order')
+ # Update the Import select tables
+ cmnd = ["bin/cve_checker/srtool_cvechecker.py","--update-imports","-f"]
+ result_returncode,result_stdout,result_stderr = execute_process(*cmnd)
+ if 0 != result_returncode:
+ _log(f"ERROR:{cmnd}: {result_stderr}:{result_stdout}:")
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = Ck_Audit.objects.all()
+ self.queryset = self.queryset.order_by('-id')
+
+ def setup_filters(self, *args, **kwargs):
+ pass
+
+ def setup_columns(self, *args, **kwargs):
+
+ self.add_column(title="Id",
+ field_name="id",
+ hideable=False,
+ orderable=True,
+ )
+
+ name_template = '''
+ <span id="audit_name-disp-{{data.id}}"><td><a href="{% url 'cvechecker_audit' data.id %}">{{data.name}}</a></td></span>
+ <span id="audit_name-edit-{{data.id}}" style="display:none;">
+ <input type="text" id="audit_name-text-{{data.id}}" value="{{data.name}}" size="50">
+ </span>
+ '''
+ self.add_column(title="Name",
+ orderable=True,
+ static_data_name="name",
+ static_data_template=name_template,
+ )
+
+ self.add_column(title="Create Time",
+ field_name="create_time",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ )
+
+ ck_package_link_template = '''
+ <td><a href="{% url 'cvechecker_audit' data.id %}">{{data.get_package_count}}</a></td>
+ '''
+ self.add_column(title="Package Count",
+ static_data_name="count",
+ static_data_template=ck_package_link_template,
+ )
+
+ self.add_column(title="Unpatched CVE",
+ static_data_name="unpatched_count",
+ static_data_template='<b><label style="color:DarkRed">{{data.get_unpatched_count}}</label></b>',
+ )
+ self.add_column(title="Ignored CVE",
+ static_data_name="ignored_count",
+ static_data_template='<label style="color:green">{{data.get_ignored_count}}</label>',
+ )
+ self.add_column(title="Patched CVE",
+ static_data_name="patched_count",
+ static_data_template='<label style="color:green">{{data.get_patched_count}}</label>',
+ )
+ self.add_column(title="Undefined CVE",
+ static_data_name="undefined_count",
+ static_data_template='<label style="color:DarkRed">{{data.get_undefined_count}}</label>',
+ hideable=True,
+ hidden=True,
+ )
+
+ self.add_column(title="YP Release",
+ static_data_name="orm_product__profile",
+ static_data_template='{{data.orm_product.long_name}}',
+ orderable=True,
+ )
+
+ if UserSafe.is_contributor(self.request.user):
+ manage_link_template = '''
+ <span class="glyphicon glyphicon-edit edit-ck-entry" id="edit-entry-{{data.id}}" x-data="{{data.id}}"></span>
+ <span class="glyphicon glyphicon glyphicon glyphicon-ok save-ck-entry" id="save-entry-{{data.id}}" x-data="{{data.id}}" style="display:none;color: Chartreuse;"></span>
+ &nbsp;&nbsp;&nbsp;&nbsp;
+ <span class="glyphicon glyphicon glyphicon glyphicon-remove cancel-ck-entry" id="cancel-entry-{{data.id}}" x-data="{{data.id}}" style="display:none;color: Crimson;"></span>
+ <span class="glyphicon glyphicon-trash trash-audit" x-data="{{data.create_time}}|{{data.id}}"></span>
+ '''
+ self.add_column(title="Manage",
+ hideable=True,
+ static_data_name="manage",
+ static_data_template=manage_link_template,
+ )
+
+
+class CveCheckerAuditTable(ToasterTable):
+ """Table of All entries in CvecheckerRecord"""
+
+ def __init__(self, *args, **kwargs):
+ super(CveCheckerAuditTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "name"
+
+ def get_context_data(self, **kwargs):
+ context = super(CveCheckerAuditTable, self).get_context_data(**kwargs)
+ audit_id = int(kwargs['audit_id'])
+ context['Ck_Audit'] = Ck_Audit.objects.get(id=audit_id)
+ context['mru'] = Job.get_recent()
+ context['mrj_type'] = 'all'
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ audit_id = int(kwargs['audit_id'])
+ self.queryset = Ck_Package.objects.filter(ck_audit_id=audit_id)
+ self.queryset = self.queryset.order_by(self.default_orderby)
+
+ def setup_filters(self, *args, **kwargs):
+ # Status filter
+ is_status = TableFilter(name="is_status", title="Status")
+ audit_id = int(kwargs['audit_id'])
+ status_filter = TableFilterActionToggle(
+ "unpatched",
+ "Unpatched",
+ Q(unpatched_cnt__gt=0))
+ is_status.add_action(status_filter)
+ status_filter = TableFilterActionToggle(
+ "patched/ignored",
+ "Patched/Ignored",
+ Q(unpatched_cnt=0))
+ is_status.add_action(status_filter)
+ self.add_filter(is_status)
+
+ def setup_columns(self, *args, **kwargs):
+
+ self.add_column(title="Id",
+ field_name="id",
+ hideable=True,
+ hidden=True,
+ )
+
+ self.add_column(title="Name",
+ field_name="name",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="Version",
+ field_name="version",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="Layer",
+ field_name="ck_layer",
+ hideable=False,
+ orderable=True,
+ static_data_name="ck_layer",
+ static_data_template="{{data.ck_layer.name}}",
+ )
+
+ issue_link_template = '''
+ <a href="{% url 'cvechecker_issue' data.id %}">{{data.get_issue_count}}</a>
+ '''
+ self.add_column(title="Issues",
+ static_data_name="issue_count",
+ static_data_template=issue_link_template,
+ )
+
+ unpatched_link_template = '''
+ <label style="color:{% if data.unpatched_cnt %}DarkRed{% else %}green{% endif %}">{{data.unpatched_cnt}}</label>
+ '''
+ self.add_column(title="Unpatched CVE",
+ filter_name="is_status",
+ static_data_name="unpatched_count",
+ static_data_template=unpatched_link_template,
+ )
+
+ product_link_template = '''
+ <td><a href="{% url 'cvechecker_product' data.id %}">{{data.get_product_names}}</a></td>
+ '''
+ self.add_column(title="Products (cvesInRecord)",
+ static_data_name="product_count",
+ static_data_template=product_link_template,
+ )
+
+
+class CveCheckerAuditCveTable(ToasterTable):
+ """Table of All entries in CvecheckerRecord"""
+
+ def __init__(self, *args, **kwargs):
+ super(CveCheckerAuditCveTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "orm_cve__name"
+
+ def get_context_data(self, **kwargs):
+ context = super(CveCheckerAuditCveTable, self).get_context_data(**kwargs)
+ audit_id = int(kwargs['audit_id'])
+ context['Ck_Audit'] = Ck_Audit.objects.get(id=audit_id)
+ context['mru'] = Job.get_recent()
+ context['mrj_type'] = 'all'
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ audit_id = int(kwargs['audit_id'])
+ self.queryset = CkPackage2Cve.objects.filter(ck_audit_id=audit_id)
+ self.queryset = self.queryset.order_by(self.default_orderby)
+
+ def setup_filters(self, *args, **kwargs):
+ # Status filter
+ is_status = TableFilter(name="is_status", title="Status")
+ for status_id in range(CkPackage2Cve.UNPATCHED,CkPackage2Cve.PATCHED+1):
+ status_filter = TableFilterActionToggle(
+ CkPackage2Cve.CK_STATUS[status_id][1],
+ CkPackage2Cve.CK_STATUS[status_id][1],
+ Q(ck_status=status_id))
+ is_status.add_action(status_filter)
+ self.add_filter(is_status)
+
+ def setup_columns(self, *args, **kwargs):
+
+ self.add_column(title="Id",
+ field_name="id",
+ hideable=True,
+ hidden=True,
+ )
+
+ cve_link_template = '''
+ <a href="{% url 'cve' data.orm_cve.name %}" target="_blank">{{data.orm_cve.name}}</a>
+ '''
+ self.add_column(title="Name",
+ static_data_name="orm_cve__name",
+ static_data_template=cve_link_template,
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="Status",
+ filter_name="is_status",
+ static_data_name="status",
+ static_data_template="{{data.get_status_text}}",
+ )
+
+ self.add_column(title="V3 Severity",
+ orderable=True,
+ static_data_name="orm_cve__cvssV3_baseSeverity",
+ static_data_template="{{data.orm_cve.cvssV3_baseSeverity}}",
+ )
+
+ self.add_column(title="V3 Score",
+ orderable=True,
+ static_data_name="orm_cve__cvssV3_baseScore",
+ static_data_template="{{data.orm_cve.cvssV3_baseScore}}",
+ )
+
+ self.add_column(title="V2 Severity",
+ orderable=True,
+ static_data_name="data.orm_cve__cvssV2_severity",
+ static_data_template="{{data.orm_cve.cvssV2_severity}}",
+ )
+
+ self.add_column(title="V2 Score",
+ orderable=True,
+ static_data_name="data.orm_cve__cvssV2_baseScore",
+ static_data_template="{{data.orm_cve.cvssV2_baseScore}}",
+ )
+
+ self.add_column(title="Published",
+ static_data_name="data.orm_cve__publishedDate",
+ static_data_template="{{data.orm_cve.publishedDate}}",
+ )
+
+ self.add_column(title="Package",
+ orderable=True,
+ static_data_name="ck_package__name",
+ static_data_template="{{data.ck_package.name}}",
+ )
+
+
+class CveCheckerIssueTable(ToasterTable):
+ """Table of Issues in CvecheckerRecord"""
+
+ def __init__(self, *args, **kwargs):
+ super(CveCheckerIssueTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "orm_cve__name"
+
+ def get_context_data(self, **kwargs):
+ context = super(CveCheckerIssueTable, self).get_context_data(**kwargs)
+ package_id = int(kwargs['package_id'])
+ context['Ck_Package'] = Ck_Package.objects.get(id=package_id)
+ context['mru'] = Job.get_recent()
+ context['mrj_type'] = 'all'
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ package_id = int(kwargs['package_id'])
+ self.queryset = CkPackage2Cve.objects.filter(ck_package_id=package_id)
+ self.queryset = self.queryset.order_by(self.default_orderby)
+
+ def setup_filters(self, *args, **kwargs):
+ # Status filter
+ is_status = TableFilter(name="is_status", title="Status")
+ for status_id in range(CkPackage2Cve.UNPATCHED,CkPackage2Cve.PATCHED+1):
+ status_filter = TableFilterActionToggle(
+ CkPackage2Cve.CK_STATUS[status_id][1],
+ CkPackage2Cve.CK_STATUS[status_id][1],
+ Q(ck_status=status_id))
+ is_status.add_action(status_filter)
+ self.add_filter(is_status)
+
+ def setup_columns(self, *args, **kwargs):
+
+ cve_link_template = '''
+ <a href="{% url 'cve' data.orm_cve.name %}" target="_blank">{{data.orm_cve.name}}</a>
+ '''
+ self.add_column(title="Issue",
+ static_data_name="orm_cve__name",
+ static_data_template=cve_link_template,
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="CK Status",
+ filter_name="is_status",
+ static_data_name="ck_status",
+ static_data_template="{{data.get_status_text}}",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="description",
+ static_data_name="orm_cve__description",
+ static_data_template="{{data.orm_cve.description}}",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="V3 Score",
+ static_data_name="orm_cve__cvssV3_baseScore",
+ static_data_template="{{data.orm_cve.cvssV3_baseScore}}",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="V3 Severity",
+ static_data_name="orm_cve__cvssV3_baseSeverity",
+ static_data_template="{{data.orm_cve.cvssV3_baseSeverity}}",
+ hideable=False,
+ )
+
+ self.add_column(title="V2 Score",
+ static_data_name="orm_cve__cvssV2_baseScore",
+ static_data_template="{{data.orm_cve.cvssV2_baseScore}}",
+ hideable=True,
+ )
+
+ self.add_column(title="V2 Severity",
+ static_data_name="orm_cve__cvssV2_severity",
+ static_data_template="{{data.orm_cve.cvssV2_severity}}",
+ hideable=True,
+ )
+
+ self.add_column(title="Publish Date",
+ static_data_name="orm_cve__publishedDate",
+ static_data_template="{{data.orm_cve.publishedDate}}",
+ hideable=True,
+ )
+
+ self.add_column(title="Last Modified Date",
+ static_data_name="orm_cve__lastModifiedDate",
+ static_data_template="{{data.orm_cve.lastModifiedDate}}",
+ hideable=True,
+ )
+
+
+class CveCheckerProductTable(ToasterTable):
+ """Table of All entries in CvecheckerRecord"""
+
+ def __init__(self, *args, **kwargs):
+ super(CveCheckerProductTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "ck_product__name"
+
+ def get_context_data(self, **kwargs):
+ context = super(CveCheckerProductTable, self).get_context_data(**kwargs)
+ package_id = int(kwargs['package_id'])
+ context['Ck_Package'] = Ck_Package.objects.get(id=package_id)
+ context['mru'] = Job.get_recent()
+ context['mrj_type'] = 'all'
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ package_id = int(kwargs['package_id'])
+ self.queryset = CkPackage2CkProduct.objects.filter(ck_package_id=package_id)
+ self.queryset = self.queryset.order_by(self.default_orderby)
+
+ def setup_filters(self, *args, **kwargs):
+ pass
+
+ def setup_columns(self, *args, **kwargs):
+
+ self.add_column(title="Product",
+ static_data_name="ck_product__name",
+ static_data_template="{{data.ck_product.name}}",
+ hideable=False,
+ orderable=True,
+ )
+
+ self.add_column(title="CvesInRecord",
+ static_data_name="cvesInRecord",
+ static_data_template="{{data.cvesInRecord}}",
+ hideable=False,
+ orderable=True,
+ )
+
+
+class CveCheckerImportManagementTable(ToasterTable):
+ """Table of Audit import meta-management """
+
+ def __init__(self, *args, **kwargs):
+ super(CveCheckerImportManagementTable, self).__init__(*args, **kwargs)
+ self.default_orderby = "order"
+
+ def get_context_data(self, **kwargs):
+ context = super(CveCheckerImportManagementTable, self).get_context_data(**kwargs)
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ self.queryset = CkUploadManager.objects.all()
+ self.queryset = self.queryset.order_by(self.default_orderby)
+
+ def setup_filters(self, *args, **kwargs):
+ pass
+
+ def setup_columns(self, *args, **kwargs):
+
+ if UserSafe.is_admin(self.request.user):
+ self.add_column(title="ID",
+ field_name="id",
+ hideable=True,
+ hidden = True,
+ )
+
+ order_template = '''
+ <span id="audit_order-disp-{{data.id}}"><td><a href="{% url 'cvechecker_audit' data.id %}">{{data.order}}</a></td></span>
+ <span id="audit_order-edit-{{data.id}}" style="display:none;">
+ <input type="text" id="audit_order-text-{{data.id}}" value="{{data.order}}" size="10">
+ </span>
+ '''
+ self.add_column(title="Order",
+ static_data_name="order",
+ static_data_template=order_template,
+ orderable=True,
+ )
+
+ name_template = '''
+ <span id="audit_name-disp-{{data.id}}">{{data.name}}</span>
+ <span id="audit_name-edit-{{data.id}}" style="display:none;">
+ <input type="text" id="audit_name-text-{{data.id}}" value="{{data.name}}" size="20">
+ </span>
+ '''
+ self.add_column(title="Title",
+ static_data_name="name",
+ static_data_template=name_template,
+ )
+
+ mode_template = '''
+ <span id="audit_mode-disp-{{data.id}}">{{data.import_mode}}</span>
+ <span id="audit_mode-edit-{{data.id}}" style="display:none;">
+ <select id="audit_mode-text-{{data.id}}" name="audit_mode-text-{{data.id}}">
+ <option value="Repo" {% if "Repo" == data.import_mode %}selected{% endif %} >Repo</option>
+ <option value="SSL" {% if "SSL" == data.import_mode %}selected{% endif %} >SSL</option>
+ <option value="File" {% if "File" == data.import_mode %}selected{% endif %} >File</option>
+ </select>
+ </span>
+ '''
+ self.add_column(title="Mode",
+ static_data_name="import_mode",
+ static_data_template=mode_template,
+ )
+
+ repo_template = '''
+ <span id="audit_repo-disp-{{data.id}}">{{data.repo}}</span>
+ <span id="audit_repo-edit-{{data.id}}" style="display:none;">
+ <input type="text" id="audit_repo-text-{{data.id}}" value="{{data.repo}}" size="30">
+ </span>
+ '''
+ self.add_column(title="Repo URL",
+ static_data_name="repo",
+ static_data_template=repo_template,
+ )
+
+ path_template = '''
+ <span id="audit_path-disp-{{data.id}}">{{data.path}}</span>
+ <span id="audit_path-edit-{{data.id}}" style="display:none;">
+ <input type="text" id="audit_path-text-{{data.id}}" value="{{data.path}}" size="30">
+ </span>
+ '''
+ self.add_column(title="Path",
+ static_data_name="path",
+ static_data_template=path_template,
+ )
+
+ pem_template = '''
+ <span id="audit_pem-disp-{{data.id}}">{{data.pem}}</span>
+ <span id="audit_pem-edit-{{data.id}}" style="display:none;">
+ <input type="text" id="audit_pem-text-{{data.id}}" value="{{data.pem}}" size="20">
+ </span>
+ '''
+ self.add_column(title="Pem File",
+ static_data_name="pem_file",
+ static_data_template=pem_template,
+ )
+
+ branch_template = '''
+ <span id="audit_branch-disp-{{data.id}}">{{data.branch}}</span>
+ <span id="audit_branch-edit-{{data.id}}" style="display:none;">
+ <input type="text" id="audit_branch-text-{{data.id}}" value="{{data.branch}}" size="10">
+ </span>
+ '''
+ self.add_column(title="Branch",
+ static_data_name="branch",
+ static_data_template=branch_template,
+ )
+
+ if False:
+ refresh_template = '''
+ {% if "Upload" == data.name %}{% else %}
+ <span id="audit_refresh-disp-{{data.id}}">{{data.auto_refresh}}</span>
+ <span id="audit_refresh-edit-{{data.id}}" style="display:none;">
+ <select id="audit_refresh-text-{{data.id}}" name="audit_mode-text-{{data.id}}">
+ <option value="False" {% if False == data.auto_refresh %}selected{% endif %} >Absolute choice</option>
+ <option value="True" {% if True == data.auto_refresh %}selected{% endif %} >Automatic refresh choices</option>
+ </select>
+
+ </span>
+ {% endif %}
+ '''
+ self.add_column(title="Auto Refresh",
+ static_data_name="auto_refresh",
+ static_data_template=refresh_template,
+ )
+
+ self.add_column(title="Select Refresh",
+ field_name="select_refresh",
+ hideable=True,
+ hidden = True,
+ )
+ self.add_column(title="Select List",
+ field_name="select_list",
+ hideable=True,
+ hidden = True,
+ )
+
+ if UserSafe.is_contributor(self.request.user):
+ manage_link_template = '''
+ {% if "Upload" == data.name %}Built-in{% else %}
+ <span class="glyphicon glyphicon-edit edit-ck-entry" id="edit-entry-{{data.id}}" x-data="{{data.id}}"></span>
+ <span class="glyphicon glyphicon glyphicon glyphicon-ok save-ck-entry" id="save-entry-{{data.id}}" x-data="{{data.id}}" style="display:none;color: Chartreuse;"></span>
+ &nbsp;&nbsp;&nbsp;&nbsp;
+ <span class="glyphicon glyphicon glyphicon glyphicon-remove cancel-ck-entry" id="cancel-entry-{{data.id}}" x-data="{{data.id}}" style="display:none;color: Crimson;"></span>
+ &nbsp;&nbsp;&nbsp;&nbsp;
+ <span class="glyphicon glyphicon-trash trash-import" x-data="{{data.name}}|{{data.id}}"></span>
+ {% endif %}
+ '''
+ self.add_column(title="Manage",
+ hideable=True,
+ static_data_name="manage",
+ static_data_template=manage_link_template,
+ )
diff --git a/lib/cve_checker/templates/ck-audit-toastertable.html b/lib/cve_checker/templates/ck-audit-toastertable.html
new file mode 100755
index 00000000..a9d4d227
--- /dev/null
+++ b/lib/cve_checker/templates/ck-audit-toastertable.html
@@ -0,0 +1,223 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+<style>
+.column1 {
+ float: left;
+ width: 40%;
+}
+.column2 {
+ float: left;
+ width: 60%;
+}
+/* Clear floats after the columns */
+.row:after {
+ content: "";
+ display: table;
+ clear: both;
+}
+</style>
+{% endblock %}
+
+{% load jobtags %}
+
+{% block title %} CVE Check Packages {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'cvechecker_audits' %}">Audits</a></li><span class="divider">&rarr;</span>
+ <li>Packages for "{{Ck_Audit.name}}"</li>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru mrj_type=mrj_type %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<div class="row" style="margin-left:10px;" id="show-settings">
+ <div class="column1">
+ <h2> Audit Name: <b>"{{Ck_Audit.name}}"</b> </h2>
+ </div>
+ <div class="column2">
+ <h2> YP Release: <b>"{{Ck_Audit.orm_product.long_name}}"</b> </h2>
+ </div>
+</div>
+
+<!-- <p><b><big>Actions: </big></b> -->
+
+<p><b><big>Actions: </big></b>
+ <a class="btn btn-default navbar-btn " id="summary-report" >Summary report</a>
+ <a class="btn btn-default navbar-btn " id="vex-report" disabled>VEX</a>
+ <a class="btn btn-default navbar-btn " id="edit-cancel" style="display:none" >Cancel</a>
+ &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
+ <a class="btn btn-default btn-info" disabled>Package View</a>
+ <a class="btn btn-default " href="{% url 'cvechecker_audit_cve' Ck_Audit.id %}">CVE View</a>
+
+<!-- Combination javascript plus redirected download for this ToasterTable (no-POST) page -->
+<form id="summary-report-form" action="/cve_checker/gen_download_cvechecker_summary/" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" name="action" value="download">
+ <input type="hidden" name="options" value="" id="summary_report_options">
+ <input type="hidden" name="audit_id" value="{{Ck_Audit.id}}">
+ <button type="submit" form="summary-report-form" value="Submit2" style="display:none" id="download-summary-report">Generate the diff report</button>
+</form>
+
+<div id="summary-report-options" style="display:none;padding-left:25px;color:DarkCyan;">
+ <br>
+ <h4> Select the pages to be included in the report:</h4>
+ <input type="checkbox" id="audit-summary" name="audit-summary" value="base-severity" checked>
+ <label for="audit-summary">Audit summary</label><br>
+ <input type="checkbox" id="package-summary" name="package-summary" value="package-summary" checked>
+ <label for="package-summary">Package summary</label><br>
+ <input type="checkbox" id="unpatched-summary" name="unpatched-summary" value="unpatched-summary" checked>
+ <label for="unpatched-summary">Unpatched summary</label><br>
+ <input type="checkbox" id="unpatched-summary-compare" name="unpatched-summary-compare" value="unpatched-summary-compare">
+ <label for="unpatched-summary-compare">Unpatched summary with comparibles</label><br>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function () {
+ var selected_editsettings=false;
+ var selected_summary=false;
+
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Audit packages (" + total + ")";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No packages found";
+ }
+ else if (total > 0) {
+ title = total + " Packages" + (total > 1 ? "s" : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+
+ $('.remove-repo-audit').click(function() {
+ var result = confirm("Are you sure you want to remove artifact '" + $(this).attr('x-data').split('|')[0] + "'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'remove-artifact-audit',
+ "record_id" : $(this).attr('x-data').split('|')[1],
+ });
+ }
+ });
+
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ //alert("AJAX RETURN");
+ $("#run-audit-analysis").removeAttr("disabled");
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+// alert("PAGE REFRESH");
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url) {
+ reqdata["Ck_Audit_id"] = {{ Ck_Audit.id }};
+ url = url || "{% url 'xhr_cvechecker_commit' %}";
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+ $('#summary-report').click(function() {
+ if (selected_summary) {
+ document.getElementById("summary-report").innerText = "Summary Report";
+ document.getElementById('vex-report').style.display = 'inline';
+ document.getElementById('edit-cancel').style.display = 'none';
+ $("#summary-report-options").slideUp();
+ selected_summary=false;
+ var options = "";
+ if (document.getElementById('audit-summary').checked) {
+ options = options + "audit-summary,";
+ }
+ if (document.getElementById('package-summary').checked) {
+ options = options + "package-summary,";
+ }
+ if (document.getElementById('unpatched-summary').checked) {
+ options = options + "unpatched-summary,";
+ }
+ if (document.getElementById('unpatched-summary-compare').checked) {
+ options = options + "unpatched-summary-compare,";
+ }
+ document.getElementById("summary_report_options").value=options;
+ document.getElementById("download-summary-report").click();
+ } else {
+ document.getElementById("summary-report").innerText = "Generate Summary Report";
+ document.getElementById("summary-report").style.display = 'inline';
+ document.getElementById('vex-report').style.display = 'none';
+ document.getElementById('edit-cancel').style.display = 'inline';
+ $("#summary-report-options").slideDown();
+ selected_summary=true;
+ }
+ });
+
+ $('#edit-cancel').click(function() {
+ document.getElementById("summary-report").innerText = "Summary Report";
+ document.getElementById('vex-report').style.display = 'inline';
+ document.getElementById('edit-cancel').style.display = 'none';
+ $("#summary-report-options").slideUp();
+ selected_summary=false;
+ });
+
+ $('.submit-downloadattachment').click(function() {
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
+ });
+
+ });
+ </script>
+{% endblock %}
diff --git a/lib/cve_checker/templates/ck-auditcve-toastertable.html b/lib/cve_checker/templates/ck-auditcve-toastertable.html
new file mode 100755
index 00000000..a7648c8b
--- /dev/null
+++ b/lib/cve_checker/templates/ck-auditcve-toastertable.html
@@ -0,0 +1,431 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+<style>
+.column1 {
+ float: left;
+ width: 40%;
+}
+.column2 {
+ float: left;
+ width: 60%;
+}
+/* Clear floats after the columns */
+.row:after {
+ content: "";
+ display: table;
+ clear: both;
+}
+</style>
+{% endblock %}
+
+{% load jobtags %}
+
+{% block title %} CVE Check CVEs {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'cvechecker_audits' %}">Audits</a></li><span class="divider">&rarr;</span>
+ <li>Packages for "{{Ck_Audit.name}}"</li>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru mrj_type=mrj_type %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<div class="row" style="margin-left:10px;" id="show-settings">
+ <div class="column1">
+ <h2> Audit Name: <b>"{{Ck_Audit.name}}"</b> </h2>
+ </div>
+ <div class="column2">
+ <h2> YP Release: <b>"{{Ck_Audit.orm_product.long_name}}"</b> </h2>
+ </div>
+</div>
+
+<!-- <p><b><big>Actions: </big></b> -->
+
+<p><b><big>Actions: </big></b>
+ <!-- <a class="btn btn-default navbar-btn " id="summary-report" disabled>Summary report</a> -->
+ <a class="btn btn-default navbar-btn " id="edit-cancel" style="display:none" >Cancel</a>
+ &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;
+ <a class="btn btn-default" href="{% url 'cvechecker_audit' Ck_Audit.id %}">Package View</a>
+ <a class="btn btn-default btn-info" disabled>CVE View</a>
+
+<!-- Combination javascript plus redirected download for this ToasterTable (no-POST) page -->
+<form id="summary-report-form" action="/cve_check/gen_download_audit_summary/" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" name="action" value="download">
+ <input type="hidden" name="options" value="" id="summary_report_options">
+ <input type="hidden" name="audit_id" value="{{Ck_Audit.id}}">
+ <button type="submit" form="summary-report-form" value="Submit2" style="display:none" id="download-summary-report">Generate the diff report</button>
+</form>
+
+<div id="summary-report-options" style="display:none;padding-left:25px;color:DarkCyan;">
+ <br>
+ <h4> Select the pages to be included in the report:</h4>
+ <input type="checkbox" id="repo-severity" name="repo-severity" value="repo-severity" checked>
+ <label for="repo-severity">Severity by repository</label><br>
+ <input type="checkbox" id="package-severity" name="package-severity" value="package-severity" checked>
+ <label for="package-severity">Severity by package</label><br>
+ <input type="checkbox" id="base-severity" name="base-severity" value="base-severity" checked>
+ <label for="base-severity">Severity by base image</label><br>
+ <input type="checkbox" id="cve-summary" name="cve-summary" value="cve-summary">
+ <label for="cve-summary">CVE summary</label><br>
+ <input type="checkbox" id="package-summary" name="package-summary" value="package-summary">
+ <label for="package-summary">Package summary</label><br>
+ <input type="checkbox" id="artifact-labels" name="artifact-labels" value="artifact-labels">
+ <label for="artifact-labels">Artifact Labels</label><br>
+ <input type="checkbox" id="summary_baseline" name="summary_baseline" value="summary_baseline">
+ <label for="summary_baseline">Audit versus Baseline by repository</label><br>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function () {
+ var selected_editsettings=false;
+ var selected_summary=false;
+
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Audit CVEs (" + total + ")";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No CVEs found";
+ }
+ else if (total > 0) {
+ title = total + " CVE" + (total > 1 ? "s" : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+
+ $('.change-repo-type').change(function() {
+ var result = confirm("Are you sure you want to change to type '" + $(this).val().split('|')[1] + "'?");
+ postCommitAjaxRequest({
+ "action" : 'update-artifact-type',
+ "is_update": result,
+ "value" : $(this).val(),
+ });
+ });
+
+ $('.remove-repo-audit').click(function() {
+ var result = confirm("Are you sure you want to remove artifact '" + $(this).attr('x-data').split('|')[0] + "'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'remove-artifact-audit',
+ "record_id" : $(this).attr('x-data').split('|')[1],
+ });
+ }
+ });
+
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ //alert("AJAX RETURN");
+ $("#run-audit-analysis").removeAttr("disabled");
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+
+ if (data.error.startsWith('UPDATE_TYPE:')) {
+ var data = data.error.replace("UPDATE_TYPE:","");
+ var data_id=data.split('|')[0];
+ var data_value=data.replace('type_','');
+ $('#'+data_id).val(data_value);
+ $('#'+data_id).css({ "color": "blue" });
+ return;
+ }
+
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+// alert("PAGE REFRESH");
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url) {
+ reqdata["Ck_Audit_id"] = {{ Ck_Audit.id }};
+ url = url || "{% url 'xhr_cvechecker_commit' %}";
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+ $('#analyze_artifacts').click(function(){
+ var result = confirm("The will analyze every CVE in this audit and will take some time. Proceed?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-analyze-artifacts',
+ "Ck_Audit_id" : '{{Ck_Audit.id}}',
+ },"");
+ }
+ });
+
+ $('#load_artifacts').click(function(){
+ var result = confirm("The will load all CVEs of registered artifacts, and will take some time. Proceed?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-load-artifacts',
+ "Ck_Audit_id" : '{{Ck_Audit.id}}',
+ },"");
+ }
+ });
+
+ $('#load_backfill').click(function(){
+ var result = confirm("Backfill missing vulnerabilities using selected audit?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-backfill-vulnerabilities',
+ "backfill_id" : $("#backfill_vulnerabilities").val(),
+ "Ck_Audit_id" : '{{Ck_Audit.id}}',
+ },"");
+ }
+ });
+
+
+ function setDefaultDisplay(is_default) {
+ var default_style = 'none';
+ if (is_default) {
+ default_style = 'inline';
+ {% if request.user.is_creator %}
+ document.getElementById("edit-settings").innerText = "Settings";
+ {% endif %}
+ document.getElementById("summary-report").innerText = "Summary Report";
+ selected_editsettings=false;
+ selected_summary=false;
+ } else {
+ document.getElementById('show-settings').style.display = 'none';
+ };
+ document.getElementById('browse-content').style.display = default_style;
+ {% if request.user.is_creator %}
+ document.getElementById("edit-settings").style.display = default_style;
+ {% endif %}
+ document.getElementById('load_artifacts').style.display = default_style;
+ document.getElementById('audit-import-tern').style.display = default_style;
+ document.getElementById('summary-report').style.display = default_style;
+ document.getElementById('cve-summary-report').style.display = default_style;
+ document.getElementById('prisma-merge-report').style.display = default_style;
+ document.getElementById('audit-package-versions').style.display = default_style;
+ document.getElementById('audit-artifacts').style.display = default_style;
+ /* Always pre-hide the pop-ups */
+ document.getElementById('summary-report-options').style.display = 'none';
+ {% if request.user.is_creator %}
+ document.getElementById('show-edit-settings').style.display = 'none';
+ {% endif %}
+ document.getElementById('edit-cancel').style.display = 'none';
+ if (is_default) {
+ $("#show-settings").slideDown();
+ };
+ };
+
+ {% if request.user.is_creator %}
+ $('#edit-settings').click(function() {
+ if (selected_editsettings) {
+ setDefaultDisplay(true);
+ postCommitAjaxRequest({
+ "action" : 'submit-editaudit',
+ "product_id" : $("#audit_product_id").val(),
+ "name" : $("#audit-name").val(),
+ "content" : $("#audit-content").val(),
+ "date" : $("#audit-date").val(),
+ "description" : $("#audit-desc").val(),
+ "tree_lock" : $("#tree_lock").val(),
+ "save_lock" : $("#save_lock").val(),
+ });
+ } else {
+ setDefaultDisplay(false);
+ document.getElementById("edit-settings").innerText = "Save Settings";
+ document.getElementById("edit-settings").style.display = 'inline';
+ document.getElementById('edit-cancel').style.display = 'inline';
+ $("#show-edit-settings").slideDown();
+ selected_editsettings=true;
+ };
+ });
+ {% endif %}
+
+ $('#summary-report').click(function() {
+ if (selected_summary) {
+ setDefaultDisplay(true);
+ selected_summary=false;
+ var options = "";
+ if (document.getElementById('repo-severity').checked) {
+ options = options + "repo-severity,";
+ }
+ if (document.getElementById('package-severity').checked) {
+ options = options + "package-severity,";
+ }
+ if (document.getElementById('base-severity').checked) {
+ options = options + "base-severity,";
+ }
+ if (document.getElementById('cve-summary').checked) {
+ options = options + "cve-summary,";
+ }
+ if (document.getElementById('package-summary').checked) {
+ options = options + "package-summary,";
+ }
+ if (document.getElementById('artifact-labels').checked) {
+ options = options + "artifact-labels,";
+ }
+ if (document.getElementById('summary_baseline').checked) {
+ options = options + "summary_baseline,";
+ }
+ document.getElementById("summary_report_options").value=options;
+ document.getElementById("download-summary-report").click();
+ } else {
+ document.getElementById("summary-report").innerText = "Generate Summary Report";
+ setDefaultDisplay(false);
+ document.getElementById("summary-report").style.display = 'inline';
+ document.getElementById('edit-cancel').style.display = 'inline';
+ $("#summary-report-options").slideDown();
+ selected_summary=true;
+ }
+ });
+
+ $('#edit-cancel').click(function() {
+ setDefaultDisplay(true);
+ });
+
+ $('#audit-import-tern').click(function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-import-tern',
+ },"");
+ });
+
+ $('.submit-downloadattachment').click(function() {
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
+ });
+
+ $('#audit-package-versions').click(function() {
+ document.getElementById("download-package-versions").click();
+ });
+
+ $('#audit-artifacts').click(function() {
+ document.getElementById("download-artifacts-summary").click();
+ });
+
+ $('#x_summary-report').click(function() {
+ document.getElementById("download-summary-report").click();
+ });
+
+ $('#submit-refresh-tops').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-refresh-tops',
+ "Ck_Audit_id" : '{{Ck_Audit.id}}',
+ },"");
+ });
+
+ $('#submit-grafana-add').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-grafana-add',
+ "Ck_Audit_id" : '{{Ck_Audit.id}}',
+ },"");
+ });
+
+ $('#submit-grafana-remove').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-grafana-remove',
+ "Ck_Audit_id" : '{{Ck_Audit.id}}',
+ },"");
+ });
+
+ $('#submit-ingest-update').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-ingest-update',
+ "Ck_Audit_id" : '{{Ck_Audit.id}}',
+ },"");
+ });
+
+
+ $('#submit-newowner').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-addowner',
+ "owner_id" : $("#user-list").val(),
+ });
+ });
+
+ $('#submit-newgroup').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-addgroup',
+ "group_id" : $("#group-list").val(),
+ });
+ });
+
+ $('.detach-owner').click(function(){
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-detachowner',
+ "owner_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
+ $('.detach-group').click(function(){
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-detachgroup',
+ "group_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
+ $('#focus_select').on('change', function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-focus',
+ "focus_select" : $("#focus_select").val(),
+ });
+ });
+
+ // Turn on the default controls
+ setDefaultDisplay(true);
+
+ });
+ </script>
+{% endblock %}
diff --git a/lib/cve_checker/templates/ck-audits-toastertable.html b/lib/cve_checker/templates/ck-audits-toastertable.html
new file mode 100755
index 00000000..f9a5c0e4
--- /dev/null
+++ b/lib/cve_checker/templates/ck-audits-toastertable.html
@@ -0,0 +1,425 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}"></script>
+ <script>
+ // Update product (per import's select string)
+ function reset_product(description) {
+ var version;
+ var product_elem_default;
+ var product_elem_matched = "0";
+ // This selector is used to see the x-data and y-data attributes
+ var products = document.querySelectorAll('[id^=product_id]');
+ for (var i in products) {
+ // Only include integer iterated indexes (hack due to selector extra data)
+ if (!isNaN(i)) {
+ product = products[i];
+ if ("default" == product.getAttribute('z-data')) {
+ product_elem_default = product.getAttribute('value');
+ }
+ version = product.getAttribute('y-data');
+ if (description.toUpperCase().includes(version.toUpperCase())) {
+ //alert("MATCH:"+ version + ":" + description + ":")
+ product_elem_matched = product.getAttribute('value');
+ }
+ }
+ }
+ // Update product select
+ const $select = document.querySelector('#audit_product_id');
+ if (product_elem_matched != "0") {
+ $select.value = product_elem_matched;
+ } else {
+ $select.value = product_elem_default;
+ }
+ }
+
+ // Use the select as the audit name extension
+ function new_import_set() {
+ var import_value = document.querySelector('input[name="content"]:checked').value;
+ var import_select = $("#"+import_value+"_list").val();
+ var audit_name = '';
+ if ("import_upload" == import_value) {
+ audit_name = 'download';
+ } else {
+ audit_name = import_select;
+ };
+ $("#audit-name").val("{{new_audit_name}}_" + audit_name);
+ reset_product(audit_name);
+ }
+ </script>
+{% endblock %}
+
+{% block title %} CVE Checker Audits {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}"/>Home</a></li><span class="divider">&rarr;</span>
+ <li>Audits</li>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru mrj_type=mrj_type %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<div class="container-fluid">
+ <h3><span>
+ Actions:
+ {% if request.user.is_creator %}
+ <a class="btn btn-default navbar-btn " id="new-audit-add" >Add an audit</a>
+ {% endif %}
+ <a class="btn btn-default navbar-btn " id="audit-diff" disabled>Audit Diff</a>
+ <a class="btn btn-default navbar-btn " id="new-audit-cancel" style="display:none" >Cancel</a>
+ {% if request.user.is_admin %}
+ &nbsp;&nbsp;&nbsp;&nbsp;
+ <!--<button id="purge-selected" class="btn btn-default" type="button">Purge selected</button> -->
+ {% endif %}
+ </span>
+ </h3>
+</div>
+
+<div id="show-new-audit" style="display:none;padding-left:25px;color:DarkCyan;">
+ <div style="margin-top: 10px;">
+ <label>Name:</label>
+ <input type="text" placeholder="name of audit" id="audit-name" size="50" value="{{new_audit_name}}_master">
+ </div>
+
+ <div>
+ <label style="margin-top: 10px;">Product:</label>
+ <select id="audit_product_id" name="audit_product_id">
+ {% for product in orm_products %}
+ {% with product.id as product_id %}
+ <option id="product_id_{{forloop.counter}}" value="{{product.id}}" x-data="{{product.name}}" y-data="{{product.version}}" z-data="{% if product.long_name == default_product %}default{% endif %}" {% if product.long_name == default_product %}selected{% endif %} >{{product.long_name}}</option>
+ {% endwith %}
+ {% endfor %}
+ </select>
+ <br>
+ </div>
+
+ <div style="margin-top: 10px;">
+ <label>Import:</label>
+ <br>
+ {% for import in ckuploadmanager %}
+ <div style="padding-left: 25px;">
+ {% if "Upload" == import.name %}
+ <input type="radio" name="content" value="import_upload" onclick="new_import_set()" checked>
+ <label for="import_{{import.id}}">&nbsp;&nbsp;{{import.name}}</label>
+
+ <form id="uploadbanner" enctype="multipart/form-data" method="post" action="{% url 'gen_upload_cvechecker' %}">{% csrf_token %}
+ <input id="fileUpload" name="fileUpload" type="file" />
+ <input type="hidden" id="action" name="action" value="upload" >
+ <input type="hidden" id="upload_product_id" name="orm_product_id" value="" >
+ <input type="hidden" id="upload_audit_name" name="audit_name" value="" >
+ <br>
+ <input type="submit" value="submit file" id="submit-upload-ck" />
+ </form>
+
+ {% else %}
+ <input type="radio" name="content" value="import_{{import.id}}" onclick="new_import_set()" ><label for="import_{{import.id}}">&nbsp;&nbsp;{{import.name}}</label>
+ {% if import.is_select_list %}
+ <select id="import_{{import.id}}_list" name="import_{{import.id}}_list" onclick="new_import_set()">
+ {% for item in import.get_select_list %}
+ <option value="{{item}}">{{item}}</option>
+ {% endfor %}
+ </select>
+ {% elif import.path %}
+ ({{import.get_path_filename}})
+ {% endif %}
+ {% endif %}
+ </div>
+ {% endfor %}
+
+ </div>
+</div>
+
+<!-- Combination javascript plus redirected download for this ToasterTable (no-POST) page -->
+<form id="audit-diff-form" action="/wr_studio/gen_download_audit_diff/" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" id="action" name="action" value="download">
+ <input type="hidden" id="form_audit_1_id" name="audit_1_id" value="0">
+ <input type="hidden" id="form_audit_2_id" name="audit_2_id" value="0">
+ <input type="hidden" id="form_audit_scope_id" name="audit_scope" value="0">
+ <input type="hidden" id="form_audit_sort_id" name="audit_sort" value="0">
+ <button type="submit" form="audit-diff-form" value="Submit" style="display:none" id="download-audit-diff">Generate the diff report</button>
+</form>
+
+<div id="show-diff-audit" style="display:none;padding-left:25px;">
+ <span id="inherit-audit" style="color:DarkCyan;">Audit #1:
+ <span class="glyphicon glyphicon-question-sign get-help" title="Audit #1 for difference"></span>
+ <br>
+ <select id="audit_1_id" >
+ {% for audit in audits %}
+ {% with audit.id as audit_id %}
+ <option value="{{audit.id}}" {%if forloop.counter == 2%}selected{% endif %}>{{audit.id}}: {{audit.name}}</option>
+ {% endwith %}
+ {% endfor %}
+ </select></span>
+ <br>
+ <span id="inherit-audit" style="color:DarkCyan;">Audit #2:</span>
+ <span class="glyphicon glyphicon-question-sign get-help" title="Audit #2 for difference"></span>
+ <br>
+ <select id="audit_2_id" >
+ {% for audit in audits %}
+ {% with audit.id as audit_id %}
+ <option value="{{audit.id}}">{{audit.id}}: {{audit.name}}</option>
+ {% endwith %}
+ {% endfor %}
+ </select>
+ <br>
+ <br>Report Scope:&nbsp;&nbsp;
+ <select id="audit_diff_scope">
+ <option value="0" >Criticals Diff Report</option>
+ <option value="1" >Full Diff Report</option>
+ <option value="2" >Cross-product Diff Report</option>
+ </select>
+ <br><br>
+ <input type="checkbox" id="audit-diff-order" name="audit-diff-order" value="1" checked></input>
+ <label for="audit-diff-order">Auto-sort audit order</label>
+ <hr>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <script type="text/javascript">
+ selected_addaudit=false;
+ selected_diffaudit=false;
+
+ $(document).ready(function () {
+
+ // Hide the upload submit button and use our own
+ document.getElementById('submit-upload-ck').style.visibility = 'hidden';
+
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "All CVE Check Audits (" + total + ")";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No CVE Check Audits found";
+ }
+ else if (total > 0) {
+ title = total + " CVE Check Audit" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ $('.edit-ck-entry').click(function() {
+ const ck_id=$(this).attr('x-data');
+ document.getElementById('audit_name-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_name-edit-'+ck_id).style.display = 'inline';
+ document.getElementById('edit-entry-'+ck_id).style.display = 'none';
+ document.getElementById('save-entry-'+ck_id).style.display = 'inline';
+ document.getElementById('cancel-entry-'+ck_id).style.display = 'inline';
+ });
+ $('.save-ck-entry').click(function() {
+ const ck_id=$(this).attr('x-data');
+ document.getElementById('audit_name-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_name-edit-'+ck_id).style.display = 'none';
+ document.getElementById('edit-entry-'+ck_id).style.display = 'inline';
+ document.getElementById('save-entry-'+ck_id).style.display = 'none';
+ document.getElementById('cancel-entry-'+ck_id).style.display = 'none';
+
+ postCommitAjaxRequest({
+ "action" : 'submit-update-ck',
+ "ck_id" : ck_id,
+ "audit_name" : $("#audit_name-text-"+ck_id).val(),
+ });
+ });
+
+ $('.cancel-ck-entry').click(function() {
+ const ck_id=$(this).attr('x-data');
+ document.getElementById('audit_name-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_name-edit-'+ck_id).style.display = 'none';
+ document.getElementById('edit-entry-'+ck_id).style.display = 'inline';
+ document.getElementById('save-entry-'+ck_id).style.display = 'none';
+ document.getElementById('cancel-entry-'+ck_id).style.display = 'none';
+ });
+
+ /* Add handler into the Toaster Table context */
+ $('.trash-audit').click(function() {
+ var result = confirm("Are you sure you want to remove '" + $(this).attr('x-data').split('|')[0] + "'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-trash-audit',
+ "record_id" : $(this).attr('x-data').split('|')[1],
+ });
+ }
+ });
+
+ titleElt.text(title);
+ });
+
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+
+ if (data.error == "refresh_new") {
+ window.location.replace("{% url 'cvechecker_audits' %}");
+ return;
+ }
+
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url="") {
+ if ("" == url) {
+ url = "{% url 'xhr_cvechecker_commit' %}";
+ };
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url: url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+ $('#new-audit-add').click(function(e) {
+ if (selected_addaudit) {
+ document.getElementById("new-audit-add").innerText = "Add an audit";
+ $("#show-new-audit").slideUp();
+ selected_addaudit=false;
+ document.getElementById('new-audit-cancel').style.display = 'none';
+ document.getElementById('audit-diff').style.display = 'inline';
+
+ audit_name = $("#audit-name").val().trim();;
+ if ( "" == audit_name) {
+ alert("Error: an audit name is required");
+ return;
+ }
+
+ import_value = document.querySelector('input[name="content"]:checked').value;
+ import_select = $("#"+import_value+"_list").val();
+ if ("import_upload" == import_value) {
+ var $select = document.querySelector('#upload_product_id');
+ $select.value = $("#audit_product_id").val();
+ $select = document.querySelector('#upload_audit_name');
+ $select.value = $("#audit-name").val();
+ // Click the submit for the upload form
+ document.getElementById("submit-upload-ck").click();
+ } else {
+ postCommitAjaxRequest({
+ "action" : 'submit-createaudit',
+ "name" : $("#audit-name").val(),
+ "product_id" : $("#audit_product_id").val(),
+ "import_id" : import_value.replace("import_",""),
+ "import_select" : import_select,
+ "is-shift" : e.shiftKey,
+ });
+ }
+ } else {
+ document.getElementById("new-audit-add").innerText = "Create this Audit";
+ document.getElementById('audit-diff').style.display = 'none';
+ document.getElementById('new-audit-cancel').style.display = 'inline';
+ $("#show-new-audit").slideDown();
+ selected_addaudit=true;
+ }
+ });
+
+ $('#new-audit-cancel').click(function() {
+ document.getElementById("new-audit-add").innerText = "Add an Audit";
+ document.getElementById('audit-diff').style.display = 'inline';
+ document.getElementById('new-audit-add').style.display = 'inline';
+ document.getElementById('new-audit-cancel').style.display = 'none';
+ $("#show-new-audit").slideUp();
+ $("#show-diff-audit").slideUp();
+ selected_addaudit=false;
+ selected_diffaudit=false;
+ });
+
+ $('#audit-diff').click(function() {
+ if (selected_diffaudit) {
+ selected_diffaudit=false;
+ document.getElementById("audit-diff").innerText = "Audit Diff";
+ document.getElementById('new-audit-add').style.display = 'inline';
+ document.getElementById('new-audit-cancel').style.display = 'none';
+ $("#show-diff-audit").slideUp();
+ /* Trigger the computation and auto download */
+ audit_1_id = $("#audit_1_id").val();
+ audit_2_id = $("#audit_2_id").val();
+ if (audit_1_id == audit_2_id) {
+ alert("You have selected the same two audits for the difference.");
+ return
+ };
+ $("#form_audit_1_id").val(audit_1_id);
+ $("#form_audit_2_id").val(audit_2_id);
+ $("#form_audit_scope_id").val($("#audit_diff_scope").val());
+ if (document.getElementById('audit-diff-order').checked) {
+ $("#form_audit_sort_id").val('1');
+ } else {
+ $("#form_audit_sort_id").val('0');
+ }
+ document.getElementById("download-audit-diff").click();
+ } else {
+ document.getElementById("audit-diff").innerText = "Generate diff report";
+ document.getElementById('new-audit-cancel').style.display = 'inline';
+ document.getElementById('new-audit-add').style.display = 'none';
+ $("#show-diff-audit").slideDown();
+ selected_diffaudit=true;
+ }
+ });
+
+ $('#purge-selected').click(function(){
+ var audit_list=[];
+ $('#harborauditstable input').each(function(){
+ if ($(this).is(':checked')) {
+ audit_list.push($(this).prop('id'));
+ }
+ });
+ if (0 == audit_list.length) {
+ alert("No Audits were selected");
+ return;
+ }
+ var result = confirm("Are you sure you want to purge these " + audit_list.length + " audits (~9 secs/audit)?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-purge-audits',
+ "audit_list" : audit_list.join(","),
+ });
+ }
+ });
+
+ });
+
+ </script>
+{% endblock %}
diff --git a/lib/cve_checker/templates/ck-import_manager-toastertable.html b/lib/cve_checker/templates/ck-import_manager-toastertable.html
new file mode 100755
index 00000000..5661e732
--- /dev/null
+++ b/lib/cve_checker/templates/ck-import_manager-toastertable.html
@@ -0,0 +1,266 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+<style>
+.column1 {
+ float: left;
+ width: 30%;
+}
+.column2 {
+ float: left;
+ width: 60%;
+}
+/* Clear floats after the columns */
+.row:after {
+ content: "";
+ display: table;
+ clear: both;
+}
+</style>
+{% endblock %}
+
+{% load jobtags %}
+
+{% block title %} Cve Check Import Manager {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li>Import Manager"</li>
+ </ul>
+ </div>
+</div>
+
+<p><b><big>Actions: </big></b>
+ <a class="btn btn-default navbar-btn " id="new-import" >New Import</a>
+ <a class="btn btn-default navbar-btn " id="refresh-imports" >Refresh select lists</a>
+ &nbsp;&nbsp;&nbsp;&nbsp;
+ <a class="btn btn-default navbar-btn " id="summary-report" disabled>Summary report</a>
+ &nbsp;&nbsp;&nbsp;&nbsp;
+ <a class="btn btn-default navbar-btn " id="show-help" >Help</a>
+
+<div id="import_help" class="well" style="padding-left:25px;display:none;">
+ <h4>Using import management:</h4>
+ This page is used to drive the table of import sources for CVE Checker audits.<br>
+ <ol>
+ <li> If the import points to a file, it will be used. </li>
+ <li> If the import points to a directory of files, they will be offered in a select list</li>
+ <li> If the import points to a directory of directories, they will be offered in a select list, and the child file(s) will be imported</li>
+ </ol>
+ Fields:<br>
+ <div style="padding-left: 30px;">
+ <b>Title:</b> Displayed title for the import channel<br>
+ <b>Mode:</b> "Repo" is for repositories, "SSL" is for scp, "File" is for direct local or NFS<br>
+ <b>Repo URL:</b> The URL to use to clone git repositories<br>
+ <b>Path:</b><br>
+ </div>
+ <div style="padding-left: 50px;">
+ <b>Repo:</b> Relative path to the target directory or file within the repo tree<br>
+ <b>SSL:</b> The user@ip:/path" to the remote target directory/file<br>
+ <b>File:</b> Absolute path to the local target directory/file<br>
+ </div>
+ <div style="padding-left: 30px;">
+ <b>Pem file:</b> Permissions file for SSH/scp access to the target directory/file<br>
+ <b>Branch:</b> optional branch for the git repo<br>
+ <b>Select List:</b> Extracted directories/files for the respective import's select list<br>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function () {
+ var selected_editsettings=false;
+ var selected_summary=false;
+ var selected_showhelp=false;
+
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Import Manager (" + total + ")";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No Import Managers found";
+ }
+ else if (total > 0) {
+ title = total + " Import Manager" + (total > 1 ? "s" : '') + " found";
+ }
+ }
+
+ $('.edit-ck-entry').click(function() {
+ const ck_id=$(this).attr('x-data');
+ document.getElementById('audit_order-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_order-edit-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_name-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_name-edit-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_mode-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_mode-edit-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_path-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_path-edit-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_pem-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_pem-edit-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_repo-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_repo-edit-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_branch-disp-'+ck_id).style.display = 'none';
+ document.getElementById('audit_branch-edit-'+ck_id).style.display = 'inline';
+ //document.getElementById('audit_refresh-disp-'+ck_id).style.display = 'none';
+ //document.getElementById('audit_refresh-edit-'+ck_id).style.display = 'inline';
+
+ document.getElementById('edit-entry-'+ck_id).style.display = 'none';
+ document.getElementById('save-entry-'+ck_id).style.display = 'inline';
+ document.getElementById('cancel-entry-'+ck_id).style.display = 'inline';
+ });
+
+ function close_ck_edit(ck_id) {
+ document.getElementById('audit_order-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_order-edit-'+ck_id).style.display = 'none';
+ document.getElementById('audit_name-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_name-edit-'+ck_id).style.display = 'none';
+ document.getElementById('audit_mode-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_mode-edit-'+ck_id).style.display = 'none';
+ document.getElementById('audit_path-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_path-edit-'+ck_id).style.display = 'none';
+ document.getElementById('audit_pem-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_pem-edit-'+ck_id).style.display = 'none';
+ document.getElementById('audit_repo-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_repo-edit-'+ck_id).style.display = 'none';
+ document.getElementById('audit_branch-disp-'+ck_id).style.display = 'inline';
+ document.getElementById('audit_branch-edit-'+ck_id).style.display = 'none';
+ //document.getElementById('audit_refresh-disp-'+ck_id).style.display = 'inline';
+ //document.getElementById('audit_refresh-edit-'+ck_id).style.display = 'none';
+
+ document.getElementById('edit-entry-'+ck_id).style.display = 'inline';
+ document.getElementById('save-entry-'+ck_id).style.display = 'none';
+ document.getElementById('cancel-entry-'+ck_id).style.display = 'none';
+ };
+
+ $('.cancel-ck-entry').click(function() {
+ const ck_id=$(this).attr('x-data');
+ close_ck_edit(ck_id);
+ });
+
+ $('.save-ck-entry').click(function() {
+ const ck_id=$(this).attr('x-data');
+ close_ck_edit(ck_id);
+ postCommitAjaxRequest({
+ "action" : 'submit-update-import-ck',
+ "ck_id" : ck_id,
+ "audit_order" : $("#audit_order-text-"+ck_id).val(),
+ "audit_name" : $("#audit_name-text-"+ck_id).val(),
+ "audit_mode" : $("#audit_mode-text-"+ck_id).val(),
+ "audit_path" : $("#audit_path-text-"+ck_id).val(),
+ "audit_pem" : $("#audit_pem-text-"+ck_id).val(),
+ "audit_repo" : $("#audit_repo-text-"+ck_id).val(),
+ "audit_branch" : $("#audit_branch-text-"+ck_id).val(),
+ });
+ });
+
+ $('.trash-import').click(function() {
+ var result = confirm("Are you sure you want to remove import '" + $(this).attr('x-data').split('|')[0] + "'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-remove-import-ck',
+ "record_id" : $(this).attr('x-data').split('|')[1],
+ });
+ }
+ });
+
+ titleElt.text(title);
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ //alert("AJAX RETURN");
+ $("#run-audit-analysis").removeAttr("disabled");
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+
+ if (data.error.startsWith('UPDATE_TYPE:')) {
+ var data = data.error.replace("UPDATE_TYPE:","");
+ var data_id=data.split('|')[0];
+ var data_value=data.replace('type_','');
+ $('#'+data_id).val(data_value);
+ $('#'+data_id).css({ "color": "blue" });
+ return;
+ }
+
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url) {
+ url = url || "{% url 'xhr_cvechecker_commit' %}";
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+ $('#new-import').click(function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-new-import-ck',
+ });
+ });
+
+ $('#refresh-imports').click(function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-import-refresh',
+ });
+ });
+
+ $('#show-help').click(function() {
+ if (selected_showhelp) {
+ document.getElementById("show-help").innerText = "Help";
+ $("#import_help").slideUp();
+ selected_showhelp = false;
+ } else {
+ document.getElementById("show-help").innerText = "Close Help";
+ $("#import_help").slideDown();
+ selected_showhelp = true;
+ }
+ });
+
+ });
+ </script>
+{% endblock %}
diff --git a/lib/cve_checker/templates/ck-issue-toastertable.html b/lib/cve_checker/templates/ck-issue-toastertable.html
new file mode 100755
index 00000000..4768d21e
--- /dev/null
+++ b/lib/cve_checker/templates/ck-issue-toastertable.html
@@ -0,0 +1,347 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+<style>
+.column1 {
+ float: left;
+ width: 30%;
+}
+.column2 {
+ float: left;
+ width: 60%;
+}
+/* Clear floats after the columns */
+.row:after {
+ content: "";
+ display: table;
+ clear: both;
+}
+</style>
+{% endblock %}
+
+{% load jobtags %}
+
+{% block title %} Cve Checker Package Issues {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'cvechecker_audits' %}">Audits</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'cvechecker_audit' Ck_Package.ck_audit.id %}">Audit packages</a></li><span class="divider">&rarr;</span>
+ <li>Package Issues for {{Ck_Package.name}} from audit {{Ck_Package.ck_audit.name}}"</li>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru mrj_type=mrj_type %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<div class="row" style="margin-left:10px;" id="show-settings">
+ <div class="column1">
+ <h2> Package Name: <b>"{{Ck_Package.name}}"</b> </h2>
+ </div>
+ <div class="column2">
+ <h2> Audit Name: <b>"{{Ck_Package.ck_audit.name}}"</b> </h2>
+ </div>
+</div>
+
+<p><b><big>Reports: </big></b>
+ <a class="btn btn-default navbar-btn " id="summary-report" disabled>Summary report</a>
+ <a class="btn btn-default navbar-btn " id="edit-cancel" style="display:none" >Cancel</a>
+
+<!-- Combination javascript plus redirected download for this ToasterTable (no-POST) page -->
+<form id="summary-report-form" action="/cve_check/gen_download_audit_summary/" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" name="action" value="download">
+ <input type="hidden" name="options" value="" id="summary_report_options">
+ <input type="hidden" name="package_id" value="{{Ck_Package.id}}">
+ <button type="submit" form="summary-report-form" value="Submit2" style="display:none" id="download-summary-report">Generate the diff report</button>
+</form>
+
+<div class="row" style="display:none;margin-left:10px;" id="show-settings">
+ <div class="column1">
+ <h4> Package: <b>{{Ck_Package.name}}</b> </h4>
+ <h4> Audit: <b>{{Ck_Package.ck_audit.name}}</b> </h4>
+ </div>
+</div>
+
+<div id="summary-report-options" style="display:none;padding-left:25px;color:DarkCyan;">
+ <br>
+ <h4> Select the pages to be included in the report:</h4>
+ <input type="checkbox" id="repo-severity" name="repo-severity" value="repo-severity" checked>
+ <label for="repo-severity">Severity by repository</label><br>
+ <input type="checkbox" id="package-severity" name="package-severity" value="package-severity" checked>
+ <label for="package-severity">Severity by package</label><br>
+ <input type="checkbox" id="base-severity" name="base-severity" value="base-severity" checked>
+ <label for="base-severity">Severity by base image</label><br>
+ <input type="checkbox" id="cve-summary" name="cve-summary" value="cve-summary">
+ <label for="cve-summary">CVE summary</label><br>
+ <input type="checkbox" id="package-summary" name="package-summary" value="package-summary">
+ <label for="package-summary">Package summary</label><br>
+ <input type="checkbox" id="artifact-labels" name="artifact-labels" value="artifact-labels">
+ <label for="artifact-labels">Artifact Labels</label><br>
+ <input type="checkbox" id="summary_baseline" name="summary_baseline" value="summary_baseline">
+ <label for="summary_baseline">Audit versus Baseline by repository</label><br>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function () {
+ var selected_editsettings=false;
+ var selected_summary=false;
+
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Package Issues (" + total + ")";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No Package Issues found";
+ }
+ else if (total > 0) {
+ title = total + " Package Issue" + (total > 1 ? "s" : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ //alert("AJAX RETURN");
+ $("#run-audit-analysis").removeAttr("disabled");
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+
+ if (data.error.startsWith('UPDATE_TYPE:')) {
+ var data = data.error.replace("UPDATE_TYPE:","");
+ var data_id=data.split('|')[0];
+ var data_value=data.replace('type_','');
+ $('#'+data_id).val(data_value);
+ $('#'+data_id).css({ "color": "blue" });
+ return;
+ }
+
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+// alert("PAGE REFRESH");
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url) {
+ reqdata["Ck_Package_id"] = {{ Ck_Package.id }};
+ url = url || "{% url 'xhr_cvechecker_commit' %}";
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+ $('#analyze_artifacts').click(function(){
+ var result = confirm("The will analyze every CVE in this audit and will take some time. Proceed?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-analyze-artifacts',
+ },"");
+ }
+ });
+
+ $('#load_artifacts').click(function(){
+ var result = confirm("The will load all CVEs of registered artifacts, and will take some time. Proceed?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-load-artifacts',
+ },"");
+ }
+ });
+
+ $('#load_backfill').click(function(){
+ var result = confirm("Backfill missing vulnerabilities using selected audit?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-backfill-vulnerabilities',
+ "backfill_id" : $("#backfill_vulnerabilities").val(),
+ },"");
+ }
+ });
+
+
+ $('#summary-report').click(function() {
+ if (selected_summary) {
+ setDefaultDisplay(true);
+ selected_summary=false;
+ var options = "";
+ if (document.getElementById('repo-severity').checked) {
+ options = options + "repo-severity,";
+ }
+ if (document.getElementById('package-severity').checked) {
+ options = options + "package-severity,";
+ }
+ if (document.getElementById('base-severity').checked) {
+ options = options + "base-severity,";
+ }
+ if (document.getElementById('cve-summary').checked) {
+ options = options + "cve-summary,";
+ }
+ if (document.getElementById('package-summary').checked) {
+ options = options + "package-summary,";
+ }
+ if (document.getElementById('artifact-labels').checked) {
+ options = options + "artifact-labels,";
+ }
+ if (document.getElementById('summary_baseline').checked) {
+ options = options + "summary_baseline,";
+ }
+ document.getElementById("summary_report_options").value=options;
+ document.getElementById("download-summary-report").click();
+ } else {
+ document.getElementById("summary-report").innerText = "Generate Summary Report";
+ setDefaultDisplay(false);
+ document.getElementById("summary-report").style.display = 'inline';
+ document.getElementById('edit-cancel').style.display = 'inline';
+ $("#summary-report-options").slideDown();
+ selected_summary=true;
+ }
+ });
+
+ $('#edit-cancel').click(function() {
+ setDefaultDisplay(true);
+ });
+
+ $('#audit-import-tern').click(function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-import-tern',
+ },"");
+ });
+
+ $('.submit-downloadattachment').click(function() {
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
+ });
+
+ $('#audit-package-versions').click(function() {
+ document.getElementById("download-package-versions").click();
+ });
+
+ $('#audit-artifacts').click(function() {
+ document.getElementById("download-artifacts-summary").click();
+ });
+
+ $('#x_summary-report').click(function() {
+ document.getElementById("download-summary-report").click();
+ });
+
+ $('#submit-refresh-tops').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-refresh-tops',
+ },"");
+ });
+
+ $('#submit-grafana-add').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-grafana-add',
+ },"");
+ });
+
+ $('#submit-grafana-remove').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-grafana-remove',
+ },"");
+ });
+
+ $('#submit-ingest-update').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-ingest-update',
+ },"");
+ });
+
+
+ $('#submit-newowner').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-addowner',
+ "owner_id" : $("#user-list").val(),
+ });
+ });
+
+ $('#submit-newgroup').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-addgroup',
+ "group_id" : $("#group-list").val(),
+ });
+ });
+
+ $('.detach-owner').click(function(){
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-detachowner',
+ "owner_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
+ $('.detach-group').click(function(){
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-detachgroup',
+ "group_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
+ $('#focus_select').on('change', function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-focus',
+ "focus_select" : $("#focus_select").val(),
+ });
+ });
+
+ // Turn on the default controls
+ setDefaultDisplay(true);
+
+ });
+ </script>
+{% endblock %}
diff --git a/lib/cve_checker/templates/ck-product-toastertable.html b/lib/cve_checker/templates/ck-product-toastertable.html
new file mode 100755
index 00000000..bdf1509f
--- /dev/null
+++ b/lib/cve_checker/templates/ck-product-toastertable.html
@@ -0,0 +1,309 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+<style>
+.column1 {
+ float: left;
+ width: 30%;
+}
+.column2 {
+ float: left;
+ width: 60%;
+}
+/* Clear floats after the columns */
+.row:after {
+ content: "";
+ display: table;
+ clear: both;
+}
+</style>
+{% endblock %}
+
+{% load jobtags %}
+
+{% block title %} Cve Check Product Issues {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'cvechecker_audits' %}">Audits</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'cvechecker_audit' Ck_Package.ck_audit.id %}">Audit packages</a></li><span class="divider">&rarr;</span>
+ <li>Package products for {{Ck_Package.name}} from audit {{Ck_Package.ck_audit.name}}"</li>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru mrj_type=mrj_type %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<p><b><big>Actions: </big></b>
+
+<p><b><big>Reports: </big></b>
+ <a class="btn btn-default navbar-btn " id="summary-report" disabled>Summary report</a>
+ <a class="btn btn-default navbar-btn " id="edit-cancel" style="display:none" >Cancel</a>
+
+<!-- Combination javascript plus redirected download for this ToasterTable (no-POST) page -->
+<form id="summary-report-form" action="/cve_check/gen_download_audit_summary/" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" name="action" value="download">
+ <input type="hidden" name="options" value="" id="summary_report_options">
+ <input type="hidden" name="package_id" value="{{Ck_Package.id}}">
+ <button type="submit" form="summary-report-form" value="Submit2" style="display:none" id="download-summary-report">Generate the diff report</button>
+</form>
+
+<div class="row" style="display:none;margin-left:10px;" id="show-settings">
+ <div class="column1">
+ <h4> Package: <b>{{Ck_Package.name}}</b> </h4>
+ <h4> Audit: <b>{{Ck_Package.ck_audit.name}}</b> </h4>
+ </div>
+</div>
+
+<div id="summary-report-options" style="display:none;padding-left:25px;color:DarkCyan;">
+ <br>
+ <h4> Select the pages to be included in the report:</h4>
+ <input type="checkbox" id="repo-severity" name="repo-severity" value="repo-severity" checked>
+ <label for="repo-severity">Severity by repository</label><br>
+ <input type="checkbox" id="package-severity" name="package-severity" value="package-severity" checked>
+ <label for="package-severity">Severity by package</label><br>
+ <input type="checkbox" id="base-severity" name="base-severity" value="base-severity" checked>
+ <label for="base-severity">Severity by base image</label><br>
+ <input type="checkbox" id="cve-summary" name="cve-summary" value="cve-summary">
+ <label for="cve-summary">CVE summary</label><br>
+ <input type="checkbox" id="package-summary" name="package-summary" value="package-summary">
+ <label for="package-summary">Package summary</label><br>
+ <input type="checkbox" id="artifact-labels" name="artifact-labels" value="artifact-labels">
+ <label for="artifact-labels">Artifact Labels</label><br>
+ <input type="checkbox" id="summary_baseline" name="summary_baseline" value="summary_baseline">
+ <label for="summary_baseline">Audit versus Baseline by repository</label><br>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function () {
+ var selected_editsettings=false;
+ var selected_summary=false;
+
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Products (" + total + ")";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No products found";
+ }
+ else if (total > 0) {
+ title = total + " Product" + (total > 1 ? "s" : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ //alert("AJAX RETURN");
+ $("#run-audit-analysis").removeAttr("disabled");
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+
+ if (data.error.startsWith('UPDATE_TYPE:')) {
+ var data = data.error.replace("UPDATE_TYPE:","");
+ var data_id=data.split('|')[0];
+ var data_value=data.replace('type_','');
+ $('#'+data_id).val(data_value);
+ $('#'+data_id).css({ "color": "blue" });
+ return;
+ }
+
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+// alert("PAGE REFRESH");
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url) {
+ reqdata["Ck_Package_id"] = {{ Ck_Package.id }};
+ url = url || "{% url 'xhr_cvechecker_commit' %}";
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+
+ $('#summary-report').click(function() {
+ if (selected_summary) {
+ setDefaultDisplay(true);
+ selected_summary=false;
+ var options = "";
+ if (document.getElementById('repo-severity').checked) {
+ options = options + "repo-severity,";
+ }
+ if (document.getElementById('package-severity').checked) {
+ options = options + "package-severity,";
+ }
+ if (document.getElementById('base-severity').checked) {
+ options = options + "base-severity,";
+ }
+ if (document.getElementById('cve-summary').checked) {
+ options = options + "cve-summary,";
+ }
+ if (document.getElementById('package-summary').checked) {
+ options = options + "package-summary,";
+ }
+ if (document.getElementById('artifact-labels').checked) {
+ options = options + "artifact-labels,";
+ }
+ if (document.getElementById('summary_baseline').checked) {
+ options = options + "summary_baseline,";
+ }
+ document.getElementById("summary_report_options").value=options;
+ document.getElementById("download-summary-report").click();
+ } else {
+ document.getElementById("summary-report").innerText = "Generate Summary Report";
+ setDefaultDisplay(false);
+ document.getElementById("summary-report").style.display = 'inline';
+ document.getElementById('edit-cancel').style.display = 'inline';
+ $("#summary-report-options").slideDown();
+ selected_summary=true;
+ }
+ });
+
+ $('#edit-cancel').click(function() {
+ setDefaultDisplay(true);
+ });
+
+ $('#audit-import-tern').click(function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-import-tern',
+ },"");
+ });
+
+ $('.submit-downloadattachment').click(function() {
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
+ });
+
+ $('#audit-package-versions').click(function() {
+ document.getElementById("download-package-versions").click();
+ });
+
+ $('#audit-artifacts').click(function() {
+ document.getElementById("download-artifacts-summary").click();
+ });
+
+ $('#x_summary-report').click(function() {
+ document.getElementById("download-summary-report").click();
+ });
+
+ $('#submit-refresh-tops').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-refresh-tops',
+ },"");
+ });
+
+ $('#submit-grafana-add').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-grafana-add',
+ },"");
+ });
+
+ $('#submit-grafana-remove').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-grafana-remove',
+ },"");
+ });
+
+ $('#submit-ingest-update').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-ingest-update',
+ },"");
+ });
+
+
+ $('#submit-newowner').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-addowner',
+ "owner_id" : $("#user-list").val(),
+ });
+ });
+
+ $('#submit-newgroup').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-addgroup',
+ "group_id" : $("#group-list").val(),
+ });
+ });
+
+ $('.detach-owner').click(function(){
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-detachowner',
+ "owner_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
+ $('.detach-group').click(function(){
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-detachgroup',
+ "group_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
+ $('#focus_select').on('change', function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-audit-focus',
+ "focus_select" : $("#focus_select").val(),
+ });
+ });
+
+ });
+ </script>
+{% endblock %}
diff --git a/lib/cve_checker/tests.py b/lib/cve_checker/tests.py
new file mode 100755
index 00000000..7ce503c2
--- /dev/null
+++ b/lib/cve_checker/tests.py
@@ -0,0 +1,3 @@
+from django.test import TestCase
+
+# Create your tests here.
diff --git a/lib/cve_checker/urls.py b/lib/cve_checker/urls.py
new file mode 100755
index 00000000..396c4fda
--- /dev/null
+++ b/lib/cve_checker/urls.py
@@ -0,0 +1,47 @@
+from django.urls import re_path as url,include
+from django.views.generic import RedirectView
+from . import views, tables
+
+urlpatterns = [
+ #
+ # Main pages
+ #
+
+ url(r'^cvechecker_audits/$',
+ tables.CveCheckerAuditsTable.as_view(template_name="ck-audits-toastertable.html"),
+ name='cvechecker_audits'),
+
+ url(r'^cvechecker/(?P<audit_id>\d+)$',
+ tables.CveCheckerAuditTable.as_view(template_name="ck-audit-toastertable.html"),
+ name='cvechecker_audit'),
+
+ url(r'^cvechecker_audit_cve/(?P<audit_id>\d+)$',
+ tables.CveCheckerAuditCveTable.as_view(template_name="ck-auditcve-toastertable.html"),
+ name='cvechecker_audit_cve'),
+
+ url(r'^cvechecker_issue/(?P<package_id>\d+)$',
+ tables.CveCheckerIssueTable.as_view(template_name="ck-issue-toastertable.html"),
+ name='cvechecker_issue'),
+
+ url(r'^cvechecker_product/(?P<package_id>\d+)$',
+ tables.CveCheckerProductTable.as_view(template_name="ck-product-toastertable.html"),
+ name='cvechecker_product'),
+
+ url(r'^cvechecker_import_manager/$',
+ tables.CveCheckerImportManagementTable.as_view(template_name="ck-import_manager-toastertable.html"),
+ name='cvechecker_import_manager'),
+
+ url(r'^gen_download_cvechecker_summary/$', views.gen_download_cvechecker_summary, name='gen_download_cvechecker_summary'),
+ url(r'^gen_download_cvechecker_audit_diff/$', views.gen_download_cvechecker_audit_diff, name='gen_download_cvechecker_audit_diff'),
+ url(r'^gen_upload_cvechecker/$', views.gen_upload_cvechecker, name='gen_upload_cvechecker'),
+ url(r'^cvechecker_clear_jobs/$', views.cvechecker_clear_jobs, name='cvechecker_clear_jobs'),
+
+ url(r'^report/(?P<page_name>\D+)$', views.report, name='report'),
+
+ #
+ # Ajax
+ #
+
+ url(r'^xhr_cvechecker_commit/$', views.xhr_cvechecker_commit,
+ name='xhr_cvechecker_commit'),
+]
diff --git a/lib/cve_checker/views.py b/lib/cve_checker/views.py
new file mode 100755
index 00000000..333cda97
--- /dev/null
+++ b/lib/cve_checker/views.py
@@ -0,0 +1,325 @@
+#
+# ex:ts=4:sw=4:sts=4:et
+# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
+#
+# Security Response Tool Implementation
+#
+# Copyright (C) 2023 Wind River Systems
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+import os
+import re
+from datetime import datetime, date
+import json
+import traceback
+
+from django.urls import reverse_lazy
+from django.views import generic
+from django.http import HttpResponse, HttpResponseNotFound, JsonResponse, HttpResponseRedirect
+
+from django.contrib.auth.forms import UserCreationForm, UserChangeForm, PasswordChangeForm
+from django.contrib import messages
+from django.contrib.auth import update_session_auth_hash
+from django.contrib.auth.models import Group
+from django.shortcuts import render, redirect
+
+from orm.models import SrtSetting, Product
+from orm.models import Job, ErrorLog
+from users.models import SrtUser, UserSafe
+from srtgui.api import execute_process
+from cve_checker.models import Ck_Audit, Ck_Package, CkUploadManager
+from cve_checker.reports import doCveCheckerAuditSummaryExcel, do_audit_cvechecker_diff_report
+
+from srtgui.views import MimeTypeFinder
+
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR', '.')
+SRT_MAIN_APP = os.environ.get('SRT_MAIN_APP', '.')
+
+# quick development/debugging support
+from srtgui.api import _log
+
+#
+# Main pages
+#
+
+# determine in which mode we are running in, and redirect appropriately
+def landing(request):
+
+ # Django sometimes has a race condition with this view executing
+ # for the master app's landing page HTML which can lead to context
+ # errors, so hard enforce the default re-direction
+ if SRT_MAIN_APP and (SRT_MAIN_APP != "yp"):
+ return redirect(f"/{SRT_MAIN_URL}/landing/")
+
+ # Append the list of landing page extensions
+ landing_extensions_table = []
+ for landing_extension in SrtSetting.objects.filter(name__startswith='LANDING_LINK').order_by('name'):
+ landing_extensions_table.append(landing_extension.value.split('|'))
+
+ context = {
+ 'landing_extensions_table' : landing_extensions_table,
+ 'this_landing' : 'srtgui',
+ }
+ return render(request, 'landing.html', context)
+
+def report(request,page_name):
+ if request.method == "GET":
+ context = GitcvecheckerReportManager.get_context_data(page_name,request=request)
+ record_list = request.GET.get('record_list', '')
+ _log("EXPORT_GET!:%s|%s|" % (request,record_list))
+ context['record_list'] = record_list
+ return render(request, 'report.html', context)
+ elif request.method == "POST":
+ _log("EXPORT_POST!:%s|%s" % (request,request.FILES))
+ parent_page = request.POST.get('parent_page', '')
+ file_name,response_file_name = GitcvecheckerReportManager.exec_report(parent_page,request=request)
+ if file_name and response_file_name:
+ fsock = open(file_name, "rb")
+ content_type = MimeTypeFinder.get_mimetype(file_name)
+ response = HttpResponse(fsock, content_type = content_type)
+ disposition = "attachment; filename=" + response_file_name
+ response["Content-Disposition"] = disposition
+ _log("EXPORT_POST_Q{%s|" % (response))
+ return response
+ else:
+ return render(request, "unavailable_artifact.html", {})
+ return redirect('/')
+ raise Exception("Invalid HTTP method for this page")
+
+# Standalone download URL, for ToasterTable pages
+#
+# TBD
+#
+def gen_download_cvechecker_audit_diff(request):
+ if request.method == "GET":
+ return redirect(landing)
+ elif request.method == "POST":
+ _log("GEN_DOWNLOAD_AUDIT_DIFF(%s)" % request.POST)
+ if request.POST["action"] == "download":
+ audit_1_id = int(request.POST.get('audit_1_id',0))
+ audit_2_id = int(request.POST.get('audit_2_id',0))
+ audit_1 = cvecheckerAudit.objects.get(id=audit_1_id)
+ audit_2 = cvecheckerAudit.objects.get(id=audit_2_id)
+ audit_scope = request.POST.get('audit_scope','0')
+ audit_sort = request.POST.get('audit_sort','1')
+ # Enforce older to newer
+ if ('1' == audit_sort) and (audit_1.id > audit_2.id):
+ audit_1,audit_2 = audit_2,audit_1
+ file_path = do_audit_cvechecker_diff_report(audit_1, audit_2, {'format':'xlsx','audit_scope':audit_scope,})
+ if os.path.isfile(file_path):
+ fsock = open(file_path, "rb")
+ content_type = MimeTypeFinder.get_mimetype(file_path)
+ response = HttpResponse(fsock, content_type = content_type)
+ response['Content-Disposition'] = 'attachment; filename="{}"'.format(os.path.basename(file_path))
+ return response
+ else:
+ _log("ERROR:could not download '%s'" % file_path)
+ return render(request, "unavailable_artifact.html", context={})
+
+#
+# Upload pages
+#
+
+# Standalone download URL, for ToasterTable pages
+def gen_upload_cvechecker(request):
+ if request.method == "GET":
+ return redirect(landing)
+ elif request.method == "POST":
+ _log(f"GEN_UPLOAD_CVECHECKER({request.POST})")
+
+ ck_upload_dir = os.path.join(SRT_BASE_DIR,'data/cve_checker/upload')
+ ck_upload_manager_id = CkUploadManager.objects.get(import_mode='Upload',path='').id
+
+ if request.POST["action"] == "upload":
+ audit_name = request.POST.get('audit_name',0)
+ orm_product_id = int(request.POST.get('orm_product_id',0))
+ orm_product = Product.objects.get(id=orm_product_id)
+
+ if not os.path.isdir(ck_upload_dir):
+ os.makedirs(ck_upload_dir)
+ try:
+ file = request.FILES['fileUpload']
+ except Exception as e:
+ _log("EXPORT_POST:'fileupload:' does not exist: %s" % e)
+ try:
+ ### TODO Error if not JSON file
+ pass
+ # Upload the file
+ local_file_path = os.path.join(ck_upload_dir,file.name)
+ _log("FOO:%s" % local_file_path)
+ if os.path.isfile(local_file_path):
+ os.remove(local_file_path)
+ with open(local_file_path, 'xb+') as destination:
+ for line in file:
+ destination.write(line)
+ # Create an audit from the imported file
+ cmnd = ['./bin/cve_checker/srtool_cvechecker.py','--import-cvechk', f"{ck_upload_manager_id},{orm_product.key},{local_file_path}", "--progress"]
+ cmnd = ['./bin/cve_checker/srtool_cvechecker.py','--import-cvechk', f"{ck_upload_manager_id},{orm_product.key},{local_file_path}", "--audit-name",audit_name,"--progress"]
+ Job.start('Audit from upload','Audit from upload',' '.join(cmnd),'')
+
+ except Exception as e:
+ _log("EXPORT_POST:'fileupload:var-1': %s" % e)
+ return redirect('cvechecker_audits')
+
+#
+# Download pages
+#
+
+# Standalone download URL, for ToasterTable pages
+def gen_download_cvechecker_summary(request):
+ if request.method == "GET":
+ return redirect(landing)
+ elif request.method == "POST":
+ _log("GEN_DOWNLOAD_CVECHECK_SUMMARY(%s)" % request.POST)
+ if request.POST["action"] == "download":
+ audit_id = int(request.POST.get('audit_id',0))
+ ck_audit = Ck_Audit.objects.get(id=audit_id)
+ queryString = request.POST.get('queryString','')
+ options = request.POST.get('options','')
+ options_dict = {'format':'xlsx','audit_id':audit_id}
+ for option in options.split(','):
+ if option: options_dict[option] = 1
+ # orderby=package&filter=is_severity:critical_not_base&search=CVE-2021-44228&default_orderby=name&filter_value=on&
+ for option in queryString.split('&'):
+ if option:
+ name,value = option.split('=')
+ options_dict[name] = value
+ file_path = doCveCheckerAuditSummaryExcel(ck_audit,options_dict)
+ if os.path.isfile(file_path):
+ fsock = open(file_path, "rb")
+ content_type = MimeTypeFinder.get_mimetype(file_path)
+ response = HttpResponse(fsock, content_type = content_type)
+ response['Content-Disposition'] = 'attachment; filename="{}"'.format(os.path.basename(file_path))
+ return response
+ else:
+ _log("ERROR:could not download '%s'" % file_path)
+ return render(request, "unavailable_artifact.html", context={})
+
+ return render(request, "unavailable_artifact.html", context={})
+
+#
+# XHR pages
+#
+
+def xhr_cvechecker_commit(request):
+ _log("XHR_CVECHECK_COMMIT(%s)" % request.POST)
+ if not 'action' in request.POST:
+ _log("xhr_cvechecker_commit:NO_ACTION")
+ return HttpResponse(json.dumps({"error":"missing action\n"}), content_type = "application/json")
+
+ try:
+ error_message = "ok"
+
+ # Fetch cvechecker data from backend
+ if request.POST["action"] == "submit-createaudit":
+ # action': ['submit-createaudit'], 'product_id': ['6'], 'ab_set': ['mickledore'], 'project_name': [''], 'name': ['audit_20231114_mickledore'], 'is-shift': ['false']}>)|
+ audit_name = request.POST.get('name', 'audit_name')
+
+ product_id = int(request.POST.get('product_id', '0'))
+ product = Product.objects.get(id=product_id)
+
+ import_id = request.POST.get('import_id', '0')
+ import_select = request.POST.get('import_select', '_none_')
+
+ # bin/cve_checker/srtool_cvechecker.py --import-cvechk import_id,master,master --progress
+ cmnd = ['./bin/cve_checker/srtool_cvechecker.py','--import-cvechk',f"{import_id},{product.key},{import_select}","--audit-name",audit_name,"--progress"]
+ _log(f"FETCH_cvechecker:JOB:{cmnd}")
+ Job.start('Fetch CveChecker','Fetch CveChecker',' '.join(cmnd),'')
+ # Set update time
+ now = datetime.today().strftime('%Y/%m/%d %H:%M:%S')
+ SrtSetting.set_setting('SRT_CVECHECK_UPDATE',now)
+
+ # Delete a cvechecker
+ elif request.POST["action"] == "submit-trash-audit":
+ cvechecker_id = int(request.POST.get('record_id', '0'))
+ cvechecker_obj = Ck_Audit.objects.get(pk=cvechecker_id)
+ cvechecker_obj.delete()
+
+ # Update management cvechecker settings
+ elif request.POST["action"] == "submit-cvechecker-settings":
+ SrtSetting.set_setting('SRT_cvechecker_PATH',request.POST.get('cvechecker_path', ''))
+
+ # Update cvechecker status
+ elif request.POST["action"] == "submit-update-ck":
+ ck_id = int(request.POST.get('ck_id', '0'))
+ cvechecker_obj = Ck_Audit.objects.get(pk=ck_id)
+ cvechecker_obj.name = request.POST.get('audit_name', '0')
+ cvechecker_obj.save()
+
+ # Add cvechecker import blank
+ elif request.POST["action"] == "submit-new-import-ck":
+ ck_import_obj,created = CkUploadManager.objects.get_or_create(name='new')
+ ck_import_obj.order = 0
+ ck_import_obj.import_mode = 'File'
+ ck_import_obj.path = ''
+ ck_import_obj.pem = ''
+ ck_import_obj.repo = ''
+ ck_import_obj.branch = ''
+ ck_import_obj.auto_refresh = False
+ ck_import_obj.save()
+
+ # Update cvechecker import
+ elif request.POST["action"] == "submit-update-import-ck":
+ ck_id = int(request.POST.get('ck_id', '0'))
+ try:
+ order = int(request.POST.get('audit_order', '0').strip())
+ ck_import_obj = CkUploadManager.objects.get(id=ck_id)
+ ck_import_obj.order = order
+ ck_import_obj.name = request.POST.get('audit_name', 'new').strip()
+ ck_import_obj.import_mode = request.POST.get('audit_mode', 'File').strip()
+ ck_import_obj.path = request.POST.get('audit_path', '').strip()
+ ck_import_obj.pem = request.POST.get('audit_pem', '').strip()
+ ck_import_obj.repo = request.POST.get('audit_repo', '').strip()
+ ck_import_obj.branch = request.POST.get('audit_branch', '').strip()
+ ck_import_obj.auto_refresh = ('True' == request.POST.get('audit_refresh', 'False').strip())
+ ck_import_obj.save()
+ except:
+ error_message = "Error: order must be an integer"
+
+ # Update the Import select tables
+ elif request.POST["action"] == "submit-import-refresh":
+ cmnd = ["bin/cve_checker/srtool_cvechecker.py","--update-imports","-f"]
+ result_returncode,result_stdout,result_stderr = execute_process(*cmnd)
+ if 0 != result_returncode:
+ error_message = f"ERROR:{cmnd}: {result_stderr}:{result_stdout}:"
+
+ # Delete an import
+ elif request.POST["action"] == "submit-remove-import-ck":
+ ck_id = int(request.POST.get('record_id', '0'))
+ ck_import_obj = CkUploadManager.objects.get(pk=ck_id)
+ ck_import_obj.delete()
+
+ # Clear the dead jobs
+ elif request.POST["action"] == "submit-clearjobs":
+ if UserSafe.is_admin(request.user):
+ Job.objects.all().delete()
+
+ # Undefined action
+ else:
+ error_message ="ERROR:unknown action '%s'" % request.POST["action"]
+
+ _log("XHR_CVECHECK_COMMIT:DONE:%s" % error_message)
+ return HttpResponse(json.dumps( {"error": error_message,} ), content_type = "application/json")
+
+ except Exception as e:
+ _log("XHR_CVECHECK_COMMIT:no(%s)(%s)" % (e,traceback.format_exc()))
+ return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+
+
+# Delete jobs
+def cvechecker_clear_jobs(request):
+ if UserSafe.is_admin(request.user):
+ Job.objects.all().delete()
+ return redirect('manage')
diff --git a/lib/orm/management/commands/checksettings.py b/lib/orm/management/commands/checksettings.py
index f5e4df02..5701c0aa 100644
--- a/lib/orm/management/commands/checksettings.py
+++ b/lib/orm/management/commands/checksettings.py
@@ -36,7 +36,7 @@ class Command(BaseCommand):
# to allow embedding comments in the JSON files
def _load_datasource(self,dir):
for ds in glob.glob(os.path.join(dir,'datasource*.json')):
- _log("Load_Datasource:%s" % ds)
+ # _log("Load_Datasource:%s" % ds)
with open(ds) as json_data:
dct = json.load(json_data)
if 'srtsetting' in dct:
@@ -49,7 +49,17 @@ class Command(BaseCommand):
if 'datasource' in dct:
for datasource in dct['datasource']:
#print(" LOAD_DATASOURCE:%s:%s" % (datasource['key'],datasource['description']))
- ds,create = DataSource.objects.get_or_create(key=datasource['key'])
+ ds,created = DataSource.objects.get_or_create(key=datasource['key'])
+ if not created:
+ # Special handling for attributes, persistent enablement
+ new_attributes = datasource['attributes'] if 'attributes' in datasource else ''
+ # An explict "ENABLE" overrides any default "DISABLE"
+ if 'ENABLE ' in ds.attributes:
+ new_attributes = 'ENABLE ' + new_attributes.replace('DISABLE ','').replace('ENABLE ','')
+ # An explict "DISABLE" overrides the default enable
+ if 'DISABLE ' in ds.attributes:
+ new_attributes = 'DISABLE ' + new_attributes.replace('DISABLE ','').replace('ENABLE ','')
+ datasource['attributes'] = new_attributes
for key in datasource.keys():
if key.startswith("_comment"):
continue
diff --git a/lib/orm/management/commands/lsupdates.py b/lib/orm/management/commands/lsupdates.py
index ca67713a..1805142f 100644
--- a/lib/orm/management/commands/lsupdates.py
+++ b/lib/orm/management/commands/lsupdates.py
@@ -5,7 +5,7 @@
# Security Response Tool Implementation
#
# Copyright (C) 2013-2015 Intel Corp.
-# Copyright (C) 2017-2018 Wind River Systems
+# Copyright (C) 2017-2021 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -103,7 +103,7 @@ class Command(BaseCommand):
(what,
pec))
sys.stdout.flush()
- if int(pec) is 100:
+ if int(pec) == 100:
sys.stdout.write("\n")
sys.stdout.flush()
@@ -322,6 +322,16 @@ class Command(BaseCommand):
logger.info("***LS UPDATES***")
+ # Disable the background updates until these are all processed
+ SrtSetting.set_setting('SRT_DISABLE_UPDATES','yes')
+
+ # First process the pre-init data sources in strict pk order to insure dependencies
+ data_sources=DataSource.objects.filter(update_frequency=DataSource.PREINIT).order_by('key')
+ for source in data_sources:
+ if source.init:
+ print("Fetching pre-init datasource '%s:%s'" % (source.source,source.description))
+ self.execute_script(source.init)
+
# Process the data sources in strict pk order to insure dependencies
data_sources=DataSource.objects.all().order_by('key')
for source in data_sources:
@@ -333,6 +343,10 @@ class Command(BaseCommand):
# No Init action?
print("Skipping datasource %s (no init action)" % (source.description))
continue
+ elif 'DISABLE ' in source.attributes:
+ # Data source disabled
+ print("Disabled datasource %s (%s)" % (source.description,source.attributes))
+ continue
else:
logger.info("Fetching datasource %s:%s" % (source.source,source.description))
print("Fetching datasource '%s:%s'" % (source.source,source.description))
@@ -395,10 +409,12 @@ class Command(BaseCommand):
logger.error("Unknown data source type for (%s,%s,%s) " % (source.data,source.source,source.name))
_log("Unknown data source type for %s,%s,%s) " % (source.data,source.source,source.name))
+ # Re-able the background updates until these are all processed
+ SrtSetting.set_setting('SRT_DISABLE_UPDATES','no')
+
os.system('setterm -cursor on')
def handle(self, *args, **options):
-
# testing shortcuts
if 'yes' == SrtSetting.objects.get(name='SRTDBG_MINIMAL_DB').value:
print("TEST: MINIMAL DATABASE LOADING")
@@ -407,5 +423,4 @@ class Command(BaseCommand):
Command.status_sustaining_limit = 10
Command.debug_defect_limit = 10
Command.cpe_limit = 10
-
self.update()
diff --git a/lib/orm/migrations/0007_components_errorlog.py b/lib/orm/migrations/0007_components_errorlog.py
new file mode 100755
index 00000000..88a02ee1
--- /dev/null
+++ b/lib/orm/migrations/0007_components_errorlog.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.5 on 2020-02-01 03:09
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0006_reconcile'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='ErrorLog',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('severity', models.IntegerField(default=0)),
+ ('description', models.TextField(blank=True)),
+ ('srt_created', models.DateTimeField(auto_now_add=True, null=True)),
+ ],
+ ),
+ migrations.AddField(
+ model_name='defect',
+ name='packages',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AddField(
+ model_name='investigation',
+ name='packages',
+ field=models.TextField(blank=True),
+ ),
+ migrations.AddField(
+ model_name='vulnerability',
+ name='packages',
+ field=models.TextField(blank=True),
+ ),
+ ]
diff --git a/lib/orm/migrations/0008_cveaccess.py b/lib/orm/migrations/0008_cveaccess.py
new file mode 100644
index 00000000..c12ac9ed
--- /dev/null
+++ b/lib/orm/migrations/0008_cveaccess.py
@@ -0,0 +1,24 @@
+# Generated by Django 2.2.11 on 2020-10-23 08:03
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
+ ('orm', '0007_components_errorlog'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='CveAccess',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('cve', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cve_users', to='orm.Cve')),
+ ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cve_user', to=settings.AUTH_USER_MODEL)),
+ ],
+ ),
+ ]
diff --git a/lib/orm/migrations/0009_recipetable.py b/lib/orm/migrations/0009_recipetable.py
new file mode 100644
index 00000000..4f3621f1
--- /dev/null
+++ b/lib/orm/migrations/0009_recipetable.py
@@ -0,0 +1,20 @@
+# Generated by Django 2.2.11 on 2020-11-13 21:48
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0008_cveaccess'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='RecipeTable',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('recipe_name', models.CharField(max_length=50)),
+ ],
+ ),
+ ]
diff --git a/lib/orm/migrations/0010_job.py b/lib/orm/migrations/0010_job.py
new file mode 100644
index 00000000..4b837379
--- /dev/null
+++ b/lib/orm/migrations/0010_job.py
@@ -0,0 +1,35 @@
+# Generated by Django 2.2.11 on 2020-11-14 23:59
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0009_recipetable'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='Job',
+ fields=[
+ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+ ('name', models.CharField(default='', max_length=50)),
+ ('description', models.TextField(blank=True)),
+ ('command', models.TextField(blank=True)),
+ ('log_file', models.TextField(blank=True)),
+ ('options', models.TextField(blank=True)),
+ ('status', models.IntegerField(choices=[(0, 'NotStarted'), (1, 'InProgress'), (2, 'Success'), (3, 'Errors'), (4, 'Cancelling'), (5, 'Cancelled')], default=0)),
+ ('parent_name', models.CharField(default='', max_length=50)),
+ ('pid', models.IntegerField(default=0)),
+ ('count', models.IntegerField(default=0)),
+ ('max', models.IntegerField(default=0)),
+ ('errors', models.IntegerField(default=0)),
+ ('warnings', models.IntegerField(default=0)),
+ ('refresh', models.IntegerField(default=0)),
+ ('message', models.CharField(default='', max_length=50)),
+ ('started_on', models.DateTimeField(null=True)),
+ ('completed_on', models.DateTimeField(null=True)),
+ ],
+ ),
+ ]
diff --git a/lib/orm/migrations/0011_extend_field_sizes.py b/lib/orm/migrations/0011_extend_field_sizes.py
new file mode 100644
index 00000000..830a2de3
--- /dev/null
+++ b/lib/orm/migrations/0011_extend_field_sizes.py
@@ -0,0 +1,33 @@
+# Generated by Django 2.2.17 on 2021-02-04 23:27
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0010_job'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='package',
+ name='name',
+ field=models.CharField(blank=True, max_length=80),
+ ),
+ migrations.AlterField(
+ model_name='package',
+ name='realname',
+ field=models.CharField(blank=True, max_length=80),
+ ),
+ migrations.AlterField(
+ model_name='product',
+ name='cpe',
+ field=models.CharField(max_length=255),
+ ),
+ migrations.AlterField(
+ model_name='datasource',
+ name='key',
+ field=models.CharField(max_length=80),
+ ),
+ ]
diff --git a/lib/orm/migrations/0012_job_user.py b/lib/orm/migrations/0012_job_user.py
new file mode 100755
index 00000000..09af561f
--- /dev/null
+++ b/lib/orm/migrations/0012_job_user.py
@@ -0,0 +1,21 @@
+# Generated by Django 2.2.11 on 2021-10-06 18:26
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
+ ('orm', '0011_extend_field_sizes'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='job',
+ name='user',
+ field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
+ ),
+ ]
diff --git a/lib/orm/migrations/0013_update_preinit.py b/lib/orm/migrations/0013_update_preinit.py
new file mode 100755
index 00000000..6711be9a
--- /dev/null
+++ b/lib/orm/migrations/0013_update_preinit.py
@@ -0,0 +1,18 @@
+# Generated by Django 2.2.11 on 2021-12-06 03:03
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0012_job_user'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='datasource',
+ name='update_frequency',
+ field=models.IntegerField(choices=[(0, 'Minute'), (1, 'Hourly'), (2, 'Daily'), (3, 'Weekly'), (4, 'Monthly'), (5, 'OnDemand'), (6, 'OnStartup'), (7, 'PreInit')], default=2),
+ ),
+ ]
diff --git a/lib/orm/migrations/0014_alter_packagetocve_applicable.py b/lib/orm/migrations/0014_alter_packagetocve_applicable.py
new file mode 100644
index 00000000..0a7e2cc0
--- /dev/null
+++ b/lib/orm/migrations/0014_alter_packagetocve_applicable.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.0 on 2023-01-30 18:58
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('orm', '0013_update_preinit'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='packagetocve',
+ name='applicable',
+ field=models.BooleanField(null=True),
+ ),
+ ]
diff --git a/lib/orm/models.py b/lib/orm/models.py
index 9b4f99ce..f5016b7d 100644
--- a/lib/orm/models.py
+++ b/lib/orm/models.py
@@ -4,7 +4,7 @@
#
# Security Response Tool Implementation
#
-# Copyright (C) 2017 Wind River Systems
+# Copyright (C) 2017-2021 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -27,21 +27,29 @@ from django.db import transaction
from django.core import validators
from django.conf import settings
import django.db.models.signals
+from django.db.models import F, Q, Sum, Count
+from django.contrib.auth.models import AbstractUser, Group, AnonymousUser
+from srtgui.api import execute_process, execute_process_close_fds
from users.models import SrtUser
import sys
import os
import re
+import itertools
from signal import SIGUSR1
from datetime import datetime
import json
+import subprocess
+import time
+import signal
+import pytz
import logging
logger = logging.getLogger("srt")
# quick development/debugging support
-from srtgui.api import _log
+from srtgui.api import _log, parameter_join
# Sqlite support
@@ -74,7 +82,6 @@ if 'sqlite' in settings.DATABASES['default']['ENGINE']:
return _base_insert(self, *args, **kwargs)
QuerySet._insert = _insert
- from django.utils import six
def _create_object_from_params(self, lookup, params):
"""
Tries to create an object using passed params.
@@ -89,7 +96,6 @@ if 'sqlite' in settings.DATABASES['default']['ENGINE']:
return self.get(**lookup), False
except self.model.DoesNotExist:
pass
- six.reraise(*exc_info)
QuerySet._create_object_from_params = _create_object_from_params
@@ -331,8 +337,10 @@ class Update():
PUBLISH_DATE = "Publish_Date(%s,%s)"
AFFECTED_COMPONENT = "Affected_Component(%s,%s)"
ACKNOWLEDGE_DATE = "AcknowledgeDate(%s,%s)"
+ PUBLIC = "Public(%s,%s)"
ATTACH_CVE = "Attach_CVE(%s)"
DETACH_CVE = "Detach_CVE(%s)"
+ MERGE_CVE = "Merge_CVE(%s)"
ATTACH_VUL = "Attach_Vulnerability(%s)"
DETACH_VUL = "Detach_Vulnerability(%s)"
ATTACH_INV = "Attach_Investigration(%s)"
@@ -404,11 +412,11 @@ class HelpText(models.Model):
text = models.TextField()
-#UPDATE_FREQUENCY: 0 = every minute, 1 = every hour, 2 = every day, 3 = every week, 4 = every month, 5 = every year
+#UPDATE_FREQUENCY: 0 = every n minutes, 1 = every hour, 2 = every day, 3 = every week, 4 = every month, 5 = on demand
class DataSource(models.Model):
search_allowed_fields = ['key', 'name', 'description', 'init', 'update', 'lookup']
- #UPDATE FREQUENCT
+ #UPDATE FREQUENCY
MINUTELY = 0
HOURLY = 1
DAILY = 2
@@ -416,6 +424,7 @@ class DataSource(models.Model):
MONTHLY = 4
ONDEMAND = 5
ONSTARTUP = 6
+ PREINIT = 7
FREQUENCY = (
(MINUTELY, 'Minute'),
(HOURLY, 'Hourly'),
@@ -424,6 +433,7 @@ class DataSource(models.Model):
(MONTHLY, 'Monthly'),
(ONDEMAND, 'OnDemand'),
(ONSTARTUP, 'OnStartup'),
+ (PREINIT, 'PreInit'),
)
# Global date format
@@ -434,7 +444,7 @@ class DataSource(models.Model):
LOOKUP_MISSING = 'LOOKUP-MISSING'
PREVIEW_SOURCE = 'PREVIEW-SOURCE'
- key = models.CharField(max_length=20)
+ key = models.CharField(max_length=80)
data = models.CharField(max_length=20)
source = models.CharField(max_length=20)
name = models.CharField(max_length=20)
@@ -570,6 +580,9 @@ class Cve(models.Model):
def get_publish_text(self):
return Cve.PUBLISH_STATE[int(self.publish_state)][1]
@property
+ def get_public_text(self):
+ return 'Public' if self.public else 'Private'
+ @property
def is_local(self):
try:
CveLocal.objects.get(name=self.name)
@@ -592,6 +605,47 @@ class Cve(models.Model):
if the_comments == the_packages:
return the_comments
return '%s' % (the_comments)
+ def propagate_private(self):
+ # Gather allowed users
+ user_id_list = []
+ for cveaccess in CveAccess.objects.filter(cve=self):
+ user_id_list.append(cveaccess.user_id)
+ _log("BOO1:user_id=%s" % cveaccess.user_id)
+
+ # Decend the object tree
+ for c2v in CveToVulnerablility.objects.filter(cve=self):
+ vulnerability = Vulnerability.objects.get(id=c2v.vulnerability_id)
+ _log("BOO2:v=%s,%s" % (vulnerability.name,self.public))
+ vulnerability.public = self.public
+ vulnerability.save()
+ if not self.public:
+ # Remove existing users
+ for va in VulnerabilityAccess.objects.filter(vulnerability=vulnerability):
+ _log("BOO3:DEL:v=%s,%s" % (vulnerability.name,va.id))
+ va.delete()
+ # Add valid user list
+ for user_id in user_id_list:
+ va,create = VulnerabilityAccess.objects.get_or_create(vulnerability=vulnerability,user_id=user_id)
+ _log("BOO4:ADD:v=%s,%s,%s" % (vulnerability.name,va.id,user_id))
+ va.save()
+
+ for v2i in VulnerabilityToInvestigation.objects.filter(vulnerability = vulnerability):
+ investigation = Investigation.objects.get(id=v2i.investigation_id)
+ _log("BOO5:i=%s,%s" % (investigation.name,self.public))
+ investigation.public = self.public
+ investigation.save()
+ if not self.public:
+ # Remove existing users
+ for ia in InvestigationAccess.objects.filter(investigation=investigation):
+ _log("BOO6:DEL:v=%s,%s" % (investigation.name,ia.id))
+ ia.delete()
+ # Add valid user list
+ for user_id in user_id_list:
+ ia,create = InvestigationAccess.objects.get_or_create(investigation=investigation,user_id=user_id)
+ _log("BOO7:ADD:i=%s,%s,%s" % (investigation.name,ia.id,user_id))
+ ia.save()
+
+
class CveDetail():
# CPE item list
@@ -727,6 +781,10 @@ class CveSource(models.Model):
cve = models.ForeignKey(Cve,related_name="cve_parent",blank=True, null=True,on_delete=models.CASCADE,)
datasource = models.ForeignKey(DataSource,related_name="cve_datasource", blank=True, null=True,on_delete=models.CASCADE,)
+class CveAccess(models.Model):
+ cve = models.ForeignKey(Cve,related_name="cve_users",on_delete=models.CASCADE,)
+ user = models.ForeignKey(SrtUser,related_name="cve_user",on_delete=models.CASCADE,)
+
class CveHistory(models.Model):
search_allowed_fields = ['cve__name', 'comment', 'date', 'author']
cve = models.ForeignKey(Cve,related_name="cve_history",default=None, null=True, on_delete=models.CASCADE,)
@@ -764,8 +822,8 @@ class Package(models.Model):
)
mode = models.IntegerField(choices=MODE, default=FOR)
- name = models.CharField(max_length=50, blank=True)
- realname = models.CharField(max_length=50, blank=True)
+ name = models.CharField(max_length=80, blank=True)
+ realname = models.CharField(max_length=80, blank=True)
invalidname = models.TextField(blank=True)
weight = models.IntegerField(default=0)
# computed count data
@@ -812,7 +870,7 @@ class Package(models.Model):
class PackageToCve(models.Model):
package = models.ForeignKey(Package,related_name="package2cve",on_delete=models.CASCADE,)
cve = models.ForeignKey(Cve,related_name="cve2package",on_delete=models.CASCADE,)
- applicable = models.NullBooleanField(default=True, null=True)
+ applicable = models.BooleanField(null=True)
# CPE Filtering
@@ -860,6 +918,11 @@ class CveReference(models.Model):
name = models.CharField(max_length=100, null=True)
datasource = models.ForeignKey(DataSource,related_name="source_references", blank=True, null=True,on_delete=models.CASCADE,)
+class RecipeTable(models.Model):
+ search_allowed_fields = ['recipe_name']
+ recipe_name = models.CharField(max_length=50)
+
+
# PRODUCT
class Product(models.Model):
@@ -870,7 +933,7 @@ class Product(models.Model):
name = models.CharField(max_length=40)
version = models.CharField(max_length=40)
profile = models.CharField(max_length=40)
- cpe = models.CharField(max_length=40)
+ cpe = models.CharField(max_length=255)
defect_tags = models.TextField(blank=True, default='')
product_tags = models.TextField(blank=True, default='')
@@ -971,6 +1034,9 @@ class Vulnerability(models.Model):
if self.cve_primary_name:
return "%s (%s)" % (self.name,self.cve_primary_name)
return "%s" % (self.name)
+ @property
+ def get_public_text(self):
+ return 'Public' if self.public else 'Private'
@staticmethod
def new_vulnerability_name():
# get next vulnerability name atomically
@@ -1320,6 +1386,9 @@ class Investigation(models.Model):
if self.vulnerability and self.vulnerability.cve_primary_name:
return "%s (%s)" % (self.name,self.vulnerability.cve_primary_name.name)
return "%s" % (self.name)
+ @property
+ def get_public_text(self):
+ return 'Public' if self.public else 'Private'
@staticmethod
def new_investigation_name():
current_investigation_index,create = SrtSetting.objects.get_or_create(name='current_investigation_index')
@@ -1492,6 +1561,210 @@ class ErrorLog(models.Model):
def get_severity_text(self):
return ErrorLog.SEVERITY[int(self.severity)][1]
+class Job(models.Model):
+ search_allowed_fields = ['name', 'title', 'description', 'status']
+ # Job Status
+ NOTSTARTED = 0
+ INPROGRESS = 1
+ SUCCESS = 2
+ ERRORS = 3
+ CANCELLING = 4
+ CANCELLED = 5
+ STATUS = (
+ (NOTSTARTED, 'NotStarted'),
+ (INPROGRESS, 'InProgress'),
+ (SUCCESS, 'Success'),
+ (ERRORS, 'Errors'),
+ (CANCELLING, 'Cancelling'),
+ (CANCELLED, 'Cancelled'),
+ )
+
+ # Required
+ name = models.CharField(max_length=50,default='')
+ description = models.TextField(blank=True)
+ command = models.TextField(blank=True)
+ log_file = models.TextField(blank=True)
+ # Optional
+ parent_name = models.CharField(max_length=50,default='')
+ options = models.TextField(blank=True)
+ user = models.ForeignKey(SrtUser,default=None,null=True,on_delete=models.CASCADE,)
+ # Managed
+ status = models.IntegerField(choices=STATUS, default=NOTSTARTED)
+ pid = models.IntegerField(default=0)
+ count = models.IntegerField(default=0)
+ max = models.IntegerField(default=0)
+ errors = models.IntegerField(default=0)
+ warnings = models.IntegerField(default=0)
+ refresh = models.IntegerField(default=0)
+ message = models.CharField(max_length=50,default='')
+ started_on = models.DateTimeField(null=True)
+ completed_on = models.DateTimeField(null=True)
+
+ @property
+ def get_status_text(self):
+ for s_val,s_name in Job.STATUS:
+ if s_val == self.status:
+ return s_name
+ return "?STATUS?"
+
+ @staticmethod
+ def get_recent(user=None):
+ """
+ Return recent jobs as a list; if sprint is set, only return
+ jobs for that sprint
+ """
+
+ if user and not isinstance(user,AnonymousUser):
+ jobs = Job.objects.filter(user=user)
+ else:
+ jobs = Job.objects.all()
+
+ finished_criteria = \
+ Q(status=Job.SUCCESS) | \
+ Q(status=Job.ERRORS) | \
+ Q(status=Job.CANCELLED)
+
+ recent_jobs = list(itertools.chain(
+ jobs.filter(status=Job.INPROGRESS).order_by("-started_on"),
+ jobs.filter(finished_criteria).order_by("-completed_on")[:3]
+ ))
+
+ # add percentage done property to each job; this is used
+ # to show job progress in mrj_section.html
+ for job in jobs:
+ job.percentDone = job.completeper()
+ job.outcomeText = job.get_status_text
+
+ return recent_jobs
+
+ def completeper(self):
+ if self.max > 0:
+ completeper = (self.count * 100) // self.max
+ else:
+ completeper = 0
+ return completeper
+
+ def eta(self):
+ eta = datetime.now()
+ completeper = self.completeper()
+ if completeper() > 0:
+ eta += ((eta - self.started_on)*(100-completeper))/completeper
+ return eta
+
+ @staticmethod
+ def start(name,description,command,options='',log_file='logs/run_job.log',job_id=1):
+ # The audit_job.py will set the pid and time values so that there is no db race condition
+ command = ['bin/common/srtool_job.py','--name',name,'--description',description,'--command',command,'--options',options,'--log',log_file]
+ if job_id:
+ command.extend(['--job-id',str(job_id)])
+ _log("JOB_START:%s" % parameter_join(command))
+# subprocess.Popen(command,close_fds=True)
+# result_returncode,result_stdout,result_stderr = execute_process(command)
+ execute_process_close_fds(command)
+
+ def cancel(self):
+ if self.status == Job.INPROGRESS:
+ try:
+ if self.pid:
+ os.kill(self.pid, signal.SIGTERM) #or signal.SIGKILL
+ except Exception as e:
+ _log("ERROR_JOB:Cancel:%s" % (e))
+ try:
+ self.status = Job.CANCELLING
+ self.completed_on = datetime.now()
+ self.pid = 0
+ self.save()
+ except Exception as e:
+ _log("ERROR_JOB:Cancelled:%s" % (e))
+
+ def done(self):
+ if not self.pid:
+ return
+ if self.status == Job.INPROGRESS:
+ self.pid = 0
+ self.completed_on = datetime.now()
+ self.status = Job.SUCCESS
+ ### TODO COUNT ERRORS AND WARNINGS
+ self.save()
+ elif self.status == Job.CANCELLING:
+ self.pid = 0
+ self.completed_on = datetime.now()
+ self.status = Job.CANCELLED
+ self.errors = 1
+ self.save()
+
+ @staticmethod
+ def preclear_jobs(user=None,user_id=0,user_none=False):
+ # NOTE: preclear completed jobs so that this page comes up clean
+ # without completed progress bars hanging around
+ if (not user_id) and (not user) and (not user_none):
+ return
+ if user_none:
+ user_id = None
+ elif not user_id:
+ user_id = user.id
+ for job in Job.objects.filter(user_id=user_id):
+ if job.status in (Job.SUCCESS,Job.ERRORS):
+ job.delete()
+
+# Wrapper class to run internal 'jobs' with the progress bar
+class Job_Local():
+ job = None
+ log_file_fd = None
+ INTERNAL_COMMAND = '<internal>'
+ DEFAULT = -1
+ DEFAULT_LOG = '.job_log.txt'
+
+ def __init__(self, name, description='', options='', log_file=DEFAULT_LOG, user=None):
+ self.job = Job(name=name, description=description, options=options, log_file=log_file, user=user)
+ self.job.command = self.INTERNAL_COMMAND
+ self.job.started_on = datetime.now(pytz.utc)
+ self.job.completed_on = None
+ if log_file:
+ self.log_file_fd = open(self.job.log_file, 'w')
+ self.log_file_fd.write(f"JOB_START: {name},{description} @{self.job.started_on}\n" )
+ self.job.status = Job.INPROGRESS
+ self.job.save()
+
+ # If cnt == DEFAULT, increment existing cnt value
+ # If max == DEFAULT, use existing max value
+ def update(self,message,count=DEFAULT,max=DEFAULT):
+ if count == self.DEFAULT:
+ self.job.count += 1
+ else:
+ self.job.count = count
+ if max != self.DEFAULT:
+ self.job.max = max
+ if self.job.count > self.job.max:
+ self.job.count = self.job.max
+ self.job.message = message
+ if True and self.log_file_fd:
+ self.log_file_fd.write(f"JOB_UPDATE({self.job.message},{self.job.count},{self.job.max})\n")
+ self.log_file_fd.flush()
+ self.job.save()
+ def add_warning(self,msg):
+ self.job.warnings += 1
+ self.job.save()
+ if self.log_file_fd:
+ self.log_file_fd.write("WARNING: " + msg + "\n" )
+ def add_error(self,msg):
+ self.job.errors += 1
+ self.job.save()
+ if self.log_file_fd:
+ self.log_file_fd.write("ERROR: " + msg + "\n" )
+ def done(self,sleep_time=4):
+ if sleep_time:
+ time.sleep(sleep_time)
+ self.update('Done',self.job.max,self.job.max)
+ self.job.completed_on = datetime.now(pytz.utc)
+ self.job.status = Job.ERRORS if self.job.errors else Job.SUCCESS
+ self.job.save()
+ if self.log_file_fd:
+ self.log_file_fd.write(f"JOB_STOP: W={self.job.warnings},E={self.job.errors} @{self.job.completed_on}\n" )
+ self.log_file_fd.flush()
+ self.log_file_fd.close()
+ self.log_file_fd = None
+
#
# Database Cache Support
#
diff --git a/lib/srtgui/api.py b/lib/srtgui/api.py
index 761839a8..2478fb9e 100644
--- a/lib/srtgui/api.py
+++ b/lib/srtgui/api.py
@@ -2,6 +2,7 @@
# BitBake Toaster Implementation
#
# Copyright (C) 2016-2018 Intel Corporation
+# Copyright (C) 2018-2023 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -28,6 +29,7 @@ import re
import json
from django.http import JsonResponse
+from django.views.generic import View
logger = logging.getLogger("srt")
@@ -54,38 +56,71 @@ def error_log(severity,description):
error = ErrorLog.objects.create(severity=severity,description=description,)
error.save()
+# Quote parameters if spaces
+def parameter_join(a):
+ str = []
+ for s in a:
+ if (' ' in s) or (0 == len(s)):
+ str.append('"%s"' % s)
+ else:
+ str.append(s)
+ return ' '.join(str)
+
+
+#
# Sub Process calls
+#
+# Enforce that all scripts run from the SRT_BASE_DIR context
+#
+
def execute_process(*args):
+ # Only string-type parameters allowed
cmd_list = []
for arg in args:
+ if not arg: continue
if isinstance(arg, (list, tuple)):
# Flatten all the way down
for a in arg:
- cmd_list.append(a)
+ if not a: continue
+ cmd_list.append(str(a))
else:
- cmd_list.append(arg)
-
- # Python < 3.5 compatible
- if sys.version_info < (3,5):
- process = subprocess.Popen(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- try:
- stdout, stderr = process.communicate(input)
- except:
- process.kill()
- process.wait()
- raise
- retcode = process.poll()
- return retcode, stdout, stderr
- else:
- result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- return result.returncode,result.stdout,result.stderr
+ cmd_list.append(str(arg))
+
+ srt_base_dir = os.environ.get('SRT_BASE_DIR')
+ if srt_base_dir and (srt_base_dir != os.getcwd()):
+ os.chdir(srt_base_dir)
+ _log(f"FOOBAR:CHDIR{srt_base_dir}")
+ if cmd_list[0].startswith('bin/') or cmd_list[0].startswith('./bin'):
+ cmd_list[0] = os.path.join(srt_base_dir,cmd_list[0])
+ _log(f"FOOBAR:{cmd_list[0]}:{os.getcwd()}")
+
+ result = subprocess.run(cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ return(result.returncode,result.stdout.decode('utf-8'),result.stderr.decode('utf-8'))
+
+# For Jobs, with captured output
+def execute_process_close_fds(cmnd):
+ srt_base_dir = os.environ.get('SRT_BASE_DIR')
+ if srt_base_dir and (srt_base_dir != os.getcwd()):
+ os.chdir(srt_base_dir)
+ if cmnd[0].startswith('bin/') or cmnd[0].startswith('./bin'):
+ cmnd[0] = os.path.join(srt_base_dir,cmnd[0])
+ subprocess.Popen(cmnd,close_fds=True)
+
+# For Jobs, with captured output
+def execute_system(cmnd):
+ srt_base_dir = os.environ.get('SRT_BASE_DIR')
+ if srt_base_dir and (srt_base_dir != os.getcwd()):
+ os.chdir(srt_base_dir)
+ if cmnd.startswith('bin/') or cmnd.startswith('./bin'):
+ cmnd = srt_base_dir + '/' + cmnd[0]
+ return os.system(cmnd)
#
# Update CVE datasource list: (a) fetch alt sources, (b) refresh preview sources
#
# #### TODO
-def update_cve_datasources(source_filter=''):
+def update_cve_datasources(source_filter='',force_update_source=True):
# Attach all matching CVE sources
_log("Alternate1:%s" % (cve_object.name))
query_set = DataSource.objects.filter(data="cve")
@@ -98,12 +133,13 @@ def update_cve_datasources(source_filter=''):
_log("Alternate CVE source %s for %s (created=%s)" % (ds.key,cve_object.name,created))
# Force update the CVE summary data from sources
- result_returncode,result_stdout,result_stderr = execute_process(
- './bin/nist/srtool_nist.py',
- '--update-cve-list',
- cve_object.name,
- '--force'
- )
+ if force_update_source:
+ result_returncode,result_stdout,result_stderr = execute_process(
+ os.path.join(os.environ.get('SRT_BASE_DIR'),'bin/nist/srtool_nist.py'),
+ '--update-cve-list',
+ cve_object.name,
+ '--force'
+ )
#
# Extract Upstream CVE record details
@@ -123,14 +159,19 @@ def readCveDetails_Upstream(cve, cve_datasource):
v.description = "ERROR(%s):missing lookup command" % (cve_datasource.description)
return v
lookup_command = lookup_command.replace('%command%','--cve-detail=%s' % cve.name)
- result_returncode,result_stdout,result_stderr = execute_process(lookup_command.split(' '))
+ lookup_commands = lookup_command.split(' ')
+ # Convert local SRT bin calls to absolute path calls
+ if not lookup_commands[0].startswith('/'):
+ lookup_commands[0] = os.path.join(os.environ.get('SRT_BASE_DIR', './'),lookup_commands[0])
+ # Execute the call
+ result_returncode,result_stdout,result_stderr = execute_process(*lookup_commands)
#_log("SRT_%s=%s|%s|%s" % (cve_datasource.key,result_returncode,result_stdout,result_stderr))
if 0 != result_returncode:
result_stdout = str(result_stdout)
v.description = "ERROR(%s):%s" % (result_returncode,result_stderr)
return v
- for line in result_stdout.decode("utf-8").splitlines():
+ for line in result_stdout.splitlines():
try:
name = line[:line.index('=')]
value = line[line.index('=')+1:].replace("[EOL]","\n")
@@ -175,7 +216,7 @@ def readCveDetails_Upstream(cve, cve_datasource):
elif name == 'ATTRIBUTES':
# Returned metadata
lookup_attributes = value
- _log("NOTE:readCveDetails_Upstream:%s:%s" % (v.name,v.cvssV2_severity))
+ #_log("NOTE:readCveDetails_Upstream:%s:%s:%s:%s:" % (v.name,v.cvssV2_severity,cve_datasource.description,v.description[:20]))
# Check for metadata special cases
if cve_datasource.LOOKUP_MISSING in lookup_attributes:
@@ -345,7 +386,7 @@ def summaryCveDetails(cve,cve_sources):
# No data sources
if not cve_main:
- return cve_detail,cve_html
+ return readCveDetails_None(cve),cve_html
# Merge the data into summary record
summaryMerge(cve_detail,cve_main,cve_local,cve_html,'description')
@@ -724,3 +765,59 @@ def publishMarkNone(cve_list,date_start,date_stop):
cvehistory = CveHistory(cve=cve, comment=Update.MARK_UNMARK, date=mid_date, author='SRTool')
cvehistory.save()
+
+class XhrJobRequest(View):
+# from orm.models import Job
+
+ def get(self, request, *args, **kwargs):
+ return HttpResponse()
+
+ def post(self, request, *args, **kwargs):
+ """
+ Job control
+
+ Entry point: /xhr_jobrequest/<project_id>
+ Method: POST
+
+ Args:
+ id: id of job to change
+ jobCancel = job_request_id ...
+ jobDelete = id ...
+
+ Returns:
+ {"error": "ok"}
+ or
+ {"error": <error message>}
+ """
+
+# project = Project.objects.get(pk=kwargs['pid'])
+
+ if 'jobCancel' in request.POST:
+ for i in request.POST['jobCancel'].strip().split(" "):
+ try:
+ job = Job.objects.get(pk=i)
+ job.cancel()
+ except Job.DoesNotExist:
+ return error_response('No such job request id %s' % i)
+
+ return error_response('ok')
+
+ if 'jobDelete' in request.POST:
+ for i in request.POST['jobDelete'].strip().split(" "):
+ try:
+ Job.objects.select_for_update().get(
+ sprint=sprint,
+ pk=i,
+ state__lte=Job.INPROGRESS).delete()
+
+ except Job.DoesNotExist:
+ pass
+ return error_response("ok")
+
+ response = HttpResponse()
+ response.status_code = 500
+ return response
+
+
+
+
diff --git a/lib/srtgui/reports.py b/lib/srtgui/reports.py
index 715c5606..3a7414c6 100644
--- a/lib/srtgui/reports.py
+++ b/lib/srtgui/reports.py
@@ -22,6 +22,10 @@ import os
import logging
from datetime import datetime, timedelta
import csv
+from openpyxl import Workbook
+from openpyxl import load_workbook
+from openpyxl.styles import Border, Side, PatternFill, Font, GradientFill, Alignment
+from openpyxl.utils import get_column_letter
from orm.models import Cve, CveSource, Vulnerability, Investigation, Defect, Product
from orm.models import Package
@@ -34,7 +38,7 @@ from django.db.models import Q
logger = logging.getLogger("srt")
-SRT_BASE_DIR = os.environ['SRT_BASE_DIR']
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR', '.')
SRT_REPORT_DIR = '%s/reports' % SRT_BASE_DIR
# quick development/debugging support
@@ -52,6 +56,54 @@ def _log_args(msg, *args, **kwargs):
s += ')'
_log(s)
+###############################################################################
+# Excel/openpyxl common look and feel formatting objects
+#
+
+#pyxl_border_all = Border(left=thin, right=thin, top=thin, bottom=thin) # , outline=True)
+pyxl_thin = Side(border_style="thin")
+pyxl_double = Side(border_style="double")
+pyxl_border_left = Border(left=pyxl_thin)
+pyxl_border_bottom = Border(bottom=pyxl_thin)
+pyxl_border_bottom_left = Border(bottom=pyxl_thin, left=pyxl_thin)
+pyxl_alignment_left = Alignment(horizontal='left')
+pyxl_alignment_right = Alignment(horizontal='right')
+pyxl_alignment_wrap = Alignment(wrap_text=True)
+pyxl_font_bold = Font(bold=True)
+pyxl_font_red = Font(color="A00000",bold=True,size = "13")
+pyxl_font_grn = Font(color="00A000",bold=True,size = "13")
+pyxl_font_blu = Font(color="0000A0",bold=True,size = "13")
+pyxl_font_orn = Font(color="FF6600",bold=True,size = "13")
+pyxl_fill_green = PatternFill(start_color="E0FFF0", end_color="E0FFF0", fill_type = "solid")
+# Warning: the form "PatternFill(bgColor="xxxxxx", fill_type = "solid")" returns black cells
+pyxl_backcolor_red = PatternFill(start_color='FCCDBA', end_color='FCCDBA', fill_type = "solid")
+pyxl_backcolor_orn = PatternFill(start_color='FBEAAB', end_color='FBEAAB', fill_type = "solid")
+pyxl_backcolor_yel = PatternFill(start_color='FCFDC7', end_color='FCFDC7', fill_type = "solid")
+pyxl_backcolor_blu = PatternFill(start_color='C5E2FF', end_color='C5E2FF', fill_type = "solid")
+pyxl_backcolor_grn = PatternFill(start_color='D6EDBD', end_color='D6EDBD', fill_type = "solid")
+pyxl_cve_fills = [pyxl_backcolor_red,pyxl_backcolor_orn,pyxl_backcolor_yel,pyxl_backcolor_blu,None,None,None]
+
+def pyxl_write_cell(ws,row_num,column_num,value,border=None,font=None,fill=None,alignment=None):
+ cell = ws.cell(row=row_num, column=column_num)
+ try:
+ cell.value = value
+ if fill:
+ cell.fill = fill
+ if alignment:
+ cell.alignment = alignment
+ if border:
+ cell.border = border
+ if font:
+ cell.font = font
+ except Exception as e:
+ print("ERROR:(%d,%d):%s" % (row_num,column_num,e))
+ # Optional next column return value
+ return(column_num+1)
+
+###############################################################################
+# Core report support
+#
+
class Report():
def __init__(self, parent_page, *args, **kwargs):
self.parent_page = parent_page
@@ -679,6 +731,8 @@ class CvesReport(Report):
context['report_type_list'] = '\
<option value="summary">CVEs Table</option> \
+ <option value="year_pub_summary">CVE by Year Prefix Summary</option> \
+ <option value="year_summary">CVE by Publish Date Summary</option> \
<option value="cve_defects">CVE to Defects Table</option> \
'
context['report_get_title'] = ''
@@ -692,7 +746,14 @@ class CvesReport(Report):
'
context['report_format_list'] = '\
<input type="radio" name="format" value="txt" checked> Text (comma delimited)<br> \
- <input type="radio" name="format" value="csv"> CSV (tab delimited)<br> \
+ <input type="radio" name="format" value="csv"> CSV \
+ (Separator: \
+ <select name="csv_separator"> \
+ <option value="comma" checked>Comma</option> \
+ <option value="semi">Semi-colon</option> \
+ <option value="tab">Tab</option> \
+ <br> \
+ </select>) \
'
context['report_custom_list'] = '\
CVE name filter = <input type="text" placeholder="e.g. CVE-2018" name="name_filter" size="40"> <br>\
@@ -877,17 +938,20 @@ class CvesReport(Report):
request_POST = self.request.POST
- range = request_POST.get('range', '')
+ range_rec = request_POST.get('range', '')
columns = request_POST.get('columns', '')
format = request_POST.get('format', '')
title = request_POST.get('title', '')
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
name_filter = request_POST.get('name_filter', '').upper()
+ csv_separator = request_POST.get('csv_separator', 'semi')
report_name = '%s/cves_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
if 'csv' == format:
- delimiter = '\t'
+ delimiter = ';'
+ if csv_separator == 'comma': delimiter = ','
+ if csv_separator == 'tab': delimiter = '\t'
else:
delimiter = ','
@@ -896,14 +960,14 @@ class CvesReport(Report):
quotechar='"', quoting=csv.QUOTE_MINIMAL)
if ('summary' == report_type):
self.print_row_summary(writer,True,"all" == columns,None)
- if 'displayed' == range:
+ if 'displayed' == range_rec:
for id in record_list.split(','):
if not id:
continue
cve = Cve.objects.get(id=id)
if not name_filter or (name_filter in cve.name):
self.print_row_summary(writer,False,"all" == columns,cve)
- elif 'all' == range:
+ elif 'all' == range_rec:
if name_filter:
query = Cve.objects.filter(name__contains=name_filter).order_by('name')
else:
@@ -913,14 +977,14 @@ class CvesReport(Report):
if ('cve_defects' == report_type):
self.print_row_cve_defects(writer,'header',"all" == columns,None,None,None,None)
- if 'displayed' == range:
+ if 'displayed' == range_rec:
for id in record_list.split(','):
if not id:
continue
cve = Cve.objects.get(id=id)
if not name_filter or (name_filter in cve.name):
self.print_row_cve_defects(writer,'cve',"all" == columns,cve,None,None,None)
- elif 'all' == range:
+ elif 'all' == range_rec:
if name_filter:
query = Cve.objects.filter(name__contains=name_filter).order_by('name')
else:
@@ -928,6 +992,114 @@ class CvesReport(Report):
for cve in query:
self.print_row_cve_defects(writer,'line',"all" == columns,cve,None,None,None)
+ if report_type in ['year_summary','year_pub_summary']:
+ columns = ["Year", "CVE_Total", "CVE_HIST", "CVE_NEW", "CVE_RES", "CVE_INV", "CVE_VUL", "CVE_NVUL", "Defect_Total", "DEFECT_HIST", "DEFECT_NEW", "DEFECT_RES", "DEFECT_INV", "DEFECT_VUL", "DEFECT_NVUL","BY_PUBLISH"]
+ for i,column in enumerate(columns):
+ csvfile.write("%s%s" % (columns[i],delimiter))
+ csvfile.write("\n")
+
+ summary = {}
+ YEAR_START = 1999
+ YEAR_STOP = 2020
+ for the_year in range(YEAR_START,YEAR_STOP+1):
+ summary[the_year] = {
+ 'CVE_TOTAL':0,
+ 'CVE_HISTORICAL':0,
+ 'CVE_NEW':0,
+ 'CVE_NEW_RESERVED':0,
+ 'CVE_INVESTIGATE':0,
+ 'CVE_VULNERABLE':0,
+ 'CVE_NOT_VULNERABLE':0,
+ 'DEFECT_TOTAL':0,
+ 'DEFECT_HISTORICAL':0,
+ 'DEFECT_NEW':0,
+ 'DEFECT_NEW_RESERVED':0,
+ 'DEFECT_INVESTIGATE':0,
+ 'DEFECT_VULNERABLE':0,
+ 'DEFECT_NOT_VULNERABLE':0,
+ 'PUBLISH_DATE':0,
+ }
+
+ # Gather historgram on CVE status
+ error_count = 0
+ for cve in Cve.objects.all():
+ # Extract the year created
+ if (report_type == 'year_pub_summary') and (not cve.status in [SRTool.HISTORICAL]) and cve.publishedDate:
+ the_year = cve.publishedDate.split('-')[0]
+ summary[the_year]['PUBLISH_DATE'] += 1
+ else:
+ the_year = cve.name.split('-')[1]
+
+ if (not the_year[0].isdigit()) or (the_year < '1999') or (the_year > '2020'):
+ if 10 > error_count:
+ _log('FOO_CVE_YEARLY:%s,%s' % (cve.name, cve.publishedDate))
+ error_count += 1
+ continue
+ the_year = int(the_year)
+
+ # Register the CVE status
+ summary[the_year]['CVE_TOTAL'] += 1
+ if cve.status in [SRTool.HISTORICAL]:
+ summary[the_year]['CVE_HISTORICAL'] += 1
+ if cve.status in [SRTool.NEW,SRTool.NEW_INACTIVE]:
+ summary[the_year]['CVE_NEW'] += 1
+ if cve.status in [SRTool.NEW_RESERVED]:
+ summary[the_year]['CVE_NEW_RESERVED'] += 1
+ if cve.status in [SRTool.INVESTIGATE,SRTool.INVESTIGATE_INACTIVE]:
+ summary[the_year]['CVE_INVESTIGATE'] += 1
+ if cve.status in [SRTool.VULNERABLE,SRTool.VULNERABLE_INACTIVE]:
+ summary[the_year]['CVE_VULNERABLE'] += 1
+ if cve.status in [SRTool.NOT_VULNERABLE,SRTool.NOT_VULNERABLE_INACTIVE]:
+ summary[the_year]['CVE_NOT_VULNERABLE'] += 1
+
+ # Register the releated defects status
+ for cv in cve.cve_to_vulnerability.all():
+ for investigation in cv.vulnerability.vulnerability_investigation.all():
+ for id in investigation.investigation_to_defect.all():
+
+ # Only check defects for current and previously active products
+ if not id.product.get_product_tag('mode') in ['support','develop','eol']:
+ continue
+
+ # Register the defect status
+ summary[the_year]['DEFECT_TOTAL'] += 1
+ if id.defect.srt_status in [SRTool.HISTORICAL]:
+ summary[the_year]['DEFECT_HISTORICAL'] += 1
+ if id.defect.srt_status in [SRTool.NEW,SRTool.NEW_INACTIVE]:
+ summary[the_year]['DEFECT_NEW'] += 1
+ if id.defect.srt_status in [SRTool.NEW_RESERVED]:
+ summary[the_year]['DEFECT_NEW_RESERVED'] += 1
+ if id.defect.srt_status in [SRTool.INVESTIGATE,SRTool.INVESTIGATE_INACTIVE]:
+ summary[the_year]['DEFECT_INVESTIGATE'] += 1
+ if id.defect.srt_status in [SRTool.VULNERABLE,SRTool.VULNERABLE_INACTIVE]:
+ summary[the_year]['DEFECT_VULNERABLE'] += 1
+ if id.defect.srt_status in [SRTool.NOT_VULNERABLE,SRTool.NOT_VULNERABLE_INACTIVE]:
+ summary[the_year]['DEFECT_NOT_VULNERABLE'] += 1
+
+ # Print historgram
+ for the_year in range(YEAR_START,YEAR_STOP+1):
+ csvfile.write("%s%s" % (the_year,delimiter))
+
+ csvfile.write("%s%s" % (summary[the_year]['CVE_TOTAL'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['CVE_HISTORICAL'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['CVE_NEW'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['CVE_NEW_RESERVED'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['CVE_INVESTIGATE'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['CVE_VULNERABLE'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['CVE_NOT_VULNERABLE'],delimiter))
+
+ csvfile.write("%s%s" % (summary[the_year]['DEFECT_TOTAL'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['DEFECT_HISTORICAL'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['DEFECT_NEW'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['DEFECT_NEW_RESERVED'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['DEFECT_INVESTIGATE'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['DEFECT_VULNERABLE'],delimiter))
+ csvfile.write("%s%s" % (summary[the_year]['DEFECT_NOT_VULNERABLE'],delimiter))
+
+ csvfile.write("%s%s" % (summary[the_year]['PUBLISH_DATE'],delimiter))
+ csvfile.write("\n")
+
+
return report_name,os.path.basename(report_name)
class SelectCvesReport(Report):
@@ -1002,6 +1174,8 @@ class SelectCvesReport(Report):
file.write("%s%s" % (cve.description,tab))
file.write("\n")
+
+
return report_name,os.path.basename(report_name)
class VulnerabilitiesReport(Report):
@@ -1458,8 +1632,72 @@ class DefectsReport(Report):
return report_name,os.path.basename(report_name)
+#
+# Products Reports
+#
+
+product_summary = {}
+
+def scan_product_jira(product):
+ global product_summary
+
+ # Totals
+ critical_count = 0
+ high_count = 0
+ medium_count = 0
+ low_count = 0
+ p1_count = 0
+ p2_count = 0
+ p3_count = 0
+ p4_count = 0
+ px_count = 0
+ unresolved_count = 0
+ resolved_count = 0
+ fixed_count = 0
+ wontfix_count = 0
+ withdrawn_count = 0
+ rejected_count = 0
+
+ # Scan the registered defects
+ queryset = product.product_defect.all()
+ for defect in queryset:
+ if Defect.CRITICAL == defect.srt_priority: critical_count += 1
+ elif Defect.HIGH == defect.srt_priority: high_count += 1
+ elif Defect.MEDIUM == defect.srt_priority: medium_count += 1
+ elif Defect.LOW == defect.srt_priority: low_count += 1
+ if Defect.DEFECT_CRITICAL == defect.priority: p1_count += 1
+ elif Defect.DEFECT_HIGH == defect.priority: p2_count += 1
+ elif Defect.DEFECT_MEDIUM == defect.priority: p3_count += 1
+ elif Defect.DEFECT_LOW == defect.priority: p4_count += 1
+ if Defect.DEFECT_UNRESOLVED == defect.resolution: unresolved_count += 1
+ elif Defect.DEFECT_RESOLVED == defect.resolution: resolved_count += 1
+ elif Defect.DEFECT_FIXED == defect.resolution: fixed_count += 1
+ elif Defect.DEFECT_WILL_NOT_FIX == defect.resolution: wontfix_count += 1
+ elif Defect.DEFECT_WITHDRAWN == defect.resolution: withdrawn_count += 1
+ elif Defect.DEFECT_REJECTED == defect.resolution: rejected_count += 1
+
+ # Add this specific entry
+ product_summary[product.long_name] = [
+ critical_count,
+ high_count,
+ medium_count,
+ low_count,
+ p1_count,
+ p2_count,
+ p3_count,
+ p4_count,
+ px_count,
+ unresolved_count,
+ resolved_count,
+ fixed_count,
+ wontfix_count,
+ withdrawn_count,
+ rejected_count,
+ ]
+
class ProductsReport(Report):
"""Report for the Products Page"""
+ global product_summary
def __init__(self, parent_page, *args, **kwargs):
_log_args("REPORT_PRODUCTS_INIT(%s)" % parent_page, *args, **kwargs)
@@ -1471,6 +1709,7 @@ class ProductsReport(Report):
context['report_type_list'] = '\
<option value="summary">Products Table</option> \
+ <option value="status_jira">Product Jira Status</option> \
'
context['report_get_title'] = '1'
context['report_recordrange_list'] = '\
@@ -1480,6 +1719,7 @@ class ProductsReport(Report):
context['report_format_list'] = '\
<input type="radio" name="format" value="txt" checked> Text<br> \
<input type="radio" name="format" value="csv"> CSV<br> \
+ <input type="radio" name="excel" value="excel"> Excel<br> \
'
return context
@@ -1495,44 +1735,131 @@ class ProductsReport(Report):
report_type = request_POST.get('report_type', '')
record_list = request_POST.get('record_list', '')
- report_name = '%s/products_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
- with open(report_name, 'w') as file:
+ if 'summary' == report_type:
+ report_name = '%s/products_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
+ with open(report_name, 'w') as file:
- if 'csv' == format:
- tab = "\t"
- else:
- tab = ","
-
- if ('summary' == report_type):
if 'csv' == format:
- file.write("Name\tVersion\tProfile\tCPE\tSRT SPE\tInvestigations\tDefects\n")
- if 'txt' == format:
- file.write("Report : Products Table\n")
- file.write("\n")
- file.write("Name,Version,Profile,CPE,SRT SPE,Investigations,Defects\n")
-
- for product in Product.objects.all():
- file.write("%s%s" % (product.name,tab))
- file.write("%s%s" % (product.version,tab))
- file.write("%s%s" % (product.profile,tab))
- file.write("%s%s" % (product.cpe,tab))
- file.write("%s%s" % (product.defect_tags,tab))
- file.write("%s%s" % (product.product_tags,tab))
+ tab = "\t"
+ else:
+ tab = ","
- for i,pi in enumerate(product.product_investigation.all()):
- if i > 0:
- file.write(" ")
- file.write("%s" % (pi.name))
- file.write("%s" % tab)
- for i,pd in enumerate(product.product_defect.all()):
- if i > 0:
- file.write(" ")
- file.write("%s" % (pd.name))
- #file.write("%s" % tab)
- file.write("\n")
+ if ('summary' == report_type):
+ if 'csv' == format:
+ file.write("Name\tVersion\tProfile\tCPE\tSRT SPE\tInvestigations\tDefects\n")
+ if 'txt' == format:
+ file.write("Report : Products Table\n")
+ file.write("\n")
+ file.write("Name,Version,Profile,CPE,SRT SPE,Investigations,Defects\n")
+
+ for product in Product.objects.all():
+ file.write("%s%s" % (product.name,tab))
+ file.write("%s%s" % (product.version,tab))
+ file.write("%s%s" % (product.profile,tab))
+ file.write("%s%s" % (product.cpe,tab))
+ file.write("%s%s" % (product.defect_tags,tab))
+ file.write("%s%s" % (product.product_tags,tab))
+
+ if False:
+ for i,pi in enumerate(product.product_investigation.all()):
+ if i > 0:
+ file.write(" ")
+ file.write("%s" % (pi.name))
+ file.write("%s" % tab)
+ for i,pd in enumerate(product.product_defect.all()):
+ if i > 0:
+ file.write(" ")
+ file.write("%s" % (pd.name))
+ #file.write("%s" % tab)
+ file.write("\n")
+ elif 'status_jira' == report_type:
+ def resolution_color(i):
+ if 0 == i: fill = pyxl_backcolor_orn
+ elif 1 == i: fill = pyxl_backcolor_grn
+ elif 2 == i: fill = pyxl_backcolor_grn
+ elif 3 == i: fill = pyxl_backcolor_yel
+ elif 4 == i: fill = pyxl_backcolor_blu
+ elif 5 == i: fill = pyxl_backcolor_blu
+ else: fill = None
+ return(fill)
+
+ for product in Product.objects.all():
+ scan_product_jira(product)
+
+ format = "xlsx"
+ report_name = '%s/products_jira_%s_%s.%s' % (SRT_REPORT_DIR,report_type,datetime.today().strftime('%Y%m%d_%H%M'),format)
+ wb = Workbook()
+ ws = wb.active
+ ws.title = "Product Jira Summary"
+ ws.column_dimensions[get_column_letter(1)].width = 30
+
+ row = 1
+ first_row = 2
+
+ col = 1
+ for header in ('Product','Critical','High','Medium','Low','P1','P2','P3','P4','Unresolved','Resolved','Fixed',"Won't Fix",'Withdrawn','Rejected'):
+ border = pyxl_border_bottom_left if (col in (2,6,10)) else pyxl_border_bottom
+ pyxl_write_cell(ws,row,col,header,border=border)
+ col += 1
+ row += 1
+
+ for product in Product.objects.order_by("order"):
+ key = product.long_name
+ scan_product_jira(product)
+ pyxl_write_cell(ws,row,1,key)
+ # CVE Severity
+ col_excel = 2
+ col_summary = 1
+ for i in range(0,4):
+ border = pyxl_border_left if (i==0) else None
+ value = product_summary[key][col_summary+i]
+ pyxl_write_cell(ws,row,col_excel+i,value,border=border,fill=pyxl_cve_fills[i] if value else None)
+
+ # Jira Priority
+ col_excel = 6
+ col_summary = 5
+ for i in range(0,4):
+ border = pyxl_border_left if (i==0) else None
+ value = product_summary[key][col_summary+i]
+ pyxl_write_cell(ws,row,col_excel+i,value,border=border,fill=pyxl_cve_fills[i] if value else None)
+ # Jira Resolution
+ col_excel = 10
+ col_summary = 9
+ for i in range(0,6):
+ border = pyxl_border_left if (i==0) else None
+ value = product_summary[key][col_summary+i]
+ pyxl_write_cell(ws,row,col_excel+i,value,border=border,fill=resolution_color(i) if value else None)
+ row += 1
+
+ # Sums
+ row -= 1
+ for i in range(1,16):
+ border = pyxl_border_bottom_left if (i in (2,6,10)) else pyxl_border_bottom
+ ws.cell(row=row,column=i).border=border
+ row += 1
+ letters = (' ','A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q')
+ for col_excel in range(2,16):
+ # CVE Severity
+ col_excel = 2
+ for i in range(0,4):
+ pyxl_write_cell(ws,row,col_excel+i,'=SUM(%s%d:%s%d)' % (letters[col_excel+i],first_row,letters[col_excel+i],row-1),fill=pyxl_cve_fills[i])
+ # Jira Priority
+ col_excel = 6
+ for i in range(0,4):
+ pyxl_write_cell(ws,row,col_excel+i,'=SUM(%s%d:%s%d)' % (letters[col_excel+i],first_row,letters[col_excel+i],row-1),fill=pyxl_cve_fills[i])
+ # Jira Resolution
+ col_excel = 10
+ for i in range(0,6):
+ pyxl_write_cell(ws,row,col_excel+i,'=SUM(%s%d:%s%d)' % (letters[col_excel+i],first_row,letters[col_excel+i],row-1),fill=resolution_color(i))
+
+ wb.save(report_name)
return report_name,os.path.basename(report_name)
+#
+# CVE Reports
+#
+
class PublishCveReport(Report):
"""Report for the Publish Cve Page"""
diff --git a/lib/srtgui/static/js/libtoaster.js b/lib/srtgui/static/js/libtoaster.js
index 6f9b5d0f..b09511a1 100644
--- a/lib/srtgui/static/js/libtoaster.js
+++ b/lib/srtgui/static/js/libtoaster.js
@@ -81,57 +81,20 @@ var libtoaster = (function () {
});
}
- /* startABuild:
- * url: xhr_buildrequest or null for current project
- * targets: an array or space separated list of targets to build
+ /* cancelAJob:
+ * url: xhr_jobrequest url or null for current scrum
+ * jobRequestIds: space separated list of build request ids
* onsuccess: callback for successful execution
* onfail: callback for failed execution
*/
- function _startABuild (url, targets, onsuccess, onfail) {
-
+ function _cancelAJob(url, jobRequestIds, onsuccess, onfail){
if (!url)
- url = libtoaster.ctx.xhrBuildRequestUrl;
-
- /* Flatten the array of targets into a space spearated list */
- if (targets instanceof Array){
- targets = targets.reduce(function(prevV, nextV){
- return prev + ' ' + next;
- });
- }
+ url = libtoaster.ctx.xhrJobRequestUrl;
$.ajax( {
type: "POST",
url: url,
- data: { 'targets' : targets },
- headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
- success: function (_data) {
- if (_data.error !== "ok") {
- console.warn(_data.error);
- } else {
- if (onsuccess !== undefined) onsuccess(_data);
- }
- },
- error: function (_data) {
- console.warn("Call failed");
- console.warn(_data);
- if (onfail) onfail(data);
- } });
- }
-
- /* cancelABuild:
- * url: xhr_buildrequest url or null for current project
- * buildRequestIds: space separated list of build request ids
- * onsuccess: callback for successful execution
- * onfail: callback for failed execution
- */
- function _cancelABuild(url, buildRequestIds, onsuccess, onfail){
- if (!url)
- url = libtoaster.ctx.xhrBuildRequestUrl;
-
- $.ajax( {
- type: "POST",
- url: url,
- data: { 'buildCancel': buildRequestIds },
+ data: { 'jobCancel': jobRequestIds },
headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
success: function (_data) {
if (_data.error !== "ok") {
@@ -148,7 +111,7 @@ var libtoaster = (function () {
});
}
- function _getMostRecentBuilds(url, onsuccess, onfail) {
+ function _getMostRecentJobs(url, onsuccess, onfail) {
$.ajax({
url: url,
type: 'GET',
@@ -163,80 +126,6 @@ var libtoaster = (function () {
});
}
- /* Get a project's configuration info */
- function _getProjectInfo(url, onsuccess, onfail){
- $.ajax({
- type: "GET",
- url: url,
- headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
- success: function (_data) {
- if (_data.error !== "ok") {
- console.warn(_data.error);
- } else {
- if (onsuccess !== undefined) onsuccess(_data);
- }
- },
- error: function (_data) {
- console.warn(_data);
- if (onfail) onfail(_data);
- }
- });
- }
-
- /* Properties for data can be:
- * layerDel (csv)
- * layerAdd (csv)
- * projectName
- * projectVersion
- * machineName
- */
- function _editCurrentProject(data, onSuccess, onFail){
- $.ajax({
- type: "POST",
- url: libtoaster.ctx.xhrProjectUrl,
- data: data,
- headers: { 'X-CSRFToken' : $.cookie('csrftoken')},
- success: function (data) {
- if (data.error != "ok") {
- console.log(data.error);
- if (onFail !== undefined)
- onFail(data);
- } else {
- if (onSuccess !== undefined)
- onSuccess(data);
- }
- },
- error: function (data) {
- console.log("Call failed");
- console.log(data);
- }
- });
- }
-
- function _getLayerDepsForProject(url, onSuccess, onFail){
- /* Check for dependencies not in the current project */
- $.getJSON(url,
- { format: 'json' },
- function(data) {
- if (data.error != "ok") {
- console.log(data.error);
- if (onFail !== undefined)
- onFail(data);
- } else {
- var deps = {};
- /* Filter out layer dep ids which are in the
- * project already.
- */
- deps.list = data.layerdeps.list.filter(function(layerObj){
- return (data.projectlayers.lastIndexOf(layerObj.id) < 0);
- });
-
- onSuccess(deps);
- }
- }, function() {
- console.log("E: Failed to make request");
- });
- }
/* parses the query string of the current window.location to an object */
function _parseUrlParams() {
@@ -469,13 +358,9 @@ var libtoaster = (function () {
enableAjaxLoadingTimer: _enableAjaxLoadingTimer,
disableAjaxLoadingTimer: _disableAjaxLoadingTimer,
reload_params : reload_params,
- startABuild : _startABuild,
- cancelABuild : _cancelABuild,
- getMostRecentBuilds: _getMostRecentBuilds,
+ cancelAJob : _cancelAJob,
+ getMostRecentJobs: _getMostRecentJobs,
makeTypeahead : _makeTypeahead,
- getProjectInfo: _getProjectInfo,
- getLayerDepsForProject : _getLayerDepsForProject,
- editCurrentProject : _editCurrentProject,
debug: false,
parseUrlParams : _parseUrlParams,
dumpsUrlParams : _dumpsUrlParams,
diff --git a/lib/srtgui/static/js/mrjsection.js b/lib/srtgui/static/js/mrjsection.js
new file mode 100755
index 00000000..800f0e6f
--- /dev/null
+++ b/lib/srtgui/static/js/mrjsection.js
@@ -0,0 +1,131 @@
+
+function mrjSectionInit(ctx){
+ $('#latest-jobs').on('click', '.cancel-job-btn', function(e){
+ e.stopImmediatePropagation();
+ e.preventDefault();
+
+ var url = $(this).data('request-url');
+ var jobReqIds = $(this).data('jobrequest-id');
+
+ libtoaster.cancelAJob(url, jobReqIds, function () {
+ alert("CANCEL JOB");
+ window.location.reload();
+ }, null);
+ });
+
+ // cached version of jobData, so we can determine whether a job has
+ // changed since it was last fetched, and update the DOM appropriately
+ var jobData = {};
+
+ // returns the cached version of this job, or {} is there isn't a cached one
+ function getCached(job) {
+ return jobData[job.id] || {};
+ }
+
+ // returns true if a job's state changed to "Success", "Errors"
+ // or "Cancelled" from some other value
+ function jobFinished(job) {
+ var cached = getCached(job);
+ return cached.state &&
+ cached.state !== job.state &&
+ (job.state == 'Success' || job.state == 'Errors' ||
+ job.state == 'Cancelled');
+ }
+
+ // returns true if the state changed
+ function stateChanged(job) {
+ var cached = getCached(job);
+ return (cached.state !== job.state);
+ }
+
+ // returns true if the tasks_complete_percentage changed
+ function tasksProgressChanged(job) {
+ var cached = getCached(job);
+ var a = cached.tasks_complete_percentage;
+ var b = job.tasks_complete_percentage;
+ var c = cached.tasks_complete_percentage !== job.tasks_complete_percentage;
+ return (cached.tasks_complete_percentage !== job.tasks_complete_percentage);
+ }
+
+ // Auto-refresh 1500 ms AFTER its last successful refresh, to avoid refresh race conditions
+ function refreshMostRecentJobs(){
+ libtoaster.getMostRecentJobs(
+ libtoaster.ctx.mostRecentJobsUrl,
+
+ // success callback
+ function (data) {
+ var job;
+ var tmpl;
+ var container;
+ var selector;
+ var colourClass;
+ var elements;
+
+ for (var i = 0; i < data.length; i++) {
+ job = data[i];
+
+ var jobEle = document.getElementById("job-instance-"+job.id);
+ if (null == jobEle) {
+ // Job's display instance does not exist, so force refresh of page's Job MRU
+ // DISABLE THESE LINES TO Avoid a race condition loop
+// alert("NO JOB");
+ setTimeout(() => { console.log("NO_JOB_YET_DELAY!"); }, 2000);
+ window.location.reload();
+ return;
+ }
+ else if (jobFinished(job)) {
+ // a job finished: reload the whole page so that the job
+ // shows up in the jobs table
+// alert("DONE JOB");
+ window.location.reload();
+ return;
+ }
+ else if (stateChanged(job)) {
+ // update the whole template
+ job.warnings_pluralise = (job.warnings !== 1 ? 's' : '');
+ job.errors_pluralise = (job.errors !== 1 ? 's' : '');
+
+ tmpl = $.templates("#job-template");
+
+ html = $(tmpl.render(job));
+
+ selector = '[data-latest-job-result="' + job.id + '"] ' +
+ '[data-role="job-status-container"]';
+ container = $(selector);
+
+ // initialize bootstrap tooltips in the new HTML
+ html.find('span.glyphicon-question-sign').tooltip();
+
+ container.html(html);
+ }
+ else if (tasksProgressChanged(job)) {
+ // update the task progress text
+ selector = '#job-pc-done-' + job.id;
+ $(selector).html(job.tasks_complete_percentage);
+ selector = '#job-message-done-' + job.id;
+ $(selector).html(job.targets);
+
+ // update the task progress bar
+ selector = '#job-pc-done-bar-' + job.id;
+ $(selector).width(job.tasks_complete_percentage + '%');
+ }
+
+ jobData[job.id] = job;
+ }
+ },
+
+ // fail callback
+ function (data) {
+ console.error(data);
+ }
+ );
+ window.setTimeout(refreshMostRecentJobs, 1500);
+ var msg = "REFRESH:"+Date.now();
+ console.log(msg);
+ }
+
+ // window.setInterval(refreshMostRecentJobs, 1500);
+
+ // Self refresh every 1500 ms
+ refreshMostRecentJobs();
+}
diff --git a/lib/srtgui/static/js/table.js b/lib/srtgui/static/js/table.js
index fd241aa6..9d3030d7 100644
--- a/lib/srtgui/static/js/table.js
+++ b/lib/srtgui/static/js/table.js
@@ -1,11 +1,14 @@
'use strict';
-function tableInit(ctx){
+function tableInit(ctx, SelectedFilterVal = ""){
if (ctx.url.length === 0) {
throw "No url supplied for retreiving data";
}
+ var clearallfilterBtn = $("#clear-all-filter");
+ var lstRemoveCurrent = []
+ let result = ""
var tableChromeDone = false;
var tableTotal = 0;
@@ -32,6 +35,28 @@ function tableInit(ctx){
tableParams.limit = Number(tableParams.limit);
tableParams.page = Number(tableParams.page);
+ if (tableParams.filter != null && SelectedFilterVal != ""){
+ lstFilterval.splice(lstFilterval.indexOf(SelectedFilterVal),1)
+
+ lstRemoveCurrent = tableParams.filter.replace(/%20/g, " ").split(",");
+ lstRemoveCurrent.splice(lstRemoveCurrent.indexOf(SelectedFilterVal),1)
+ if (lstRemoveCurrent.length > 1){
+ tableParams.filter = lstRemoveCurrent.join(",")
+ }
+ else{
+ tableParams.filter = lstRemoveCurrent[0]
+ clearallfilterBtn.tooltip('destroy');
+ clearallfilterBtn.removeClass("btn-primary");
+ }
+
+ }
+ else if(tableParams.filter != null && SelectedFilterVal == ""){
+ tableParams.filter = null;
+ lstFilterval = [];
+ clearallfilterBtn.tooltip('destroy');
+ clearallfilterBtn.removeClass("btn-primary");
+ }
+
loadData(tableParams);
// clicking on this set of elements removes the search
@@ -263,10 +288,9 @@ function tableInit(ctx){
filterBtn.prop('id', col.filter_name);
filterBtn.click(filterOpenClicked);
- /* If we're currently being filtered setup the visial indicator */
+ /* If we're currently being filtered setup the visual indicator */
if (tableParams.filter &&
- tableParams.filter.match('^'+col.filter_name)) {
-
+ tableParams.filter.includes(col.filter_name)) {
filterBtnActive(filterBtn, true);
}
header.append(filterBtn);
@@ -310,24 +334,36 @@ function tableInit(ctx){
}
/* Toggles the active state of the filter button */
- function filterBtnActive(filterBtn, active){
+ function filterBtnActive(filterBtn, active, ActiveButton = ""){
+// var clearallfilterBtn = $("#clear-all-filter");
if (active) {
filterBtn.removeClass("btn-link");
filterBtn.addClass("btn-primary");
+ if(lstFilterval.length > 1){
+ clearallfilterBtn.addClass("btn-primary");
+ clearallfilterBtn.tooltip(
+ {
+ html: true,
+ title: '<button class="btn btn-sm btn-primary" onClick=\'$("#clear-filter-btn-'+ ctx.tableName +'").click();\'>Clear filter</button>',
+ placement: 'bottom',
+ delay: {
+ hide: 1500,
+ show: 400,
+ },
+ }
+ );
+ };
+
filterBtn.tooltip({
html: true,
- title: '<button class="btn btn-sm btn-primary" onClick=\'$("#clear-filter-btn-'+ ctx.tableName +'").click();\'>Clear filter</button>',
+ title: '<button class="btn btn-sm btn-primary" onClick=\'ClearFilter("'+ ActiveButton +'").click();\'>Clear filter</button>',
placement: 'bottom',
delay: {
hide: 1500,
show: 400,
},
});
- } else {
- filterBtn.removeClass("btn-primary");
- filterBtn.addClass("btn-link");
- filterBtn.tooltip('destroy');
}
}
@@ -627,16 +663,29 @@ function tableInit(ctx){
return action;
}
+ function table_objToString (obj) {
+ let str = '';
+ for (const [p, val] of Object.entries(obj)) {
+ str += `${p}=${val},`;
+ }
+ return str;
+ }
+
function filterOpenClicked(){
var filterName = $(this).data('filter-name');
/* We need to pass in the current search so that the filter counts take
- * into account the current search term
+ * into account the current search term.
+ *
+ * Also, pass all of the URL params via the tableParams object, in case
+ * the user's table needs custom params for processing.
*/
+
var params = {
'name' : filterName,
'search': tableParams.search,
'cmd': 'filterinfo',
+ 'tableParams': table_objToString(tableParams),
};
$.ajax({
@@ -816,11 +865,20 @@ function tableInit(ctx){
$("#clear-filter-btn-"+ctx.tableName).click(function(e){
e.preventDefault();
- var filterBtn = $("#" + tableParams.filter.split(":")[0]);
- filterBtnActive(filterBtn, false);
-
+ // var filterBtn = $("#" + tableParams.filter.split(":")[0]);
+ //filterBtnActive(filterBtn, false);
+ for(var i = 0, size = lstFilterval.length; i < size ; i++){
+ var item = lstFilterval[i];
+ var filterBtn = $("#" + item.split(":")[0]);
+ filterBtn.tooltip('destroy');
+ filterBtn.removeClass("btn-primary");
+ }
tableParams.filter = null;
+ lstFilterval = [];
loadData(tableParams);
+
+ clearallfilterBtn.tooltip('destroy');
+ clearallfilterBtn.removeClass("btn-primary");
});
$("#filter-modal-form-"+ctx.tableName).submit(function(e){
@@ -834,20 +892,33 @@ function tableInit(ctx){
// checked radio button
var checkedFilter = $(this).find("input[name='filter']:checked");
- tableParams.filter = checkedFilter.val();
+// # True? vvvv FOO
+// tableParams.filter = checkedFilter.val();
// hidden field holding the value for the checked filter
var checkedFilterValue = $(this).find("input[data-value-for='" +
tableParams.filter + "']");
tableParams.filter_value = checkedFilterValue.val();
+ if (lstFilterval.indexOf(checkedFilter.val()) == -1){
+ lstFilterval.push(checkedFilter.val());
+ tableParams.filter = lstFilterval.join(",")
+ }
+ else{
+ tableParams.filter =lstFilterval.join(",")
+ }
+ //tableParams.filter = checkedFilter.val() //lstFilterval
+ // hidden field holding the value for the checked filter
+ // tableParams.filter_value = checkedFilterValue.val();
+ var currentFilterValue = String(lstFilterval.slice(-1))
+
/* All === remove filter */
- if (tableParams.filter.match(":all$")) {
+ if (currentFilterValue.match(":all$")) {
tableParams.filter = null;
tableParams.filter_value = null;
} else {
- var filterBtn = $("#" + tableParams.filter.split(":")[0]);
- filterBtnActive(filterBtn, true);
+ var filterBtn = $("#" + currentFilterValue.split(":")[0]);
+ filterBtnActive(filterBtn, true,currentFilterValue);
}
loadData(tableParams);
diff --git a/lib/srtgui/static/js/typeahead_affected_components.js b/lib/srtgui/static/js/typeahead_affected_components.js
new file mode 100755
index 00000000..d8f5d25e
--- /dev/null
+++ b/lib/srtgui/static/js/typeahead_affected_components.js
@@ -0,0 +1,9 @@
+'use strict';
+
+function autocompInit() {
+ var newComponentInput = $("#input-isvulnerable-components");
+
+ libtoaster.makeTypeahead(newComponentInput,
+ libtoaster.ctx.recipeTypeAheadUrl, {}, function (item) {});
+
+} \ No newline at end of file
diff --git a/lib/srtgui/tables.py b/lib/srtgui/tables.py
index b8ff6f67..dfb0571b 100644
--- a/lib/srtgui/tables.py
+++ b/lib/srtgui/tables.py
@@ -4,7 +4,7 @@
#
# Security Response Tool Implementation
#
-# Copyright (C) 2017 Wind River Systems
+# Copyright (C) 2017-2021 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -19,12 +19,58 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+#
+# NOTICE: Important ToasterTable implementation concepts and limitations
+#
+# 1) The order of table method execution:
+#
+# a) __init__
+# b) get_context_data
+# c) __init__ (second call reason unknown)
+# d) setup_queryset
+# e) setup_filters (if present)
+# f) setup_columns
+# g) apply_row_customization (if present)
+#
+# 2) Named URL path arguments from "urls.py" are accessible via kwargs
+# WARNING: these values not NOT available in "__init__"
+#
+# Example:
+# urls.ps : url(r'^foo/(?P<my_value>\d+)$',
+# tables.py: my_value = int(kwargs['my_value'])
+#
+# 3) Named URL query arguments the table's url are accessible via the request
+#
+# Example:
+# url : http://.../foo/bar/42605?my_value=25
+# tables.py: my_value = self.request.GET.get('my_value','0')
+#
+# 4) The context[] values are NOT present in the "setup_columns" context
+# They must be explicitly implemented into the column data
+#
+# 5) The HTML page's templatetags are NOT present in the "setup_columns" context
+# They must be explicitly added into the template code
+#
+# Example:
+# static_data_template = '''
+# {% load jobtags %}<span onclick="toggle_select(\'box_{{data.id}}\');">{{data.recommend|recommend_display}}</span>
+# '''
+#
+# WARNING: because there is no context (#4), you cannot for example use dictionary lookup filters
+# use apply_row_customization() method instead, and set the self.dict_name in setup_columns()
+#
+
+import os
import re
import json
+from datetime import timedelta, datetime
+import traceback
from srtgui.widgets import ToasterTable
+from srtgui.api import execute_process
from orm.models import SRTool
from orm.models import Cve, Vulnerability, Investigation, CweTable, Product
+from orm.models import CveAccess
from orm.models import Package
from orm.models import CpeTable, CpeFilter, Defect, DataSource, SrtSetting
from orm.models import PublishPending
@@ -32,8 +78,11 @@ from orm.models import Notify, NotifyCategories
from orm.models import CveHistory, VulnerabilityHistory, InvestigationHistory, DefectHistory
from orm.models import PublishSet
from orm.models import ErrorLog
+from orm.models import Job
from users.models import UserSafe
+from django.contrib.auth.models import AnonymousUser
+
from django.db.models import Q
from srtgui.tablefilter import TableFilter
@@ -105,7 +154,23 @@ class CvesTable(ToasterTable):
is_recommend.add_action(exec_p3)
self.add_filter(is_recommend)
- def setup_queryset(self, *args, **kwargs):
+ # Is Public filter
+ is_public = TableFilter(name="is_public",
+ title="Filter CVEs by 'Public'")
+ exec_public = TableFilterActionToggle(
+ "public",
+ "Public",
+ Q(public=True))
+ exec_private = TableFilterActionToggle(
+ "private",
+ "Private",
+ Q(public=False))
+ is_public.add_action(exec_public)
+ is_public.add_action(exec_private)
+ self.add_filter(is_public)
+
+
+ def orig_setup_queryset(self, *args, **kwargs):
self.queryset = \
Cve.objects.all()
@@ -115,6 +180,27 @@ class CvesTable(ToasterTable):
self.queryset = self.queryset.order_by(self.default_orderby)
+ def setup_queryset(self, *args, **kwargs):
+ _log("FOO_PRIVATE0:%s:" % (self.request.user))
+ if UserSafe.is_admin(self.request.user):
+ self.queryset = Cve.objects.all()
+ else:
+ # Add all public records
+ self.queryset = Cve.objects.filter(public = True)
+ if not isinstance(self.request.user,AnonymousUser):
+ # Add all user accessible private records
+ for cve_private_access in CveAccess.objects.filter(user=self.request.user):
+ cve = cve_private_access.cve
+ _log("FOO_PRIVATE1:%s:%s" % (self.request.user.username,cve.name))
+ private_queryset = Cve.objects.filter(name=cve.name)
+ _log("FOO_PRIVATE2:%s:%s" % (self.request.user.username,cve.name))
+ self.queryset |= private_queryset
+ _log("FOO_PRIVATE3:%s:%s" % (self.request.user.username,cve.name))
+
+ _log("FOO_PRIVATE4")
+ self.queryset = self.queryset.order_by(self.default_orderby)
+ _log("FOO_PRIVATE5")
+
def setup_columns(self, *args, **kwargs):
@@ -145,6 +231,15 @@ class CvesTable(ToasterTable):
static_data_template="{{data.get_status_text}}"
)
+ self.add_column(title="Public",
+ field_name="public",
+ hideable=True,
+ orderable=True,
+ filter_name="is_public",
+ static_data_name="public",
+ static_data_template="{{data.get_public_text}}"
+ )
+
score_link_template = '''
{% if 0 == data.recommend %}0{% else %}{{data.recommend}}{% endif %}
'''
@@ -313,7 +408,7 @@ class SelectCveTable(ToasterTable):
return context
- def apply_row_customization(self, row):
+ def apply_row_customization(self, row, **kwargs):
data = super(SelectCveTable, self).apply_row_customization(row)
# data:dict_keys(['rows', 'total', 'default_orderby', 'error', 'columns'])
@@ -417,11 +512,18 @@ class SelectCveTable(ToasterTable):
def setup_columns(self, *args, **kwargs):
+ self.add_column(title="Id (creation order)",
+ field_name="id",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ )
+
self.add_column(title="Select",
field_name="Select",
hideable=False,
static_data_name="select",
- static_data_template='<input type="checkbox" id="box_{{data.id}}" name="{{data.name}}" />',
+ static_data_template='<input type="checkbox" class="selectbox" id="box_{{data.id}}" name="{{data.name}}" />',
)
self.add_column(title="Status",
@@ -443,7 +545,7 @@ class SelectCveTable(ToasterTable):
)
recommend_link_template = '''
- {% load projecttags %}<span onclick="toggle_select(\'box_{{data.id}}\');">{{data.recommend|recommend_display}}</span>
+ {% load jobtags %}<span onclick="toggle_select(\'box_{{data.id}}\');">{{data.recommend|recommend_display}}</span>
'''
self.add_column(title="Recommendation",
hideable=False,
@@ -497,20 +599,20 @@ class SelectCveTable(ToasterTable):
)
self.add_column(title="publishedDate",
- field_name="publisheddate",
+ field_name="publishedDate",
hideable=True,
hidden=True,
orderable=True,
- static_data_name="publisheddate",
+ static_data_name="publishedDate",
static_data_template='<span onclick="toggle_select(\'box_{{data.id}}\');">{{data.publishedDate}}</span>',
)
self.add_column(title="lastModifiedDate",
- field_name="lastmodifieddate",
+ field_name="lastModifiedDate",
hideable=True,
hidden=True,
orderable=True,
- static_data_name="lastmodifieddate",
+ static_data_name="lastModifiedDate",
static_data_template='<span onclick="toggle_select(\'box_{{data.id}}\');">{{data.lastModifiedDate}}</span>',
)
@@ -602,7 +704,7 @@ class DefectsTable(ToasterTable):
is_srt_priority.add_action(TableFilterActionToggle(
SRTool.SRT_PRIORITY[priority][1].lower().replace(' ','_'),
SRTool.SRT_PRIORITY[priority][1],
- Q(priority=SRTool.SRT_PRIORITY[priority][0]))
+ Q(srt_priority=SRTool.SRT_PRIORITY[priority][0]))
)
self.add_filter(is_srt_priority)
@@ -613,7 +715,7 @@ class DefectsTable(ToasterTable):
is_srt_status.add_action(TableFilterActionToggle(
SRTool.SRT_STATUS[status][1].lower().replace(' ','_'),
SRTool.SRT_STATUS[status][1],
- Q(status=SRTool.SRT_STATUS[status][0]))
+ Q(srt_status=SRTool.SRT_STATUS[status][0]))
)
self.add_filter(is_srt_status)
@@ -624,7 +726,7 @@ class DefectsTable(ToasterTable):
is_srt_outcome.add_action(TableFilterActionToggle(
Defect.SRT_OUTCOME[status][1].lower().replace(' ','_'),
Defect.SRT_OUTCOME[status][1],
- Q(status=Defect.SRT_OUTCOME[status][0]))
+ Q(srt_outcome=Defect.SRT_OUTCOME[status][0]))
)
self.add_filter(is_srt_outcome)
@@ -778,7 +880,7 @@ class DefectsTable(ToasterTable):
orderable=True,
field_name="date_created",
static_data_name="date_created",
- static_data_template='{{date_created}}'
+ static_data_template='{{data.date_created}}'
)
self.add_column(title="Defect Update",
hideable=True,
@@ -1370,6 +1472,22 @@ class VulnerabilitiesTable(ToasterTable):
is_priority.add_action(exec_is_critical)
self.add_filter(is_priority)
+ # Is Public filter
+ is_public = TableFilter(name="is_public",
+ title="Filter Vulnerabilities by 'Public'")
+ exec_public = TableFilterActionToggle(
+ "public",
+ "Public",
+ Q(public=True))
+ exec_private = TableFilterActionToggle(
+ "private",
+ "Private",
+ Q(public=False))
+ is_public.add_action(exec_public)
+ is_public.add_action(exec_private)
+ self.add_filter(is_public)
+
+
def setup_queryset(self, *args, **kwargs):
self.queryset = \
Vulnerability.objects.all()
@@ -1393,6 +1511,15 @@ class VulnerabilitiesTable(ToasterTable):
static_data_template=id_link_template,
)
+ self.add_column(title="Public",
+ field_name="public",
+ hideable=True,
+ orderable=True,
+ filter_name="is_public",
+ static_data_name="public",
+ static_data_template="{{data.get_public_text}}"
+ )
+
# !!! HACK: 'vc.cve.name' is returning '%s' when it is supposed to be null !!!
cve_link_template = '''
{% for vc in data.vulnerability_to_cve.all %}
@@ -1592,6 +1719,20 @@ class InvestigationsTable(ToasterTable):
is_priority.add_action(exec_is_critical)
self.add_filter(is_priority)
+ # Is Public filter
+ is_public = TableFilter(name="is_public",
+ title="Filter CVEs by 'Public'")
+ exec_public = TableFilterActionToggle(
+ "public",
+ "Public",
+ Q(public=True))
+ exec_private = TableFilterActionToggle(
+ "private",
+ "Private",
+ Q(public=False))
+ is_public.add_action(exec_public)
+ is_public.add_action(exec_private)
+ self.add_filter(is_public)
# Product filter
is_product = TableFilter(name="is_product",
@@ -1627,6 +1768,16 @@ class InvestigationsTable(ToasterTable):
static_data_template=id_link_template,
)
+ self.add_column(title="Public",
+ field_name="public",
+ hideable=True,
+ hidden=True,
+ orderable=True,
+ filter_name="is_public",
+ static_data_name="public",
+ static_data_template="{{data.get_public_text}}"
+ )
+
defect_link_template = '''
{% for ij in data.investigation_to_defect.all %}
{% if not forloop.first %} {% endif %}<a href="{% url 'defect_name' ij.defect.name %}">{{ij.defect.name}} </a>
@@ -1731,6 +1882,7 @@ class SourcesTable(ToasterTable):
def get_context_data(self, **kwargs):
context = super(SourcesTable, self).get_context_data(**kwargs)
+ context['mru'] = Job.get_recent()
return context
def setup_queryset(self, *args, **kwargs):
@@ -1740,6 +1892,20 @@ class SourcesTable(ToasterTable):
self.queryset = self.queryset.order_by(self.default_orderby)
def setup_columns(self, *args, **kwargs):
+ # Get the 'next update' values
+ source_update = {}
+ cmnds = [os.path.join(os.environ.get('SRT_BASE_DIR'),'./bin/common/srtool_update.py'),'--fetch-updates-dhm']
+ result_returncode,result_stdout,result_stderr = execute_process(*cmnds)
+ if 0 != result_returncode:
+ _log("ERROR:FETCH-UPDATES-DHM:%s" % result_stderr)
+ for line in result_stdout.splitlines():
+ try:
+ name = line[:line.index(',')]
+ value = line[line.index(',')+1:].strip()
+ source_update[name] = value
+ except:
+ continue
+ self.source_update = source_update
self.add_column(title="ID",
hideable=True,
@@ -1779,6 +1945,8 @@ class SourcesTable(ToasterTable):
hidden=True,
orderable=False,
field_name="attributes",
+ static_data_name="attributes",
+ static_data_template='''<span id="attr_{{data.id}}">{{data.attributes}}</span>''',
)
self.add_column(title="Description",
@@ -1808,11 +1976,14 @@ class SourcesTable(ToasterTable):
field_name="lookup",
)
+ last_modified_date_template = '{% load jobtags %}'
+ last_modified_date_template += '{{ data.lastModifiedDate|shift_timezone:"%d" }}' % self.request.user.get_timezone_offset
self.add_column(title="Data Modified",
help_text="Last upstream date",
hideable=False,
orderable=True,
- field_name="lastModifiedDate",
+ static_data_name="str_lastModifiedDate",
+ static_data_template=last_modified_date_template,
)
updated_template = '''
@@ -1831,7 +2002,8 @@ class SourcesTable(ToasterTable):
{% if data.update %}{{data.get_frequency_text}}{% else %}({{data.get_frequency_text}}){% endif %}
'''
self.add_column(title="Update Freq.",
- hideable=False,
+ hideable=True,
+ hidden=True,
orderable=True,
field_name="update_frequency",
static_data_name="update_frequency",
@@ -1855,6 +2027,54 @@ class SourcesTable(ToasterTable):
field_name="cve_filter",
)
+# update_now_template = '''
+# {% load jobtags %}<span id="next_{{data.id}}">{{source_update|get_dict_value:data.id}}{% if data.update %}<button class="execute run-update-job" style="float:right;" x-data="{{data.id}}">Now</button>{% endif %}</span>
+# '''
+ update_now_template = '''{{data.id}}|{{data.attributes}}'''
+ self.add_column(title="Update Next (D|H:M:S)",
+ hideable=True,
+ hidden=False,
+ static_data_name="update_now",
+ static_data_template=update_now_template,
+ )
+
+ source_enabled_template='''
+ <input type="checkbox" class="source-enabled" name="source_enabled" x-data="{{data.id}}" {% if "DISABLE " in data.attributes %}checked{% endif %}>
+ <label for="audit_top_artifact"> Disabled</label><br>
+ '''
+ self.add_column(title="Disable",
+ hideable=True,
+ hidden=True,
+ static_data_name="source_enabled",
+ static_data_template=source_enabled_template,
+ )
+
+ def apply_row_customization(self, row):
+ data = super(SourcesTable, self).apply_row_customization(row)
+ def get_key(key,dict):
+ if key in dict:
+ return(dict[key])
+ return ''
+ # {'Severity_V2': '["", "MEDIUM"]', 'Severity_V3': '["", "MEDIUM"]'}
+ for i in range(len(data['rows'])):
+ source_id,attributes = data['rows'][i]['update_now'].split('|')
+ try:
+ update_now_str = self.source_update[source_id]
+ disabled = ("DISABLE " in attributes)
+ hidden_on = 'style="display:none;"' if disabled else ''
+ hidden_off = 'style="display:none;"' if not disabled else ''
+# <span id="next_{{data.id}}">{{source_update|get_dict_value:data.id}}{% if data.update %}<button class="execute run-update-job" style="float:right;" x-data="{{data.id}}">Now</button>{% endif %}</span>
+ update_now = '<span id="next_on_%s" %s>%s' % (source_id,hidden_on,update_now_str)
+ if update_now_str and ('(' != update_now_str[0]):
+ update_now += '<button class="execute run-update-job" style="float:right;" x-data="%s">Now</button>' % source_id
+ update_now += '</span>'
+ update_now += '<span id="next_off_%s" %s>(Disabled)</span>' % (source_id,hidden_off)
+ data['rows'][i]['update_now'] = update_now
+ except Exception as e:
+ _log("ERROR_APPLY_ROW_CUSTOMIZATION:%s" % e)
+ continue
+ return data
+
class SelectPublishTable(ToasterTable):
"""Table of Publishable CVE's in SRTool"""
@@ -1919,8 +2139,7 @@ class SelectPublishTable(ToasterTable):
field_name="Select",
hideable=False,
static_data_name="select",
- static_data_template='<input type="checkbox" name="{{data.name}}" />',
- )
+ static_data_template='<input type="checkbox" class="selectbox" id="box_{{data.id}}" name="{{data.name}}" />', )
self.add_column(title="Status",
field_name="status",
@@ -2154,7 +2373,7 @@ class ErrorLogsTable(ToasterTable):
field_name="Select",
hideable=False,
static_data_name="select",
- static_data_template='<input type="checkbox" value="{{data.pk}}" name="select-notify" />',
+ static_data_template='<input type="checkbox" class="selectbox" id="box_{{data.pk}}" value="{{data.pk}}" name="select-notify" />',
)
self.add_column(title="SRT Created",
@@ -2925,3 +3144,158 @@ class PublishDefectTable(ToasterTable):
orderable=False,
)
+class ManageJobsTable(ToasterTable):
+ """Table of All Audits """
+
+ def __init__(self, *args, **kwargs):
+ super(ManageJobsTable, self).__init__(*args, **kwargs)
+ _log("MANAGEJOBSTABLE:INIT|%s|%s|%s|" % (str(self),','.join(args),json.dumps(kwargs) ))
+ self.default_orderby = "-started_on"
+# _log("TRACE:%s" % str(traceback.print_stack()))
+
+ def get_context_data(self, **kwargs):
+ context = super(ManageJobsTable, self).get_context_data(**kwargs)
+ _log("MANAGEJOBSTABLE:GET_CONTEXT_DATA|%s|%s|" % (str(self),json.dumps(kwargs) ))
+ context['mru'] = Job.get_recent()
+ context['mrj_type'] = 'all'
+ return context
+
+ def setup_queryset(self, *args, **kwargs):
+ _log("MANAGEJOBSTABLE:SETUP_QUERYSET|%s|%s|%s|" % (str(self),','.join(args),json.dumps(kwargs) ))
+ self.queryset = Job.objects.all()
+ self.queryset = self.queryset.order_by(self.default_orderby)
+
+ def setup_filters(self, *args, **kwargs):
+ _log("MANAGEJOBSTABLE:SETUP_FILTERS|%s|%s|%s|" % (str(self),','.join(args),json.dumps(kwargs) ))
+ # Is Status filter
+ is_status = TableFilter(name="is_status",
+ title="Filter Jobs by 'Status'")
+ for status in range(len(Job.STATUS)):
+ is_status.add_action(TableFilterActionToggle(
+ Job.STATUS[status][1].lower().replace(' ','_'),
+ Job.STATUS[status][1],
+ Q(status=Job.STATUS[status][0]))
+ )
+ self.add_filter(is_status)
+
+ def setup_columns(self, *args, **kwargs):
+ _log("MANAGEJOBSTABLE:SETUP_COLUMNS|%s|%s|%s|" % (str(self),','.join(args),json.dumps(kwargs) ))
+
+ # Fetch pid run data
+ pid_table = {}
+ pid_table[0] = 0
+ pid_table['0'] = 0
+ pid_table[''] = 0
+ result_returncode,result_stdout,result_stderr = execute_process(['bin/common/srtool_job.py','--job-pid-status'])
+ for line in result_stdout.splitlines():
+ pid = line[:line.index(':')]
+ value = line[line.index(':')+1:]
+ pid_table[pid] = value
+ self.pid_table = pid_table
+ _log("FOO:%s" % pid_table)
+
+ self.add_column(title="ID",
+ field_name="id",
+ orderable=True,
+ )
+
+ self.add_column(title="Name",
+ field_name="name",
+ orderable=True,
+ )
+
+ self.add_column(title="Status",
+ filter_name="is_status",
+ static_data_name="status",
+ static_data_template='{{data.get_status_text}}',
+ )
+
+ self.add_column(title="Description",
+ field_name="description",
+ hideable=True,
+ )
+
+ self.add_column(title="Command",
+ field_name="command",
+ hideable=True,
+ )
+
+ self.add_column(title="Message",
+ field_name="message",
+ hideable=True,
+ hidden=True,
+ )
+
+ job_pid_template = '''
+ {{data.pid}}
+ '''
+ self.add_column(title="PID",
+ hideable=True,
+ static_data_name="pid",
+ static_data_template=job_pid_template,
+ )
+
+ self.add_column(title="Started_On",
+ field_name="started_on",
+ hideable=True,
+ orderable=True,
+ )
+
+ self.add_column(title="Completed_On",
+ field_name="completed_on",
+ hideable=True,
+ )
+
+ self.add_column(title="Count",
+ field_name="count",
+ hideable=True,
+ hidden=True,
+ )
+ self.add_column(title="Max",
+ field_name="max",
+ hideable=True,
+ hidden=True,
+ )
+ self.add_column(title="Errors",
+ hideable=True,
+ hidden=True,
+ static_data_name="errors",
+ static_data_template='{{data.errors|default:"0"}}',
+ )
+ self.add_column(title="Warnings",
+ hideable=True,
+ hidden=True,
+ static_data_name="warnings",
+ static_data_template='{{data.warnings|default:"0"}}',
+ )
+
+ self.add_column(title="Log_File",
+ field_name="log_file",
+ hideable=True,
+ )
+
+ if UserSafe.is_creator(self.request.user):
+ trash_job_template = '''
+ <span class="glyphicon glyphicon-trash trash-job" x-data="{{data.name}}|{{data.id}}"></span>
+ '''
+ self.add_column(title="Manage",
+ hideable=False,
+ static_data_name="trash_job",
+ static_data_template=trash_job_template,
+ )
+
+ def apply_row_customization(self, row, **kwargs):
+ # data:dict_keys(['rows', 'total', 'default_orderby', 'error', 'columns'])
+ data = super(ManageJobsTable, self).apply_row_customization(row)
+ for i in range(len(data['rows'])):
+ pid = data['rows'][i]['pid'].strip()
+ status = data['rows'][i]['status'].strip()
+ if (pid in self.pid_table) and ('1' == self.pid_table[pid]):
+ if 'Success' == status:
+ data['rows'][i]['pid'] = '%s <span style="color:green"> (Done)</span>' % (pid)
+ else:
+ data['rows'][i]['pid'] = '%s <span style="color:red"> (Dead)</span>' % (pid)
+ else:
+ data['rows'][i]['pid'] = '%s <span style="color:blue"> (Running)</span>' % (pid)
+ return data
+
diff --git a/lib/srtgui/templates/base.html b/lib/srtgui/templates/base.html
index 623dfdd8..8967a104 100644
--- a/lib/srtgui/templates/base.html
+++ b/lib/srtgui/templates/base.html
@@ -1,7 +1,6 @@
<!DOCTYPE html>
{% load static %}
-{% load projecttags %}
-{% load project_url_tag %}
+{% load jobtags %}
<html lang="en">
<head>
<title>
@@ -39,6 +38,9 @@
libtoaster.ctx = {
jsUrl : "{% static 'js/' %}",
htmlUrl : "{% static 'html/' %}",
+ recipeTypeAheadUrl: {% url 'xhr_recipetypeahead' as paturl %}{{paturl|json}},
+ xhrJobRequestUrl: "{% url 'xhr_jobrequest' %}",
+ mostRecentJobsUrl: "{% url 'most_recent_jobs' %}",
};
</script>
{% block extraheadcontent %}
@@ -71,7 +73,7 @@
display: none;
position: absolute;
background-color: #f9f9f9;
- min-width: 160px;
+ min-width: 260px;
overflow: auto;
box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2);
z-index: 1;
@@ -112,6 +114,11 @@ toggle between hiding and showing the dropdown content */
function myFunction() {
document.getElementById("myDropdown").classList.toggle("show");
}
+/* When the user clicks on the Product,
+toggle between hiding and showing the dropdown content */
+function selectProduct() {
+ document.getElementById("ProductDropdown").classList.toggle("show");
+}
// Close the dropdown if the user clicks outside of it
window.onclick = function(event) {
@@ -158,7 +165,7 @@ window.onclick = function(event) {
<img class="logo" src="{{ srt_logo.1 }}" alt="{{srt_logo.0}}"/>
</a>
{% endif %}
- <a class="brand" href="/">SRTool:Security Response Tool</a>
+ <a class="brand" href="/">SRTool:Security Response Tool {{srt_mode}}</a>
{% if DEBUG %}
<span class="glyphicon glyphicon-info-sign" title="<strong>SRTool version information</strong>" data-content="<dl><dt>Git branch</dt><dd>{{TOASTER_BRANCH}}</dd><dt>Git revision</dt><dd>{{TOASTER_REVISION}}</dd></dl>"></i>
{% endif %}
@@ -168,7 +175,7 @@ window.onclick = function(event) {
<ul class="nav navbar-nav">
<li id="navbar-home" {% if request.resolver_match.url_name == 'landing' %}class="active"{% endif %}>
- <a href="{% url 'landing' %}">
+ <a href="/"> <!--href="{ % url 'landing' % }"> -->
<i class="glyphicon glyphicon-tasks"></i>
Home
</a>
@@ -234,7 +241,7 @@ window.onclick = function(event) {
<div class="dropdown navbar-right">
{% if user.is_authenticated %}
- <button onclick="myFunction()" class="dropbtn ">Hello '{{user.username}}'</button>
+ <button onclick="myFunction()" class="dropbtn ">Hello '{{user.username}}'</button>{% if user.timezone %}({{user.timezone}}){% endif %}
{% else %}
<button onclick="myFunction()" class="dropbtn ">Hello 'Guest' (Login here)</button>
{% endif %}
@@ -247,9 +254,11 @@ window.onclick = function(event) {
<a href="{% url 'password_reset' %}">Reset password</a>
<a href="{% url 'tbd' %}">Request permissions</a>
-->
+ <a href="{% url 'email_admin' %}">Request admin help</a>
{% else %}
<a href="{% url 'login' %}">Login</a>
<a href="{% url 'signup' %}">Request account</a>
+ <a href="{% url 'email_admin' %}">Request admin help</a>
{% endif %}
</div>
</div>
diff --git a/lib/srtgui/templates/basetable_top.html b/lib/srtgui/templates/basetable_top.html
index ce478c05..db9ca7ed 100644
--- a/lib/srtgui/templates/basetable_top.html
+++ b/lib/srtgui/templates/basetable_top.html
@@ -1,4 +1,4 @@
-{% load projecttags %}
+{% load jobtags %}
<!-- component to display a generic table -->
<script>
diff --git a/lib/srtgui/templates/create_vulnerability.html b/lib/srtgui/templates/create_vulnerability.html
index f8e56d24..c62aed2d 100644
--- a/lib/srtgui/templates/create_vulnerability.html
+++ b/lib/srtgui/templates/create_vulnerability.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Create New Vulnerability {% endblock %}
diff --git a/lib/srtgui/templates/cve-edit-local.html b/lib/srtgui/templates/cve-edit-local.html
index 7dde6b68..6c8bcdce 100755
--- a/lib/srtgui/templates/cve-edit-local.html
+++ b/lib/srtgui/templates/cve-edit-local.html
@@ -12,7 +12,7 @@
</div>
<div class="col-md-5">
<div class="well" style="width: 400px;">
- <h3>Quick Info: </h3>
+ <h3>EDIT Quick Info: </h3>
<p/>
<dl class="dl-horizontal">
diff --git a/lib/srtgui/templates/cve-nist-local.html b/lib/srtgui/templates/cve-nist-local.html
index 9c4c454c..3fe16e74 100755
--- a/lib/srtgui/templates/cve-nist-local.html
+++ b/lib/srtgui/templates/cve-nist-local.html
@@ -5,7 +5,7 @@
<div class="col-md-5">
<div>
<h3>Description</h3>
- <textarea rows="9" style="min-width: 100%" class="localblue">{{details.description}}</textarea>
+ <textarea rows="9" readonly style="min-width: 100%" class="localblue">{{details.description}}</textarea>
</div>
<p/>
</div>
@@ -31,7 +31,8 @@
<dd>
{% if object.cve_to_vulnerability.all %}
{% for cv in object.cve_to_vulnerability.all %}
- {% if not forloop.first %}| {% endif %}<a href="{% url 'vulnerability' cv.vulnerability.pk %}">{{cv.vulnerability.name}}</a>
+ {% if not forloop.first %}<p>{% endif %}<a href="{% url 'vulnerability' cv.vulnerability.pk %}">{{cv.vulnerability.name}}</a>
+ <span class="glyphicon glyphicon-trash detach-vulnerability" id="detach_vulnerability_'+{{cv.vulnerability.id}}+'" x-data="{{cv.vulnerability.id}}"></span>
{% endfor %}
{% endif %}
<button class="execute btn btn-info" id="submit-create-vulnerability" style="margin-bottom: 5px; margin-top: 5px;">Create Vulnerability</button>
diff --git a/lib/srtgui/templates/cve-nist.html b/lib/srtgui/templates/cve-nist.html
index 9792865b..0b8cf3f0 100755
--- a/lib/srtgui/templates/cve-nist.html
+++ b/lib/srtgui/templates/cve-nist.html
@@ -1,13 +1,13 @@
<!-- vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv -->
-{% load projecttags %}
+{% load jobtags %}
<!-- Row: Description and Quick Info -->
<div class="row" style="padding-left: 25px;">
<div class="col-md-6">
<h3>Description</h3>
<div>
- <textarea rows="9" style="min-width: 100%" {{cve_html|get_dict_value:'description'}}>{{details.description}}</textarea>
+ <textarea rows="9" readonly style="min-width: 100%" {{cve_html|get_dict_value:'description'}}>{{details.description}}</textarea>
</div>
<p/>
</div>
@@ -33,7 +33,8 @@
<dd>
{% if object.cve_to_vulnerability.all %}
{% for cv in object.cve_to_vulnerability.all %}
- {% if not forloop.first %}| {% endif %}<a href="{% url 'vulnerability' cv.vulnerability.pk %}">{{cv.vulnerability.name}}</a>
+ {% if not forloop.first %}<p>{% endif %}<a href="{% url 'vulnerability' cv.vulnerability.pk %}">{{cv.vulnerability.name}}</a>
+ <span class="glyphicon glyphicon-trash detach-vulnerability" id="detach_vulnerability_'+{{cv.vulnerability.id}}+'" x-data="{{cv.vulnerability.id}}"></span>
{% endfor %}
{% endif %}
<button class="execute btn btn-info" id="submit-create-vulnerability" style="margin-bottom: 5px; margin-top: 5px;">Create Vulnerability</button>
@@ -231,6 +232,7 @@
{% for cpe in details.get_cpe_list %}
{% if not cpe %}
{% elif not cpe.0 %}
+ No CPE configurations
{% elif '[config' in cpe.0 %}
<div style="padding-left: 25px;">
<h4>&bull; Configuration </h3>
diff --git a/lib/srtgui/templates/cve.html b/lib/srtgui/templates/cve.html
index c3cfcac5..e3fe0ca0 100644
--- a/lib/srtgui/templates/cve.html
+++ b/lib/srtgui/templates/cve.html
@@ -1,6 +1,6 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} {{object.name}} - SRTool {% endblock %}
@@ -29,8 +29,11 @@
<div class="col-md-12">
<div class="page-header build-data">
<span id="cve-name-container">
- <span id="cve-name" class="srt_h1">{{object.name}} {% if not object.public %} <font color="red">[PRIVATE]</font> {% endif %}</span>
- {% if object.is_local and request.user.is_contributor %}<span class="glyphicon glyphicon-edit" id="cve-change-form-toggle"></span>{% endif %}
+ &nbsp;&nbsp;
+ <span id="cve-name" class="srt_h1">{{object.name}}
+ {% if object.is_local and request.user.is_contributor %}&nbsp;&nbsp;<span class="glyphicon glyphicon-edit" id="cve-change-form-toggle"></span>{% endif %}
+ {% if not object.public %}&nbsp;&nbsp;<font color="red" >[PRIVATE]</font> {% endif %}
+ </span>
{% if request.user.is_creator %}
<span style="padding-left:30px;"><button id="select-quickedit" class="btn btn-default" type="button">Edit Status...</button></span>
<span style="padding-left:30px;"><button id="select-notification" class="btn btn-default" type="button">Create Notification ...</button></span>
@@ -41,6 +44,9 @@
<span style="padding-left:30px;"><button id="select-cveedit" class="btn btn-default" type="button">Edit CVE Data ...</button></span>
{% endif %}
<span style="padding-left:30px;"><button id="submit-delete-cve" class="btn btn-default" type="button">Delete CVE</button></span>
+ {% if object.is_local %}
+ <span style="padding-left:30px;"><button id="select-merge-cve" class="btn btn-default" type="button">Merge CVE</button></span>
+ {% endif %}
{% endif %}
</span>
{% if not is_edit %}
@@ -59,13 +65,26 @@
<!-- include SRtool Metadata/Notification -->
{% include "srtool_metadata_include.html" with default_category="CVE" default_url="cve" %}
+<!-- CVE Merge -->
+{% if object.is_local %}
+ <div id="details-cve-merge" style="display:none;padding-left:25px;">
+ <fieldset style="border: 1px solid Blue; background-color:LightBlue; padding-left: 25px; padding-right: 20px;"> <!-- class="fieldset-auto-width" -->
+ <p><p>
+ <button class="btn btn-primary btn-lg" id="submit-merge-cve"> Submit Merge </button>
+ <p>Target CVE: <input type="text" placeholder="CVE Number" id="target-cve-name" size="40" ></p>
+ </fieldset>
+ <p>
+ <p>
+ </div>
+{% endif %}
+
<div class="row">
<div class="col-md-12 tabbable">
<ul class="nav nav-tabs">
- {% for details,state,id,cve_html in cve_list_table %}
+ {% for details,state,id,cve_html,ds_id in cve_list_table %}
<li class="{{state}}">
<a href="#{{id}}" data-toggle="tab">
- {{id}}
+ {{id}}{% if request.user.is_admin %}({{ds_id}}){% endif %}
<span class="glyphicon glyphicon-question-sign get-help" title="{{id}} CVE data"></span>
</a>
</li>
@@ -73,7 +92,7 @@
</ul>
<div class="tab-content">
- {% for details,state,id,cve_html in cve_list_table %}
+ {% for details,state,id,cve_html,ds_id in cve_list_table %}
<div class="tab-pane {{state}}" id="{{id}}">
{% if 'Local' == id %}
@@ -95,6 +114,73 @@
</form>{% csrf_token %}
{% endif %}
+{% if not object.public %}
+ {% if request.user.is_creator %}
+
+ <div class="row" style="padding-left: 25px;">
+ <h3>User Access
+ {% if request.user.is_creator %}
+ <button id="select-adduseraccess" class="btn btn-default" type="button">Add user access ...</button>
+ {% endif %}
+ </h3>
+
+ <div id="details-adduseraccess" style="padding-left: 50px; display:none;">
+ <p><p>
+ <button class="execute" id="submit-adduseraccess"> Submit </button>
+ <div class="row">
+ <p>
+ <div id="all-users" class="scrolling" style="width: 300px;">
+ {% for user in users %}
+ <div class="checkbox">
+ <label>
+ <input class="checkbox-users" name="access-users" value="{{user.pk}}" type="checkbox">{{user.name}}
+ </label>
+ <p>
+ </div>
+ {% endfor %}
+ </div>
+ </div>
+ </div>
+
+ <table class="table table-striped table-condensed" data-testid="vuln-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>User</th>
+ <th>Manage</th>
+ </tr>
+ </thead>
+
+ {% if object.public %}
+ <tr>
+ <td>All</td>
+ <td>
+ </td>
+ </tr>
+ {% endif %}
+
+ {% if object.cve_users.all %}
+ {% for u in object.cve_users.all %}
+ <tr>
+ <td>{{ u.user.username }}</td>
+ <td>
+ <span id="attachment_entry_'+{{u.id}}+'" class="js-config-var-name"></span>
+ <span class="glyphicon glyphicon-trash trash-useraccess" id="attachment_trash_'+{{u.id}}+'" x-data="{{u.id}}"></span>
+ </td>
+ </tr>
+ {% endfor %}
+ {% else %}
+ {% if not object.public %}
+ <tr>
+ <td>No users found</td>
+ </tr>
+ {% endif %}
+ {% endif %}
+ </table>
+
+ </div>
+ {% endif %}
+{% endif %}
+
<div class="row" style="padding-left: 25px;">
<h3>History</h3>
@@ -129,6 +215,8 @@ Created={{object.srt_created}} Updated={{object.srt_updated}}
<script>
var selected_quickedit=false;
var selected_notifyedit=false;
+ var selected_adduseraccess=false;
+ var selected_mergecve=false;
/* CVE Name change support */
var cveNameForm = $("#cve-name-change-form");
@@ -154,7 +242,7 @@ Created={{object.srt_created}} Updated={{object.srt_updated}}
if (('new_name' in data) && (0 == data.new_name.indexOf("url:"))) {
window.location.replace(data.new_name.replace("url:",""));
} else if (('new_name' in data) && ("" != data.new_name)) {
- var new_url = "{% url 'cve' object.name %}".replace("{{object.name}}",data.new_name);
+ var new_url = "{% url 'cve' 123 %}".replace("123",data.new_name);
window.location.replace(new_url);
} else {
location.reload(true);
@@ -190,7 +278,7 @@ Created={{object.srt_created}} Updated={{object.srt_updated}}
selected_quickedit=true;
$("#display-status").slideUp();
$("#details-quickedit").slideDown();
- document.getElementById("select-quickedit").innerText = "Close edit status...";
+ document.getElementById("select-quickedit").innerText = "Cancel edit status...";
$("#select-quickedit").addClass("blueborder");
document.getElementById("select-status-state").focus();
}
@@ -206,14 +294,31 @@ Created={{object.srt_created}} Updated={{object.srt_updated}}
var tags=$('#text-tags').val().trim();
var priority=$('#select-priority-state').val();
var status=$('#select-status-state').val();
+ var public=$('#select-public-state').val();
var publish_state=$('#select-publish-state').val();
var publish_date=$('#select-publish-date').val();
var acknowledge_date=$('#text-acknowledge-date').val();
var affected_components=$('#text-affected-components').val();
+ /* Double check any public status changes */
+ {% if object.public %}
+ if ("0" == public) {
+ if (! confirm("Are you sure you want to make this CVE and all its children as PRIVATE?")) {
+ return
+ }
+ }
+ {% endif %}
+ {% if not object.public %}
+ if ("1" == public) {
+ if (! confirm("Are you sure you want to make this CVE and all its children as PUBLIC?")) {
+ return
+ }
+ }
+ {% endif %}
postCommitAjaxRequest({
"action" : 'submit-quickedit',
"priority" : priority,
"status" : status,
+ "public" : public,
"note" : note,
"private_note" : private_note,
"tags" : tags,
@@ -301,6 +406,16 @@ Created={{object.srt_created}} Updated={{object.srt_updated}}
"vul_name" : $("#vulnerability_name").val(),
});
});
+ $('.detach-vulnerability').click(function() {
+ var result = confirm("Are you sure you want to detach this Vulnerability?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-detach-vulnerability',
+ "record_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
$("#submit-delete-cve").click(function(){
var result = confirm("Are you sure you want to permamently delete '{{object.name}}' and all its related records?");
@@ -311,7 +426,64 @@ Created={{object.srt_created}} Updated={{object.srt_updated}}
}
});
+ $('#select-merge-cve').click(function(){
+ if (selected_mergecve) {
+ selected_mergecve=false;
+ $("#details-cve-merge").slideUp();
+ document.getElementById("select-merge-cve").innerText = "Merge CVE";
+ $("#select-merge-cve").removeClass("blueborder");
+ } else {
+ selected_mergecve=true;
+ $("#details-cve-merge").slideDown();
+ document.getElementById("select-merge-cve").innerText = "Close merge CVE";
+ $("#select-merge-cve").addClass("blueborder");
+ document.getElementById("target-cve-name").focus();
+ }
+ });
+ $("#submit-merge-cve").click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-merge-cve',
+ "cve_merge_name" : $("#target-cve-name").val(),
+ });
+ });
+ $('#select-adduseraccess').click(function(){
+ if (selected_adduseraccess) {
+ selected_adduseraccess=false;
+ $("#details-adduseraccess").slideUp();
+ } else {
+ selected_adduseraccess=true;
+ $("#details-adduseraccess").slideDown();
+ }
+ });
+
+ $('#submit-adduseraccess').click(function(){
+ var user_list=[];
+ $('input[name="access-users"]').each(function(){
+ if ($(this).is(':checked')) {
+ user_list.push($(this).prop('value'));
+ }
+ });
+ user_list = user_list.join(",");
+ if ("" == user_list) {
+ alert("No users were selected");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'submit-adduseraccess',
+ "users" : user_list,
+ });
+ });
+
+ $('.trash-useraccess').click(function(){
+ var result = confirm("Are you sure?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-trashuseraccess',
+ "record_id" : $(this).attr('x-data'),
+ });
+ }
+ })
/* Set the report link */
$('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list={{object.id}}");
diff --git a/lib/srtgui/templates/cve.html_orig b/lib/srtgui/templates/cve.html_orig
index e5ec7eff..f674c1d3 100755
--- a/lib/srtgui/templates/cve.html_orig
+++ b/lib/srtgui/templates/cve.html_orig
@@ -1,6 +1,6 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} {{cve_list_table.0.0.name}} - SRTool {% endblock %}
diff --git a/lib/srtgui/templates/cves-select-toastertable.html b/lib/srtgui/templates/cves-select-toastertable.html
index d29a2b92..d1b89f34 100644
--- a/lib/srtgui/templates/cves-select-toastertable.html
+++ b/lib/srtgui/templates/cves-select-toastertable.html
@@ -1,5 +1,5 @@
{% extends 'base.html' %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% load static %}
@@ -9,8 +9,8 @@
<link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
<link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
<link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
- <script src="{% static 'js/jquery-ui.min.js' %}">
- </script>
+ <script src="{% static 'js/jquery-ui.min.js' %}"></script>
+ <script src="{% static 'js/typeahead_affected_components.js' %}"></script>
<script>
// Toggle the row checkbox if any column element is clicked
function toggle_select(toggle_id) {
@@ -140,7 +140,7 @@
<p><b><big>Reason: </big></b>
<input type="text" id="input-isvulnerable-reason" name="reason" size="40">&nbsp;&nbsp;<input id="markPublishIs" type="checkbox">&nbsp;Mark for Publish</input>&nbsp;&nbsp;<input id="markFor" type="checkbox"> Add Keywords to 'For' </input>
<p><b><big>Affected Components: </big></b>
- <input type="text" id="input-isvulnerable-components" name="components" size="40"> (e.g. space-separated list of packages, recipes, sub-system list, applications, )
+ <input type="text" id="input-isvulnerable-components" name="components" size="40" autocomplete="off"> (e.g. space-separated list of packages, recipes, sub-system list, applications, )
<div id="published-date-list">
<p><i>Acknowledge Date</i> =
@@ -207,7 +207,7 @@
</div>
<!-- Javascript support -->
- <script>
+ <script type="text/javascript">
//# sourceURL=somename.js
@@ -217,9 +217,19 @@
var selected_investigate=false;
var selected_other=false;
var cve_total=0;
+ var lastChecked = null;
+ var $selectboxes = null;
$(document).ready(function() {
+ // Init the recipe typeahead
+ try {
+ autocompInit();
+ } catch (e) {
+ document.write("Sorry, An error has occurred initiating the autocomplete feature");
+ console.warn(e);
+ }
+
function onCommitAjaxSuccess(data, textstatus) {
if (window.console && window.console.log) {
console.log("XHR returned:", data, "(" + textstatus + ")");
@@ -536,6 +546,21 @@
});
$('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list="+record_list);
+ /* Enable shift-select ranges */
+ $selectboxes = $('.selectbox');
+ $selectboxes.click(function(e) {
+ if (!lastChecked) {
+ lastChecked = this;
+ return;
+ }
+ if (e.shiftKey) {
+ var start = $selectboxes.index(this);
+ var end = $selectboxes.index(lastChecked);
+ $selectboxes.slice(Math.min(start,end), Math.max(start,end)+ 1).prop('checked', lastChecked.checked);
+ }
+ lastChecked = this;
+ });
+
});
});
</script>
diff --git a/lib/srtgui/templates/cves-toastertable.html b/lib/srtgui/templates/cves-toastertable.html
index 45dce261..1f6548f5 100644
--- a/lib/srtgui/templates/cves-toastertable.html
+++ b/lib/srtgui/templates/cves-toastertable.html
@@ -47,7 +47,7 @@
$("#table-loading").slideDown();
tableElt.on("table-done", function (e, total, tableParams) {
- var title = "All CVE's";
+ var title = "All CVE's (" + total + ")";
if (tableParams.search || tableParams.filter) {
if (total === 0) {
diff --git a/lib/srtgui/templates/date-time-test.html b/lib/srtgui/templates/date-time-test.html
new file mode 100755
index 00000000..d123f79c
--- /dev/null
+++ b/lib/srtgui/templates/date-time-test.html
@@ -0,0 +1,88 @@
+{% extends "base.html" %}
+
+{% load static %}
+{% load jobtags %}
+{% load humanize %}
+
+{% block title %} DateTime Test {% endblock %}
+{% block pagecontent %}
+
+ <div class="col-md-5">
+ <b>DateTime Info</b>
+ <div class="well">
+ <dl class="dl-horizontal">
+ <dt>UTC Current Time:</dt>
+ <dd>{{current_utc}}</dd>
+ <dt>Alameda Current Time:</dt>
+ <dd>{{current_ala}}</dd>
+ <dt>Your Current Time:</dt>
+ <dd>{{current_local}}</dd>
+ <dt>Datetime Shift</dt>
+ <dd>"2021-05-10 13:38:22"|shift_timezone:"{{user.get_timezone_offset}}" => {{ "2021-05-10 13:38:22"|shift_timezone:user.get_timezone_offset }},{{user.get_timezone_offset}}</dd>
+ </dl>
+ </div>
+ </div>
+ <p><i>Timezone</i> = {{user_timezone}}</p>
+
+ <fieldset style="border: 1px solid Blue; background-color:LightBlue; padding-left: 25px; padding-right: 20px;">
+ <button class="execute btn btn-primary btn-lg" id="submit-timezone"> Submit Changes </button>
+
+<select name="timezone" id="select-timezone">
+
+ {% for tz in timezone_list %}
+
+ <option value="{{tz}}" {% if user_timezone == tz %}selected{% endif %}>{{tz}}</option>
+
+ {% endfor %}
+
+</select>
+</fieldset>
+
+<script type="text/javascript">
+
+ $(document).ready(function() {
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:"{% url 'xhr_date_time_test' %}",
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+ $('#submit-timezone').click(function(){
+ var timezone=$('#select-timezone').val();
+
+ /* Double check any public status changes */
+ postCommitAjaxRequest({
+ "action" : 'submit-timezone',
+ "timezone" : timezone,
+ });
+ });
+
+ });
+
+</script>
+
+{% endblock %}
diff --git a/lib/srtgui/templates/defect.html b/lib/srtgui/templates/defect.html
index 2cae9514..b1ccae31 100644
--- a/lib/srtgui/templates/defect.html
+++ b/lib/srtgui/templates/defect.html
@@ -1,6 +1,6 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} {{object.name}} - SRTool {% endblock %}
diff --git a/lib/srtgui/templates/detail_sorted_header.html b/lib/srtgui/templates/detail_sorted_header.html
index 4434df43..6554df2e 100644
--- a/lib/srtgui/templates/detail_sorted_header.html
+++ b/lib/srtgui/templates/detail_sorted_header.html
@@ -4,7 +4,7 @@
Must be followed by <tbody>...</tbody></table>.
Requires tablecols setup column fields dclass, clclass, qhelp, orderfield.
{% endcomment %}
-{% load projecttags %}
+{% load jobtags %}
{# <table class="table table-bordered table-hover tablesorter" id="otable"> #}
<thead>
<!-- Table header row; generated from "tablecols" entry in the context dict -->
diff --git a/lib/srtgui/templates/email_admin.html b/lib/srtgui/templates/email_admin.html
new file mode 100755
index 00000000..7c5f0fda
--- /dev/null
+++ b/lib/srtgui/templates/email_admin.html
@@ -0,0 +1,70 @@
+{% extends "base.html" %}
+
+{% load jobtags %}
+
+{% block title %} Admin Help - SRTool {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li>Admin Help</li>
+ </ul>
+ </div>
+</div>
+
+<!-- Begin container -->
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header build-data">
+ <h1>Admin Help</h1>
+ Send an email to the SRTool Admin staff for assistance.
+ </div>
+ </div>
+</div>
+
+<form method="post">
+{% csrf_token %}
+
+ {% if error_message %} <h3 style="color:red">{{error_message}}</h3><br> {% endif %}
+
+ <button class="btn btn-primary btn-lg" name="action" value="submit">Submit Request</button>
+ <button class="btn btn-primary btn-lg" name="action" value="cancel">Cancel</button>
+ <br>
+
+ <h3>Request:
+ <select name="request-type">
+ <option value="Request account verification">Request account verification</option>
+ <option value="Request password reset">Request password reset</option>
+ <option value="Request new group name">Request new group name</option>
+ <option value="Request new repo source name">Request new repo source name</option>
+ <option value="Request 'Contributor' status">Request 'Contributor' status</option>
+ <option value="Request 'Creator' status">Request 'Creator' status</option>
+ <option value="Request general Help">Request general help</option>
+ </select>
+ </h2><br>
+
+ <p>Your name: <input type="text" placeholder="your name" name="user-name" size="80" value="{% if request.user.user_fullname %}{{request.user.user_fullname}}{% endif %}"></p>
+ <p>Your email: <input type="text" placeholder="your name" name="user-email" size="80" value="{% if request.user.user_fullname %}{{request.user.email}}{% endif %}"></p>
+
+ <h3>Message text:</h2>
+ <textarea rows="9" style="min-width: 50%" name="message"></textarea>
+
+<hr>
+
+</form>
+
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function() {
+
+ });
+</script>
+
+
+{% endblock %}
diff --git a/lib/srtgui/templates/email_success.html b/lib/srtgui/templates/email_success.html
new file mode 100755
index 00000000..baa44163
--- /dev/null
+++ b/lib/srtgui/templates/email_success.html
@@ -0,0 +1,49 @@
+{% extends "base.html" %}
+
+{% load jobtags %}
+
+{% block title %} Admin Help - SRTool {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li>Admin Help</li>
+ </ul>
+ </div>
+</div>
+
+<!-- Begin container -->
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header build-data">
+ <h1>Admin Success</h1>
+ Email sent to the SRTool Admin staff for assistance.
+ </div>
+ </div>
+</div>
+
+<form method="post">
+{% csrf_token %}
+
+ <h3> Email successfully sent!</h3>
+ <br>
+ <button class="btn btn-primary btn-lg" name="action" value="close">Close</button>
+ <br>
+
+</form>
+
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function() {
+
+ });
+</script>
+
+
+{% endblock %}
diff --git a/lib/srtgui/templates/errorlog-toastertable.html b/lib/srtgui/templates/errorlog-toastertable.html
new file mode 100755
index 00000000..91cf8d55
--- /dev/null
+++ b/lib/srtgui/templates/errorlog-toastertable.html
@@ -0,0 +1,142 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Error Logs {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li>Error Logs</li>
+ </ul>
+ </div>
+</div>
+
+<div > <!--class="form-inline" -->
+ <b><big>Actions: </big></b>
+ <button id="delete-notification" class="btn btn-default" type="button">Delete Selected</button>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {# xhr_table_url is just the current url so leave it blank #}
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+ <!-- Javascript support -->
+ <script type="text/javascript">
+ var selected_notifyedit=false;
+ var lastChecked = null;
+ var $selectboxes = null;
+
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Pending Notifications";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No Error Logs found";
+ }
+ else if (total > 0) {
+ title = total + " Error" + (total > 1 ? 's' : '') + " found";
+ }
+ }
+
+ titleElt.text(title);
+
+ /* Enable shift-select ranges */
+ $selectboxes = $('.selectbox');
+ $selectboxes.click(function(e) {
+ if (!lastChecked) {
+ lastChecked = this;
+ return;
+ }
+ if (e.shiftKey) {
+ var start = $selectboxes.index(this);
+ var end = $selectboxes.index(lastChecked);
+ $selectboxes.slice(Math.min(start,end), Math.max(start,end)+ 1).prop('checked', lastChecked.checked);
+ }
+ lastChecked = this;
+ });
+
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ } else if (('results_msg' in data) && ("" != data.results_msg)) {
+ alert("Results: " + data.results_msg);
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:"{% url 'xhr_errorlogs'%}",
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ })
+ }
+
+ $('#delete-notification').click(function(){
+ log_list = [];
+ $('#errorlogstable input').each(function(){
+ if ($(this).is(':checked')) {
+ log_list.push($(this).prop('value'));
+ }
+ });
+ log_list = log_list.join(",");
+ if ("" == log_list) {
+ alert("No Error Logs were selected");
+ return;
+ }
+ if ("" != log_list) {
+ postCommitAjaxRequest({
+ "action" : 'delete-errorlogs',
+ "log_list" : log_list,
+ });
+ }
+ });
+
+ }); <!-- $(document).ready() -->
+
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/export.html b/lib/srtgui/templates/export.html
index 8b2309ca..82f48016 100644
--- a/lib/srtgui/templates/export.html
+++ b/lib/srtgui/templates/export.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Export Report {% endblock %}
diff --git a/lib/srtgui/templates/filtersnippet.html b/lib/srtgui/templates/filtersnippet.html
index 1286ca31..eb835c1a 100644
--- a/lib/srtgui/templates/filtersnippet.html
+++ b/lib/srtgui/templates/filtersnippet.html
@@ -1,4 +1,4 @@
-{% load projecttags %}
+{% load jobtags %}
<!-- '{{f.class}}' filter -->
{% with f.class as key %}
diff --git a/lib/srtgui/templates/generic-toastertable-page.html b/lib/srtgui/templates/generic-toastertable-page.html
index b3eabe1a..cecfacc9 100644
--- a/lib/srtgui/templates/generic-toastertable-page.html
+++ b/lib/srtgui/templates/generic-toastertable-page.html
@@ -1,5 +1,5 @@
{% extends "baseprojectpage.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% load static %}
diff --git a/lib/srtgui/templates/guided_tour.html b/lib/srtgui/templates/guided_tour.html
index bdc8987e..68c114ef 100644
--- a/lib/srtgui/templates/guided_tour.html
+++ b/lib/srtgui/templates/guided_tour.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Guided Tour of SRTool {% endblock %}
diff --git a/lib/srtgui/templates/investigation.html b/lib/srtgui/templates/investigation.html
index c2bf92d7..bd974796 100644
--- a/lib/srtgui/templates/investigation.html
+++ b/lib/srtgui/templates/investigation.html
@@ -29,7 +29,7 @@
</style>
{% endblock %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} {{object.name}} - SRTool {% endblock %}
@@ -107,21 +107,32 @@
</div>
<div class="row">
<div class="column1">
- <p><b><label id="priority">Set Priority:</label></b>
+ <p><b><label id="priority">Set Priority ({{object.priority}}):</label></b>
+
<div id="priority-list" class="scrolling" style="width: 120px;">
- <div class="checkbox"> <label>
- <input type="radio" name="priority" value="4" type="checkbox"> P1
- </label><p></div>
- <div class="checkbox"> <label>
- <input type="radio" name="priority" value="3" type="checkbox" checked="yes"> P2
- </label><p></div>
- <div class="checkbox"> <label>
- <input type="radio" name="priority" value="2" type="checkbox"> P3
- </label><p></div>
- <div class="checkbox"> <label>
- <input type="radio" name="priority" value="1" type="checkbox"> P4
- </label><p></div>
+ <div>
+ <label for="priority_p4">P4</label>
+ <input type="radio" id="priority_p4" name="priority_defect" value="1"
+ {% if 1 == object.priority %}checked{% endif %}
+ {% if 0 == object.priority %}checked{% endif %}>
+ </div>
+ <div>
+ <label for="priority_p3">P3</label>
+ <input type="radio" id="priority_p3" name="priority_defect" value="2"
+ {% if 2 == object.priority %}checked{% endif %}>
+ </div>
+ <div>
+ <label for="priority_p2">P2</label>
+ <input type="radio" id="priority_p2" name="priority_defect" value="3"
+ {% if 3 == object.priority %}checked{% endif %}>
+ </div>
+ <div>
+ <label for="priority_p1">P1</label>
+ <input type="radio" id="priority_p1" name="priority_defect" value="4"
+ {% if 4 == object.priority %}checked{% endif %}>
+ </div>
</div>
+
</div>
<div class="column2">
<p><b><label id="components">Set Components:</label></b>
@@ -387,9 +398,11 @@
<thead>
<tr>
<th>User</th>
+<!--
{% if request.user.is_creator %}
<th>Manage</th>
{% endif %}
+-->
</tr>
</thead>
@@ -404,7 +417,8 @@
{% if object.investigation_users.all %}
{% for u in object.investigation_users.all %}
<tr>
- <td>{{ u.user.name }}</td>
+ <td>{{ u.user.username }}</td>
+<!--
{% if request.user.is_creator %}
<td>
<span id="attachment_entry_'+{{u.id}}+'" class="js-config-var-name"></span>
@@ -413,6 +427,7 @@
</td>
{% endif %}
</tr>
+-->
{% endfor %}
{% else %}
{% if not object.public %}
@@ -630,7 +645,7 @@ Created={{object.srt_created}} Updated={{object.srt_updated}}
}
});
- $('.submit-downloadattac hment').click(function() {
+ $('.submit-downloadattachment').click(function() {
$("#downloadbanner-"+this.getAttribute("x-data")).submit();
});
@@ -735,7 +750,7 @@ Created={{object.srt_created}} Updated={{object.srt_updated}}
selected_quickedit=true;
$("#display-status").slideUp();
$("#details-quickedit").slideDown();
- document.getElementById("select-quickedit").innerText = "Close edit status...";
+ document.getElementById("select-quickedit").innerText = "Cancel edit status...";
$("#select-quickedit").addClass("blueborder");
document.getElementById("select-status-state").focus();
}
@@ -766,13 +781,13 @@ Created={{object.srt_created}} Updated={{object.srt_updated}}
selected_notifyedit=false;
$("#details-notify-edit").slideUp();
$("#display-status").slideDown();
- document.getElementById("select-notification").innerText = "Create Notification ...";
+ document.getElementById("select-notification").innerText = "Create notification ...";
$("#select-notification").removeClass("blueborder");
} else {
selected_notifyedit=true;
$("#display-status").slideUp();
$("#details-notify-edit").slideDown();
- document.getElementById("select-notification").innerText = "Close notification";
+ document.getElementById("select-notification").innerText = "Cancel notification";
$("#select-notification").addClass("blueborder");
document.getElementById("select-category-notify").focus();
}
diff --git a/lib/srtgui/templates/joblog.html b/lib/srtgui/templates/joblog.html
new file mode 100755
index 00000000..1e4abca0
--- /dev/null
+++ b/lib/srtgui/templates/joblog.html
@@ -0,0 +1,39 @@
+{% extends "base.html" %}
+
+{% load static %}
+{% load jobtags %}
+{% load humanize %}
+
+{% block title %} Job Log {% endblock %}
+{% block pagecontent %}
+<div class="row">
+ <div class="col-md-7" style="padding-left: 50px;">
+ <h1>Job Log: {{object.name}} : {{log_date}}
+ <form id="downloadbanner-log" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" id="action" name="action" value="download-job-log">
+ <input type="hidden" id="report_path" name="report_path" value="JOBLOG">
+ <span class="glyphicon glyphicon-download-alt submit-download-log" x-data="log"></span>
+ </form>
+ </h1>
+ </div>
+</div>
+
+<div class="row" style="padding-left: 25px;">
+ <textarea id="log-text" readonly placeholder="Job log" cols="120" rows="30" style="background-color: #cccccc;" />{{ log_text }}</textarea>
+</div>
+
+
+<!-- Javascript support -->
+<script>
+ $(document).ready(function() {
+
+ $('.submit-download-log').click(function() {
+ $("#downloadbanner-log").submit();
+ });
+
+ /* Set the report link */
+ $('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list={{object.id}}");
+ });
+</script>
+
+{% endblock %}
diff --git a/lib/srtgui/templates/js-unit-tests.html b/lib/srtgui/templates/js-unit-tests.html
index ca248962..6ebca39f 100644
--- a/lib/srtgui/templates/js-unit-tests.html
+++ b/lib/srtgui/templates/js-unit-tests.html
@@ -1,5 +1,5 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% load static %}
{% block pagecontent %}
diff --git a/lib/srtgui/templates/landing.html b/lib/srtgui/templates/landing.html
index 67c61b1e..f0e4f13d 100644
--- a/lib/srtgui/templates/landing.html
+++ b/lib/srtgui/templates/landing.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Welcome to SRTool{% endblock %}
@@ -9,7 +9,7 @@
<div class="row">
<div class="col-md-7" style="padding-left: 50px;">
<h1>Security Response Tool (SRTool)</h1>
- <p>A web interface to SRTool CVE investigations</p>
+ <p>A web interface to SRTool CVE investigations ({{this_landing}})</p>
</div>
</div>
<div class="row">
@@ -67,6 +67,13 @@
<td>SRTool Products<td>
</tr>
+ {% for ext_url,ext_title,ext_description in landing_extensions_table %}
+ <tr>
+ <td> <a class="btn btn-info btn-lg" href="{% url ext_url %}">{{ext_title}}</a></td>
+ <td>{{ext_description}}<td>
+ </tr>
+ {% endfor %}
+
</table>
</div>
diff --git a/lib/srtgui/templates/landing_not_managed.html b/lib/srtgui/templates/landing_not_managed.html
index baa4b72c..25e7f713 100644
--- a/lib/srtgui/templates/landing_not_managed.html
+++ b/lib/srtgui/templates/landing_not_managed.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Welcome to Toaster {% endblock %}
diff --git a/lib/srtgui/templates/login.html b/lib/srtgui/templates/login.html
index 96fb6fe1..49d4ab30 100644
--- a/lib/srtgui/templates/login.html
+++ b/lib/srtgui/templates/login.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Login Page {% endblock %}
diff --git a/lib/srtgui/templates/maintenance.html b/lib/srtgui/templates/maintenance.html
index a0bb1845..c35d6961 100755
--- a/lib/srtgui/templates/maintenance.html
+++ b/lib/srtgui/templates/maintenance.html
@@ -1,82 +1,216 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Maintenance tools {% endblock %}
{% block pagecontent %}
- <div class="row">
- <div class="col-md-7" style="padding-left: 50px;">
- <h1>Maintenance</h1>
- </div>
- </div>
- <div class="row">
- <div class="jumbotron well-transparent">
-
- <div class="col-md-6">
- <div>
- <table class="table table-striped table-condensed" data-testid="landing-hyperlinks-table">
- <thead>
- <tr>
- <th>Action</th>
- <th>Description</th>
- </tr>
- </thead>
-
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'error_logs' %}">Error Logs</a></td>
- <td>Examine Error Logs ({{errorlog_total}})</td>
- </tr>
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'history_cve' %}">History CVE</a></td>
- <td>Examine History for CVEs</td>
- </tr>
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'history_vulnerability' %}">History Vulnerabilities</a></td>
- <td>Examine History for Vulnerabilities</td>
- </tr>
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'history_investigation' %}">History Investigations</a></td>
- <td>Examine History for Investigations</td>
- </tr>
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'history_defect' %}">History Defects</a></td>
- <td>Examine History for Defects</td>
- </tr>
-
- </table>
- </div>
-
- </div>
-
- <div class="col-md-5">
- <b>Quick Info</b>
- <div class="well">
- <dl class="dl-horizontal">
-
- <dt>CVE History: Total Count =</dt>
- <dd>
- {{history_cve_total}}
- </dd>
- <dt>Vulnerability History: Total Count =</dt>
- <dd>
- {{history_vulnerability_total}}
- </dd>
- <dt>Investigation: Total Count =</dt>
- <dd>
- {{history_investigation_total}}
- </dd>
- <dt>Defect: Total Count =</dt>
- <dd>
- {{defect_investigation_total}}
- </dd>
-
- </dl>
- </div>
- </div>
-
- </div>
- </div>
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li>Maintenance</a>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-7" style="padding-left: 50px;">
+ <h1>Maintenance</h1>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<div class="row">
+<div class="jumbotron well-transparent">
+
+ <div class="col-md-6">
+ <div>
+ <table class="table table-striped table-condensed" data-testid="landing-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>Action</th>
+ <th>Description</th>
+ </tr>
+ </thead>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'error_logs' %}">Error Logs</a></td>
+ <td>Examine Error Logs ({{errorlog_total}})</td>
+ </tr>
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'history_cve' %}">History CVE</a></td>
+ <td>Examine History for CVEs</td>
+ </tr>
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'history_vulnerability' %}">History Vulnerabilities</a></td>
+ <td>Examine History for Vulnerabilities</td>
+ </tr>
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'history_investigation' %}">History Investigations</a></td>
+ <td>Examine History for Investigations</td>
+ </tr>
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'history_defect' %}">History Defects</a></td>
+ <td>Examine History for Defects</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'manage_jobs' 77 %}">Manage Jobs</a></td>
+ <td>Manage the Jobs table</td>
+ </tr>
+
+ <tr>
+ <td>
+ <button class="execute" id="submit-clearjobs"> Clear Jobs</button>
+ <td>Clear the Jobs table of all entries</td>
+ </tr>
+
+ <tr>
+ <td>
+ <button class="execute" id="submit-testjob1"> Test Job #1 Progress </button>
+ <td>Test job progress bars support with default job #1</td>
+ </tr>
+
+ <tr>
+ <td>
+ <button class="execute" id="submit-testjob1-2"> Test Job #2 Progress </button>
+ <td>Test job progress bars support with job #2</td>
+ </tr>
+
+ <tr>
+ <td>
+ <button class="execute" id="submit-parent-child"> Test Parent/Child Jobs</button>
+ <td>Test Progress using parent and child jobs</td>
+ </tr>
+
+ </table>
+ </div>
+
+ </div>
+
+ <div class="col-md-5">
+ <b>Quick Info</b>
+ <div class="well">
+ <dl class="dl-horizontal">
+
+ <dt>CVE History: Total Count =</dt>
+ <dd>
+ {{history_cve_total}}
+ </dd>
+ <dt>Vulnerability History: Total Count =</dt>
+ <dd>
+ {{history_vulnerability_total}}
+ </dd>
+ <dt>Investigation: Total Count =</dt>
+ <dd>
+ {{history_investigation_total}}
+ </dd>
+ <dt>Defect: Total Count =</dt>
+ <dd>
+ {{defect_investigation_total}}
+ </dd>
+
+ </dl>
+ </div>
+
+ <form method="post"> {% csrf_token %}
+ <b>Remote Backup Path
+ <button class="execute btn btn-primary">Update</button> <!-- btn-lg -->
+ </b>
+ <div class="well">
+ <input type="hidden" name="action" value="submit-remote-backup-path">
+ Path = <input type="text" placeholder="remote backup path" name="text-remote-backup-path" size="60" value="{{remote_backup_path}}">
+ </div>
+ <form>
+ </div>
+
+</div>
+</div>
+
+
+<script type="text/javascript">
+
+ $(document).ready(function() {
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url: url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+
+ $('#submit-clearjobs').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-clearjobs',
+ },"{% url 'xhr_job_post' %}");
+ });
+
+ $('#submit-testjob1').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-testjob',
+ "command" : 'SELFTEST',
+ "name" : 'Basic self test',
+ },"{% url 'xhr_job_post' %}");
+ });
+
+ $('#submit-testjob1-2').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-testjob-j2',
+ "command" : 'SELFTEST',
+ "name" : 'Basic self test',
+ },"{% url 'xhr_job_post' %}");
+ });
+
+ $('#submit-parent-child').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-testjob-parent',
+ "command" : 'PARENTTEST',
+ "name" : 'Test Parent/Children',
+ },"{% url 'xhr_job_post' %}");
+ });
+
+
+ /* Set the report link */
+ $('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}");
+ });
+</script>
{% endblock %}
diff --git a/lib/srtgui/templates/manage-jobs-toastertable.html b/lib/srtgui/templates/manage-jobs-toastertable.html
new file mode 100755
index 00000000..34e89c57
--- /dev/null
+++ b/lib/srtgui/templates/manage-jobs-toastertable.html
@@ -0,0 +1,126 @@
+{% extends 'base.html' %}
+{% load static %}
+
+{% block extraheadcontent %}
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.structure.min.css' %}" type='text/css'>
+ <link rel="stylesheet" href="{% static 'css/jquery-ui.theme.min.css' %}" type='text/css'>
+ <script src="{% static 'js/jquery-ui.min.js' %}">
+ </script>
+{% endblock %}
+
+{% block title %} Manage Jobs {% endblock %}
+
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'manage' %}">Management</a></li><span class="divider">&rarr;</span>
+ <li><a href="{% url 'maintenance' %}">Maintenance</a></li><span class="divider">&rarr;</span>
+ <li>Manage Jobs</a>
+ </ul>
+ </div>
+</div>
+
+<p><b><big>Actions: </big></b>
+<a class="btn btn-default navbar-btn " id="submit-clearjobs" href="">Clear Jobs</a>
+
+<div class="row">
+ <div class="col-md-12">
+ <div class="page-header">
+ <h1 class="top-air" data-role="page-title"></h1>
+ </div>
+
+ {% url '' as xhr_table_url %}
+ {% include 'toastertable.html' %}
+ </div>
+</div>
+
+<!-- Javascript support -->
+<script type="text/javascript">
+ $(document).ready(function () {
+ var tableElt = $("#{{table_name}}");
+ var titleElt = $("[data-role='page-title']");
+
+ tableElt.on("table-done", function (e, total, tableParams) {
+ var title = "Manage Jobs ("+total+")";
+
+ if (tableParams.search || tableParams.filter) {
+ if (total === 0) {
+ title = "No Projects found";
+ }
+ else if (total > 0) {
+ title = total + " Job" + (total > 1 ? "s" : '') + " found";
+ }
+ }
+
+ /* Add handler into the Toaster Table context */
+ $('.trash-job').click(function() {
+ var result = confirm("Are you sure you want to remove Job '" + $(this).attr('x-data').split('|')[0] + "'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-trash-job',
+ "record_id" : $(this).attr('x-data').split('|')[1],
+ });
+ }
+ });
+
+ titleElt.text(title);
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata,url) {
+ url = url || "{% url 'xhr_job_post' %}";
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:url,
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
+ $('#project_refresh').click(function(){
+ postCommitAjaxRequest({
+ "action" : 'submit-refresh-projects',
+ "audit_id" : '{% if hb_audit %}{{hb_audit.id}}{% endif %}',
+ },"");
+ });
+
+ $('#submit-clearjobs').click(function(){
+ var result = confirm("Are you sure you want to remove all jobs?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-clearjobs',
+ },"");
+ }
+ });
+
+ });
+
+ </script>
+{% endblock %}
diff --git a/lib/srtgui/templates/management.html b/lib/srtgui/templates/management.html
index b99f4613..60769873 100644
--- a/lib/srtgui/templates/management.html
+++ b/lib/srtgui/templates/management.html
@@ -1,168 +1,188 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Manage Resources {% endblock %}
{% block pagecontent %}
- <div class="row">
- <div class="col-md-7" style="padding-left: 50px;">
- <h1>Management</h1>
- </div>
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li>Management</a>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-7" style="padding-left: 50px;">
+ <h1>Management</h1>
</div>
- <div class="row">
- <div class="jumbotron well-transparent">
-
- <div class="col-md-6">
- <div>
- <table class="table table-striped table-condensed" data-testid="landing-hyperlinks-table">
- <thead>
- <tr>
- <th>Action</th>
- <th>Description</th>
- </tr>
- </thead>
-
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'triage_cves' %}">Triage CVE's</a></td>
- <td>Triage the CVE's ({{cve_new}})</td>
- </tr>
-
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'manage_notifications' %}">Pending notifications</a></td>
- <td>Triage the pending notifications ({{notification_total}})</td>
- </tr>
-
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'manage_report' %}">Summary Report</a></td>
- <td>Report on the over all response system status</td>
- </tr>
-
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'publish' %}">Publish Reports</a></td>
- <td>Process items to be published from the SRTool</td>
- </tr>
-
- {% if request.user.is_admin %}
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'users' %}">Manage Users</a></td>
- <td>Add, edit, and remove users</td>
- </tr>
-
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'sources' %}?nocache=1">Manage Sources</a></td>
- <td>Manage source list, perform manual pulls</td>
- </tr>
-
- <tr>
- <td><a class="btn btn-info btn-lg" href="{% url 'maintenance' %}?nocache=1">Maintenance</a></td>
- <td>Maintenance utilities ({{errorlog_total}})</td>
- </tr>
- {% endif %}
-
- </table>
- </div>
-
- </div>
-
- <div class="col-md-5">
- <b>Quick Info</b>
- <div class="well">
- <dl class="dl-horizontal">
- <dt>CVE's: Total Count =</dt>
- <dd>
- <a href="{% url 'cves' %}"> {{cve_total}} </a>
- </dd>
- <dt>Pending triaged =</dt>
- <dd>
- <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:new&default_orderby=name&filter_value=on&"> {{cve_new}} </a>
- </dd>
- <dt>Investigate =</dt>
- <dd>
- <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:investigate&default_orderby=name&filter_value=on&"> {{cve_investigate}} </a>
- </dd>
- <dt>Vulnerable =</dt>
- <dd>
- <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:vulnerable&default_orderby=name&filter_value=on&"> {{cve_vulnerable}} </a>
- </dd>
- <dt>Not Vulnerable =</dt>
- <dd>
- <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:not_vulnerable&default_orderby=name&filter_value=on&"> {{cve_not_vulnerable}} </a>
- </dd>
- <dt>Vulnerabilities: Total Count =</dt>
- <dd>
- <a href="{% url 'vulnerabilities' %}"> {{vulnerability_total}} </a>
- </dd>
- <dt>Open =</dt>
- <dd>
- <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_outcome:open&default_orderby=name&filter_value=on&"> {{vulnerability_open}} </a>
- </dd>
- <dt>Critical active =</dt>
- <dd>
- <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:critical&default_orderby=name&filter_value=on&" %}> {{vulnerability_critical}} </a>
- </dd>
- <dt>High active =</dt>
- <dd>
- <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:high&default_orderby=name&filter_value=on&" %}> {{vulnerability_high}} </a>
- </dd>
- <dt>Medium active =</dt>
- <dd>
- <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:medium&default_orderby=name&filter_value=on&" %}> {{vulnerability_medium}} </a>
- </dd>
-
- <dt>Investigations: Total Count =</dt>
- <dd>
- <a href="{% url 'investigations' %}" %}> {{investigation_total}} </a>
- </dd>
- <dt>Open =</dt>
- <dd>
- <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_outcome:open&default_orderby=name&filter_value=on&" %}> {{investigation_open}} </a>
- </dd>
- <dt>Critical active =</dt>
- <dd>
- <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:critical&default_orderby=name&filter_value=on&" %}> {{investigation_critical}} </a>
- </dd>
- <dt>High active =</dt>
- <dd>
- <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:high&default_orderby=name&filter_value=on&" %}> {{investigation_high}} </a>
- </dd>
- <dt>Medium active =</dt>
- <dd>
- <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:medium&default_orderby=name&filter_value=on&" %}> {{investigation_medium}} </a>
- </dd>
-
- <dt>Defects: Total Count =</dt>
- <dd>
- <a href="{% url 'defects' %}" %}> {{defect_total}} </a>
- </dd>
- <dt>Open =</dt>
- <dd>
- <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_srt_outcome:open&default_orderby=name&filter_value=on&" %}> {{defect_open}} </a>
- </dd>
- <dt>InProgress =</dt>
- <dd>
- <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_status:in_progress&default_orderby=name&filter_value=on&" %}> {{defect_inprogress}} </a>
- </dd>
- <dt>P1 active =</dt>
- <dd>
- <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_priority:critical&default_orderby=name&filter_value=on&" %}> {{defect_p1}} </a>
- </dd>
- <dt>P2 active =</dt>
- <dd>
- <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_priority:high&default_orderby=name&filter_value=on&" %}> {{defect_p2}} </a>
- </dd>
-
- <dt>Packages: Affected=</dt>
- <dd>
- <a href="{% url 'cpes_srtool' %}" %}> {{package_total}} </a>
- </dd>
-
- </dl>
- </div>
- </div>
-
- </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<div class="row">
+ <div class="jumbotron well-transparent">
+
+ <div class="col-md-6">
+ <div>
+ <table class="table table-striped table-condensed" data-testid="landing-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>Action</th>
+ <th>Description</th>
+ </tr>
+ </thead>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'triage_cves' %}">Triage CVE's</a></td>
+ <td>Triage the CVE's ({{cve_new}})</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'manage_notifications' %}">Pending notifications</a></td>
+ <td>Triage the pending notifications ({{notification_total}})</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'manage_report' %}">Summary Report</a></td>
+ <td>Report on the over all response system status</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'publish' %}">Publish Reports</a></td>
+ <td>Process items to be published from the SRTool</td>
+ </tr>
+
+ {% if request.user.is_admin %}
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'users' %}">Manage Users</a></td>
+ <td>Add, edit, and remove users</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'sources' %}?nocache=1">Manage Sources</a></td>
+ <td>Manage source list, perform manual pulls</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'maintenance' %}?nocache=1">Maintenance</a></td>
+ <td>Maintenance utilities ({{errorlog_total}})</td>
+ </tr>
+ {% endif %}
+
+ </table>
+ </div>
+
</div>
+ <div class="col-md-5">
+ <b>Quick Info</b>
+ <div class="well">
+ <dl class="dl-horizontal">
+ <dt>CVE's: Total Count =</dt>
+ <dd>
+ <a href="{% url 'cves' %}"> {{cve_total}} </a>
+ </dd>
+ <dt>Pending triaged =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:new&default_orderby=name&filter_value=on&"> {{cve_new}} </a>
+ </dd>
+ <dt>Investigate =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:investigate&default_orderby=name&filter_value=on&"> {{cve_investigate}} </a>
+ </dd>
+ <dt>Vulnerable =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:vulnerable&default_orderby=name&filter_value=on&"> {{cve_vulnerable}} </a>
+ </dd>
+ <dt>Not Vulnerable =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:not_vulnerable&default_orderby=name&filter_value=on&"> {{cve_not_vulnerable}} </a>
+ </dd>
+ <dt>Vulnerabilities: Total Count =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}"> {{vulnerability_total}} </a>
+ </dd>
+ <dt>Open =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_outcome:open&default_orderby=name&filter_value=on&"> {{vulnerability_open}} </a>
+ </dd>
+ <dt>Critical active =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:critical&default_orderby=name&filter_value=on&" %}> {{vulnerability_critical}} </a>
+ </dd>
+ <dt>High active =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:high&default_orderby=name&filter_value=on&" %}> {{vulnerability_high}} </a>
+ </dd>
+ <dt>Medium active =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:medium&default_orderby=name&filter_value=on&" %}> {{vulnerability_medium}} </a>
+ </dd>
+
+ <dt>Investigations: Total Count =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}" %}> {{investigation_total}} </a>
+ </dd>
+ <dt>Open =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_outcome:open&default_orderby=name&filter_value=on&" %}> {{investigation_open}} </a>
+ </dd>
+ <dt>Critical active =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:critical&default_orderby=name&filter_value=on&" %}> {{investigation_critical}} </a>
+ </dd>
+ <dt>High active =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:high&default_orderby=name&filter_value=on&" %}> {{investigation_high}} </a>
+ </dd>
+ <dt>Medium active =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:medium&default_orderby=name&filter_value=on&" %}> {{investigation_medium}} </a>
+ </dd>
+
+ <dt>Defects: Total Count =</dt>
+ <dd>
+ <a href="{% url 'defects' %}" %}> {{defect_total}} </a>
+ </dd>
+ <dt>Open =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_srt_outcome:open&default_orderby=name&filter_value=on&" %}> {{defect_open}} </a>
+ </dd>
+ <dt>InProgress =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_status:in_progress&default_orderby=name&filter_value=on&" %}> {{defect_inprogress}} </a>
+ </dd>
+ <dt>P1 active =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_priority:critical&default_orderby=name&filter_value=on&" %}> {{defect_p1}} </a>
+ </dd>
+ <dt>P2 active =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_priority:high&default_orderby=name&filter_value=on&" %}> {{defect_p2}} </a>
+ </dd>
+
+ <dt>Packages: Affected=</dt>
+ <dd>
+ <a href="{% url 'cpes_srtool' %}" %}> {{package_total}} </a>
+ </dd>
+
+ </dl>
+ </div>
+ </div>
+
+ </div>
+</div>
+
{% endblock %}
diff --git a/lib/srtgui/templates/mrj_section.html b/lib/srtgui/templates/mrj_section.html
new file mode 100755
index 00000000..480dfef2
--- /dev/null
+++ b/lib/srtgui/templates/mrj_section.html
@@ -0,0 +1,194 @@
+{% load static %}
+{% load humanize %}
+<script src="{% static 'js/mrjsection.js' %}"></script>
+
+{% if mru %}
+ <div id="latest-jobs">
+ {% for job in mru %}
+ <div id="job-instance-{{job.id}}" data-latest-job-result="{{job.id}}" class="alert job-result {% if job.status == job.SUCCESS %}alert-success{% elif job.status == job.ERRORS %}alert-danger{% else %}alert-info{% endif %}">
+ <!-- job title -->
+ <div class="row job-name">
+ <div class="col-md-12">
+ <small>
+ {{job.name}}{% if request.user.is_admin %} ({{job.id}}){% endif %}
+ </small>
+ </div>
+ </div>
+
+ <div class="row" data-role="job-status-container">
+ <div class="col-md-12">
+ Loading...
+ </div>
+ </div>
+ </div>
+ {% endfor %}
+ </div>
+{% endif %}
+
+<!-- job main template -->
+<script id="job-template" type="text/x-jsrender">
+ <div class="col-md-3">
+ <!-- only show link for completed jobs -->
+ <%if state == 'Success' || state == 'Errors'%>
+ <%:targets%>
+ <%else%>
+ <span data-toggle="tooltip" id="job-message-done-<%:id%>" data-role="targets-text" title="Job: <%:targets%>">
+ <%:targets%>
+ </span>
+ <%/if%>
+ </div>
+
+ <div data-job-state="<%:state%>">
+ <%if state == 'Success' || state == 'Errors'%>
+ <%include tmpl='#succeeded-or-failed-job-template'/%>
+ <%else state == 'Cancelling'%>
+ <%include tmpl='#cancelling-job-template'/%>
+ <%else state == 'NotStarted'%>
+ <%include tmpl='#starting-template'/%>
+ <%else state == 'InProgress'%>
+ <%include tmpl='#in-progress-job-template'/%>
+ <%else state == 'Cancelled'%>
+ <%include tmpl='#cancelled-job-template'/%>
+ <%/if%>
+ </div>
+</script>
+
+<!-- queued job -->
+<script id="queued-job-template" type="text/x-jsrender">
+ <div class="col-md-5">
+ <span class="glyphicon glyphicon-question-sign get-help get-help-blue"
+ title="This job is waiting for the background application to start">
+ </span>
+
+ Job queued
+ </div>
+
+ <div class="col-md-4">
+ <!-- cancel button -->
+ <%include tmpl='#cancel-template'/%>
+ </div>
+</script>
+
+<!-- in progress job; at least one task finished -->
+<script id="in-progress-job-template" type="text/x-jsrender">
+ <!-- progress bar and task completion percentage -->
+ <div data-role="job-status" class="col-md-4 col-md-offset-1 progress-info">
+ <!-- progress bar -->
+ <div class="progress" id="job-pc-done-title-<%:id%>">
+ <div id="job-pc-done-bar-<%:id%>"
+ style="width: <%:tasks_complete_percentage%>%;"
+ class="progress-bar">
+ </div>
+ </div>
+ </div>
+
+ <div class="col-md-4 progress-info">
+ <!-- task completion percentage -->
+ <span id="job-pc-done-<%:id%>"><%:tasks_complete_percentage%></span>% of
+ tasks complete
+
+ <!-- cancel button -->
+ <%include tmpl='#cancel-template'/%>
+ </div>
+</script>
+
+<!-- cancelling job -->
+<script id="cancelling-job-template" type="text/x-jsrender">
+ <div class="col-md-9">
+ Cancelling the job ...
+ </div>
+</script>
+
+<!-- succeeded or failed job -->
+<script id="succeeded-or-failed-job-template" type="text/x-jsrender">
+ <!-- completed_on -->
+ <div class="col-md-2">
+ <%:completed_on%>
+ </div>
+
+ <!-- errors -->
+ <div class="col-md-2">
+ <%if errors%>
+ <span class="glyphicon glyphicon-minus-sign"></span>
+ <a href="<%:dashboard_errors_url%>" class="alert-link">
+ <%:errors%> error<%:errors_pluralise%>
+ </a>
+ <%/if%>
+ </div>
+
+ <!-- warnings -->
+ <div class="col-md-2">
+ <%if warnings%>
+ <span class="glyphicon glyphicon-warning-sign job-warnings"></span>
+ <a href="<%:dashboard_warnings_url%>" class="alert-link job-warnings">
+ <%:warnings%> warning<%:warnings_pluralise%>
+ </a>
+ <%/if%>
+ </div>
+
+<!-- <%if errors == 0 and warnings == 0%>
+ <div class="col-md-2">
+ No Errors
+ </div>
+ <%/if%>
+-->
+
+ <!-- job time -->
+ <div class="col-md-3">
+ Job time:
+ <span data-role="data-recent-job-jobtime-field">
+ <b><%:jobtime%></b>
+ </span>
+ <form id="downloadbanner-log" enctype="multipart/form-data" method="post" >{% csrf_token %}
+ <input type="hidden" name="action" value="download-log">
+ <input type="hidden" name="report_path" value="JOBLOG">
+ <a href="/srtgui/joblog/<%:id%>" class="glyphicon glyphicon-download-alt submit-download-joblog" x-data="log" target="_blank"></a>
+ </form>
+ </div>
+</script>
+
+<!-- cancelled job -->
+<script id="cancelled-job-template" type="text/x-jsrender">
+ <!-- job cancelled message -->
+ <div class="col-md-6">
+ Job cancelled
+ </div>
+</script>
+
+<!-- cancel button or no cancel icon -->
+<script id="cancel-template" type="text/x-jsrender">
+ <!-- cancel button -->
+ <span class="cancel-job-btn pull-right alert-link"
+ data-jobrequest-id="<%:id%>" data-request-url="<%:cancel_url%>">
+ <span class="glyphicon glyphicon-remove-circle"></span>
+ Cancel
+ </span>
+</script>
+
+<script>
+ $(document).ready(function () {
+ var ctx = {
+ }
+
+ try {
+ mrjSectionInit(ctx);
+ $('.submit-download-joblog').click(function() {
+ alert("submit-download-joblog:"+this.getAttribute("x-data"));
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
+ });
+ } catch (e) {
+ document.write("Sorry, An error has occurred loading this page");
+ console.warn(e);
+ }
+
+ $('.submit-downloadattachment').click(function() {
+ $("#downloadbanner-"+this.getAttribute("x-data")).submit();
+ });
+
+ $('.submit-download-joblog').click(function() {
+ alert("submit-download-joblog:"+this.getAttribute("x-data"));
+ $("#downloadbanner-job").submit();
+ });
+
+ });
+</script>
diff --git a/lib/srtgui/templates/product.html b/lib/srtgui/templates/product.html
index 7f8d1b1f..64234778 100644
--- a/lib/srtgui/templates/product.html
+++ b/lib/srtgui/templates/product.html
@@ -1,6 +1,6 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} {{object.name}} - SRTool {% endblock %}
diff --git a/lib/srtgui/templates/publish.html b/lib/srtgui/templates/publish.html
index 0e7f58e7..6c915f85 100644
--- a/lib/srtgui/templates/publish.html
+++ b/lib/srtgui/templates/publish.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Publish Reports {% endblock %}
diff --git a/lib/srtgui/templates/publish_diff_snapshot.html b/lib/srtgui/templates/publish_diff_snapshot.html
index cf0f2294..44958632 100644
--- a/lib/srtgui/templates/publish_diff_snapshot.html
+++ b/lib/srtgui/templates/publish_diff_snapshot.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Publish Requests {% endblock %}
@@ -18,6 +18,14 @@
</div>
</div>
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
<h2>Publish Report Management</h2>
<ul>
<li>The SRTool supports exporting new and updated CVEs to external publishing tools</li>
@@ -63,6 +71,7 @@
<div>
<span style="padding-left:30px;"><button id="export-snapshot" class="btn btn-default" type="button">Generate</button></span>
+ <span style="padding-left:30px;"><button id="export-snapshot-progress" class="btn btn-default" type="button">Generate (Progress)</button></span>
<!--<button type="submit" name="action" value="export-snapshot">Export</button> -->
<span id="export-snapshot-text">Generate the publish table on-demand (using snapshots)</span>
<span id="generating-report" hidden style="color:red"><I>... Generating the report - this will take a few minutes ...</I></span>
@@ -80,7 +89,7 @@
</option>
{% endfor %}
</select>
- <span style="padding-left:30px;"><button id="export-snapshot" class="btn btn-default" type="button" disabled>Save</button></span>
+ <span style="padding-left:30px;"><button id="export-snapshot-XXX" class="btn btn-default" type="button" disabled>Save</button></span>
<!--<button type="submit" name="action" value="export-snapshot">Export</button> -->
Save the automatic publishing frequency
</div>
@@ -302,6 +311,35 @@
}
});
+ $('#export-snapshot-progress').click(function(){
+ snap_date_base = $("#snap_date_base").val();
+ snap_date_top = $("#snap_date_top").val();
+ snap_date_start = $("#snap_date_start").val();
+ snap_date_stop = $("#snap_date_stop").val();
+ if (snap_date_start > snap_date_stop) {
+ alert("Error: the start date is after the stop date");
+ return;
+ }
+ if (snap_date_start < snap_date_base) {
+ alert("Error: the start date is before the snapshot base date");
+ return;
+ }
+ if (snap_date_stop > snap_date_top) {
+ alert("Error: the stop date is after the snapshot top date");
+ return;
+ }
+ var result = confirm("Generate the report? This will take several minutes.");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'export-snapshot-progress',
+ "snap_date_base" : snap_date_base,
+ "snap_date_top" : snap_date_top,
+ "snap_date_start" : snap_date_start,
+ "snap_date_stop" : snap_date_stop
+ });
+ }
+ });
+
/* Manage report files */
diff --git a/lib/srtgui/templates/report.html b/lib/srtgui/templates/report.html
index 4c2b2450..f89628fe 100644
--- a/lib/srtgui/templates/report.html
+++ b/lib/srtgui/templates/report.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Report/Export {% endblock %}
@@ -34,6 +34,7 @@
Note: There is no report defined for this page.<br>
{% endif %}
+ <!--
{% if report_get_title %}
<hr>
Title:<br>
@@ -41,6 +42,7 @@
<br>
{% endif %}
<hr>
+ -->
{% if report_recordrange_list %}
Record Range:<br>
diff --git a/lib/srtgui/templates/snippets/gitrev_popover.html b/lib/srtgui/templates/snippets/gitrev_popover.html
index c1e3dabf..445c39cd 100644
--- a/lib/srtgui/templates/snippets/gitrev_popover.html
+++ b/lib/srtgui/templates/snippets/gitrev_popover.html
@@ -1,4 +1,4 @@
-{% load projecttags %}
+{% load jobtags %}
{% if vcs_ref|is_shaid %}
<a class="btn btn-default" data-content="{{vcs_ref}}">
{{vcs_ref|truncatechars:10}}
diff --git a/lib/srtgui/templates/snippets/investigations_popover.html b/lib/srtgui/templates/snippets/investigations_popover.html
index 0f65d3d4..22197a13 100644
--- a/lib/srtgui/templates/snippets/investigations_popover.html
+++ b/lib/srtgui/templates/snippets/investigations_popover.html
@@ -1,5 +1,5 @@
{# Popover that displays the Investigations related to this Product #}
-{% load projecttags %}
+{% load jobtags %}
{% with investigations='Wind River Linux 9' %}
{% with count_investigations=1 %}
diff --git a/lib/srtgui/templates/snippets/pkg_dependencies_popover.html b/lib/srtgui/templates/snippets/pkg_dependencies_popover.html
index 273437e3..eefbc122 100644
--- a/lib/srtgui/templates/snippets/pkg_dependencies_popover.html
+++ b/lib/srtgui/templates/snippets/pkg_dependencies_popover.html
@@ -1,5 +1,5 @@
{# Popover that displays the dependences and sizes of a package 'data' used in the Packages table #}
-{% load projecttags %}
+{% load jobtags %}
{% with package_deps=data.package_dependencies_source|for_target:extra.target_name %}
{% with count_package=package_deps.packages|length %}
diff --git a/lib/srtgui/templates/snippets/pkg_revdependencies_popover.html b/lib/srtgui/templates/snippets/pkg_revdependencies_popover.html
index e6ef816e..8eca0357 100644
--- a/lib/srtgui/templates/snippets/pkg_revdependencies_popover.html
+++ b/lib/srtgui/templates/snippets/pkg_revdependencies_popover.html
@@ -1,5 +1,5 @@
{# Popover that displays the reverse dependences and sizes of a package 'data' used in the Packages table #}
-{% load projecttags %}
+{% load jobtags %}
{% with package_deps=data.package_dependencies_target|for_target:extra.target_name %}
{% with count_package=package_deps.packages|length %}
diff --git a/lib/srtgui/templates/sources-toastertable.html b/lib/srtgui/templates/sources-toastertable.html
index 1721e3b0..279f279c 100644
--- a/lib/srtgui/templates/sources-toastertable.html
+++ b/lib/srtgui/templates/sources-toastertable.html
@@ -1,5 +1,8 @@
{% extends 'base.html' %}
+
{% load static %}
+{% load jobtags %}
+{% load humanize %}
{% block extraheadcontent %}
<link rel="stylesheet" href="{% static 'css/jquery-ui.min.css' %}" type='text/css'>
@@ -24,6 +27,14 @@
</div>
</div>
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
<div class="row">
<div class="col-md-12">
@@ -66,6 +77,76 @@
});
$('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list="+record_list);
+ //
+ // Listeners that must reside in the Toaster table context
+ //
+
+ /* Dynamically run the job function */
+ $('.run-update-job').click(function(){
+ var datasource_id = $(this).attr('x-data');
+ var result = confirm("Are you sure you want to force update this datasource right now?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-run-update-job',
+ "id" : datasource_id,
+ });
+ }
+ });
+
+ // Toggle the data source init/update enables
+ $('.source-enabled').click(function() {
+ postCommitAjaxRequest({
+ "action" : 'submit-toggle-enable',
+ "id" : $(this).attr('x-data'),
+ });
+ });
+
+ function onCommitAjaxSuccess(data, textstatus) {
+ if (window.console && window.console.log) {
+ console.log("XHR returned:", data, "(" + textstatus + ")");
+ } else {
+ alert("NO CONSOLE:\n");
+ return;
+ }
+ if (data.error == "no_refresh") {
+ if (data.data_message) {
+ const nv_pair = data.data_message.split("=");
+ document.getElementById('attr_'+nv_pair[0]).innerText = nv_pair[1];
+ if (0 <= nv_pair[1].indexOf("DISABLE ")) {
+ document.getElementById('next_on_'+nv_pair[0]).style.display = 'none';
+ document.getElementById('next_off_'+nv_pair[0]).style.display = 'inline';
+ } else {
+ document.getElementById('next_on_'+nv_pair[0]).style.display = 'inline';
+ document.getElementById('next_off_'+nv_pair[0]).style.display = 'none';
+ };
+ };
+ return;
+ }
+ if (data.error != "ok") {
+ alert("error on request:\n" + data.error);
+ return;
+ }
+ // reload the page with the updated tables
+ location.reload(true);
+ }
+
+ function onCommitAjaxError(jqXHR, textstatus, error) {
+ console.log("ERROR:"+error+"|"+textstatus);
+ alert("XHR errored1:\n" + error + "\n(" + textstatus + ")");
+ }
+
+ /* ensure cookie exists {% csrf_token %} */
+ function postCommitAjaxRequest(reqdata) {
+ var ajax = $.ajax({
+ type:"POST",
+ data: reqdata,
+ url:"{% url 'xhr_sources_commit' %}",
+ headers: { 'X-CSRFToken': $.cookie("csrftoken")},
+ success: onCommitAjaxSuccess,
+ error: onCommitAjaxError,
+ });
+ }
+
});
});
</script>
diff --git a/lib/srtgui/templates/sources.html b/lib/srtgui/templates/sources.html
index 1b017c06..df2852a2 100644
--- a/lib/srtgui/templates/sources.html
+++ b/lib/srtgui/templates/sources.html
@@ -1,6 +1,6 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} Data Sources - SRTool {% endblock %}
diff --git a/lib/srtgui/templates/srtool_metadata_include.html b/lib/srtgui/templates/srtool_metadata_include.html
index 05c62d3d..297510a1 100755
--- a/lib/srtgui/templates/srtool_metadata_include.html
+++ b/lib/srtgui/templates/srtool_metadata_include.html
@@ -11,10 +11,17 @@
{% if request.user.is_creator %}
<i>Publish</i> = {{object.get_publish_text}}, <i>Publish Date</i> = {{object.publish_date}}, <i>Acknowledge Date</i> = {{object.acknowledge_date|date:'Y-m-d'}}, <i>Initial Release</i> = {{object.publishedDate}}, <i>Last Modified</i> = {{object.lastModifiedDate}}
<!--<a class="btn btn-default navbar-btn " id="login-button" href="">Publish Now</a> -->
+ , <i>Public = </i> {{object.get_public_text}}&nbsp;&nbsp;
{% else %}
<i>Publish = {{object.get_publish_text}}</i>
{% endif %}
</LI>
+ {% elif default_category == "VULNERABILITY" %}
+ {% if request.user.is_creator %}
+ <i>Public = </i> {{object.get_public_text}}&nbsp;&nbsp;
+ {% endif %}
+ <i>Outcome:</i> {{object.get_outcome_text}}
+ <p>
{% else %}
<i>Outcome:</i> {{object.get_outcome_text}}
<p>
@@ -64,6 +71,16 @@
<option value="8" {% if 8 == object.status %}selected{% endif %}>(Vulnerable)</option>
<option value="9" {% if 9 == object.status %}selected{% endif %}>(Not Vulnerable)</option>
</select>
+
+ {% if default_category == "CVE" or default_category == "VULNERABILITY" %}
+ &nbsp;&nbsp;
+ <i>Public</i> =
+ <select name="Public" id="select-public-state">
+ <option value="1" {% if object.public %}selected{% endif %}>Public</option>
+ <option value="0" {% if not object.public %}selected{% endif %}>Private</option>
+ </select>
+ {% endif %}
+
<p>
{% if default_category == "CVE" %}
<i>Publish</i> =
@@ -85,15 +102,21 @@
<option value="3" {% if 3 == object.outcome_state %}selected{% endif %}>Closed (Won't Fix)</option>
</select>
{% endif %}
- <p>Comments: <input type="text" placeholder="Edit comments" id="text-note" size="80" value="{{object.comments}}"></p>
+ <p>Notes: <input type="text" placeholder="Edit comments" id="text-note" size="80" value="{{object.comments}}"></p>
{% if request.user.is_creator %}
- <p>Private Comments: <input type="text" placeholder="Edit private comments" id="text-private-note" size="80" value="{{object.comments_private}}"></p>
+ <p>Private Notes: <input type="text" placeholder="Edit private comments" id="text-private-note" size="80" value="{{object.comments_private}}"></p>
{% endif %}
<p>Tags: <input type="text" placeholder="Edit tags" id="text-tags" size="80" value="{{object.tags}}"></p>
<p>Affected Components: <input type="text" placeholder="Edit affected components" id="text-affected-components" size="80" value="{{object.packages}}"></p>
{% if default_category == "CVE" %}
<i>Acknowledge Date</i> = <input type="text" placeholder="Acknowledge Date" id="text-acknowledge-date" size="40" value="{{object.acknowledge_date|date:'Y-m-d'}}"> (YYYY-MM-DD, or empty string for None)<p>
{% endif %}
+
+ {% if default_category == "VULNERABILITY" %}
+ <p>Description:<p>
+ <textarea name="description" rows="9" style="min-width: 100%" class="localblue" id="text-description">{{object.description}}</textarea>
+ {% endif %}
+
<p><p>
</fieldset>
</div>
diff --git a/lib/srtgui/templates/tablesort.html b/lib/srtgui/templates/tablesort.html
index 36247429..1224b3bf 100644
--- a/lib/srtgui/templates/tablesort.html
+++ b/lib/srtgui/templates/tablesort.html
@@ -1,4 +1,4 @@
-{% load projecttags %}
+{% load jobtags %}
<!-- component to display a generic table -->
{% if disable_sort %}
<table class="table table-bordered table-hover" id="detail_table">
diff --git a/lib/srtgui/templates/tbd.html b/lib/srtgui/templates/tbd.html
index d8979f6e..a50d806f 100644
--- a/lib/srtgui/templates/tbd.html
+++ b/lib/srtgui/templates/tbd.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} TBD {% endblock %}
diff --git a/lib/srtgui/templates/toastertable-simple.html b/lib/srtgui/templates/toastertable-simple.html
index 56cd2ce3..858c87e6 100644
--- a/lib/srtgui/templates/toastertable-simple.html
+++ b/lib/srtgui/templates/toastertable-simple.html
@@ -1,6 +1,6 @@
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
<script src="{% static 'js/table.js' %}"></script>
<script src="{% static 'js/layerBtn.js' %}"></script>
diff --git a/lib/srtgui/templates/toastertable.html b/lib/srtgui/templates/toastertable.html
index 99eb01e2..a33321a9 100644
--- a/lib/srtgui/templates/toastertable.html
+++ b/lib/srtgui/templates/toastertable.html
@@ -1,10 +1,28 @@
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
<script src="{% static 'js/table.js' %}"></script>
<script src="{% static 'js/layerBtn.js' %}"></script>
<script>
+
+ var lstFilterval = [];
+ function ClearFilter(test) {
+ (function(){
+ var ctx = {
+ tableName : "{{table_name}}",
+ url : "{{ xhr_table_url }}?format=json",
+ title : "{{title}}",
+ };
+
+ try {
+ tableInit(ctx,test);
+ } catch (e) {
+ document.write("Problem loading table widget: " + e);
+ }
+ })();
+ }
+
$(document).ready(function() {
(function(){
@@ -26,7 +44,9 @@
{% include 'toastertable-filter.html' %}
<div class="row-fluid" id="empty-state-{{table_name}}" style="display:none">
- <div class="alert alert-info">{{empty_state|safe}}</div>
+ <div class="alert alert-info">{{empty_state|safe}}
+ &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;<button id="clear-all-filter" class="btn btn-default navbar-btn" align="right">Clear All Filters</button>
+ </div>
</div>
<div id="no-results-{{table_name}}" style="display:none">
@@ -83,6 +103,11 @@
</select>
</div>
</form>
+
+ <div class="btn-group navbar-right">
+ <button id="clear-all-filter" class="btn btn-default navbar-btn " >Clear All Filters</button>&nbsp;&nbsp;&nbsp;&nbsp;
+ </div>
+
<div class="btn-group navbar-right">
<button id="edit-columns-button" class="btn btn-default navbar-btn dropdown-toggle" data-toggle="dropdown">Edit columns
<span class="caret"></span>
diff --git a/lib/srtgui/templates/triage_cves.html b/lib/srtgui/templates/triage_cves.html
index ddef1501..0cc774d3 100644
--- a/lib/srtgui/templates/triage_cves.html
+++ b/lib/srtgui/templates/triage_cves.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Export Report {% endblock %}
diff --git a/lib/srtgui/templates/unavailable_artifact.html b/lib/srtgui/templates/unavailable_artifact.html
index fc77e405..dedaa41b 100644
--- a/lib/srtgui/templates/unavailable_artifact.html
+++ b/lib/srtgui/templates/unavailable_artifact.html
@@ -1,5 +1,5 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% load static %}
diff --git a/lib/srtgui/templates/users.html b/lib/srtgui/templates/users.html
index fd2c8c18..970291b6 100644
--- a/lib/srtgui/templates/users.html
+++ b/lib/srtgui/templates/users.html
@@ -1,6 +1,6 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block title %} Users - SRTool {% endblock %}
@@ -34,46 +34,56 @@
<div style="padding-left: 25px;">
<p><b>Reader</b>: User that can read the content (Field, TechPubs)</p>
<p><b>Contributor</b>: Reader that can can add notes and attachements (Engineers, Test, Managers)</p>
- <p><b>Creator</b>: Contributor that can create Investiations and defect records </p>
+ <p><b>Creator</b>: Contributor that can create Investigations and defect records </p>
<p><b>Admin</b>: Creator that can manage users, data sources</p>
</div>
</div>
- <p/>
</div>
</div>
<div class="row" style="padding-left: 25px;">
<h3>User List
- <a class="btn btn-default navbar-btn " id="new-investigation-attachement" href="{% url 'edit_user' 0 %}">Add user</a>
+ <a class="btn btn-default navbar-btn " href="{% url 'edit_user' 0 %}">Add user</a>
</h3>
- <table class="table table-striped table-condensed" data-testid="vuln-hyperlinks-table">
+ <table class="table table-striped table-condensed">
<thead>
<tr>
+ {% if user.is_admin %}
+ <th>ID</th>
+ {% endif %}
<th>User</th>
<th>First</th>
<th>Last</th>
<th>Email</th>
<th>Role</th>
+ <th>Time zone</th>
<th>Group</th>
+ <th>Last Login</th>
<th>Manage</th>
</tr>
</thead>
{% if object.all %}
- {% for user in object.all %}
+ {% for user_obj in object.all %}
<tr>
- <td>{{ user.username }} </td>
- <td>{{ user.first_name }} </td>
- <td>{{ user.last_name }} </td>
- <td>{{ user.email }} </td>
- <td>{{ user.role }} </td>
- <td>{{ user.get_groups }} </td>
+ {% if user.is_admin %}
+ <td>{{ user_obj.id }}</td>
+ {% endif %}
+ <td>{{ user_obj.username }}</td>
+ <td>{{ user_obj.first_name }}</td>
+ <td>{{ user_obj.last_name }}</td>
+ <td>{{ user_obj.email }}</td>
+ <td>{{ user_obj.role }}</td>
+ <td>{{ user_obj.timezone }}</td>
+ <td>{% if user_obj.is_superuser %}SuperUser{% else %}{{ user_obj.get_groups }}{%endif %}</td>
+ <td>{{ user_obj.last_login|date:'Y-m-d'}}</td>
<td>
- {% if user.is_superuser or not user.is_staff %}
+ {% if user_obj.is_superuser or not user_obj.is_staff %}
<span id="user_'+{{user.id}}+'" class="js-user-name"></span>
- <a href="{% url 'edit_user' user.id %}"><span class="glyphicon glyphicon-edit js-icon-pencil-config_var"></span></a>
- <span class="glyphicon glyphicon-trash trash-user" id="user_trash_'+{{user.id}}+'" x-data="{{user.username}}:{{user.id}}"></span>
+ <a href="{% url 'edit_user' user_obj.id %}"><span class="glyphicon glyphicon-edit js-icon-pencil-config_var"></span></a>
+ &nbsp;&nbsp;
+ <span class="glyphicon glyphicon-trash trash-user" id="user_trash_'+{{user_obj.id}}+'" x-data="{{user_obj.username}}:{{user_obj.id}}"></span>
{% else %}
Built-in
{% endif %}
@@ -91,6 +101,89 @@
</div>
+<!-- pass the full user list here -->
+{% for user in object.all %}
+<input type="hidden" class="js-checkbox-users-list" value="{{user.id}}|{{user.user_fullname}}">
+{% endfor %}
+
+<div class="row" id="group-section" style="padding-left: 25px;width:70%;">
+
+ <h3 style="white-space: nowrap;">Group List ({{builtin_groups}})
+ <a class="btn btn-default navbar-btn" id="add_group">Add group</a>
+ <!--<button class="execute" id="add_group" style="display:inline-block;"> Add group: </button>-->
+ <input type="text" value="" style="width:16%;display:inline-block;" class="form-control" id="add-group-name" placeholder="Name for new group">
+ </h3>
+
+ <div class="row" id="edit_group_options" style="display:none;padding-left:25px;color:DarkCyan;">
+ <h3>Group Edit:
+ <a class="btn btn-default navbar-btn" style="color:DarkCyan;" id="edit-save" >Save</a>
+ <a class="btn btn-default navbar-btn" style="color:DarkCyan;" id="edit-cancel" >Cancel</a>
+ </h3>
+ <label style="width:100px;height:24px;">Group name:</label>
+ <input type="text" value="" style="width:25%;" class="form-control" id="new-group-name" placeholder="Name for the group">
+ <input type="text" style="display:none;" id="new-group-id" >
+ <br>
+ <label style="width:100px;height:24px;">User list:</label>
+ <div id="all-users" class="scrolling"></div>
+ <br>
+ <hr>
+ </div>
+
+ <table class="table table-striped table-condensed">
+ <thead>
+ <tr>
+ <th>Name</th>
+ <th>User</th>
+ <th>Manage User</th>
+ <th>Manage Group</th>
+ </tr>
+ </thead>
+
+ {% if groups.all %}
+ {% for group in groups.all %}
+ <tr>
+ <td>{{ group.name }} </td>
+ <td></td>
+ <td></td>
+ <td>
+ {% if group.name in builtin_groups %}
+ Built-in
+ {% else %}
+ <span id="group_'+{{group.id}}+'" class="js-group-name"></span>
+ <a id="edit_group">
+ <span class="glyphicon glyphicon-edit js-icon-pencil-config_var edit_group"
+ x-data="{{group.id}}|{{group.name}}|{% for user in group.user_set.all %}{{user.user_fullname}},{% endfor %}">
+ </span></a>
+ &nbsp;&nbsp;
+ <span class="glyphicon glyphicon-trash trash-group" x-data="{{group.id}}|{{group.name}}"></span>
+ {% endif %}
+ </td>
+ </tr>
+ {% for user in group.user_set.all %}
+ <tr>
+ <td></td>
+ <td>{{ user.user_fullname }} </td>
+ <td>
+ {% if group.name in builtin_groups %}
+ (Managed above)
+ {% else %}
+ <span class="glyphicon glyphicon-trash trash-user-from-group" x-data="{{group.id}}|{{group.name}}|{{user.id}}|{{user.user_fullname}}"></span>
+ {% endif %}
+ </td>
+ <td></td>
+ </tr>
+ {% endfor %}
+ {% endfor %}
+ {% else %}
+ <tr>
+ <td>No groups found</td>
+ </tr>
+ {% endif %}
+
+ </table>
+
+</div>
+
<!-- Javascript support -->
<script>
$(document).ready(function() {
@@ -137,8 +230,86 @@
}
});
- });
+ $('.edit_group').click(function() {
+ document.getElementById("new-group-name").value= $(this).attr('x-data').split('|')[1];
+ document.getElementById("new-group-id").value= $(this).attr('x-data').split('|')[0];
+ $("#edit_group_options").slideDown();
+ // build the user list: avoid false substring matches by including comma separators
+ var html = "";
+ var group_user_set = "," + $(this).attr('x-data').split('|')[2] + ",";
+ var users_list = document.getElementsByClassName('js-checkbox-users-list');
+ // Add the checked boxes first
+ for (var i = 0, length = users_list.length; i < length; i++) {
+ var status = '" >';
+ var user_id = users_list[i].value.split("|")[0];
+ var user_name = users_list[i].value.split("|")[1];
+ if (0 <= group_user_set.indexOf(","+user_name+",")) {
+ status = '" checked="checked">';
+ };
+ html += '<div class="checkbox"><label><input type="checkbox" class="checkbox-users" x-data="'+user_id+'" value="'+users_list[i].value+status+user_name+'</label></div>';
+ }
+ document.getElementById("all-users").innerHTML = html;
+ //document.getElementById("edit_group_options").focus();
+ document.getElementById("group-section").scrollIntoView();
+ });
+
+ $('#edit-save').click(function() {
+ $("#edit_group_options").slideUp();
+ var user_id_list = "";
+ $("input[type='checkbox']").each(function(){
+ var user_id = $(this).attr('x-data');
+ var ischecked = $(this).is(":checked");
+ if (ischecked) {
+ user_id_list = user_id_list + user_id + ',';
+ }
+ });
+ postCommitAjaxRequest({
+ "action" : 'submit-group-users',
+ "group_id" : document.getElementById("new-group-id").value,
+ "user_id_list" : user_id_list,
+ });
+ });
+
+ $('#edit-cancel').click(function() {
+ $("#edit_group_options").slideUp();
+ });
+
+ $('#add_group').click(function() {
+ var new_group_name = document.getElementById("add-group-name").value;
+ var result = confirm("Create new group '"+new_group_name+"'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-group-create',
+ "group_name" : new_group_name,
+ });
+ };
+ });
+
+ $('.trash-group').click(function() {
+ var result = confirm("Are you sure you want to remove group '" + $(this).attr('x-data').split('|')[1] + "'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-trashgroup',
+ "record_id" : $(this).attr('x-data').split('|')[0],
+ });
+ }
+ });
+ $('.trash-user-from-group').click(function() {
+ var group_id = $(this).attr('x-data').split('|')[0];
+ var group_name = $(this).attr('x-data').split('|')[1];
+ var user_id = $(this).attr('x-data').split('|')[2];
+ var user_name = $(this).attr('x-data').split('|')[3];
+ var result = confirm("Are you sure you want to remove user '" + user_name + "' from group '" + group_name + "'?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-trashusergroup',
+ "group_id" : group_id,
+ "record_id" : user_id,
+ });
+ }
+ });
+ });
</script>
diff --git a/lib/srtgui/templates/vulnerability.html b/lib/srtgui/templates/vulnerability.html
index cd174737..c8fdd995 100644
--- a/lib/srtgui/templates/vulnerability.html
+++ b/lib/srtgui/templates/vulnerability.html
@@ -1,5 +1,5 @@
{% extends "base.html" %}
-{% load projecttags %}
+{% load jobtags %}
{% block extraheadcontent %}
<style>
@@ -45,12 +45,25 @@
<div class="row">
<div class="col-md-12">
<div class="page-header build-data">
- <span class="srt_h1">Vulnerability {{object.get_long_name}} {% if not object.public %} <font color="red">[PRIVATE]</font> {% endif %}</span>
+ <span id="vul-name-container">
+ &nbsp;&nbsp;
+ <span id="vulnerability-name" class="srt_h1">Vulnerability {{object.get_long_name}}
+ {% if request.user.is_contributor %}&nbsp;&nbsp;<span class="glyphicon glyphicon-edit" id="vul-change-form-toggle"></span>{% endif %}
+ {% if not object.public %}&nbsp;&nbsp;<font color="red" >[PRIVATE]</font> {% endif %}
+ </span>
{% if request.user.is_creator %}
<span style="padding-left:30px;"><button id="select-quickedit" class="btn btn-default" type="button">Edit Status...</button></span>
<span style="padding-left:30px;"><button id="select-notification" class="btn btn-default" type="button">Create Notification ...</button></span>
<span style="padding-left:30px;"><button id="select-delete" class="btn btn-default" type="button" x-data="{{object.id}}">Delete</button></span>
{% endif %}
+ </span>
+ <form id="vul-name-change-form" class="form-inline" style="display: none;">
+ <div class="form-group">
+ <input class="form-control input-lg" type="text" id="vul-name-change-input" autocomplete="off" value="{{object.name}}">
+ </div>
+ <button id="vul-name-change-btn" class="btn btn-default btn-lg" type="button">Save</button>
+ <a href="#" id="vul-name-change-cancel" class="btn btn-lg btn-link">Cancel</a>
+ </form>
</div>
</div>
</div>
@@ -73,7 +86,8 @@
<dt>CVE Dictionary Entry:</dt>
<dd>
{% for vc in object.vulnerability_to_cve.all %}
- {% if not forloop.first %}| {% endif %}<a href="{% url 'cve' vc.cve.name %}">{{vc.cve.name}}</a>
+ {% if not forloop.first %}<p>{% endif %}<a href="{% url 'cve' vc.cve.name %}">{{vc.cve.name}}</a>
+ <span class="glyphicon glyphicon-trash detach-cve" id="detach_cve_'+{{vc.cve.id}}+'" x-data="{{vc.cve.id}}"></span>
{% endfor %}
</dd>
@@ -104,6 +118,15 @@
{% if not forloop.first %}| {% endif %}{{vc.cve.cvssV2_baseScore}},{{vc.cve.cvssV2_severity}} </a>
{% endfor %}
</dd>
+
+ {% if request.user.is_creator %}
+ <dt>Attach CVE:</dt>
+ <dd>
+ <input type="text" id="cve_name" name="cve_name" size="20" placeholder="(CVE name)">
+ <button class="execute btn btn-info" id="submit-attach-cve" style="margin-bottom: 5px; margin-top: 0px;">Attach CVE</button>
+ </dd>
+ {% endif %}
+
</dl>
</div>
</div>
@@ -148,21 +171,6 @@
</tr>
</thead>
- <table class="table table-striped table-condensed" data-testid="vuln-hyperlinks-table">
- <thead>
- <tr>
- <th>Product Name</th>
- <th>Investigation</th>
- <th>Status</th>
- <th>Outcome</th>
- <th>Defect</th>
- <th>Release Version</th>
- {% if request.user.is_creator %}
- <th>Manage</th>
- {% endif %}
- </tr>
- </thead>
-
{% if object.investigation_list %}
{% for v2i in object.investigation_list %}
<tr>
@@ -394,7 +402,9 @@
<thead>
<tr>
<th>User</th>
+<!--
<th>Manage</th>
+-->
</tr>
</thead>
@@ -409,11 +419,13 @@
{% if object.vulnerability_users.all %}
{% for u in object.vulnerability_users.all %}
<tr>
- <td>{{ u.user.name }}</td>
+ <td>{{ u.user.username }}</td>
+<!--
<td>
<span id="attachment_entry_'+{{u.id}}+'" class="js-config-var-name"></span>
<span class="glyphicon glyphicon-trash trash-useraccess" id="attachment_trash_'+{{u.id}}+'" x-data="{{u.id}}"></span>
</td>
+-->
</tr>
{% endfor %}
{% else %}
@@ -470,6 +482,13 @@ Created={{object.srt_created}} Updated={{object.srt_updated}}
var selected_quickedit=false;
var selected_notifyedit=false;
+ /* Vulnerability Name change support */
+ var vulNameForm = $("#vul-name-change-form");
+ var vulNameContainer = $("#vul-name-container");
+ var vulName = $(".vul-name");
+ var vulNameFormToggle = $("#vul-change-form-toggle");
+ var vulNameChangeCancel = $("#vul-name-change-cancel");
+
window.onload = function() {
$("input[name=status][value=" + {{ object.status }} + "]").prop('checked', true);
$("input[name=outcome][value=" + {{ object.outcome }} + "]").prop('checked', true);
@@ -488,8 +507,15 @@ Created={{object.srt_created}} Updated={{object.srt_updated}}
alert("error on request:\n" + data.error);
return;
}
- // reload the page with the updated tables
- location.reload(true);
+ // reload the page with the updated tables
+ if (('new_name' in data) && (0 == data.new_name.indexOf("url:"))) {
+ window.location.replace(data.new_name.replace("url:",""));
+ } else if (('new_name' in data) && ("" != data.new_name)) {
+ var new_url = "{% url 'vulnerability' 123 %}".replace("123",data.new_name);
+ window.location.replace(new_url);
+ } else {
+ location.reload(true);
+ }
}
function onCommitAjaxError(jqXHR, textstatus, error) {
@@ -703,7 +729,7 @@ Created={{object.srt_created}} Updated={{object.srt_updated}}
selected_quickedit=true;
$("#display-status").slideUp();
$("#details-quickedit").slideDown();
- document.getElementById("select-quickedit").innerText = "Close edit status...";
+ document.getElementById("select-quickedit").innerText = "Cancel edit status...";
$("#select-quickedit").addClass("blueborder");
document.getElementById("select-status-state").focus();
}
@@ -715,17 +741,36 @@ Created={{object.srt_created}} Updated={{object.srt_updated}}
var tags=$('#text-tags').val().trim();
var priority=$('#select-priority-state').val();
var status=$('#select-status-state').val();
+ var public=$('#select-public-state').val();
var outcome=$('#select-outcome-state').val();
var affected_components=$('#text-affected-components').val();
+ var description=$('#text-description').val();
+ /* Double check any public status changes */
+ {% if object.public %}
+ if ("0" == public) {
+ if (! confirm("Are you sure you want to make this Vulnerability and all its related records as PRIVATE?")) {
+ return
+ }
+ }
+ {% endif %}
+ {% if not object.public %}
+ if ("1" == public) {
+ if (! confirm("Are you sure you want to make this Vulnerability and all its related records as PUBLIC?")) {
+ return
+ }
+ }
+ {% endif %}
postCommitAjaxRequest({
"action" : 'submit-quickedit',
"note" : note,
"private_note" : private_note,
"tags" : tags,
"status" : status,
+ "public" : public,
"outcome" : outcome,
"priority" : priority,
"affected_components" : affected_components,
+ "description" : description,
});
});
@@ -734,13 +779,13 @@ Created={{object.srt_created}} Updated={{object.srt_updated}}
selected_notifyedit=false;
$("#details-notify-edit").slideUp();
$("#display-status").slideDown();
- document.getElementById("select-notification").innerText = "Create Notification ...";
+ document.getElementById("select-notification").innerText = "Create notification ...";
$("#select-notification").removeClass("blueborder");
} else {
selected_notifyedit=true;
$("#display-status").slideUp();
$("#details-notify-edit").slideDown();
- document.getElementById("select-notification").innerText = "Close notification";
+ document.getElementById("select-notification").innerText = "Cancel notification";
$("#select-notification").addClass("blueborder");
document.getElementById("select-category-notify").focus();
}
@@ -791,6 +836,47 @@ Created={{object.srt_created}} Updated={{object.srt_updated}}
}
});
+ /* Vulnerability name change functionality */
+ vulNameFormToggle.click(function(e){
+ e.preventDefault();
+ vulNameContainer.hide();
+ vulNameForm.fadeIn();
+ });
+ vulNameChangeCancel.click(function(e){
+ e.preventDefault();
+ vulNameForm.hide();
+ vulNameContainer.fadeIn();
+ });
+ $("#vul-name-change-btn").click(function(){
+ var newvulName = $("#vul-name-change-input").val();
+ postCommitAjaxRequest({
+ "action" : 'submit-newname',
+ "old_name" : '{{object.name}}',
+ "new_name" : newvulName,
+ });
+ });
+
+ $("#submit-attach-cve").click(function(){
+ var cve_name=$("#cve_name").val();
+ if ("" == cve_name) {
+ alert("No CVE name was entered");
+ return;
+ }
+ postCommitAjaxRequest({
+ "action" : 'submit-attach-cve',
+ "cve_name" : cve_name,
+ });
+ });
+ $('.detach-cve').click(function() {
+ var result = confirm("Are you sure you want to detach this CVE?");
+ if (result){
+ postCommitAjaxRequest({
+ "action" : 'submit-detach-cve',
+ "record_id" : $(this).attr('x-data'),
+ });
+ }
+ });
+
/* Set the report link */
$('#report_link').attr('href',"{% url 'report' request.resolver_match.url_name %}?record_list={{object.id}}");
});
diff --git a/lib/srtgui/templatetags/projecttags.py b/lib/srtgui/templatetags/jobtags.py
index 0c5efc29..4a987d99 100644..100755
--- a/lib/srtgui/templatetags/projecttags.py
+++ b/lib/srtgui/templatetags/jobtags.py
@@ -23,6 +23,7 @@ import os
from os.path import relpath
import re
import json as JsonLib
+from datetime import datetime, timedelta
from django import template
from django.template.defaultfilters import filesizeformat
@@ -342,3 +343,13 @@ def has_group(user, group_name):
group = Group.objects.get(name=group_name)
return group in user.groups.all()
+@register.filter(name='shift_timezone')
+def shift_timezone(datetime_str, hours_offset):
+ # do some calculation (offset + time passed)
+ try:
+ dt = datetime.strptime(datetime_str,'%Y-%m-%d %H:%M:%S')
+ dt += timedelta(hours=int(hours_offset))
+ return dt.strftime('%Y-%m-%d %H:%M:%S')
+ except:
+ return("TIME_ERROR:%s" % datetime_str)
+
diff --git a/lib/srtgui/templatetags/multi_tags.py b/lib/srtgui/templatetags/multi_tags.py
new file mode 100755
index 00000000..6a436825
--- /dev/null
+++ b/lib/srtgui/templatetags/multi_tags.py
@@ -0,0 +1,22 @@
+import os
+
+from django import template
+from django.utils.safestring import mark_safe
+
+ml_register = template.Library()
+
+@ml_register.filter(name = 'multitag')
+def multitag(tags):
+ """
+ Convert a comma-delimited list into HTML separate lines.
+ """
+ return mark_safe(tags.replace(',','<p>'))
+
+@ml_register.filter(name = 'get_dict_value')
+def get_dict_value(dictionary, key):
+ """ return the value of a dictionary key
+ """
+ try:
+ return dictionary[key]
+ except (KeyError, IndexError):
+ return ''
diff --git a/lib/srtgui/templatetags/project_url_tag.py b/lib/srtgui/templatetags/project_url_tag.py
deleted file mode 100644
index 51ccc560..00000000
--- a/lib/srtgui/templatetags/project_url_tag.py
+++ /dev/null
@@ -1,34 +0,0 @@
-from django import template
-from django.urls import reverse
-
-register = template.Library()
-
-def project_url(parser, token):
- """
- Create a URL for a project's main page;
- for non-default projects, this is the configuration page;
- for the default project, this is the project builds page
- """
- try:
- tag_name, project = token.split_contents()
- except ValueError:
- raise template.TemplateSyntaxError(
- "%s tag requires exactly one argument" % tag_name
- )
- return ProjectUrlNode(project)
-
-class ProjectUrlNode(template.Node):
- def __init__(self, project):
- self.project = template.Variable(project)
-
- def render(self, context):
- try:
- project = self.project.resolve(context)
- if project.is_default:
- return reverse('projectbuilds', args=(project.id,))
- else:
- return reverse('project', args=(project.id,))
- except template.VariableDoesNotExist:
- return ''
-
-register.tag('project_url', project_url)
diff --git a/lib/srtgui/typeaheads.py b/lib/srtgui/typeaheads.py
index e32c16ad..800e9b0e 100644
--- a/lib/srtgui/typeaheads.py
+++ b/lib/srtgui/typeaheads.py
@@ -19,6 +19,8 @@
import subprocess
from srtgui.widgets import ToasterTypeAhead
+from orm.models import RecipeTable
+
from django.urls import reverse
from django.core.cache import cache
@@ -184,3 +186,27 @@ class GitRevisionTypeAhead(ToasterTypeAhead):
'detail': '[ %s ]' % str(rev)})
return results
+
+
+class RecipeTypeAhead(ToasterTypeAhead):
+ """ Typeahead for all the recipes """
+ def apply_search(self, search_term, cve, request):
+
+ recipes = RecipeTable.objects.all().order_by("recipe_name")
+
+ primary_results = recipes.filter(recipe_name__icontains=search_term)
+
+ results = []
+
+ for recipe in list(primary_results):
+
+ detail = ''
+ needed_fields = {
+ 'id': recipe.pk,
+ 'name': recipe.recipe_name,
+ 'detail': detail,
+ }
+
+ results.append(needed_fields)
+
+ return results
diff --git a/lib/srtgui/urls.py b/lib/srtgui/urls.py
index ef91f16b..45d15fde 100644
--- a/lib/srtgui/urls.py
+++ b/lib/srtgui/urls.py
@@ -16,11 +16,13 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-from django.conf.urls import url
+from django.urls import re_path as url
from django.views.generic import RedirectView
from srtgui import tables
from srtgui import views
+from srtgui import typeaheads
+from srtgui import widgets
urlpatterns = [
# landing page
@@ -34,12 +36,15 @@ urlpatterns = [
url(r'^cves/$',
tables.CvesTable.as_view(template_name="cves-toastertable.html"),
name='cves'),
+
+ # "cve_status" is passed by URL: redirect("/srtgui/select-cves/?cve_status=%d" % cve_select_status)
url(r'^select-cves/$',
tables.SelectCveTable.as_view(template_name="cves-select-toastertable.html"),
name='select-cves'),
url(r'^select-cves/(?P<cve_status>\d+)$',
tables.SelectCveTable.as_view(template_name="cves-select-toastertable.html"),
name='select-cves'),
+
url(r'^cve-create/$', views.cve_create, name="cve_create"),
url(r'^cve-alternates/(?P<cve_pk>\d+)$', views.cve_alternates, name="cve_alternates"),
@@ -107,6 +112,7 @@ urlpatterns = [
url(r'^report/(?P<page_name>\D+)$', views.report, name='report'),
+ # XHR URLs
url(r'^xhr_triage_commit/$', views.xhr_triage_commit,
name='xhr_triage_commit'),
@@ -135,6 +141,8 @@ urlpatterns = [
url(r'^xhr_publish/$', views.xhr_publish,
name='xhr_publish'),
+ # Management URLs
+
url(r'^manage/$', views.management, name='manage'),
url(r'^manage_cpes/$',
tables.ManageCpeTable.as_view(template_name="manage-cpes-toastertable.html"),
@@ -172,10 +180,43 @@ urlpatterns = [
tables.HistoryDefectTable.as_view(template_name="history-defect-toastertable.html"),
name='history_defect'),
+ # typeahead api end points
+ url(r'^xhr_recipetypeahead/recipes$',
+ typeaheads.RecipeTypeAhead.as_view(), name='xhr_recipetypeahead'),
+
+ #
+ # Job progress URLs
+ #
+
+ url(r'^joblog/(?P<job_pk>\d+)$', views.joblog, name='joblog'),
+ url(r'^mostrecentjobs$', widgets.MostRecentJobsView.as_view(),
+ name='most_recent_jobs'),
+ url(r'^xhr_maintenance_commit/$', views.xhr_maintenance_commit,
+ name='xhr_maintenance_commit'),
+ url(r'^xhr_jobrequest/$',
+ widgets.XhrJobRequest.as_view(),
+ name='xhr_jobrequest'),
+ url(r'^xhr_job_post/$', views.xhr_job_post,
+ name='xhr_job_post'),
+ url(r'^xhr_sources_commit/$', views.xhr_sources_commit,
+ name='xhr_sources_commit'),
+
+ url(r'^manage_jobs/(?P<foo_id>\d+)$',
+ tables.ManageJobsTable.as_view(template_name="manage-jobs-toastertable.html"),
+ name='manage_jobs'),
+
+ #
+ # Extra
+ #
+
+ url(r'^email_admin/$', views.email_admin, name='email_admin'),
+ url(r'^email_success/$', views.email_success, name='email_success'),
+
url(r'^guided_tour/$', views.guided_tour, name='guided_tour'),
url(r'^quicklink/$', views.quicklink, name='quicklink'),
+ url(r'^date_time_test/$', views.date_time_test, name='date_time_test'),
url(r'^tbd/$', views.tbd, name='tbd'),
# default redirection
diff --git a/lib/srtgui/views.py b/lib/srtgui/views.py
index d3601181..2cfe0e19 100644
--- a/lib/srtgui/views.py
+++ b/lib/srtgui/views.py
@@ -4,7 +4,7 @@
#
# Security Response Tool Implementation
#
-# Copyright (C) 2017-2018 Wind River Systems
+# Copyright (C) 2017-2023 Wind River Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -23,15 +23,24 @@ import os
import traceback
import subprocess
from datetime import timedelta, datetime
+from datetime import timezone as datetime_timezone
from decimal import Decimal
import mimetypes
import json
import re
+import time
+import pytz
from django.db.models import Q
from django.shortcuts import render, redirect
from django.db.models.functions import Lower
-from orm.models import Cve, CveLocal, CveSource, CveHistory
+from django.contrib.auth.models import Group
+from django.urls import reverse, resolve
+from django.core.paginator import EmptyPage, PageNotAnInteger
+from django.http import HttpResponse
+from django.utils import timezone
+
+from orm.models import Cve, CveLocal, CveSource, CveHistory, CveAccess
from orm.models import Vulnerability, VulnerabilityHistory, CveToVulnerablility, VulnerabilityToInvestigation, VulnerabilityNotification, VulnerabilityAccess, VulnerabilityComments, VulnerabilityUploads
from orm.models import Investigation, InvestigationHistory, InvestigationToDefect, InvestigationComments, InvestigationNotification, InvestigationAccess, InvestigationUploads
from orm.models import SrtSetting, Product
@@ -41,21 +50,16 @@ from orm.models import Defect, DefectHistory, PublishPending, PublishSet
from orm.models import Notify, NotifyAccess, NotifyCategories
from orm.models import SRTool, Update
from orm.models import ErrorLog
-
+from orm.models import Job
from users.models import SrtUser, UserSafe
-
from srtgui.reports import ReportManager
from srtgui.api import readCveDetails, writeCveDetails, summaryCveDetails, execute_process
from srtgui.api import publishCalculate, publishReset, publishMarkNew, publishMarkModified, publishMarkNone
-from django.urls import reverse, resolve
-from django.core.paginator import EmptyPage, PageNotAnInteger
-from django.http import HttpResponse
-from django.utils import timezone
-
import logging
-SRT_BASE_DIR = os.environ['SRT_BASE_DIR']
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR', '.')
+SRT_MAIN_APP = os.environ.get('SRT_MAIN_APP', '.')
logger = logging.getLogger("srt")
@@ -105,17 +109,28 @@ def managedcontextprocessor(request):
ret['srt_logo'] = SrtSetting.objects.get(name='SRTOOL_LOGO').value.split(',')
# Add optional local logo link
ret['srt_local_logo'] = SrtSetting.objects.get(name='SRTOOL_LOCAL_LOGO').value.split(',')
+ # Add optional SRTool mode
+ ret['srt_mode'] = ' (%s)' % os.environ['SRT_MODE'] if (('SRT_MODE' in os.environ) and os.environ['SRT_MODE']) else ''
return ret
# determine in which mode we are running in, and redirect appropriately
def landing(request):
- # we only redirect to projects page if there is a user-generated project
-# num_builds = Build.objects.all().count()
-# user_projects = Project.objects.filter(is_default = False)
-# has_user_project = user_projects.count() > 0
+ # Django sometimes has a race condition with this view executing
+ # for the master app's landing page HTML which can lead to context
+ # errors, so hard enforce the default re-direction
+ if SRT_MAIN_APP and (SRT_MAIN_APP != "srtgui"):
+ return redirect(f"/{SRT_MAIN_APP}/landing/")
- context = {}
+ # Append the list of landing page extensions
+ landing_extensions_table = []
+ for landing_extension in SrtSetting.objects.filter(name__startswith='LANDING_LINK').order_by('name'):
+ landing_extensions_table.append(landing_extension.value.split('|'))
+
+ context = {
+ 'landing_extensions_table' : landing_extensions_table,
+ 'this_landing' : 'srtgui',
+ }
return render(request, 'landing.html', context)
@@ -469,6 +484,7 @@ def management(request):
defects_inprogress = defects_inprogress.count()
context = {
+ 'mru' : Job.get_recent(),
'cve_total' : Cve.objects.all().count(),
'cve_new' : Cve.objects.filter(status=Cve.NEW).count(),
# 'cve_open' : Cve.objects.filter( Q(status=Cve.INVESTIGATE) & Q(status=Cve.VULNERABLE) ).count(),
@@ -503,19 +519,30 @@ def management(request):
return render(request, 'management.html', context)
def maintenance(request):
+ _log("MAINTENANCE: %s" % request)
# does this user have permission to see this record?
if not UserSafe.is_creator(request.user):
return redirect(landing)
- context = {
- 'errorlog_total' : ErrorLog.objects.all().count(),
- 'history_cve_total' : CveHistory.objects.all().count(),
- 'history_vulnerability_total' : VulnerabilityHistory.objects.all().count(),
- 'history_investigation_total' : InvestigationHistory.objects.all().count(),
- 'defect_investigation_total' : DefectHistory.objects.all().count(),
- }
- return render(request, 'maintenance.html', context)
+ if request.method == "GET":
+ context = {
+ 'errorlog_total' : ErrorLog.objects.all().count(),
+ 'history_cve_total' : CveHistory.objects.all().count(),
+ 'history_vulnerability_total' : VulnerabilityHistory.objects.all().count(),
+ 'history_investigation_total' : InvestigationHistory.objects.all().count(),
+ 'defect_investigation_total' : DefectHistory.objects.all().count(),
+ 'mru' : Job.get_recent(),
+ 'remote_backup_path' : SrtSetting.get_setting('SRT_REMOTE_BACKUP_PATH',''),
+ }
+ return render(request, 'maintenance.html', context)
+ elif request.method == "POST":
+ _log("EXPORT_POST:MAINTENANCE: %s" % request)
+ if request.POST["action"] == "submit-remote-backup-path":
+ SrtSetting.set_setting('SRT_REMOTE_BACKUP_PATH',request.POST["text-remote-backup-path"].strip()),
+ return redirect(maintenance)
+ else:
+ return render(request, "unavailable_artifact.html", context={})
def cve(request, cve_pk, active_tab="1"):
if request.method == "GET":
@@ -531,10 +558,17 @@ def cve(request, cve_pk, active_tab="1"):
_log("CVE_ERROR(%s)(%s):" % (cve_pk,e))
return redirect(landing)
- # does this user have permission to see this record?
+ # Does this user have permission to see this record?
if (not cve_object.public) and (not UserSafe.is_admin(request.user)):
- _log("CVE_ERROR_PERMISSIONS:(%s)" % request.user)
- return redirect(landing)
+ try:
+ cveaccess = CveAccess.objects.get(cve=cve_object,user=request.user)
+ except:
+ cveaccess = None
+ if not cveaccess:
+ _log("CVE_ERROR_PERMISSIONS:(%s)" % request.user)
+ return redirect(landing)
+ else:
+ _log("CVE_PASS_PERMISSIONS:(%s)" % request.user)
# Set up the investigation link
investigation_records = Investigation.objects.filter(name=cve_object.name)
@@ -555,7 +589,7 @@ def cve(request, cve_pk, active_tab="1"):
# Always pre-pend a summary page
tab_states[chr(cve_index)] = 'active'
cveDetails,cve_html = summaryCveDetails(cve_object,cve_sources)
- cve_list_table.append([cveDetails,tab_states[chr(cve_index)],'Summary',cve_html])
+ cve_list_table.append([cveDetails,tab_states[chr(cve_index)],'Summary',cve_html,''])
cve_index += 1
# Add the source/edit tabs
@@ -569,18 +603,26 @@ def cve(request, cve_pk, active_tab="1"):
if active_tab == cs.datasource.name:
active_tab = chr(cve_index)
if ('Edit' == active_tab) and ('Local' == cs.datasource.name):
- #tab_states[chr(cve_index)] = 'active'
- tab_states[chr(cve_index)] = ''
- cve_list_table.append([readCveDetails(cve_object,cs.datasource),tab_states[chr(cve_index)],'Edit',{}])
+ if False:
+ tab_states[chr(cve_index)] = ''
+ else:
+ # Force the 'Edit' tab to start active
+ tab_states[chr(cve_index)] = 'active'
+ # Force the 'Summary' tab to start inactive
+ tab_states[chr(ord('1'))] = ''
+ cve_list_table[0][1] = ''
+ cve_list_table.append([readCveDetails(cve_object,cs.datasource),tab_states[chr(cve_index)],'Edit',{},''])
else:
tab_states[chr(cve_index)] = ''
#tab_states[chr(cve_index)] = 'active' if (active_tab == chr(cve_index)) else ''
- cve_list_table.append([readCveDetails(cve_object,cs.datasource),tab_states[chr(cve_index)],cs.datasource.name,{}])
+ tab_name = cs.datasource.name
+ cve_list_table.append([readCveDetails(cve_object,cs.datasource),tab_states[chr(cve_index)],tab_name,{},cs.datasource.id])
cve_index += 1
if 0 == len(cve_sources):
_log("CVE_0_Sources??:(%s,%s)" % (cve_pk, active_tab))
- tab_states['1'] = 'active'
- cve_list_table.append([readCveDetails(cve_object,None),tab_states['1'],'(No Source)',{}])
+ tab_states['1'] = ''
+ details = readCveDetails(cve_object,None)
+ cve_list_table.append([readCveDetails(cve_object,None),tab_states['1'],'No_Source',{},''])
# Check to make sure active_tab was applied
for tab in tab_states.keys():
@@ -618,10 +660,18 @@ def cve(request, cve_pk, active_tab="1"):
if not request.POST.get('cve-edit','').startswith('Save'):
return redirect(cve, cve_object.id, "Summary")
+
# does this user have permission to see this record?
if (not cve_object.public) and (not UserSafe.is_admin(request.user)):
- _log("CVE_ERROR_PERMISSIONS:(%s)" % request.user)
- return redirect(landing)
+ try:
+ cveaccess = CveAccess.objects.get(cve=cve_object,user=request.user)
+ except:
+ cveaccess = None
+ if not cveaccess:
+ _log("CVE_ERROR_PERMISSIONS:(%s)" % request.user)
+ return redirect(landing)
+ else:
+ _log("CVE_PASS_PERMISSIONS:(%s)" % request.user)
# update the local CVE record
writeCveDetails(cve_object.name,request)
@@ -648,15 +698,23 @@ def cve_edit(request, cve_pk):
cve_source_object,created = CveSource.objects.get_or_create(cve=cve_object,datasource=source)
return cve(request, cve_object.name, active_tab="Edit")
-def cve_create(request):
+def _create_local_cve():
# Create the local CVE edit record
new_cve_name = CveLocal.new_cve_name()
cve_object = Cve.objects.create(name=new_cve_name,name_sort=get_name_sort(new_cve_name))
+ cve_object.save()
cve_local_object = CveLocal.objects.create(name=new_cve_name)
+ cve_local_object.save()
# Add the source mapping
source = DataSource.objects.get(name='Local')
cve_source_object = CveSource.objects.create(cve=cve_object,datasource=source)
- # Open the new CVE
+ cve_source_object.save()
+ return cve_object,cve_local_object
+
+def cve_create(request):
+ # Create the local CVE edit record
+ cve_object,cve_local_object = _create_local_cve()
+ # Open the new CVE page
return redirect(cve, cve_object.id, "Local")
@@ -678,7 +736,15 @@ def vulnerability(request, vulnerability_pk):
# does this user have permission to see this record?
if (not vulnerability_object.public) and (not UserSafe.is_admin(request.user)):
- return redirect(landing)
+ try:
+ vul_access = VulnerabilityAccess.objects.get(vulnerability=vulnerability_object,user=request.user)
+ except:
+ vul_access = None
+ if not vul_access:
+ _log("VUL_ERROR_PERMISSIONS:(%s)" % request.user)
+ return redirect(landing)
+ else:
+ _log("VUL_PASS_PERMISSIONS:(%s)" % request.user)
context = {
'object' : vulnerability_object,
@@ -757,6 +823,19 @@ def investigation(request, investigation_pk):
except:
return redirect(landing)
+ # does this user have permission to see this record?
+ if (not investigation_object.public) and (not UserSafe.is_admin(request.user)):
+ try:
+ inv_access = InvestigationAccess.objects.get(investigation=investigation_object,user=request.user)
+ except:
+ inv_access = None
+ if not inv_access:
+ _log("INV_ERROR_PERMISSIONS:(%s)" % request.user)
+ return redirect(landing)
+ else:
+ _log("INV_PASS_PERMISSIONS:(%s)" % request.user)
+
+
### TO-DO: replace with dynamic lookahead instead of static huge list
defects = Defect.objects.all()
@@ -881,7 +960,8 @@ def sources(request):
object = DataSource.objects.all()
context = {
- 'object' : object,
+ 'object' : object,
+ 'mru' : Job.get_recent(),
}
return render(request, template, context)
@@ -902,7 +982,7 @@ def login(request):
try:
### USER CONTROL
- user = SrtUser.objects.get(name=user_name)
+ user = SrtUser.objects.get(username=user_name)
request.session['srt_user_id'] = user.id
request.session.modified = True
_log("LOGIN_POST_SET:%s,%s" % (user.name,user.id))
@@ -929,6 +1009,8 @@ def users(request):
context = {
'object' : object,
+ 'groups' : Group.objects.all().order_by(Lower('name')),
+ 'builtin_groups' : ('Reader','Contributor','Creator','Admin'),
}
return render(request, template, context)
@@ -1022,7 +1104,6 @@ def publish_diff_snapshot(request):
if not UserSafe.is_creator(request.user):
return redirect(landing)
- main_app = SrtSetting.get_setting('SRT_MAIN_APP','yp')
if request.method == "GET":
# Prepare available snapshots
@@ -1095,6 +1176,7 @@ def publish_diff_snapshot(request):
'snapshot_frequency_list' : snapshot_frequency_list,
'snap_frequency_select' : snap_frequency_select,
+ 'mru' : Job.get_recent(),
}
return render(request, 'publish_diff_snapshot.html', context)
elif request.method == "POST":
@@ -1149,7 +1231,7 @@ def publish_diff_snapshot(request):
publishCalculate(date_start,date_stop)
return redirect('publish')
if 'export' == action:
- return redirect('/%s/report/publish' % main_app)
+ return redirect('/%s/report/publish' % SRT_MAIN_APP)
return redirect('publish')
def publish_diff_history(request):
@@ -1157,7 +1239,6 @@ def publish_diff_history(request):
if not UserSafe.is_creator(request.user):
return redirect(landing)
- main_app = SrtSetting.get_setting('SRT_MAIN_APP','yp')
if request.method == "GET":
# Prepare available snapshots
@@ -1284,7 +1365,7 @@ def publish_diff_history(request):
publishCalculate(date_start,date_stop)
return redirect('publish')
if 'export' == action:
- return redirect('/%s/report/publish' % main_app)
+ return redirect('/%s/report/publish' % SRT_MAIN_APP)
return redirect('publish')
@@ -1351,6 +1432,10 @@ def _create_defect(investigation,reason,defect_reason,domain_components,affected
# Assign the defect the same priority as the Investigation
priority = investigation.get_priority_text
+ # Protect Jira from undefine priorities
+ if priority == SRTool.priority_text(SRTool.UNDEFINED):
+ _log("WARNING:_create_defect:FIX_PRIORITY:'%s' to '%s" % (priority,SRTool.priority_text(SRTool.LOW)))
+ priority = SRTool.priority_text(SRTool.LOW)
_log("_create_defect:%s:%s:%s" % (investigation.name,priority,links))
# Offer a default defect summary
@@ -1755,6 +1840,7 @@ def _submit_notification(request):
NotifyAccess.objects.get_or_create(notify=notify, user=user)
_log("xhr_notifications5")
+
def xhr_cve_commit(request):
_log("xhr_cve_commit(%s)" % request.POST)
if not 'action' in request.POST:
@@ -1764,9 +1850,13 @@ def xhr_cve_commit(request):
action = request.POST['action']
history_update = []
new_name = ''
+ error_text = "ok"
+ username = UserSafe.user_name(request.user)
+
if 'submit-quickedit' == action:
priority = int(request.POST['priority'])
status = int(request.POST['status'])
+ public = (1 == int(request.POST['public']))
note = request.POST['note'].strip()
private_note = request.POST['private_note'].strip()
tags = request.POST['tags'].strip()
@@ -1817,13 +1907,33 @@ def xhr_cve_commit(request):
history_update.append(Update.ACKNOWLEDGE_DATE % (SRTool.date_ymd_text(cve.acknowledge_date),SRTool.date_ymd_text(acknowledge_date)))
cve.acknowledge_date = acknowledge_date
- cve.save()
- if 'submit-notification' == action:
+ # Process implications of 'public' change
+ if (cve.public != public):
+ history_update.append(Update.PUBLIC % (cve.public,public))
+ cve.public = public
+ # Insure newly private record has at least this user
+ if not public:
+ cve_access,created = CveAccess.objects.get_or_create(cve=cve, user=request.user)
+ if created:
+ cve_access.save()
+ # If we are about to propagate, save current state first and once
+ cve.save()
+ cve.propagate_private()
+ else:
+ # No propagation, normal save
+ cve.save()
+ elif 'submit-notification' == action:
# Note: no history update
_submit_notification(request)
- if 'submit-newname' == action:
+ elif 'submit-newname' == action:
old_name = request.POST['old_name']
- new_name = request.POST['new_name']
+ new_name_input = request.POST['new_name'].strip()
+ new_name = ''
+ for i in range(len(new_name_input)):
+ if not new_name_input[i] in ('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-0123456789_ '):
+ new_name += '_'
+ else:
+ new_name += new_name_input[i]
try:
# Is name already used?
Cve.objects.get(name=new_name)
@@ -1839,7 +1949,7 @@ def xhr_cve_commit(request):
cveLocal = CveLocal.objects.get(name=old_name)
cveLocal.name = new_name
cveLocal.save()
- if 'submit-create-vulnerability' == action:
+ elif 'submit-create-vulnerability' == action:
_log("SUBMIT-CREATE-VULNERABILITY")
vname = Vulnerability.new_vulnerability_name()
vulnerability = Vulnerability.objects.create(
@@ -1849,13 +1959,18 @@ def xhr_cve_commit(request):
priority = cve.priority,
comments = cve.comments,
packages = cve.packages,
+ public = cve.public,
)
vulnerability.save()
+ # If private, add users
+ if not cve.public:
+ for cve_private_access in CveAccess.objects.filter(cve=cve,user=request.user):
+ VulnerabilityAccess.objects.create(vulnerability=vulnerability,user=cve_private_access.user)
history_update.append(Update.ATTACH_INV % (vname))
cve2vul = CveToVulnerablility.objects.create(cve = cve,vulnerability = vulnerability)
cve2vul.save()
_log("SUBMIT-CREATE-VULNERABILITY:%s,%s,%s" % (cve.id,vulnerability.id,cve2vul.id))
- if 'submit-attach-vulnerability' == action:
+ elif 'submit-attach-vulnerability' == action:
_log("SUBMIT-CREATE-VULNERABILITY")
vname = request.POST['vul_name'].strip()
try:
@@ -1866,20 +1981,97 @@ def xhr_cve_commit(request):
history_update.append(Update.ATTACH_INV % (vname))
cve2vul = CveToVulnerablility.objects.create(cve = cve,vulnerability = vulnerability)
cve2vul.save()
+ update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,Update.ATTACH_CVE % cve.name)
+ vul_hist = VulnerabilityHistory.objects.create(vulnerability_id=vulnerability.id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
+ vul_hist.save()
_log("SUBMIT-CREATE-VULNERABILITY:%s,%s,%s" % (cve.id,vulnerability.id,cve2vul.id))
- if 'submit-delete-cve' == action:
+ elif 'submit-detach-vulnerability' == action:
+ record_id = request.POST['record_id']
+ vulnerability = Vulnerability.objects.get(id=record_id)
+ c2v = CveToVulnerablility.objects.get(vulnerability=vulnerability,cve=cve)
+ c2v.delete()
+ update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,Update.DETACH_CVE % cve.name)
+ vul_hist = VulnerabilityHistory.objects.create(vulnerability_id=vulnerability.id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
+ vul_hist.save()
+ history_update.append(Update.DETACH_VUL % vulnerability.name)
+ elif 'submit-delete-cve' == action:
_log("SUBMIT-DELETE-CVE(%s)" % cve.name)
#history_update.append(Update.ATTACH_INV % (vname))
+
+ # Try remove the datasource map first
+ try:
+ cvesource = CveSource.objects.get(cve=cve)
+ if cvesource:
+ cvesource.delete()
+ except:
+ # NO CveSource record
+ pass
+
+ # First delete the Cve record (and its related records automatically)
+ cve_name = cve.name
cve.delete()
_log("SUBMIT-DELETED-CVE(%s)!" % cve.name)
+ # Now remove any related cvelocal records
+ # CveLocal records are keyed by name, since they are created dynamically form a local edit
+ try:
+ cvelocal = CveLocal.objects.get(name=cve_name)
+ if cvelocal:
+ cvelocal.delete()
+ except:
+ # NO CveLocal record
+ pass
new_name = 'url:/srtgui/cves'
+ elif 'submit-adduseraccess' == action:
+ users = request.POST['users']
+ usernames = []
+ for user_id in users.split(','):
+ usernames.append(SrtUser.objects.get(pk=user_id).username)
+ CveAccess.objects.get_or_create(cve=cve, user_id=user_id)
+ history_update.append(Update.ATTACH_ACCESS % ','.join(usernames))
+ cve.propagate_private()
+ elif 'submit-trashuseraccess' == action:
+ record_id = request.POST['record_id']
+ access_record = CveAccess.objects.get(id=record_id)
+ history_update.append(Update.DETACH_ACCESS % access_record.user.username)
+ access_record.delete()
+ cve.propagate_private()
+
+ elif 'submit-merge-cve' == action:
+ cve_merge_name = request.POST['cve_merge_name']
+ try:
+ cve_merge = Cve.objects.get(name=cve_merge_name)
+ # We found it, but does the user have access to it?
+ # TODO
+
+ # Merge/create the cvelocal data
+ pass
+
+ # Save the results
+ pass
+
+ # Delete the local CVE in favor if the merged CVE?
+ pass
+
+ # Jump to the new CVE
+ new_name = cve_merge_name
+ history_update.append(Update.MERGE_CVE % cve.name)
+ cve = cve_merge
+
+ except Exception as e:
+ error_text = "ERROR: unknown CVE name '%s'" % cve_merge_name
+ _log("ERROR:CVE_MERGE_NAME:%s" % e)
+
+ else:
+ error_text = "ERROR: unknown action '%s'" % action
+
+ _log("XHR_CVE_COMMIT:new_name=%s" % new_name)
+
return_data = {
- "error": "ok",
+ "error": error_text,
"new_name" : new_name,
}
- username = UserSafe.user_name(request.user)
if history_update:
update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_update))
CveHistory.objects.create(cve_id=cve.id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
@@ -1937,6 +2129,7 @@ def xhr_vulnerability_commit(request):
action = request.POST['action']
v_id = request.POST['vulnerability_id']
username = UserSafe.user_name(request.user)
+ new_name = ''
try:
history_update = []
if 'submit-quickedit' == action:
@@ -1945,8 +2138,10 @@ def xhr_vulnerability_commit(request):
tags = request.POST['tags'].strip()
priority = int(request.POST['priority'])
status = int(request.POST['status'])
+ public = (1 == int(request.POST['public']))
outcome = int(request.POST['outcome'])
affected_components = request.POST['affected_components'].strip()
+ description = request.POST['description'].strip()
v = Vulnerability.objects.get(id=v_id)
if (v.priority != priority):
history_update.append(Update.PRIORITY % (SRTool.priority_text(v.priority),SRTool.priority_text(priority)))
@@ -1969,8 +2164,68 @@ def xhr_vulnerability_commit(request):
if (affected_components != v.packages):
history_update.append(Update.AFFECTED_COMPONENT % (v.packages,affected_components))
v.packages = affected_components
- v.save()
- if 'submit-addproduct' == action:
+ if (description != v.description):
+ history_update.append(Update.DESCRIPTION)
+ v.description = description
+
+ # Process implications of 'public' change
+ _log("V2C:PRIVATE0:%s to %s" % (v.public,public))
+ if (public != v.public):
+ history_update.append(Update.PUBLIC % (v.public,public))
+ v.public = public
+ # Insure newly private record has at least this user
+ if not public:
+ vul_access,created = VulnerabilityAccess.objects.get_or_create(vulnerability=v, user=request.user)
+ if created:
+ vul_access.save()
+ # Since we are about to propagate, save current state first and once
+ v.save()
+ _log("V2C:PRIVATE1:%s" % v.public)
+ # Propagate the 'public' change via the parent CVEs (if any)
+ for c2v in CveToVulnerablility.objects.filter(vulnerability=v):
+ _log("V2C:PRIVATE2:%s" % c2v.cve.name)
+ # If now private, insure parent CVE has this user
+ if not public:
+ cve_access,created = CveAccess.objects.get_or_create(cve=c2v.cve, user=request.user)
+ if created:
+ cve_access.save()
+ c2v.cve.public = public
+ c2v.cve.save()
+ c2v.cve.propagate_private()
+ else:
+ # No propagation, normal save
+ v.save()
+ elif 'submit-newname' == action:
+ v = Vulnerability.objects.get(id=v_id)
+ old_name = request.POST['old_name']
+ new_name_input = request.POST['new_name'].strip()
+ new_name = ''
+ for i in range(len(new_name_input)):
+ if not new_name_input[i] in ('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-0123456789_'):
+ new_name += '_'
+ else:
+ new_name += new_name_input[i]
+ try:
+ # Is name already used?
+ is_existing_vul = Vulnerability.objects.get(name=new_name)
+ return HttpResponse(json.dumps({"error":"name '%s' is already used\n" % new_name}), content_type = "application/json")
+ except:
+ _log("NewName3:%s -> %s" % (old_name,new_name))
+ # Apply this unique name to CVE
+ v.name = new_name
+ v.save()
+ # Move any attached documents
+ path_old = os.path.join(SRT_BASE_DIR, "downloads/%s" % old_name)
+ path_new = os.path.join(SRT_BASE_DIR, "downloads/%s" % new_name)
+ doc_found = False
+ for doc in VulnerabilityUploads.objects.filter(vulnerability=v):
+ doc_found = True
+ doc.path = doc.path.replace(path_old,path_new)
+ doc.save()
+ if doc_found:
+ os.rename(path_old, path_new)
+ history_update.append(Update.NEW_NAME % (old_name,new_name))
+ elif 'submit-addproduct' == action:
products = request.POST['products']
investigation_names = []
vulnerability_obj = Vulnerability.objects.get(id=v_id)
@@ -1988,28 +2243,32 @@ def xhr_vulnerability_commit(request):
product = product_obj,
comments = vulnerability_obj.comments,
packages = vulnerability_obj.packages,
+ public = vulnerability_obj.public,
)
vul2inv = VulnerabilityToInvestigation.objects.create(vulnerability=vulnerability_obj,investigation=investigation_obj)
vul2inv.save()
investigation_names.append(iname)
+ # Assert part CVE access rights
+ for c2v in CveToVulnerablility.objects.filter(vulnerability=vulnerability_obj):
+ c2v.cve.propagate_private()
history_update.append(Update.ATTACH_INV % ','.join(investigation_names))
- if 'submit-trashinvestigation' == action:
+ elif 'submit-trashinvestigation' == action:
inv_id = request.POST['record_id']
investigation_obj = Investigation.objects.get(pk=inv_id)
vul2inv = VulnerabilityToInvestigation.objects.filter(investigation=investigation_obj)
vul2inv.delete()
history_update.append(Update.DETACH_INV % (investigation_obj.name))
investigation_obj.delete()
- if 'submit-newcomment' == action:
+ elif 'submit-newcomment' == action:
comment = request.POST['comment']
VulnerabilityComments.objects.create(vulnerability_id=v_id, comment=comment, date=datetime.today().strftime('%Y-%m-%d'), author=username)
#NOTE: No History for this
- if 'submit-trashcomment' == action:
+ elif 'submit-trashcomment' == action:
record_id = request.POST['record_id']
comment = VulnerabilityComments.objects.get(id=record_id)
comment.delete()
#NOTE: No History for this
- if 'submit-trashattachment' == action:
+ elif 'submit-trashattachment' == action:
record_id = request.POST['record_id']
upload = VulnerabilityUploads.objects.get(id=record_id)
try:
@@ -2018,45 +2277,114 @@ def xhr_vulnerability_commit(request):
pass
history_update.append(Update.DETACH_DOC % (upload.path))
upload.delete()
- if 'submit-addusernotify' == action:
+ elif 'submit-addusernotify' == action:
users = request.POST['users']
usernames = []
for user_id in users.split(','):
- usernames.append(SrtUser.objects.get(pk=user_id).name)
+ usernames.append(SrtUser.objects.get(pk=user_id).username)
VulnerabilityNotification.objects.get_or_create(vulnerability_id=v_id, user_id=user_id)
history_update.append(Update.ATTACH_USER_NOTIFY % ','.join(usernames))
- if 'submit-trashusernotification' == action:
+ elif 'submit-trashusernotification' == action:
record_id = request.POST['record_id']
notification_record = VulnerabilityNotification.objects.get(id=record_id)
- removed_user = SrtUser.objects.get(pk=notification_record.user_id).name
+ removed_user = SrtUser.objects.get(pk=notification_record.user_id).username
notification_record.delete()
history_update.append(Update.DETACH_USER_NOTIFY % removed_user)
- if 'submit-adduseraccess' == action:
+ elif 'submit-adduseraccess' == action:
users = request.POST['users']
usernames = []
for user_id in users.split(','):
- usernames.append(SrtUser.objects.get(pk=user_id).name)
+ usernames.append(SrtUser.objects.get(pk=user_id).username)
VulnerabilityAccess.objects.get_or_create(vulnerability_id=v_id, user_id=user_id)
history_update.append(Update.ATTACH_ACCESS % ','.join(usernames))
- if 'submit-trashuseraccess' == action:
+ elif 'submit-trashuseraccess' == action:
record_id = request.POST['record_id']
access_record = VulnerabilityAccess.objects.get(id=record_id)
+ history_update.append(Update.DETACH_ACCESS % access_record.user.username)
access_record.delete()
- history_update.append(Update.DETACH_ACCESS % username)
- if 'submit-notification' == action:
+ elif 'submit-notification' == action:
_submit_notification(request)
#NOTE: No History for this
- if 'submit-trashvulnerability' == action:
+ elif 'submit-trashvulnerability' == action:
record_id = request.POST['record_id']
vulnerability_obj = Vulnerability.objects.get(pk=record_id)
# history_update.append(Update.DETACH_VUL % vulnerability_obj.name)
vulnerability_obj.delete()
+ elif 'submit-attach-cve' == action:
+ vulnerability_obj = Vulnerability.objects.get(pk=v_id)
+ cve_name_input = request.POST['cve_name']
+ # Sanitize the CVE name
+ cve_name = ''
+ for i in range(len(cve_name_input)):
+ if not cve_name_input[i] in ('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-0123456789_ '):
+ cve_name += '_'
+ else:
+ cve_name += cve_name_input[i]
+ try:
+ cve_obj = Cve.objects.get(name=cve_name)
+ # Does the user have permission to see this CVE?
+ if (not cve_obj.public) and (not UserSafe.is_admin(request.user)):
+ try:
+ cveaccess = CveAccess.objects.get(cve=cve_obj,user=request.user)
+ except:
+ cveaccess = None
+ if not cveaccess:
+ _log("CVE_ATTACHE_ERROR_PERMISSIONS:(%s)" % request.user)
+ return HttpResponse(json.dumps( {"error":"Error: this CVE name is reserved"} ), content_type = "application/json")
+ except:
+ # Create local CVE with this name
+ cve_obj,cve_local_object = _create_local_cve()
+ old_name = cve_obj.name
+ cve_obj.description = vulnerability_obj.description
+ cve_obj.name = cve_name
+ cve_obj.save()
+ cve_local_object.description = vulnerability_obj.description
+ cve_local_object.save()
+ # Apply the new name to CveLocal
+ cveLocal = CveLocal.objects.get(name=old_name)
+ cveLocal.name = cve_name
+ cveLocal.save()
+
+ history_cve_update = []
+ if not vulnerability_obj.public:
+ history_cve_update.append(Update.PUBLIC % (cve_obj.public,vulnerability_obj.public))
+ cve_obj.public = vulnerability_obj.public
+ # Insure newly private record has at least this user
+ cve_access,created = CveAccess.objects.get_or_create(cve=cve_obj, user=request.user)
+ cve_access.save()
+ cve_obj.propagate_private()
+ cve_obj.save()
+
+ # Attach the CVE to the Vulnerability
+ c2v,create = CveToVulnerablility.objects.get_or_create(vulnerability=vulnerability_obj,cve=cve_obj)
+ c2v.save()
+ # Add history to CVE
+ username = UserSafe.user_name(request.user)
+ history_cve_update.append(Update.ATTACH_INV % (vulnerability_obj.name))
+ update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_cve_update))
+ cve_hist = CveHistory.objects.create(cve_id=cve_obj.id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
+ cve_hist.save()
+ history_update.append(Update.ATTACH_CVE % cve_obj.name)
+ elif 'submit-detach-cve' == action:
+ vulnerability_obj = Vulnerability.objects.get(pk=v_id)
+ record_id = request.POST['record_id']
+ cve_obj = Cve.objects.get(id=record_id)
+ c2v = CveToVulnerablility.objects.get(vulnerability=vulnerability_obj,cve=cve_obj)
+ c2v.delete()
+ update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,Update.DETACH_VUL % vulnerability_obj.name)
+ cve_hist = CveHistory.objects.create(cve_id=cve_obj.id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
+ cve_hist.save()
+ history_update.append(Update.DETACH_CVE % cve_obj.name)
+ else:
+ # Action not found
+ return HttpResponse(json.dumps( {"error": "ERROR:unknown action '%s'" % action} ), content_type = "application/json")
if history_update:
update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_update))
VulnerabilityHistory.objects.create(vulnerability_id=v_id, comment=update_comment, date=datetime.now().strftime('%Y-%m-%d'), author=username)
return_data = {
"error": "ok",
+ "new_name" : new_name,
}
return HttpResponse(json.dumps( return_data ), content_type = "application/json")
except Exception as e:
@@ -2231,7 +2559,7 @@ def xhr_investigation_commit(request):
history_update.append(Update.AFFECTED_COMPONENT % (invst.packages,affected_components))
invst.packages = affected_components
invst.save()
- if 'submit-attachdefectlist' == action:
+ elif 'submit-attachdefectlist' == action:
defects = request.POST['defects']
product_id = Investigation.objects.get(id=invst_id).product_id
defect_names = []
@@ -2239,7 +2567,7 @@ def xhr_investigation_commit(request):
defect_names.append(Defect.objects.get(pk=defect_id).name)
InvestigationToDefect.objects.get_or_create(investigation_id=invst_id, defect_id=defect_id)
history_update.append(Update.ATTACH_DEV % ','.join(defect_names))
- if 'submit-attachdefect' == action:
+ elif 'submit-attachdefect' == action:
query = request.POST['query'].upper()
product_id = Investigation.objects.get(id=invst_id).product_id
# Courtesy removal of URL (or other) prefix
@@ -2271,31 +2599,40 @@ def xhr_investigation_commit(request):
defect.srt_status = invst.status
defect.save()
history_update.append(Update.ATTACH_DEV % defect.name)
- if 'submit-createdefect' == action:
+ elif 'submit-createdefect' == action:
investigation = Investigation.objects.get(id=invst_id)
defect_reason = request.POST['defect_reason']
components = request.POST['components']
priority = request.POST['priority']
+ try:
+ # if explicit selected priority, reset Investigation to that
+ priority = int(priority)
+ if priority != investigation.priority:
+ investigation.priority = priority
+ investigation.save()
+ except Exception as e:
+ _log("WARINING:defect_create:priority issue:'%s'" % priority)
+
affected_components = request.POST['affected_components'].strip()
defect_name,created = _create_defect(investigation,'',defect_reason,components,affected_components,username)
history_update.append(Update.ATTACH_DEV % defect_name)
xhr_note = defect_name
- if 'submit-detachdefect' == action:
+ elif 'submit-detachdefect' == action:
defect_name = request.POST['defect']
product_id = Investigation.objects.get(id=invst_id).product_id
defect_id = Defect.objects.get(name=defect_name).id
InvestigationToDefect.objects.get(investigation_id=invst_id, defect_id=defect_id).delete()
history_update.append(Update.DETACH_DEV % defect_name)
- if 'submit-newcomment' == action:
+ elif 'submit-newcomment' == action:
comment = request.POST['comment']
InvestigationComments.objects.create(investigation_id=invst_id, comment=comment, date=datetime.today().strftime('%Y-%m-%d'), author=username)
#NOTE: No History for this
- if 'submit-trashcomment' == action:
+ elif 'submit-trashcomment' == action:
record_id = request.POST['record_id']
comment = InvestigationComments.objects.get(id=record_id)
comment.delete()
#NOTE: No History for this
- if 'submit-trashattachment' == action:
+ elif 'submit-trashattachment' == action:
record_id = request.POST['record_id']
upload = InvestigationUploads.objects.get(id=record_id)
try:
@@ -2304,39 +2641,45 @@ def xhr_investigation_commit(request):
pass
history_update.append(Update.DETACH_DOC % (upload.path))
upload.delete()
- if 'submit-addusernotify' == action:
+ elif 'submit-addusernotify' == action:
users = request.POST['users']
usernames = []
for user_id in users.split(','):
- usernames.append(SrtUser.objects.get(pk=user_id).name)
+ usernames.append(SrtUser.objects.get(pk=user_id).username)
InvestigationNotification.objects.get_or_create(investigation_id=invst_id, user_id=user_id)
history_update.append(Update.ATTACH_USER_NOTIFY % ','.join(usernames))
- if 'submit-trashusernotification' == action:
+ elif 'submit-trashusernotification' == action:
record_id = request.POST['record_id']
notification_record = InvestigationNotification.objects.get(id=record_id)
- removed_user = SrtUser.objects.get(pk=notification_record.user_id).name
+ removed_user = SrtUser.objects.get(pk=notification_record.user_id).username
history_update.append(Update.DETACH_USER_NOTIFY % removed_user)
notification_record.delete()
- if 'submit-adduseraccess' == action:
+ elif 'submit-adduseraccess' == action:
users = request.POST['users']
usernames = []
for user_id in users.split(','):
- usernames.append(SrtUser.objects.get(pk=user_id).name)
+ usernames.append(SrtUser.objects.get(pk=user_id).username)
InvestigationAccess.objects.get_or_create(investigation_id=invst_id, user_id=user_id)
history_update.append(Update.ATTACH_ACCESS % ','.join(usernames))
- if 'submit-trashuseraccess' == action:
+ elif 'submit-trashuseraccess' == action:
record_id = request.POST['record_id']
access_record = InvestigationAccess.objects.get(id=record_id)
- history_update.append(Update.DETACH_ACCESS % username)
+ history_update.append(Update.DETACH_ACCESS % access_record.user.username)
access_record.delete()
- if 'submit-notification' == action:
+ elif 'submit-notification' == action:
_submit_notification(request)
#NOTE: No History for this
- if 'submit-trashinvestigation' == action:
+ elif 'submit-trashinvestigation' == action:
record_id = request.POST['record_id']
investigation_obj = Investigation.objects.get(pk=record_id)
# history_update.append(Update.DETACH_INV % investigation_obj.name)
investigation_obj.delete()
+ else:
+ return_data = {
+ "error": "ERROR:unknown action '%s'" % action,
+ "new_name" : new_name,
+ }
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
if history_update:
update_comment = "%s%s" % (Update.UPDATE_STR % Update.SOURCE_USER,';'.join(history_update))
@@ -2354,8 +2697,6 @@ def xhr_investigation_commit(request):
def xhr_publish(request):
_log("xhr_publish(%s)" % request.POST)
- main_app = SrtSetting.get_setting('SRT_MAIN_APP','yp')
-
def remove_mark(mark,line):
pos1 = line.find(mark)
if -1 == pos1:
@@ -2395,18 +2736,18 @@ def xhr_publish(request):
if (not top_dir) and (snap_date_top == backup_date) and ('Now' != backup_mode):
top_dir = 'backups/%s' % backup_dir
- _log('Publish:./bin/%s/srtool_publish.py --srt2update %s' % (main_app,base_dir))
- report_returncode,report_stdout,report_error = execute_process('./bin/%s/srtool_publish.py' % main_app,'--srt2update',base_dir)
+ _log('Publish:./bin/%s/srtool_publish.py --srt2update %s' % (SRT_MAIN_APP,base_dir))
+ report_returncode,report_stdout,report_error = execute_process('./bin/%s/srtool_publish.py' % SRT_MAIN_APP,'--srt2update',base_dir)
if 0 != report_returncode:
return_data = {"error": "Error: base dir prep:%s:%s" % (report_error,report_stdout),}
return HttpResponse(json.dumps( return_data ), content_type = "application/json")
- _log('Publish:./bin/%s/srtool_publish.py --srt2update %s' % (main_app,top_dir))
- report_returncode,report_stdout,report_error = execute_process('./bin/%s/srtool_publish.py' % main_app,'--srt2update',top_dir)
+ _log('Publish:./bin/%s/srtool_publish.py --srt2update %s' % (SRT_MAIN_APP,top_dir))
+ report_returncode,report_stdout,report_error = execute_process('./bin/%s/srtool_publish.py' % SRT_MAIN_APP,'--srt2update',top_dir)
if 0 != report_returncode:
return_data = {"error": "Error: top dir prep:%s:%s" % (report_error,report_stdout),}
return HttpResponse(json.dumps( return_data ), content_type = "application/json")
- _log('Publish:./bin/'+main_app+'/srtool_publish.py --validate-update-svns --previous '+base_dir+' --current '+top_dir+' --start '+snap_date_start+' --stop '+snap_date_stop)
- report_returncode,report_stdout,report_error = execute_process('./bin/%s/srtool_publish.py' % main_app,
+ _log('Publish:./bin/'+SRT_MAIN_APP+'/srtool_publish.py --validate-update-svns --previous '+base_dir+' --current '+top_dir+' --start '+snap_date_start+' --stop '+snap_date_stop)
+ report_returncode,report_stdout,report_error = execute_process('./bin/%s/srtool_publish.py' % SRT_MAIN_APP,
'--validate-update-svns','--previous',base_dir,'--current',top_dir,
'--start',snap_date_start,'--stop',snap_date_stop)
if 0 != report_returncode:
@@ -2420,9 +2761,51 @@ def xhr_publish(request):
SrtSetting.set_setting('publish_snap_last_calc',publish_snap_last_calc)
_log('Publish:Done!')
+
+ if 'export-snapshot-progress' == action:
+ snap_date_base = request.POST['snap_date_base']
+ snap_date_top = request.POST['snap_date_top']
+ snap_date_start = request.POST['snap_date_start']
+ snap_date_stop = request.POST['snap_date_stop']
+ _log("xhr_publish:export-snapshot:%s,%s,%s,%s" % (snap_date_base,snap_date_top,snap_date_start,snap_date_stop))
+
+ SrtSetting.set_setting('publish_snap_date_base',snap_date_base)
+ SrtSetting.set_setting('publish_snap_date_top',snap_date_top)
+ SrtSetting.set_setting('publish_snap_date_start',snap_date_start)
+ SrtSetting.set_setting('publish_snap_date_stop',snap_date_stop)
+
+ backup_returncode,backup_stdout,backup_result = execute_process('bin/common/srtool_backup.py','--list-backups-db')
+ base_dir = ''
+ top_dir = ''
+ for i,line in enumerate(backup_stdout.decode("utf-8").splitlines()):
+ # Week|backup_2019_19|2019-05-18|12:51:51|Saturday, May 18 2019
+ backup_mode,backup_dir,backup_date,backup_time,backup_day = line.split('|')
+ if (not base_dir) and (snap_date_base == backup_date):
+ base_dir = 'backups/%s' % backup_dir
+ if (not top_dir) and (snap_date_top == backup_date) and ('Now' != backup_mode):
+ top_dir = 'backups/%s' % backup_dir
+
+ _log('PublishProgress:./bin/%s/srtool_publish.py --srt2update %s' % (SRT_MAIN_APP,base_dir))
+
+ command = [
+ './bin/%s/srtool_publish.py' % SRT_MAIN_APP,
+ '--validate-update-svns-progress','--previous',base_dir,'--current',top_dir,
+ '--start',snap_date_start,'--stop',snap_date_stop,
+ ' --progress'
+ ]
+ Job.start('Update svns progress','Create SVNS diff file',' '.join(command),'','update_logs/run_svns_job.log',job_id=2)
+
+ publish_snap_last_calc = 'Base:%s, Top:%s, Start:%s, Stop:%s, On:%s' % (
+ snap_date_base,snap_date_top,snap_date_start,snap_date_stop,
+ datetime.today().strftime("%Y-%m-%d %H:%M:%S")
+ )
+ SrtSetting.set_setting('publish_snap_last_calc',publish_snap_last_calc)
+
+ _log('PublishProgress:Done!')
+
elif 'submit-trashreport' == action:
report_name = request.POST['report_name']
- os.remove('data/%s/%s' % (main_app,report_name))
+ os.remove('data/%s/%s' % (SRT_MAIN_APP,report_name))
else:
srtool_today_time = datetime.today()
srtool_today = datetime.today().strftime("%Y-%m-%d")
@@ -2494,31 +2877,290 @@ def xhr_publish(request):
return HttpResponse(json.dumps({"error":str(e) + "\n"}), content_type = "application/json")
+def attach_cve_alternates(cve,force_nist_update=True):
+ # Attach all matching CVE sources
+ #_log("Alternate1:%s" % (cve.name))
+ for ds in DataSource.objects.filter(data="cve"):
+ #_log("Alternate2:%s:%s:%s:%s:" % (ds.key,ds.cve_filter,cve.name,ds.cve_filter))
+ if ds.cve_filter and cve.name.startswith(ds.cve_filter):
+ try:
+ cve_source_object,created = CveSource.objects.get_or_create(cve=cve,datasource=ds)
+ except:
+ ### WORKAROUND TODO TOFIX
+ cve_source_object = CveSource.objects.filter(cve=cve,datasource=ds).first()
+ created = False
+ #_log("Alternate CVE source %s for %s (created=%s)" % (ds.key,cve.name,created))
+
+ # Force update the CVE summary data from sources
+ if force_nist_update:
+ result_returncode,result_stdout,result_stderr = execute_process(
+ './bin/nist/srtool_nist.py',
+ '--update-cve-list',
+ cve.name,
+ '--force'
+ )
+ #_log("CVE_ALT_REFRESH=%s|%s|%s" % (result_returncode,result_stdout,result_stderr))
+
def cve_alternates(request, cve_pk):
try:
cve_object = Cve.objects.get(pk=cve_pk)
except Exception as e:
_log("CVE_ERROR(%s):" % e)
return redirect(landing)
-
# Attach all matching CVE sources
- _log("Alternate1:%s" % (cve_object.name))
- for ds in DataSource.objects.filter(data="cve"):
- _log("Alternate2:%s" % (ds.key))
- if ds.cve_filter and cve_object.name.startswith(ds.cve_filter):
- cve_source_object,created = CveSource.objects.get_or_create(cve=cve_object,datasource=ds)
- _log("Alternate CVE source %s for %s (created=%s)" % (ds.key,cve_object.name,created))
+ attach_cve_alternates(cve_object)
+ return redirect(cve, cve_pk)
- # Force update the CVE summary data from sources
- result_returncode,result_stdout,result_stderr = execute_process(
- './bin/nist/srtool_nist.py',
- '--update-cve-list',
- cve_object.name,
- '--force'
- )
- _log("CVE_ALT_REFRESH=%s|%s|%s" % (result_returncode,result_stdout,result_stderr))
+def xhr_maintenance_commit(request):
+ _log("xhr_maintenance_commit(%s)" % request.POST)
+ if not 'action' in request.POST:
+ return HttpResponse(json.dumps({"error":"missing action\n"}), content_type = "application/json")
+ try:
+ if request.POST["action"] == "<some_action>":
+ pass
- return redirect(cve, cve_pk)
+ return_data = {
+ "error": "ok",
+ }
+ _log("xhr_maintenance_commit:SUCCESS")
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+
+ except Exception as e:
+ _log("xhr_triage_commit:no(%s)" % e)
+ return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+
+def xhr_sources_commit(request):
+ _log("xhr_sources_commit(%s)" % request.POST)
+ if not 'action' in request.POST:
+ return HttpResponse(json.dumps({"error":"missing action\n"}), content_type = "application/json")
+ try:
+ error_message = "ok";
+ data_message = "";
+ if request.POST["action"] == "submit-run-update-job":
+ ds_id = int(request.POST['id'])
+ datasource = DataSource.objects.get(id=ds_id)
+ #Job.start(name,description,command,options='',log_file=None,job_id=1):
+ name = datasource.name
+ description = datasource.description
+ options = ''
+ # Force update to execute now
+ command = datasource.update + ' --force'
+ _log("SUBMIT-RUN-UPDATE-JOB:Job.start(%s,%s,%s,%s)" % (name,description,command,options))
+ with open(f"{SRT_BASE_DIR}/update_logs/master_log.txt", "a") as update_log:
+ update_log.write("SRTOOL_UPDATE_MANUAL:%s:%s:%s:\n" % (datetime.now(),datasource.description,command))
+ Job.start(name,description,command,options)
+
+ elif request.POST["action"] == "submit-toggle-enable":
+ ds_id = int(request.POST['id'])
+ datasource = DataSource.objects.get(id=ds_id)
+ if 'DISABLE ' in datasource.attributes:
+ datasource.attributes = datasource.attributes.replace('DISABLE ','')
+ datasource.attributes = 'ENABLE ' + datasource.attributes
+ else:
+ datasource.attributes = 'DISABLE ' + datasource.attributes
+ datasource.attributes = datasource.attributes.replace('ENABLE ','')
+ datasource.save()
+ error_message = 'no_refresh'
+ data_message = '%d=%s' % (datasource.id,datasource.attributes)
+
+ else:
+ error_message = "ERROR:unknown action '%s'" % request.POST["action"]
+
+ return_data = {
+ "error": error_message,
+ "data_message": data_message,
+ }
+ _log("xhr_sources_commit:SUCCESS")
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+
+ except Exception as e:
+ _log("xhr_triage_commit:no(%s)" % e)
+ return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+
+def xhr_job_post(request):
+ _log("xhr_job_post(%s)2" % request.POST)
+ if not 'action' in request.POST:
+ return HttpResponse(json.dumps({"error":"missing action\n"}), content_type = "application/json")
+ try:
+ if request.POST["action"] == "submit-job":
+ command = request.POST.get('command', 'NoCmnd')
+ name = request.POST.get('name', 'NoName')
+ options = request.POST.get('options', '')
+ Job.start(name,'Submit Job',command,options,'update_logs/Job.start_user.log')
+ elif request.POST["action"] == "submit-testjob":
+ command = request.POST.get('command', 'NoCmnd')
+ name = request.POST.get('name', 'NoName')
+ options = request.POST.get('options', '')
+ Job.start(name,'This is a test',command,options,'update_logs/Job.start_user.log')
+ elif request.POST["action"] == "submit-testjob-j2":
+ command = request.POST.get('command', 'NoCmnd')
+ name = request.POST.get('name', 'NoName')
+ options = request.POST.get('options', '')
+ Job.start(name,'This is a test',command,options,'update_logs/Job.start_user.log',job_id=2)
+ elif request.POST["action"] == "submit-testjob-parent":
+ command = request.POST.get('command', 'NoCmnd')
+ name = request.POST.get('name', 'NoName')
+ options = request.POST.get('options', '')
+ # Preclear previously completed jobs from view
+ for job in Job.objects.all():
+ if not job.status in (Job.NOTSTARTED,Job.INPROGRESS):
+ job.status = Job.NOTSTARTED
+ job.save()
+ Job.start(name,'Parent/Children test',"./bin/common/srtool_job.py --test-parent-job",options,'update_logs/Job.start_user.log',job_id=9)
+ elif request.POST["action"] == "submit-trash-job":
+ record_id = int(request.POST.get('record_id', '0'))
+ if UserSafe.is_admin(request.user):
+ Job.objects.get(id=record_id).delete()
+ elif request.POST["action"] == "submit-clearjobs":
+ if UserSafe.is_admin(request.user):
+ Job.objects.all().delete()
+ else:
+ return_data = {
+ "error": "ERROR:unknown action '%s'" % request.POST["action"],
+ }
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+
+ return_data = {
+ "error": "ok",
+ }
+ _log("xhr_maintenance_commit:SUCCESS")
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+
+ except Exception as e:
+ _log("xhr_job_post:no(%s)" % e)
+ return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+
+def joblog(request,job_pk):
+ if request.method == "GET":
+ _log("GET_JOBLOG:%s" % job_pk)
+ job, created = Job.objects.get_or_create(id=job_pk)
+ template = "joblog.html"
+ log_text = ''
+ log_file = job.log_file if (job.log_file and ('/' == job.log_file[0])) else os.path.join(SRT_BASE_DIR,job.log_file)
+ if job.log_file:
+ with open(os.path.join(SRT_BASE_DIR,log_file),'r') as file:
+ log_text = file.read() #Note: keep EOL chars
+ context = {
+ 'object' : job,
+ 'log_text' : log_text,
+ 'log_date' : time.asctime(time.localtime(os.path.getmtime(log_file))),
+ }
+ return render(request, template, context)
+ # No action if no log
+ return HttpResponse(json.dumps( {"error": "ok",} ), content_type = "application/json")
+ elif request.method == "POST":
+ _log("POST_JOBLOG: %s" % request)
+
+ if request.POST["action"] == "download-job-log":
+ try:
+ job = Job.objects.get(id=job_pk)
+ file_path = job.log_file
+ except:
+ # In case job was cleaned up but old link for log was still visible
+ file_path = ''
+ if file_path:
+ fsock = open(file_path, "rb")
+ file_name = os.path.basename(file_path)
+ content_type = MimeTypeFinder.get_mimetype(file_path)
+ response = HttpResponse(fsock, content_type = content_type)
+ disposition = 'attachment; filename="{}"'.format(file_name)
+ response['Content-Disposition'] = 'attachment; filename="{}"'.format(file_name)
+ _log("EXPORT_POST_Q{%s} %s || %s " % (response, response['Content-Disposition'], disposition))
+ return response
+ else:
+ return render(request, "unavailable_artifact.html", context={})
+
+ return redirect("/srtgui/joblog")
+
+ raise Exception("Invalid HTTP method for this page")
+
+def email_admin(request):
+ if request.method == "GET":
+ context = {
+ 'error_message' : '',
+ }
+ return render(request, 'email_admin.html', context)
+ elif request.method == "POST":
+ _log("EMAIL_ADMIN: %s" % request)
+
+ if request.POST["action"] == "submit":
+ request_type = request.POST.get('request-type', '')
+ user_name = request.POST.get('user-name', '').strip()
+ user_email = request.POST.get('user-email', '').strip()
+ message = request.POST.get('message', '').strip()
+ if (not user_name) or (not user_email):
+ return render(request, 'email_admin.html', {'error_message' : "Error:missing user name or email",})
+
+ email_list = []
+ for user in SrtUser.get_group_users('SRTool_Admins'):
+ if user.email:
+ email_list.append(user.email)
+ if not email_list:
+ return render(request, 'email_admin.html', {'error_message' : "Error:missing admin emails. Contact SRTool team",})
+# email_list.append(user_email)
+
+ email_temp_file = '.email.txt'
+ with open(email_temp_file, 'w') as file:
+ print("SRTool alert: %s for %s" % (request_type,user_name),file=file)
+ print("From: %s" % user_email,file=file)
+ for email in email_list:
+ print("To: %s" % email,file=file)
+ print("Subject: %s requests %s" % (user_name,request_type),file=file)
+ print("",file=file)
+ print("SRTool alert: %s" % request_type,file=file)
+ print("From: %s" % user_name,file=file)
+ print("Email: %s" % user_email,file=file)
+ print("",file=file)
+ print(message,file=file)
+
+ smtp_server = os.environ.get('SRT_EMAIL_SMTP', 'MISSING_SRT_EMAIL_SMTP')
+ email_command = ['git','send-email','--from='+user_email,'--thread','--quiet','--confirm=never',\
+ '--smtp-server',smtp_server,'--to=%s' % ','.join(email_list), email_temp_file]
+ email_returncode,email_stdout,email_stderr = execute_process(email_command)
+ if email_returncode:
+ return render(request, 'email_admin.html', {'error_message' : email_stderr,})
+ return redirect(email_success)
+
+ elif request.POST["action"] == "cancel":
+ return redirect('/')
+
+ else:
+ return render(request, 'email_admin.html', {'error_message' : "Error:no such action '%s'" % request.POST["action"]})
+
+ raise Exception("Invalid HTTP method for this page")
+
+def email_success(request):
+ if request.method == "GET":
+ context = {
+ }
+ return render(request, 'email_success.html', context)
+ elif request.method == "POST":
+ _log("EMAIL_SUCCESS: %s" % request)
+ if request.POST["action"] == "close":
+ return redirect('/')
+ return redirect('/')
+
+def date_time_test(request):
+ utc_dt = datetime.now(timezone.utc)
+ current_ala = utc_dt.astimezone(pytz.timezone('US/Pacific')).strftime(SRTool.DATETIME_FORMAT)
+ user_timezone_str = request.user.map_usertz_to_usertz_str()
+
+ # Replace with getting user_timezone_str from the user record
+ user_timezone = pytz.timezone(request.user.map_usertz_str_to_usertz(user_timezone_str))
+
+ epoch = time.time()
+ offset = utc_dt.astimezone(user_timezone).replace(tzinfo=None) - datetime.fromtimestamp(epoch)
+ local_time = utc_dt + offset
+ current_local = local_time.strftime(SRTool.DATETIME_FORMAT)
+
+ context = {
+ 'current_utc' : datetime.utcnow().strftime(SRTool.DATETIME_FORMAT),
+ 'current_ala' : current_ala,
+ 'current_local' : current_local,
+ 'timezone_list' : SrtUser.get_timezone_list(),
+ 'user_timezone' : user_timezone,
+ }
+ return render(request, 'date-time-test.html', context)
def tbd(request):
diff --git a/lib/srtgui/widgets.py b/lib/srtgui/widgets.py
index 5f5c54b1..ec2fbd42 100644
--- a/lib/srtgui/widgets.py
+++ b/lib/srtgui/widgets.py
@@ -4,7 +4,7 @@
#
# BitBake Toaster Implementation
#
-# Copyright (C) 2015 Intel Corporation
+# Copyright (C) 2023 Intel Corporation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
@@ -21,6 +21,7 @@
from django.views.generic import View, TemplateView
from django.views.decorators.cache import cache_control
+from django.utils.decorators import method_decorator
from django.shortcuts import HttpResponse
from django.core.cache import cache
from django.core.paginator import Paginator, EmptyPage
@@ -29,8 +30,15 @@ from django.template import Context, Template
from django.template import VariableDoesNotExist
from django.template import TemplateSyntaxError
from django.core.serializers.json import DjangoJSONEncoder
+from django.urls import reverse, resolve
+from django.utils import timezone
+from django.http import JsonResponse
+
+from srtgui.templatetags.jobtags import json as template_json
+from srtgui.templatetags.jobtags import sectohms
from orm.models import SrtSetting, Cve
+from orm.models import Job
import types
import json
@@ -51,6 +59,38 @@ class NoFieldOrDataName(Exception):
# quick development/debugging support
from srtgui.api import _log
+################################################
+### Helper Routines
+
+def isbalanced(s):
+ c= 0
+ ans=False
+ list1 = []
+ list2 = []
+ strcheck = ""
+ lstcheck = []
+ for i in range(len(s)):
+ if s[i] == "(":
+ strcheck = strcheck + "_" + s[i]
+ lstcheck.append(i)
+ elif s[i] == ")" and "(" in strcheck:
+ list1 = strcheck.split("_")
+ list1.pop()
+ lstcheck.pop()
+ strcheck = "_".join(list1)
+ elif s[i] == ")" and "(" not in strcheck:
+ strcheck = strcheck + "_" + s[i]
+ lstcheck.append(i)
+ list1 = strcheck.split("_")
+ list2[:0] = s
+ if len(lstcheck) > 0 :
+ for i2 in lstcheck:
+ list2.pop(lstcheck[0])
+ return "".join(list2)
+
+################################################
+### ToasterTable
+
class ToasterTable(TemplateView):
def __init__(self, *args, **kwargs):
super(ToasterTable, self).__init__()
@@ -60,7 +100,7 @@ class ToasterTable(TemplateView):
self.queryset = None
self.columns = []
- _log("ToasterTable:%s,%s" % (args,kwargs))
+# _log("ToasterTable:%s,%s" % (args,kwargs))
# map from field names to Filter instances
self.filter_map = TableFilterMap()
@@ -71,8 +111,8 @@ class ToasterTable(TemplateView):
self.default_orderby = ""
# prevent HTTP caching of table data
- @cache_control(must_revalidate=True,
- max_age=0, no_store=True, no_cache=True)
+ @method_decorator(cache_control(must_revalidate=True,
+ max_age=0, no_store=True, no_cache=True))
def dispatch(self, request, *args, **kwargs):
return super(ToasterTable, self).dispatch(request, *args, **kwargs)
@@ -96,6 +136,17 @@ class ToasterTable(TemplateView):
def get(self, request, *args, **kwargs):
if request.GET.get('format', None) == 'json':
+ # Add all URL parameters to kwargs, specifically for the
+ # case of the Toaster table JSON 'get' for the 'filter' AJAX
+ # call which does not include the request header parameters
+ tableParams = self.request.GET.get('tableParams','')
+ for param in tableParams.split(','):
+ pos = param.find('=')
+ if 0 < pos:
+ name = param[:param.find('=')]
+ value = param[param.find('=')+1:]
+ kwargs[name] = value
+
self.setup_queryset(*args, **kwargs)
# Put the project id into the context for the static_data_template
if 'pid' in kwargs:
@@ -228,25 +279,89 @@ class ToasterTable(TemplateView):
TableFilterAction* before its filter is applied and may modify the
queryset returned by the filter
"""
+
self.setup_filters(**kwargs)
try:
- filter_name, action_name = filters.split(':')
+# filter_name, action_name = filters.split(':')
+ if len(filters.split(",")) < 2 :
+ filter_name, action_name = filters.split(':')
action_params = unquote_plus(filter_value)
except ValueError:
return
- if "all" in action_name:
- return
-
- table_filter = self.filter_map.get_filter(filter_name)
- action = table_filter.get_action(action_name)
- action.set_filter_params(action_params)
- self.queryset = action.filter(self.queryset)
+# if "all" in action_name:
+# return
+
+ FilterString = ""
+ CriteriaString = ""
+ lstactionlist = []
+ if self.request.session.get('filterkey'):
+ if False:
+ if self.request.session['filterkey'].split("~")[0] != "":
+ CriteriaString = self.request.session['filterkey']
+ CriteriaString = CriteriaString.split("~")[0]
+ FilterString = str(filters) + str('|') + str(CriteriaString)
+ FilterList = FilterString.split('|')
+
+
+ # if self.request.session['filterkey'].split("~")[0] != "":
+ if len(filters.split(",")) > 1:
+ # CriteriaString = self.request.session['filterkey']
+ # CriteriaString = CriteriaString.split("~")[0]
+ # for CriteriaItem in filters.split(","):
+ # if not CriteriaItem in FilterString:
+ # FilterString = str(CriteriaItem) + str('|') + str(CriteriaString)
+ FilterList = filters.split(",")
+
+ q1 = None
+ q2 = self.queryset
+ for filterItem in FilterList:
+ #if counter == 0:
+ table_filter1 = self.filter_map.get_filter(filterItem.split(":")[0])
+ action1 = table_filter1.get_action(filterItem.split(":")[1])
+ action1.set_filter_params(action_params)
+ q1 = action1.filter(q2)
+ q2 = q1
+ lstactionlist.append(filterItem.split(":")[1])
+ self.queryset = q1
+ else:
+ table_filter = self.filter_map.get_filter(filter_name)
+ action = table_filter.get_action(action_name)
+ action.set_filter_params(action_params)
+ self.queryset = action.filter(self.queryset)
+ FilterString = str(filters)
+ lstactionlist.append(action_name)
+ else:
+ table_filter = self.filter_map.get_filter(filter_name)
+ action = table_filter.get_action(action_name)
+ action.set_filter_params(action_params)
+ self.queryset = action.filter(self.queryset)
+ FilterString = str(filters)
+ lstactionlist.append(action_name)
+
+ _log("FOO:APPLY_FILTER:FILTER:%s" % action_params)
+
+ strquerystring = self.queryset.query.__str__()
+ qstring1 = strquerystring.replace ('AND', 'AND\n')
+ qstring2 = qstring1.replace ('OR', 'OR\n')
+ tar = re.findall(r"(?<==).+(?= AND)|(?<==)(?<==).+(?= OR )|(?<==).+(?=[)])|(?<==).+(?= OR)", qstring2)
+ for item in tar:
+ if len(re.findall(r"[A-Za-z]", item.strip())) != 0 :
+ item = isbalanced(item)
+ strquerystring = strquerystring.replace(item, '"'+item.strip()+'"' )
+ self.request.session['filterkey'] = str(FilterString) + str('~') + strquerystring
def apply_orderby(self, orderby):
# Note that django will execute this when we try to retrieve the data
- self.queryset = self.queryset.order_by(orderby)
+ if False:
+ # Use parent order field if present (for column computed from existing column)
+ order_by = re.sub(r'.*__parent_', '', orderby)
+# order_by = orderby
+ self.queryset = self.queryset.order_by(order_by)
+ else:
+ self.queryset = self.queryset.order_by(orderby)
+ # self.request.session['filterkey'] = str('~') + str(self.queryset.query)
def apply_search(self, search_term):
"""Creates a query based on the model's search_allowed_fields"""
@@ -285,9 +400,19 @@ class ToasterTable(TemplateView):
else:
search_queries = queries
+ _log("FOO:APPLY_SEARCH:FILTER:%s" % search_queries)
self.queryset = self.queryset.filter(search_queries)
-
- def apply_row_customization(self, row):
+ strquerystring = self.queryset.query.__str__()
+ qstring1 = strquerystring.replace ('AND', 'AND\n')
+ qstring2 = qstring1.replace ('OR', 'OR\n')
+ tar = re.findall(r"(?<==).+(?= AND)|(?<==)(?<==).+(?= OR )|(?<==).+(?=[)])|(?<==).+(?= OR)", qstring2)
+ for item in tar:
+ if len(re.findall(r"[A-Za-z]", item.strip())) != 0 :
+ item = isbalanced(item)
+ strquerystring = strquerystring.replace(item, '"'+item.strip()+'"' )
+ self.request.session['filterkey'] = str('~') + str(strquerystring)
+
+ def apply_row_customization(self, row, **kwargs):
""" function to implement in the subclass which supports
row data customization in the respective table handler """
return row
@@ -311,6 +436,11 @@ class ToasterTable(TemplateView):
orderby = request.GET.get("orderby", None)
nocache = request.GET.get("nocache", None)
+ # Test if clear filters from session
+ if filters == "":
+ if request.session.get('filterkey'):
+ del request.session['filterkey']
+
# Make a unique cache name
cache_name = self.__class__.__name__
@@ -339,6 +469,8 @@ class ToasterTable(TemplateView):
self.setup_columns(**kwargs)
+ self.request.session['nofilterkey'] = str('~') + str(self.queryset.query)
+
if search:
self.apply_search(search)
if filters:
@@ -428,7 +560,7 @@ class ToasterTable(TemplateView):
data['rows'].append(required_data)
# apply any row data customization override before converted to JSON
- data = self.apply_row_customization(data)
+ data = self.apply_row_customization(data, **kwargs)
data = json.dumps(data, indent=2, cls=DjangoJSONEncoder)
cache.set(cache_name, data, 60*30)
@@ -491,3 +623,136 @@ class ToasterTypeAhead(View):
pass
+class MostRecentJobsView(View):
+ def _was_yesterday_or_earlier(self, completed_on):
+ now = timezone.now()
+ delta = now - completed_on
+
+ if delta.days >= 1:
+ return True
+
+ return False
+
+ def get(self, request, *args, **kwargs):
+ """
+ Returns a list of jobs in JSON format.
+ """
+
+ recent_job_objs = Job.get_recent()
+ recent_jobs = []
+
+ for job_obj in recent_job_objs:
+## cancel_url = \
+## reverse('xhr_jobrequest', args=(job_obj.sprint.pk,))
+# cancel_url = \
+# reverse('xhr_jobrequest', )
+ cancel_url = \
+ ''
+
+ job = {}
+ job['id'] = job_obj.pk
+
+ tasks_complete_percentage = 0
+ if job_obj.status in (Job.SUCCESS, Job.ERRORS):
+ tasks_complete_percentage = 100
+ elif job_obj.status == Job.INPROGRESS:
+ tasks_complete_percentage = job_obj.completeper()
+
+ job['tasks_complete_percentage'] = tasks_complete_percentage
+
+ job['state'] = job_obj.get_status_text
+
+ job['errors'] = job_obj.errors
+
+ job['warnings'] = job_obj.warnings
+
+ if job_obj.completed_on and job_obj.started_on:
+ timespent = job_obj.completed_on - job_obj.started_on
+ job['jobtime'] = sectohms(timespent.total_seconds())
+ else:
+ job['jobtime'] = 0
+
+ job['cancel_url'] = cancel_url
+
+ job['job_targets_json'] = \
+ template_json(job_obj.name)
+
+ # convert completed_on time to user's timezone
+ if job_obj.completed_on:
+ completed_on = job_obj.completed_on
+
+ completed_on_template = '%H:%M'
+ if self._was_yesterday_or_earlier(completed_on):
+ completed_on_template = '%d/%m/%Y ' + completed_on_template
+ else:
+ completed_on_template = 'Today ' + completed_on_template
+ job['completed_on'] = completed_on.strftime(
+ completed_on_template)
+ else:
+ job['completed_on'] = 'In progress...'
+
+ job['targets'] = job_obj.message #current remote command
+
+ if job_obj.refresh:
+ # Right now a binary flag, later maybe a timeout counter
+ job['refresh'] = '1' #remote page refresh request
+ job_obj.refresh = 0
+ job_obj.save()
+ else:
+ job['refresh'] = '0'
+
+ recent_jobs.append(job)
+
+ return JsonResponse(recent_jobs, safe=False)
+
+class XhrJobRequest(View):
+
+ def error_response(error):
+ return JsonResponse({"error": error})
+
+ def get(self, request, *args, **kwargs):
+ return HttpResponse()
+
+ def post(self, request, *args, **kwargs):
+ """
+ Job control
+
+ Entry point: /xhr_jobrequest/<project_id>
+ Method: POST
+
+ Args:
+ id: id of job to change
+ jobCancel = job_request_id ...
+ jobDelete = id ...
+
+ Returns:
+ {"error": "ok"}
+ or
+ {"error": <error message>}
+ """
+
+ if 'jobCancel' in request.POST:
+ for i in request.POST['jobCancel'].strip().split(" "):
+ try:
+ job = Job.objects.get(pk=i)
+ job.cancel()
+ except Job.DoesNotExist:
+ return error_response('No such job request id %s' % i)
+
+ return JsonResponse({"error": 'ok'})
+
+ if 'jobDelete' in request.POST:
+ for i in request.POST['jobDelete'].strip().split(" "):
+ try:
+ Job.objects.select_for_update().get(
+ pk=i,
+ state__lte=Job.INPROGRESS).delete()
+
+ except Job.DoesNotExist:
+ pass
+ return error_response("ok")
+
+ response = HttpResponse()
+ response.status_code = 500
+ return response
+
diff --git a/lib/srtmain/management/commands/checksocket.py b/lib/srtmain/management/commands/checksocket.py
index 19e75cb5..803009bc 100644
--- a/lib/srtmain/management/commands/checksocket.py
+++ b/lib/srtmain/management/commands/checksocket.py
@@ -25,7 +25,7 @@ import errno
import socket
from django.core.management.base import BaseCommand, CommandError
-from django.utils.encoding import force_text
+from django.utils.encoding import force_str
DEFAULT_ADDRPORT = "0.0.0.0:8000"
@@ -63,7 +63,7 @@ class Command(BaseCommand):
if hasattr(err, 'errno') and err.errno in errors:
errtext = errors[err.errno]
else:
- errtext = force_text(err)
+ errtext = force_str(err)
raise CommandError(errtext)
self.stdout.write("OK")
diff --git a/lib/srtmain/settings.py b/lib/srtmain/settings.py
index 0607fe9a..abd115cb 100644
--- a/lib/srtmain/settings.py
+++ b/lib/srtmain/settings.py
@@ -22,8 +22,8 @@
# Django settings for SRT
import os
-
from django import VERSION as DJANGO_VERSION
+import yaml
DEBUG = True
@@ -40,27 +40,47 @@ ADMINS = (
MANAGERS = ADMINS
-SRT_SQLITE_DEFAULT_DIR = os.environ.get('SRT_BASE_DIR')
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR', ".")
+with open(f"{SRT_BASE_DIR}/srt_dbconfig.yml", "r") as ymlfile:
+ SRT_DBCONFIG = yaml.safe_load(ymlfile)
+ SRT_DBSELECT = SRT_DBCONFIG['dbselect']
+ srt_dbconfig = SRT_DBCONFIG[SRT_DBSELECT]
+ srt_dbtype = srt_dbconfig['dbtype']
DATABASES = {
'default': {
- # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'ENGINE': 'django.db.backends.sqlite3',
# DB name or full path to database file if using sqlite3.
- 'NAME': "%s/srt.sqlite" % SRT_SQLITE_DEFAULT_DIR,
+ 'NAME': f"{SRT_BASE_DIR}/{srt_dbconfig['path']}",
'USER': '',
'PASSWORD': '',
- #'HOST': '127.0.0.1', # e.g. mysql server
- #'PORT': '3306', # e.g. mysql port
},
# Sqlite database lock problem
'OPTIONS': {
'timeout': 20,
}
+} if srt_dbtype == "sqlite" else {
+ 'default': {
+ 'ENGINE': 'django.db.backends.mysql',
+ 'NAME': srt_dbconfig["name"],
+ 'USER': srt_dbconfig["user"],
+ 'PASSWORD': srt_dbconfig["passwd"],
+ 'HOST': srt_dbconfig["host"],
+ 'PORT': srt_dbconfig["port"],
+ },
+} if srt_dbtype == "mysql" else {
+ 'default': {
+ 'ENGINE': 'django.db.backends.postgresql_psycopg2',
+ 'NAME': srt_dbconfig["name"],
+ 'USER': srt_dbconfig["user"],
+ 'PASSWORD': srt_dbconfig["passwd"],
+ 'HOST': srt_dbconfig["host"],
+ 'PORT': srt_dbconfig["port"],
+ },
}
# Needed when Using sqlite especially to add a longer timeout for waiting
-# for the database lock to be released
+# for the database lock to be released
# https://docs.djangoproject.com/en/1.6/ref/databases/#database-is-locked-errors
if 'sqlite' in DATABASES['default']['ENGINE']:
DATABASES['default']['OPTIONS'] = { 'timeout': 20 }
@@ -180,7 +200,7 @@ TEMPLATES = [
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
- os.path.join(SRT_SQLITE_DEFAULT_DIR, 'lib/srtmain/templates'),
+ os.path.join(SRT_BASE_DIR, 'lib/srtmain/templates'),
],
'OPTIONS': {
'context_processors': [
@@ -278,7 +298,7 @@ INSTALLED_APPS = (
'srtgui',
'users',
)
-#print("DEBUG:INSTALLED_APPS:%s,%s" % (SRT_MAIN_APP,INSTALLED_APPS))
+##print("DEBUG:INSTALLED_APPS:%s,%s" % (SRT_MAIN_APP,INSTALLED_APPS))
INTERNAL_IPS = ['127.0.0.1', '192.168.2.28']
@@ -319,7 +339,7 @@ if os.environ.get('SRT_DEVEL', None) is not None:
SOUTH_TESTS_MIGRATE = False
-# We automatically detect and install applications here if
+# We automatically detect and install other applications here if
# they have a 'models.py' or 'views.py' file
import os
currentdir = os.path.dirname(__file__)
@@ -331,6 +351,7 @@ for t in os.walk(os.path.dirname(currentdir)):
if ("views.py" in t[2] or "models.py" in t[2]) and not modulename in INSTALLED_APPS:
INSTALLED_APPS = INSTALLED_APPS + (modulename,)
+##print("INSTALLED_APPS:%s" % ','.join(INSTALLED_APPS))
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
@@ -401,3 +422,10 @@ EMAIL_FILE_PATH = os.path.join(SITE_ROOT, "sent_emails")
# Custom SRTool Users
AUTH_USER_MODEL = 'users.SrtUser'
+DEFAULT_AUTO_FIELD='django.db.models.AutoField'
+
+# SSL support (Django 4)
+SRT_CSRF_TRUSTED_ORIGINS = os.environ.get("SRT_CSRF_TRUSTED_ORIGINS", None)
+if SRT_CSRF_TRUSTED_ORIGINS is not None:
+ CSRF_TRUSTED_ORIGINS = SRT_CSRF_TRUSTED_ORIGINS.split(',')
+
diff --git a/lib/srtmain/urls.py b/lib/srtmain/urls.py
index 2f330154..6bdc1581 100644
--- a/lib/srtmain/urls.py
+++ b/lib/srtmain/urls.py
@@ -22,7 +22,7 @@
import os
from django import VERSION as DJANGO_VERSION
-from django.conf.urls import url
+from django.urls import re_path as url
from django.views.generic import RedirectView, TemplateView
from django.views.decorators.cache import never_cache
@@ -39,6 +39,9 @@ logger = logging.getLogger("srt")
from django.contrib import admin
admin.autodiscover()
+# Fetch the main app URL
+SRT_MAIN_APP = os.environ.get('SRT_MAIN_APP', 'srtgui')
+
urlpatterns = [
# Examples:
@@ -50,7 +53,7 @@ urlpatterns = [
url(r'^health$', TemplateView.as_view(template_name="health.html"), name='Toaster Health'),
# if no application is selected, we have the magic srtgui app here
- url(r'^$', never_cache(RedirectView.as_view(url='/srtgui/', permanent=True))),
+ url(r'^$', never_cache(RedirectView.as_view(url='/'+SRT_MAIN_APP+'/', permanent=True)), name='Default URL=/'+SRT_MAIN_APP+'/'),
]
import srtmain.settings
@@ -76,12 +79,12 @@ if DJANGO_VERSION >= (2,0):
# Uncomment the next lines to enable the admin:
path('admin/', admin.site.urls),
- # Main application
- path(SRT_MAIN_APP + '/', include(SRT_MAIN_APP + '.urls')),
# Default applications
path('srtgui/', include('srtgui.urls')),
path('users/', include('users.urls')),
path('users/', include('django.contrib.auth.urls')),
+ # Main application
+ path(SRT_MAIN_APP + '/', include(SRT_MAIN_APP + '.urls')),
] + urlpatterns
else:
urlpatterns = [
@@ -96,7 +99,26 @@ else:
url('^' + SRT_MAIN_APP + '/', include(SRT_MAIN_APP + '.urls')),
] + urlpatterns
-#print("DEBUG:INSTALLED_URL_PATTERNS:%s,%s" % (SRT_MAIN_APP,urlpatterns))
+
+# We automatically detect and install other applications here
+# (at a lower precedence) if they have a 'urls.py'
+currentdir = os.path.dirname(__file__)
+urlpatterns_str = str(urlpatterns)
+for t in os.walk(os.path.dirname(currentdir)):
+ modulename = os.path.basename(t[0])
+ if 'srtmain' == modulename:
+ # Avoid infinite recursion
+ continue
+ if "urls.py" in t[2]:
+ found = False
+ for url in urlpatterns:
+ if modulename+"/urls.py" in str(url):
+ found = True
+ if not found:
+# urlpatterns.append(path(modulename + '/', include(modulename + '.urls')))
+ urlpatterns.insert(0,path(modulename + '/', include(modulename + '.urls')))
+
+##print("DEBUG:INSTALLED_URL_PATTERNS:%s,%s" % (SRT_MAIN_APP,urlpatterns))
currentdir = os.path.dirname(__file__)
diff --git a/lib/srtmain/wsgi.py b/lib/srtmain/wsgi.py
index 6b468e4f..42259a79 100644
--- a/lib/srtmain/wsgi.py
+++ b/lib/srtmain/wsgi.py
@@ -17,12 +17,29 @@ framework.
"""
import os
+from dotenv import load_dotenv
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "Toaster.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "srtmain.settings")
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR','.')
+PIDFILE = os.environ.get('PIDFILE','.')
+
+# quick development/debugging support
+def _log(msg):
+ f1=open(f"{SRT_BASE_DIR}/gunicorn_env.txt", 'w')
+ f1.write("|" + msg + "|\n" )
+ f1.close()
+
+# Spawn the updater, if not already running
+_log(str(os.environ))
+is_update_pid = os.path.isfile(f"{SRT_BASE_DIR}/.srtupdate.pid")
+if False and SRT_BASE_DIR and PIDFILE and (not is_update_pid):
+ cmnd = [f"{SRT_BASE_DIR}/bin/srt","start_update",f"update_follow_pid={PIDFILE}"]
+ _log(f"COMMAND:{cmnd}")
+ os.spawnv(os.P_NOWAIT, cmnd[0], cmnd)
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
diff --git a/lib/users/migrations/0002_last_name.py b/lib/users/migrations/0002_last_name.py
new file mode 100644
index 00000000..11560e6a
--- /dev/null
+++ b/lib/users/migrations/0002_last_name.py
@@ -0,0 +1,18 @@
+# Generated by Django 2.2.11 on 2020-11-25 05:02
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('users', '0001_initial'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='srtuser',
+ name='last_name',
+ field=models.CharField(blank=True, max_length=150, verbose_name='last name'),
+ ),
+ ]
diff --git a/lib/users/migrations/0003_srtuser_timezone.py b/lib/users/migrations/0003_srtuser_timezone.py
new file mode 100644
index 00000000..d69f62d6
--- /dev/null
+++ b/lib/users/migrations/0003_srtuser_timezone.py
@@ -0,0 +1,18 @@
+# Generated by Django 2.2.20 on 2021-05-07 18:18
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('users', '0002_last_name'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='srtuser',
+ name='timezone',
+ field=models.CharField(blank=True, max_length=32),
+ ),
+ ]
diff --git a/lib/users/migrations/0004_timezone_default.py b/lib/users/migrations/0004_timezone_default.py
new file mode 100755
index 00000000..1d53468c
--- /dev/null
+++ b/lib/users/migrations/0004_timezone_default.py
@@ -0,0 +1,18 @@
+# Generated by Django 2.2.11 on 2021-10-31 02:24
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('users', '0003_srtuser_timezone'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='srtuser',
+ name='timezone',
+ field=models.CharField(blank=True, default='US/Pacific', max_length=32, null=True),
+ ),
+ ]
diff --git a/lib/users/migrations/0005_alter_srtuser_first_name.py b/lib/users/migrations/0005_alter_srtuser_first_name.py
new file mode 100644
index 00000000..9aa275ba
--- /dev/null
+++ b/lib/users/migrations/0005_alter_srtuser_first_name.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.0 on 2023-01-30 18:58
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('users', '0004_timezone_default'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='srtuser',
+ name='first_name',
+ field=models.CharField(blank=True, max_length=150, verbose_name='first name'),
+ ),
+ ]
diff --git a/lib/users/models.py b/lib/users/models.py
index b59f9fee..60391303 100755
--- a/lib/users/models.py
+++ b/lib/users/models.py
@@ -8,6 +8,7 @@ from srtgui.api import _log
class SrtUser(AbstractUser):
# add additional fields in here
role = models.CharField(max_length=128, verbose_name='security role')
+ timezone = models.CharField(max_length=32, default='US/Pacific', null=True, blank=True)
def __str__(self):
return "%s,%s" % (self.email,self.role)
@@ -69,9 +70,84 @@ class SrtUser(AbstractUser):
group.user_set.add(self)
return group.name
return ",".join(groups)
+ @staticmethod
+ def get_people_users():
+ names_to_exclude = ['admin','Guest','SRTool','All']
+ return(SrtUser.objects.exclude(username__in=names_to_exclude))
+ @staticmethod
+ def get_group_users(group_name):
+ try:
+ group = Group.objects.get(name=group_name)
+ except:
+ return([])
+ return([user for user in group.user_set.all()])
@property
def get_group_perm(self):
return self.get_group_permissions()
+ DEFAULT_TIMEZONE_INDEX = 21
+ @staticmethod
+ def get_timezone_list():
+ #timezone_list.split(':') = [GMT offset][pytz entry(key:can use ezlookup against pytz list)][Example cities/regions]
+ timezone_list = [
+ '+14:Pacific/Kiritimati:',
+ '+13:Pacific/Apia:',
+ '+12:Pacific/Auckland:NZ/Standard',
+ '+11:Pacific/Guadalcanal:SB/Standard RU/Magadan',
+ '+10:Pacific/Port_Moresby:PG/Standard RU/Vladivostok',
+ '+09:Asia/Tokyo:JP/Standard RU/Yakutsk ID/Jakarta',
+ '+08:Asia/Singapore:SG/Standard HK/Standard MN/Ulanbaatar',
+ '+07:Asia/Bangkok:TH/Bangkok RU/Krasnoyarsk',
+ '+06:Asia/Dhaka:BD/Dhaka RU/Omsk',
+ '+05:Asia/Karachi:PK/Karachi RU/Yekaterinburg',
+ '+04:Asia/Dubai:AE/Dubai MU/Standard RU/Samara',
+ '+03:Europe/Moscow:RU/Moscow KE/Nairobi',
+ '+02:Europe/Berlin:RU/Kaliningrad EG/Cairo RW/Kigali',
+ '+01:Europe/Dublin:NG/Lagos IE/Dublin',
+ '+00:Atlantic/Reykjavik:LR/Monrovia PT/Azores MA/Casablancas',
+ '-01:Atlantic/Cape_Verde:CV/Praia',
+ '-02:Etc/GMT-2:',
+ '-03:America/Argentina/Buenos_Aires:AR/Buenos Aires SR/Commewijne',
+ '-04:US/Eastern:US/New York City PR/San Juan',
+ '-05:US/Central:US/Houston MX/Mexico City',
+ '-06:US/Mountain:Denver CA/Edmonton',
+ '-07:US/Pacific:Los Angeles CA/Vancouver MX/Tijuana',
+ '-08:Pacific/Pitcairn:PN/Adamstown',
+ '-09:America/Anchorage:US/Anchorage',
+ '-10:Pacific/Tahiti:PF/Tahiti',
+ '-11:Pacific/Pago_Pago:AS/Pago Pago US/Midway',
+ '-12:Pacific/Kwajalein:',
+ ]
+ return timezone_list
+
+ # Set user timezone string from long string
+ def map_usertz_str_to_usertz(self, long_str):
+ short_timezone = ""
+ for tz in SrtUser.get_timezone_list():
+ if tz == long_str:
+ short_timezone = tz.split(':')[1]
+ break
+ if not short_timezone:
+ short_timezone = SrtUser.get_timezone_list[SrtUser.DEFAULT_TIMEZONE_INDEX].split(':')[1]
+ self.timezone = short_timezone
+ return self.timezone
+
+ # Return long string format
+ def map_usertz_to_usertz_str(self):
+ for tz in SrtUser.get_timezone_list():
+ if self.timezone == tz.split(':')[1]:
+ return tz
+ break
+ return self.get_timezone_list()[SrtUser.DEFAULT_TIMEZONE_INDEX]
+
+ # Return offset from UTC -> jobtags.py
+ @property
+ def get_timezone_offset(self):
+ for tz in SrtUser.get_timezone_list():
+ if self.timezone == tz.split(':')[1]:
+ return int(tz.split(':')[0])
+ break
+ return int(self.get_timezone_list()[SrtUser.DEFAULT_TIMEZONE_INDEX].split(':')[0])
+
# Minimal and safe User object to pass to web pages (no passwords)
class UserSafe():
diff --git a/lib/users/templates/user_edit.html b/lib/users/templates/user_edit.html
index 26b18ea8..4d57ac97 100755
--- a/lib/users/templates/user_edit.html
+++ b/lib/users/templates/user_edit.html
@@ -1,8 +1,11 @@
<!-- templates/signup.html -->
{% extends 'base.html' %}
-{% block title %}{% if 'new_admin' == mode %}New User{% else %}Edit User Settings{% endif %}{% endblock %}
+{% load static %}
+{% load jobtags %}
+{% load humanize %}
+{% block title %}{% if 'new_admin' == mode %}New User{% else %}Edit User Settings{% endif %}{% endblock %}
{% block pagecontent %}
<div>
<h2>{% if 'new_admin' == mode %}New User{% else %}Edit User Settings{% endif %}</h2>
@@ -32,6 +35,16 @@
<dt>Role:</dt>
<dd><input type="text" placeholder="Edit role" name="user_role" size="80" value="{{user_role}}"></dd>
+ <!--Insert tz dropdown here-->
+ <dt>Timezone:</dt>
+ <dd>
+ <select name="timezone" id="select-timezone">
+ {% for tz in timezone_list %}
+ <option value="{{tz}}" {% if user_timezone == tz %}selected{% endif %}>{{tz}}</option>
+ {% endfor %}
+ </select>
+ </dd>
+
<dt>Group:</dt>
<dd>
{% if 'edit_user' == mode %}
@@ -42,6 +55,7 @@
<option value="Contributor" {% if 'Contributor' == group_name %}selected{% endif %}>Contributor</option>
<option value="Creator" {% if 'Creator' == group_name %}selected{% endif %}>Creator</option>
<option value="Admin" {% if 'Admin' == group_name %}selected{% endif %}>Admin</option>
+ <option value="SuperUser" {% if user_super %}selected{% endif %}>SuperUser</option>
</select>
{% endif %}
</dd>
diff --git a/lib/users/urls.py b/lib/users/urls.py
index 4c33cb18..f936d1d0 100755
--- a/lib/users/urls.py
+++ b/lib/users/urls.py
@@ -1,4 +1,4 @@
-from django.conf.urls import include, url
+from django.urls import re_path as url, include
from . import views
urlpatterns = [
@@ -9,6 +9,6 @@ urlpatterns = [
url(r'^edit_user/(?P<user_pk>\d+)$', views.edit_user, name="edit_user"),
url(r'^xhr_user_commit/$', views.xhr_user_commit, name='xhr_user_commit'),
-
+ url(r'^xhr_date_time_test/$', views.xhr_date_time_test, name='xhr_date_time_test'),
]
diff --git a/lib/users/views.py b/lib/users/views.py
index 62163822..c5cce77d 100755
--- a/lib/users/views.py
+++ b/lib/users/views.py
@@ -103,6 +103,9 @@ def edit_user(request,user_pk):
'user_last' : '' if not pk else srtuser.last_name,
'user_email' : '' if not pk else srtuser.email,
'user_role' : '' if not pk else srtuser.role,
+ 'user_super' : False if not pk else srtuser.is_superuser,
+ 'user_timezone' : SrtUser.get_timezone_list()[SrtUser.DEFAULT_TIMEZONE_INDEX] if not pk else srtuser.map_usertz_to_usertz_str(),
+ 'timezone_list': SrtUser.get_timezone_list(),
'group_name' : 'Reader' if not pk else srtuser.get_groups.split(',')[0],
'validation_errors' : '',
}
@@ -115,12 +118,14 @@ def edit_user(request,user_pk):
else:
return redirect('/')
+ # added user_tz to POST method
mode = request.POST.get('mode', '')
user_name = request.POST.get('user_name', '')
user_first = request.POST.get('user_first', '')
user_last = request.POST.get('user_last', '')
user_email = request.POST.get('user_email', '')
user_role = request.POST.get('user_role', '')
+ user_tz = request.POST.get('timezone', '')
user_group = request.POST.get('user_group', '')
user_pass1 = request.POST.get('user_pass1', '')
user_pass2 = request.POST.get('user_pass2', '')
@@ -151,12 +156,18 @@ def edit_user(request,user_pk):
'user_last' : user_last,
'user_email' : user_email,
'user_role' : user_role,
+ 'user_tz' : user_tz,
'group_name' : user_group,
'validation_errors' : validation_errors[2:],
}
return render(request, 'user_edit.html', context)
# Process the post
+ if 'SuperUser' == user_group:
+ user_group = 'Admin'
+ is_superuser = True
+ else:
+ is_superuser = False
if 'new_admin' == mode:
srtuser = SrtUser(username=user_name)
else:
@@ -168,6 +179,8 @@ def edit_user(request,user_pk):
srtuser.last_name = user_last
srtuser.email = user_email
srtuser.role = user_role
+ srtuser.is_superuser = is_superuser
+ srtuser.timezone = srtuser.map_usertz_str_to_usertz(user_tz)
srtuser.save()
# Update Group
if user_group and (user_group != srtuser.get_groups.split(',')[0]):
@@ -198,14 +211,70 @@ def xhr_user_commit(request):
action = request.POST['action']
history_comment = ''
try:
+ error_message = "ok";
if 'submit-trashuser' == action:
record_id = request.POST['record_id']
user = SrtUser.objects.get(pk=record_id).delete()
+
+ elif 'submit-trashgroup' == action:
+ record_id = request.POST['record_id']
+ group = Group.objects.get(pk=record_id).delete()
+
+ elif 'submit-trashusergroup' == action:
+ group_id = int(request.POST.get('group_id','0'))
+ record_id = request.POST['record_id']
+ group = Group.objects.get(pk=group_id)
+ srtuser = SrtUser.objects.get(pk=record_id)
+ group.user_set.remove(srtuser)
+ ret=group.save()
+
+ elif 'submit-group-users' == action:
+ group_id = int(request.POST.get('group_id','0'))
+ user_id_list = request.POST['user_id_list']
+ group = Group.objects.get(pk=group_id)
+ # Add new users
+ for user_id in user_id_list.split(','):
+ if user_id:
+ srtuser = SrtUser.objects.get(id=int(user_id))
+ group.user_set.add(srtuser)
+ group.save()
+ # Remove old users
+ for srtuser in group.user_set.all():
+ if not str(srtuser.id) in user_id_list:
+ group.user_set.remove(srtuser)
+ group.save()
+
+ elif 'submit-group-create' == action:
+ group_name = request.POST['group_name'].strip()
+ group,created = Group.objects.get_or_create(name=group_name)
+ group.save()
+
+ else:
+ error_message = "ERROR:unknown action '%s'" % request.POST["action"]
+
return_data = {
- "error": "ok",
+ "error": error_message,
}
return HttpResponse(json.dumps( return_data ), content_type = "application/json")
except Exception as e:
_log("xhr_user_commit:no(%s)" % e)
return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
+
+def xhr_date_time_test(request):
+ _log("xhr_date_time_test(%s)" % request.POST)
+ if not 'action' in request.POST:
+ return HttpResponse(json.dumps({"error":"missing action\n"}), content_type = "application/json")
+
+ action = request.POST['action']
+ history_comment = ''
+ try:
+ if 'submit-timezone' == action:
+ timezone = request.POST['timezone']
+ return_data = {
+ "error": "ok",
+ }
+ return HttpResponse(json.dumps( return_data ), content_type = "application/json")
+ except Exception as e:
+ _log("xhr_date_time_test:no(%s)" % e)
+ return HttpResponse(json.dumps({"error":str(e) + "\n" + traceback.format_exc()}), content_type = "application/json")
diff --git a/lib/yp/reports.py b/lib/yp/reports.py
index dca99d10..565c2dc0 100755
--- a/lib/yp/reports.py
+++ b/lib/yp/reports.py
@@ -35,11 +35,11 @@ from srtgui.reports import Report, ReportManager, ProductsReport, ManagementRepo
from django.db.models import Q, F
from django.db import Error
-from srtgui.templatetags.projecttags import filtered_filesizeformat
+from srtgui.templatetags.jobtags import filtered_filesizeformat
logger = logging.getLogger("srt")
-SRT_BASE_DIR = os.environ['SRT_BASE_DIR']
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR', '.')
SRT_REPORT_DIR = '%s/reports' % SRT_BASE_DIR
# quick development/debugging support
diff --git a/lib/yp/templates/landing.html b/lib/yp/templates/landing.html
new file mode 100755
index 00000000..0a488398
--- /dev/null
+++ b/lib/yp/templates/landing.html
@@ -0,0 +1,93 @@
+{% extends "base.html" %}
+
+{% load static %}
+{% load jobtags %}
+{% load humanize %}
+
+{% block title %} Welcome to SRTool{% endblock %}
+{% block pagecontent %}
+ <div class="row">
+ <div class="col-md-7" style="padding-left: 50px;">
+ <h1>Security Response Tool (SRTool)</h1>
+ <p>A web interface to SRTool CVE investigations ({{this_landing}})</p>
+ </div>
+ </div>
+ <div class="row">
+ <div class="jumbotron well-transparent">
+
+ <div class="col-md-6">
+ <div>
+ <table class="table table-striped table-condensed" data-testid="landing-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>Table</th>
+ <th>Description</th>
+ </tr>
+ </thead>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'cvechecker_audits' %}">CVE Check Audits</a></td>
+ <td>CVE Check Audits</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'cves' %}">CVE's</a></td>
+ <td>Common Vulnerability Enumeration</td>
+ </tr>
+
+<!-- <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'investigations' %}">Investigations</a></td>
+ <td>SRTool Investigations (product level)</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'defects' %}">Defects</a></td>
+ <td>SRTool Defects</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'cpes_srtool' %}">Package CPE's</a></td>
+ <td>Affected packages (Common Platform Enumeration)</td>
+ </tr>
+
+-->
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'cwes' %}">CWE's</a></td>
+ <td>Common Weakness Enumeration</td>
+ </tr>
+
+ <tr>
+ <td> <a class="btn btn-info btn-lg" href="{% url 'products' %}">Products</a></td>
+ <td>SRTool Products<td>
+ </tr>
+
+ {% for ext_url,ext_title,ext_description in landing_extensions_table %}
+ <tr>
+ <td> <a class="btn btn-info btn-lg" href="{% url ext_url %}">{{ext_title}}</a></td>
+ <td>{{ext_description}}<td>
+ </tr>
+ {% endfor %}
+
+ {% if request.user.is_creator %}
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'manage' %}">Management</a></td>
+ <td>Triage CVE's, Create Vulnerabilities, Manage Users</td>
+ </tr>
+ {% endif %}
+
+ </table>
+ </div>
+
+ </div>
+
+ <div class="col-md-6">
+ <div align="center"><a class="btn btn-primary btn-lg" href="{% url 'guided_tour' %}">Click here to take a Guided Tour!</a></div>
+ <p />
+ <p />
+ <img alt="CVE preview" class="img-thumbnail" src="{% static 'img/cve_splash.png' %}"/>
+ </div>
+
+ </div>
+ </div>
+
+{% endblock %}
diff --git a/lib/yp/templates/management.html b/lib/yp/templates/management.html
new file mode 100755
index 00000000..013a9d87
--- /dev/null
+++ b/lib/yp/templates/management.html
@@ -0,0 +1,199 @@
+{% extends "base.html" %}
+
+{% load static %}
+{% load jobtags %}
+{% load humanize %}
+
+{% block title %} Manage Resources {% endblock %}
+{% block pagecontent %}
+
+<div class="row">
+ <!-- Breadcrumbs -->
+ <div class="col-md-12">
+ <ul class="breadcrumb" id="breadcrumb">
+ <li><a href="{% url 'landing' %}">Home</a></li><span class="divider">&rarr;</span>
+ <li>Management</a>
+ </ul>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-7" style="padding-left: 50px;">
+ <h1>Management</h1>
+ </div>
+</div>
+
+<div class="row">
+ <div class="col-md-12">
+ {% with mru=mru %}
+ {% include 'mrj_section.html' %}
+ {% endwith %}
+ </div>
+</div>
+
+<div class="row">
+ <div class="jumbotron well-transparent">
+
+ <div class="col-md-6">
+ <div>
+ <table class="table table-striped table-condensed" data-testid="landing-hyperlinks-table">
+ <thead>
+ <tr>
+ <th>Action</th>
+ <th>Description</th>
+ </tr>
+ </thead>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'cvechecker_import_manager' %}">Import Manager</a></td>
+ <td>Manage the CVE Check report import modes</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'triage_cves' %}">Triage CVE's</a></td>
+ <td>Triage the CVE's ({{cve_new}})</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'manage_notifications' %}">Pending notifications</a></td>
+ <td>Triage the pending notifications ({{notification_total}})</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'manage_report' %}">Summary Report</a></td>
+ <td>Report on the over all response system status</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'publish' %}">Publish Reports</a></td>
+ <td>Process items to be published from the SRTool</td>
+ </tr>
+
+ {% if request.user.is_admin %}
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'users' %}">Manage Users</a></td>
+ <td>Add, edit, and remove users</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'sources' %}?nocache=1">Manage Sources</a></td>
+ <td>Manage source list, perform manual pulls</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'maintenance' %}?nocache=1">Maintenance</a></td>
+ <td>Maintenance utilities ({{errorlog_total}})</td>
+ </tr>
+
+ <tr>
+ <td><a class="btn btn-info btn-lg" href="{% url 'cvechecker_clear_jobs' %}">Clear Jobs</a></td>
+ <td>Clear the Jobs table of all entries</td>
+ </tr>
+
+ {% endif %}
+
+ </table>
+ </div>
+
+ </div>
+
+ <div class="col-md-5">
+ <b>Quick Info</b>
+ <div class="well">
+ <dl class="dl-horizontal">
+ <dt>CVE's: Total Count =</dt>
+ <dd>
+ <a href="{% url 'cves' %}"> {{cve_total}} </a>
+ </dd>
+ <dt>Pending triaged =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:new&default_orderby=name&filter_value=on&"> {{cve_new}} </a>
+ </dd>
+ <dt>Investigate =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:investigate&default_orderby=name&filter_value=on&"> {{cve_investigate}} </a>
+ </dd>
+ <dt>Vulnerable =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:vulnerable&default_orderby=name&filter_value=on&"> {{cve_vulnerable}} </a>
+ </dd>
+ <dt>Not Vulnerable =</dt>
+ <dd>
+ <a href="{% url 'cves' %}?limit=25&page=1&orderby=name&filter=is_status:not_vulnerable&default_orderby=name&filter_value=on&"> {{cve_not_vulnerable}} </a>
+ </dd>
+ <dt>Vulnerabilities: Total Count =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}"> {{vulnerability_total}} </a>
+ </dd>
+ <dt>Open =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_outcome:open&default_orderby=name&filter_value=on&"> {{vulnerability_open}} </a>
+ </dd>
+ <dt>Critical active =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:critical&default_orderby=name&filter_value=on&" %}> {{vulnerability_critical}} </a>
+ </dd>
+ <dt>High active =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:high&default_orderby=name&filter_value=on&" %}> {{vulnerability_high}} </a>
+ </dd>
+ <dt>Medium active =</dt>
+ <dd>
+ <a href="{% url 'vulnerabilities' %}?limit=25&page=1&orderby=name&filter=is_priority:medium&default_orderby=name&filter_value=on&" %}> {{vulnerability_medium}} </a>
+ </dd>
+
+ <dt>Investigations: Total Count =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}" %}> {{investigation_total}} </a>
+ </dd>
+ <dt>Open =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_outcome:open&default_orderby=name&filter_value=on&" %}> {{investigation_open}} </a>
+ </dd>
+ <dt>Critical active =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:critical&default_orderby=name&filter_value=on&" %}> {{investigation_critical}} </a>
+ </dd>
+ <dt>High active =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:high&default_orderby=name&filter_value=on&" %}> {{investigation_high}} </a>
+ </dd>
+ <dt>Medium active =</dt>
+ <dd>
+ <a href="{% url 'investigations' %}?limit=25&page=1&orderby=name&filter=is_priority:medium&default_orderby=name&filter_value=on&" %}> {{investigation_medium}} </a>
+ </dd>
+
+ <dt>Defects: Total Count =</dt>
+ <dd>
+ <a href="{% url 'defects' %}" %}> {{defect_total}} </a>
+ </dd>
+ <dt>Open =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_srt_outcome:open&default_orderby=name&filter_value=on&" %}> {{defect_open}} </a>
+ </dd>
+ <dt>InProgress =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_status:in_progress&default_orderby=name&filter_value=on&" %}> {{defect_inprogress}} </a>
+ </dd>
+ <dt>P1 active =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_priority:critical&default_orderby=name&filter_value=on&" %}> {{defect_p1}} </a>
+ </dd>
+ <dt>P2 active =</dt>
+ <dd>
+ <a href="{% url 'defects' %}?limit=25&page=1&orderby=-priority&filter=is_defect_priority:high&default_orderby=name&filter_value=on&" %}> {{defect_p2}} </a>
+ </dd>
+
+ <dt>Packages: Affected=</dt>
+ <dd>
+ <a href="{% url 'cpes_srtool' %}" %}> {{package_total}} </a>
+ </dd>
+
+ </dl>
+ </div>
+ </div>
+
+ </div>
+</div>
+
+{% endblock %}
diff --git a/lib/yp/templates/yp_hello.html b/lib/yp/templates/yp_hello.html
index 15ab9a7b..95ee8e43 100755
--- a/lib/yp/templates/yp_hello.html
+++ b/lib/yp/templates/yp_hello.html
@@ -1,7 +1,7 @@
{% extends "base.html" %}
{% load static %}
-{% load projecttags %}
+{% load jobtags %}
{% load humanize %}
{% block title %} Yocto Project {% endblock %}
diff --git a/lib/yp/urls.py b/lib/yp/urls.py
index 494de9ae..d60360b7 100755
--- a/lib/yp/urls.py
+++ b/lib/yp/urls.py
@@ -1,12 +1,14 @@
-from django.conf.urls import include, url
+from django.urls import re_path as url,include
+from django.views.generic import RedirectView
from . import views
urlpatterns = [
- url(r'^hello/$', views.yp_hello, name='yp_hello'),
-
- url(r'^$', views.yp_hello, name='yp_default'),
+ # landing page
+ url(r'^landing/$', views.landing, name='landing'),
url(r'^report/(?P<page_name>\D+)$', views.report, name='report'),
url(r'^manage_report/$', views.manage_report, name='manage_report'),
+ # default redirection
+ url(r'^$', RedirectView.as_view(url='landing', permanent=True)),
]
diff --git a/lib/yp/views.py b/lib/yp/views.py
index 3310e7e6..6f722479 100755
--- a/lib/yp/views.py
+++ b/lib/yp/views.py
@@ -19,12 +19,15 @@
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+import os
+
#from django.urls import reverse_lazy
#from django.views import generic
from django.http import HttpResponse, HttpResponseNotFound, JsonResponse, HttpResponseRedirect
from django.shortcuts import render, redirect
from users.models import SrtUser, UserSafe
+from orm.models import SrtSetting
from srtgui.views import MimeTypeFinder
from yp.reports import YPReportManager
@@ -33,10 +36,29 @@ from yp.reports import YPReportManager
# quick development/debugging support
from srtgui.api import _log
-def yp_hello(request):
- context = {}
- _log("Note:yp_hello")
- return render(request, 'yp_hello.html', context)
+SRT_BASE_DIR = os.environ.get('SRT_BASE_DIR', '.')
+SRT_MAIN_APP = os.environ.get('SRT_MAIN_APP', '.')
+
+# determine in which mode we are running in, and redirect appropriately
+def landing(request):
+
+ # Django sometimes has a race condition with this view executing
+ # for the master app's landing page HTML which can lead to context
+ # errors, so hard enforce the default re-direction
+ if SRT_MAIN_APP and (SRT_MAIN_APP != "yp"):
+ return redirect(f"/{SRT_MAIN_APP}/landing/")
+
+ # Append the list of landing page extensions
+ landing_extensions_table = []
+ for landing_extension in SrtSetting.objects.filter(name__startswith='LANDING_LINK').order_by('name'):
+ landing_extensions_table.append(landing_extension.value.split('|'))
+
+ context = {
+ 'landing_extensions_table' : landing_extensions_table,
+ 'this_landing' : 'yp',
+ }
+
+ return render(request, 'landing.html', context)
def report(request,page_name):
if request.method == "GET":