Skip to content

Commit

Permalink
Merge pull request #9655 from DefectDojo/release/2.31.5
Browse files Browse the repository at this point in the history
Release: Merge release into master from: release/2.31.5
  • Loading branch information
Maffooch authored Mar 1, 2024
2 parents c5d896b + 95a774f commit 2181c61
Show file tree
Hide file tree
Showing 9 changed files with 91 additions and 120 deletions.
2 changes: 1 addition & 1 deletion components/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "defectdojo",
"version": "2.31.4",
"version": "2.31.5",
"license" : "BSD-3-Clause",
"private": true,
"dependencies": {
Expand Down
17 changes: 14 additions & 3 deletions components/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -395,12 +395,13 @@ easymde@^2.18.0:
marked "^4.1.0"

es5-ext@^0.10.35, es5-ext@^0.10.50, es5-ext@^0.10.62, es5-ext@~0.10.14:
version "0.10.62"
resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.62.tgz#5e6adc19a6da524bf3d1e02bbc8960e5eb49a9a5"
integrity sha512-BHLqn0klhEpnOKSrzn/Xsz2UIW8j+cGmo9JLzr8BiUapV8hPL9+FliFqjwr9ngW7jWdnxv6eO+/LqyhJVqgrjA==
version "0.10.64"
resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.64.tgz#12e4ffb48f1ba2ea777f1fcdd1918ef73ea21714"
integrity sha512-p2snDhiLaXe6dahss1LddxqEm+SkuDvV8dnIQG0MWjyHpcMNfXKPE+/Cc0y+PhxJX3A4xGNeFCj5oc0BUh6deg==
dependencies:
es6-iterator "^2.0.3"
es6-symbol "^3.1.3"
esniff "^2.0.1"
next-tick "^1.1.0"

es6-iterator@^2.0.3, es6-iterator@~2.0.1, es6-iterator@~2.0.3:
Expand Down Expand Up @@ -467,6 +468,16 @@ escodegen@~1.2.0:
optionalDependencies:
source-map "~0.1.30"

esniff@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/esniff/-/esniff-2.0.1.tgz#a4d4b43a5c71c7ec51c51098c1d8a29081f9b308"
integrity sha512-kTUIGKQ/mDPFoJ0oVfcmyJn4iBDRptjNVIzwIFR7tqWXdVI9xfA2RMwY/gbSpJG3lkdWNEjLap/NqVHZiJsdfg==
dependencies:
d "^1.0.1"
es5-ext "^0.10.62"
event-emitter "^0.3.5"
type "^2.7.2"

esprima@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
Expand Down
2 changes: 1 addition & 1 deletion dojo/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa

__version__ = '2.31.4'
__version__ = '2.31.5'
__url__ = 'https://github.com/DefectDojo/django-DefectDojo'
__docs__ = 'https://documentation.defectdojo.com'
57 changes: 22 additions & 35 deletions dojo/metrics/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from dojo.product.queries import get_authorized_products
from dojo.product_type.queries import get_authorized_product_types
from dojo.finding.queries import get_authorized_findings
from dojo.finding.helper import ACCEPTED_FINDINGS_QUERY, CLOSED_FINDINGS_QUERY
from dojo.endpoint.queries import get_authorized_endpoint_status
from dojo.authorization.authorization import user_has_permission_or_403
from django.utils.translation import gettext as _
Expand Down Expand Up @@ -126,9 +127,10 @@ def identify_view(request):


def finding_querys(prod_type, request):
findings_query = Finding.objects.filter(
verified=True,
severity__in=('Critical', 'High', 'Medium', 'Low', 'Info')
# Get the initial list of findings th use is authorized to see
findings_query = get_authorized_findings(
Permissions.Finding_View,
user=request.user,
).select_related(
'reporter',
'test',
Expand All @@ -139,49 +141,34 @@ def finding_querys(prod_type, request):
'test__engagement__risk_acceptance',
'test__test_type',
)

findings_query = get_authorized_findings(Permissions.Finding_View, findings_query, request.user)

findings = MetricsFindingFilter(request.GET, queryset=findings_query)
findings_qs = queryset_check(findings)

# Quick check to determine if the filters were too tight and filtered everything away
if not findings_qs and not findings_query:
findings = findings_query
findings_qs = findings if isinstance(findings, QuerySet) else findings.qs
messages.add_message(request,
messages.ERROR,
_('All objects have been filtered away. Displaying all objects'),
extra_tags='alert-danger')

messages.add_message(
request,
messages.ERROR,
_('All objects have been filtered away. Displaying all objects'),
extra_tags='alert-danger')
# Attempt to parser the date ranges
try:
start_date, end_date = get_date_range(findings_qs)
except:
start_date = timezone.now()
end_date = timezone.now()

# Filter by the date ranges supplied
findings_query = findings_query.filter(date__range=[start_date, end_date])
# Get the list of closed and risk accepted findings
findings_closed = findings_query.filter(CLOSED_FINDINGS_QUERY)
accepted_findings = findings_query.filter(ACCEPTED_FINDINGS_QUERY)
# filter by product type if applicable
if len(prod_type) > 0:
findings_closed = Finding.objects.filter(mitigated__date__range=[start_date, end_date],
test__engagement__product__prod_type__in=prod_type).prefetch_related(
'test__engagement__product')
# capture the accepted findings in period
accepted_findings = Finding.objects.filter(risk_accepted=True, date__range=[start_date, end_date],
test__engagement__product__prod_type__in=prod_type). \
prefetch_related('test__engagement__product')
accepted_findings_counts = Finding.objects.filter(risk_accepted=True, date__range=[start_date, end_date],
test__engagement__product__prod_type__in=prod_type). \
prefetch_related('test__engagement__product')
else:
findings_closed = Finding.objects.filter(mitigated__date__range=[start_date, end_date]).prefetch_related(
'test__engagement__product')
accepted_findings = Finding.objects.filter(risk_accepted=True, date__range=[start_date, end_date]). \
prefetch_related('test__engagement__product')
accepted_findings_counts = Finding.objects.filter(risk_accepted=True, date__range=[start_date, end_date]). \
prefetch_related('test__engagement__product')

findings_closed = get_authorized_findings(Permissions.Finding_View, findings_closed, request.user)
accepted_findings = get_authorized_findings(Permissions.Finding_View, accepted_findings, request.user)
accepted_findings_counts = get_authorized_findings(Permissions.Finding_View, accepted_findings_counts, request.user)
accepted_findings_counts = severity_count(accepted_findings_counts, 'aggregate', 'severity')
findings_closed = findings_closed.filter(test__engagement__product__prod_type__in=prod_type)
accepted_findings = accepted_findings.filter(test__engagement__product__prod_type__in=prod_type)
# Get the severity counts of risk accepted findings
accepted_findings_counts = severity_count(accepted_findings, 'aggregate', 'severity')

r = relativedelta(end_date, start_date)
months_between = (r.years * 12) + r.months
Expand Down
90 changes: 20 additions & 70 deletions dojo/product/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,10 +290,7 @@ def identify_view(request):

def finding_querys(request, prod):
filters = dict()

findings_query = Finding.objects.filter(test__engagement__product=prod,
severity__in=('Critical', 'High', 'Medium', 'Low', 'Info'))

findings_query = Finding.objects.filter(test__engagement__product=prod)
# prefetch only what's needed to avoid lots of repeated queries
findings_query = findings_query.prefetch_related(
# 'test__engagement',
Expand All @@ -307,81 +304,34 @@ def finding_querys(request, prod):
findings_qs = queryset_check(findings)
filters['form'] = findings.form

# dead code:
# if not findings_qs and not findings_query:
# # logger.debug('all filtered')
# findings = findings_query
# findings_qs = queryset_check(findings)
# messages.add_message(request,
# messages.ERROR,
# 'All objects have been filtered away. Displaying all objects',
# extra_tags='alert-danger')

try:
# logger.debug(findings_qs.query)
start_date = findings_qs.earliest('date').date
start_date = datetime(start_date.year,
start_date.month, start_date.day,
tzinfo=timezone.get_current_timezone())
start_date = datetime(
start_date.year,
start_date.month, start_date.day,
tzinfo=timezone.get_current_timezone())
end_date = findings_qs.latest('date').date
end_date = datetime(end_date.year,
end_date.month, end_date.day,
tzinfo=timezone.get_current_timezone())
end_date = datetime(
end_date.year,
end_date.month, end_date.day,
tzinfo=timezone.get_current_timezone())
except Exception as e:
logger.debug(e)
start_date = timezone.now()
end_date = timezone.now()
week = end_date - timedelta(days=7) # seven days and /newnewer are considered "new"

# risk_acceptances = Risk_Acceptance.objects.filter(engagement__in=Engagement.objects.filter(product=prod)).prefetch_related('accepted_findings')
# filters['accepted'] = [finding for ra in risk_acceptances for finding in ra.accepted_findings.all()]

from dojo.finding.helper import ACCEPTED_FINDINGS_QUERY
filters['accepted'] = findings_qs.filter(ACCEPTED_FINDINGS_QUERY).filter(date__range=[start_date, end_date])
filters['verified'] = findings_qs.filter(date__range=[start_date, end_date],
false_p=False,
active=True,
verified=True,
duplicate=False,
out_of_scope=False).order_by("date")
filters['new_verified'] = findings_qs.filter(date__range=[week, end_date],
false_p=False,
verified=True,
active=True,
duplicate=False,
out_of_scope=False).order_by("date")
filters['open'] = findings_qs.filter(date__range=[start_date, end_date],
false_p=False,
duplicate=False,
out_of_scope=False,
active=True,
is_mitigated=False)
filters['inactive'] = findings_qs.filter(date__range=[start_date, end_date],
duplicate=False,
out_of_scope=False,
active=False,
is_mitigated=False)
filters['closed'] = findings_qs.filter(date__range=[start_date, end_date],
false_p=False,
duplicate=False,
out_of_scope=False,
active=False,
is_mitigated=True)
filters['false_positive'] = findings_qs.filter(date__range=[start_date, end_date],
false_p=True,
duplicate=False,
out_of_scope=False)
filters['out_of_scope'] = findings_qs.filter(date__range=[start_date, end_date],
false_p=False,
duplicate=False,
out_of_scope=True)
week = end_date - timedelta(days=7) # seven days and /newer are considered "new"

filters['accepted'] = findings_qs.filter(finding_helper.ACCEPTED_FINDINGS_QUERY).filter(date__range=[start_date, end_date])
filters['verified'] = findings_qs.filter(finding_helper.VERIFIED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date")
filters['new_verified'] = findings_qs.filter(finding_helper.VERIFIED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date")
filters['open'] = findings_qs.filter(finding_helper.OPEN_FINDINGS_QUERY).filter(date__range=[start_date, end_date])
filters['inactive'] = findings_qs.filter(finding_helper.INACTIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date])
filters['closed'] = findings_qs.filter(finding_helper.CLOSED_FINDINGS_QUERY).filter(date__range=[start_date, end_date])
filters['false_positive'] = findings_qs.filter(finding_helper.FALSE_POSITIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date])
filters['out_of_scope'] = findings_qs.filter(finding_helper.OUT_OF_SCOPE_FINDINGS_QUERY).filter(date__range=[start_date, end_date])
filters['all'] = findings_qs
filters['open_vulns'] = findings_qs.filter(
false_p=False,
duplicate=False,
out_of_scope=False,
active=True,
mitigated__isnull=True,
filters['open_vulns'] = findings_qs.filter(finding_helper.OPEN_FINDINGS_QUERY).filter(
cwe__isnull=False,
).order_by('cwe').values(
'cwe'
Expand Down
2 changes: 1 addition & 1 deletion dojo/templates/dojo/product_metrics.html
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,7 @@ <h3 class="pull-left">
<div class="col-md-6">
<div class="panel panel-default">
<div class="panel-heading">
Open, Closed, and Accepted Week to Week
Open, Closed, and Risk Accepted Week to Week
<i title="Weeks are only displayed if findings are available."
class="text-info fa-solid fa-circle-info"></i>
</div>
Expand Down
35 changes: 29 additions & 6 deletions dojo/tools/qualys/csv_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,22 @@ def _extract_cvss_vectors(cvss_base, cvss_temporal):
return cvss_vector


def _clean_cve_data(cve_string: str) -> list:
# Determine if a CVE was even provided
if len(cve_string) == 0:
return []
# Determine if there is more than one CVE
cve_list = []
if "," in cve_string:
# Split everything up
cve_list = [single_cve.strip() for single_cve in cve_string.split(",")]
else:
# There is just one CVE here, but we must return a list
cve_list = [cve_string.strip()]

return cve_list


def build_findings_from_dict(report_findings: [dict]) -> [Finding]:
"""
Takes a list of Dictionaries built from CSV and creates a Finding object
Expand All @@ -110,13 +126,19 @@ def build_findings_from_dict(report_findings: [dict]) -> [Finding]:
}
dojo_findings = []
for report_finding in report_findings:
# Get endpoint meta
if report_finding.get("FQDN"):
endpoint = Endpoint.from_uri(report_finding.get("FQDN"))
elif report_finding.get("DNS"):
endpoint = Endpoint(host=report_finding.get("DNS"))
else:
endpoint = Endpoint(host=report_finding["IP"])

# Get CVE meta
cve_data = report_finding.get("CVE ID", report_finding.get("CVEID", ""))
# Clean up the CVE data appropriately
cve_list = _clean_cve_data(cve_data)

if "CVSS3 Base" in report_finding:
cvssv3 = _extract_cvss_vectors(
report_finding["CVSS3 Base"], report_finding["CVSS3 Temporal"]
Expand Down Expand Up @@ -151,7 +173,6 @@ def build_findings_from_dict(report_findings: [dict]) -> [Finding]:
vuln_id_from_tool=report_finding["QID"],
cvssv3=cvssv3
)
cve_data = report_finding.get("CVE ID")
# Qualys reports regression findings as active, but with a Date Last
# Fixed.
if report_finding["Date Last Fixed"]:
Expand Down Expand Up @@ -192,11 +213,13 @@ def build_findings_from_dict(report_findings: [dict]) -> [Finding]:
date=date,
vuln_id_from_tool=report_finding["QID"]
)
cve_data = report_finding.get("CVEID")

finding.unsaved_vulnerability_ids = (
cve_data.split(",") if "," in cve_data else [cve_data]
)
# Make sure we have something to append to
if isinstance(finding.unsaved_vulnerability_ids, list):
# Append CVEs if there is a chance for duplicates
finding.unsaved_vulnerability_ids += cve_list
else:
# Set the initial cve list for new findings
finding.unsaved_vulnerability_ids = cve_list
finding.verified = True
finding.unsaved_endpoints.append(endpoint)
if not finding_with_id:
Expand Down
4 changes: 2 additions & 2 deletions helm/defectdojo/Chart.yaml
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
apiVersion: v2
appVersion: "2.31.4"
appVersion: "2.31.5"
description: A Helm chart for Kubernetes to install DefectDojo
name: defectdojo
version: 1.6.112
version: 1.6.113
icon: https://www.defectdojo.org/img/favicon.ico
maintainers:
- name: madchap
Expand Down
2 changes: 1 addition & 1 deletion unittests/test_metrics_queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def test_finding_queries(self, mock_timezone):
mock_timezone.return_value = mock_datetime

# Queries over Finding and Risk_Acceptance
with self.assertNumQueries(27):
with self.assertNumQueries(24):
product_types = []
finding_queries = views.finding_querys(
product_types,
Expand Down

0 comments on commit 2181c61

Please sign in to comment.