Skip to content

Commit

Permalink
Merge pull request #11246 from DefectDojo/release/2.40.1
Browse files Browse the repository at this point in the history
Release: Merge release into master from: release/2.40.1
  • Loading branch information
rossops authored Nov 12, 2024
2 parents fafe5c3 + 28569c0 commit 090b2c7
Show file tree
Hide file tree
Showing 40 changed files with 7,836 additions and 274 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/build-docker-images-for-testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ jobs:
# export docker images to be used in next jobs below
- name: Upload image ${{ matrix.docker-image }} as artifact
timeout-minutes: 10
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.docker-image }}
name: built-docker-image-${{ matrix.docker-image }}-${{ matrix.os }}
path: ${{ matrix.docker-image }}-${{ matrix.os }}_img
retention-days: 1
2 changes: 1 addition & 1 deletion .github/workflows/fetch-oas.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ jobs:
run: docker compose down

- name: Upload oas.${{ matrix.file-type }} as artifact
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: oas-${{ matrix.file-type }}
path: oas.${{ matrix.file-type }}
Expand Down
12 changes: 8 additions & 4 deletions .github/workflows/integration-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,14 +45,18 @@ jobs:

# load docker images from build jobs
- name: Load images from artifacts
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
path: built-docker-image
pattern: built-docker-image-*
merge-multiple: true

- name: Load docker images
timeout-minutes: 10
run: |-
docker load -i nginx/nginx-${{ matrix.os }}_img
docker load -i django/django-${{ matrix.os }}_img
docker load -i integration-tests/integration-tests-debian_img
docker load -i built-docker-image/nginx-${{ matrix.os }}_img
docker load -i built-docker-image/django-${{ matrix.os }}_img
docker load -i built-docker-image/integration-tests-debian_img
docker images
- name: Set integration-test mode
Expand Down
10 changes: 7 additions & 3 deletions .github/workflows/k8s-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,14 +48,18 @@ jobs:
minikube status
- name: Load images from artifacts
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
path: built-docker-image
pattern: built-docker-image-*
merge-multiple: true

- name: Load docker images
timeout-minutes: 10
run: |-
eval $(minikube docker-env)
docker load -i nginx/nginx-${{ matrix.os }}_img
docker load -i django/django-${{ matrix.os }}_img
docker load -i built-docker-image/nginx-${{ matrix.os }}_img
docker load -i built-docker-image/django-${{ matrix.os }}_img
docker images
- name: Configure HELM repos
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/release-drafter.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Load OAS files from artifacts
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4

- name: Upload Release Asset - OpenAPI Specification - YAML
id: upload-release-asset-yaml
Expand Down
10 changes: 7 additions & 3 deletions .github/workflows/rest-framework-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,17 @@ jobs:

# load docker images from build jobs
- name: Load images from artifacts
uses: actions/download-artifact@v3
uses: actions/download-artifact@v4
with:
path: built-docker-image
pattern: built-docker-image-*
merge-multiple: true

- name: Load docker images
timeout-minutes: 10
run: |-
docker load -i nginx/nginx-${{ matrix.os }}_img
docker load -i django/django-${{ matrix.os }}_img
docker load -i built-docker-image/nginx-${{ matrix.os }}_img
docker load -i built-docker-image/django-${{ matrix.os }}_img
docker images
# run tests with docker compose
Expand Down
2 changes: 1 addition & 1 deletion components/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "defectdojo",
"version": "2.40.0",
"version": "2.40.1",
"license" : "BSD-3-Clause",
"private": true,
"dependencies": {
Expand Down
2 changes: 1 addition & 1 deletion dojo/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa: F401

__version__ = "2.40.0"
__version__ = "2.40.1"
__url__ = "https://github.com/DefectDojo/django-DefectDojo"
__docs__ = "https://documentation.defectdojo.com"
17 changes: 2 additions & 15 deletions dojo/api_v2/serializers.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import json
import logging
import os
import re
from datetime import datetime

Expand Down Expand Up @@ -803,20 +802,8 @@ class Meta:

def validate(self, data):
if file := data.get("file"):
ext = os.path.splitext(file.name)[1] # [0] returns path+filename
valid_extensions = settings.FILE_UPLOAD_TYPES
if ext.lower() not in valid_extensions:
if accepted_extensions := f"{', '.join(valid_extensions)}":
msg = (
"Unsupported extension. Supported extensions are as "
f"follows: {accepted_extensions}"
)
else:
msg = (
"File uploads are prohibited due to the list of acceptable "
"file extensions being empty"
)
raise ValidationError(msg)
# the clean will validate the file extensions and raise a Validation error if the extensions are not accepted
FileUpload(title=file.name, file=file).clean()
return data
return None

Expand Down
4 changes: 3 additions & 1 deletion dojo/importers/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -530,13 +530,15 @@ def validate_tags(
*args: list,
**kwargs: dict,
) -> list:
return self.validate(
tags = self.validate(
"tags",
expected_types=[list],
required=False,
default=[],
**kwargs,
)
# Force all tags to be lowercase
return [tag.lower() for tag in tags]

def validate_test(
self,
Expand Down
23 changes: 23 additions & 0 deletions dojo/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import re
import warnings
from datetime import datetime
from pathlib import Path
from uuid import uuid4

import hyperlink
Expand Down Expand Up @@ -741,6 +742,28 @@ def get_accessible_url(self, obj, obj_id):

return f"access_file/{self.id}/{obj_id}/{obj_type}"

def clean(self):
if not self.title:
self.title = "<No Title>"

valid_extensions = settings.FILE_UPLOAD_TYPES

# why does this not work with self.file....
if self.file:
file_name = self.file.url
else:
file_name = self.title
if Path(file_name).suffix.lower() not in valid_extensions:
if accepted_extensions := f"{', '.join(valid_extensions)}":
msg = (
_("Unsupported extension. Supported extensions are as follows: %s") % accepted_extensions
)
else:
msg = (
_("File uploads are prohibited due to the list of acceptable file extensions being empty")
)
raise ValidationError(msg)


class Product_Type(models.Model):

Expand Down
2 changes: 1 addition & 1 deletion dojo/settings/.settings.dist.py.sha256sum
Original file line number Diff line number Diff line change
@@ -1 +1 @@
6b9365d002880ae64ab54da905ede076db5a8661960f8f1e2793b7f4d25ff7e8
58e2f6cb0ed2c041fe2741d955b72cb7540bfb0923f489d6324717fcf00039da
3 changes: 3 additions & 0 deletions dojo/settings/settings.dist.py
Original file line number Diff line number Diff line change
Expand Up @@ -1744,6 +1744,9 @@ def saml2_attrib_map_format(dict):
"ELSA": "https://linux.oracle.com/errata/&&.html", # e.g. https://linux.oracle.com/errata/ELSA-2024-12714.html
"ELBA": "https://linux.oracle.com/errata/&&.html", # e.g. https://linux.oracle.com/errata/ELBA-2024-7457.html
"RXSA": "https://errata.rockylinux.org/", # e.g. https://errata.rockylinux.org/RXSA-2024:4928
"CAPEC": "https://capec.mitre.org/data/definitions/&&.html", # e.g. https://capec.mitre.org/data/definitions/157.html
"CWE": "https://cwe.mitre.org/data/definitions/&&.html", # e.g. https://cwe.mitre.org/data/definitions/79.html
"TEMP": "https://security-tracker.debian.org/tracker/", # e.g. https://security-tracker.debian.org/tracker/TEMP-0841856-B18BAF
}
# List of acceptable file types that can be uploaded to a given object via arbitrary file upload
FILE_UPLOAD_TYPES = env("DD_FILE_UPLOAD_TYPES")
Expand Down
7 changes: 6 additions & 1 deletion dojo/templatetags/display_tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -781,7 +781,12 @@ def vulnerability_url(vulnerability_id):
for key in settings.VULNERABILITY_URLS:
if vulnerability_id.upper().startswith(key):
if "&&" in settings.VULNERABILITY_URLS[key]:
return settings.VULNERABILITY_URLS[key].split("&&")[0] + str(vulnerability_id) + settings.VULNERABILITY_URLS[key].split("&&")[1]
# Process specific keys specially if need
if key in ["CAPEC", "CWE"]:
vuln_id = str(vulnerability_id).replace(f"{key}-", "")
else:
vuln_id = str(vulnerability_id)
return f'{settings.VULNERABILITY_URLS[key].split("&&")[0]}{vuln_id}{settings.VULNERABILITY_URLS[key].split("&&")[1]}'
return settings.VULNERABILITY_URLS[key] + str(vulnerability_id)
return ""

Expand Down
4 changes: 2 additions & 2 deletions dojo/tools/acunetix/parse_acunetix360_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def get_findings(self, filename, test):
dupes = {}
data = json.load(filename)
dupes = {}
scan_date = parser.parse(data["Generated"])
scan_date = parser.parse(data["Generated"], dayfirst=True)
text_maker = html2text.HTML2Text()
text_maker.body_width = 0
for item in data["Vulnerabilities"]:
Expand Down Expand Up @@ -96,7 +96,7 @@ def get_findings(self, filename, test):
finding.unsaved_req_resp = [{"req": request, "resp": response}]
finding.unsaved_endpoints = [Endpoint.from_uri(url)]
if item.get("FirstSeenDate"):
parseddate = parser.parse(item["FirstSeenDate"])
parseddate = parser.parse(item["FirstSeenDate"], dayfirst=True)
finding.date = parseddate
if dupe_key in dupes:
find = dupes[dupe_key]
Expand Down
2 changes: 1 addition & 1 deletion dojo/tools/acunetix/parse_acunetix_xml.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def get_findings(self, filename, test):
# get report date
if scan.findtext("StartTime") and "" != scan.findtext("StartTime"):
report_date = dateutil.parser.parse(
scan.findtext("StartTime"),
scan.findtext("StartTime"), dayfirst=True,
).date()
for item in scan.findall("ReportItems/ReportItem"):
finding = Finding(
Expand Down
8 changes: 4 additions & 4 deletions dojo/tools/awssecurityhub/compliance.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import datetime
import datetime

from dojo.models import Finding

Expand Down Expand Up @@ -31,11 +31,11 @@ def get_item(self, finding: dict, test):
active = False
if finding.get("LastObservedAt", None):
try:
mitigated = datetime.strptime(finding.get("LastObservedAt"), "%Y-%m-%dT%H:%M:%S.%fZ")
mitigated = datetime.datetime.strptime(finding.get("LastObservedAt"), "%Y-%m-%dT%H:%M:%S.%fZ")
except Exception:
mitigated = datetime.strptime(finding.get("LastObservedAt"), "%Y-%m-%dT%H:%M:%fZ")
mitigated = datetime.datetime.strptime(finding.get("LastObservedAt"), "%Y-%m-%dT%H:%M:%fZ")
else:
mitigated = datetime.utcnow()
mitigated = datetime.datetime.now(datetime.UTC)
else:
mitigated = None
is_Mitigated = False
Expand Down
8 changes: 4 additions & 4 deletions dojo/tools/awssecurityhub/guardduty.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import datetime
import datetime

from dojo.models import Endpoint, Finding

Expand All @@ -25,11 +25,11 @@ def get_item(self, finding: dict, test):
is_Mitigated = True
if finding.get("LastObservedAt", None):
try:
mitigated = datetime.strptime(finding.get("LastObservedAt"), "%Y-%m-%dT%H:%M:%S.%fZ")
mitigated = datetime.datetime.strptime(finding.get("LastObservedAt"), "%Y-%m-%dT%H:%M:%S.%fZ")
except Exception:
mitigated = datetime.strptime(finding.get("LastObservedAt"), "%Y-%m-%dT%H:%M:%fZ")
mitigated = datetime.datetime.strptime(finding.get("LastObservedAt"), "%Y-%m-%dT%H:%M:%fZ")
else:
mitigated = datetime.utcnow()
mitigated = datetime.datetime.now(datetime.UTC)
description = f"This is a GuardDuty Finding\n{finding.get('Description', '')}" + "\n"
description += f"**AWS Finding ARN:** {finding_id}\n"
if finding.get("SourceUrl"):
Expand Down
8 changes: 4 additions & 4 deletions dojo/tools/awssecurityhub/inspector.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import datetime
import datetime

from dojo.models import Endpoint, Finding

Expand Down Expand Up @@ -48,11 +48,11 @@ def get_item(self, finding: dict, test):
active = False
if finding.get("LastObservedAt", None):
try:
mitigated = datetime.strptime(finding.get("LastObservedAt"), "%Y-%m-%dT%H:%M:%S.%fZ")
mitigated = datetime.datetime.strptime(finding.get("LastObservedAt"), "%Y-%m-%dT%H:%M:%S.%fZ")
except Exception:
mitigated = datetime.strptime(finding.get("LastObservedAt"), "%Y-%m-%dT%H:%M:%fZ")
mitigated = datetime.datetime.strptime(finding.get("LastObservedAt"), "%Y-%m-%dT%H:%M:%fZ")
else:
mitigated = datetime.utcnow()
mitigated = datetime.datetime.now(datetime.UTC)
title_suffix = ""
hosts = []
for resource in finding.get("Resources", []):
Expand Down
Loading

0 comments on commit 090b2c7

Please sign in to comment.