diff --git a/dojo/__init__.py b/dojo/__init__.py
index 82fe1512626..b094d840779 100644
--- a/dojo/__init__.py
+++ b/dojo/__init__.py
@@ -4,6 +4,6 @@
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa: F401
-__version__ = "2.50.1"
+__version__ = "2.50.2"
__url__ = "https://github.com/DefectDojo/django-DefectDojo"
__docs__ = "https://documentation.defectdojo.com"
diff --git a/dojo/context_processors.py b/dojo/context_processors.py
index 7a3c84af035..39385ef3440 100644
--- a/dojo/context_processors.py
+++ b/dojo/context_processors.py
@@ -12,6 +12,7 @@ def globalize_vars(request):
"FORGOT_USERNAME": settings.FORGOT_USERNAME,
"CLASSIC_AUTH_ENABLED": settings.CLASSIC_AUTH_ENABLED,
"OIDC_ENABLED": settings.OIDC_AUTH_ENABLED,
+ "SOCIAL_AUTH_OIDC_LOGIN_BUTTON_TEXT": settings.SOCIAL_AUTH_OIDC_LOGIN_BUTTON_TEXT,
"AUTH0_ENABLED": settings.AUTH0_OAUTH2_ENABLED,
"GOOGLE_ENABLED": settings.GOOGLE_OAUTH_ENABLED,
"OKTA_ENABLED": settings.OKTA_OAUTH_ENABLED,
diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py
index 00385e733e4..87dab3d291b 100644
--- a/dojo/jira_link/helper.py
+++ b/dojo/jira_link/helper.py
@@ -148,7 +148,9 @@ def is_keep_in_sync_with_jira(finding):
jira_issue_exists = finding.has_jira_issue or (finding.finding_group and finding.finding_group.has_jira_issue)
if jira_issue_exists:
# Determine if any automatic sync should occur
- keep_in_sync_enabled = get_jira_instance(finding).finding_jira_sync
+ jira_instance = get_jira_instance(finding)
+ if jira_instance:
+ keep_in_sync_enabled = jira_instance.finding_jira_sync
return keep_in_sync_enabled
diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py
index b71e4e4520c..fb2d74e82e6 100644
--- a/dojo/settings/settings.dist.py
+++ b/dojo/settings/settings.dist.py
@@ -29,6 +29,7 @@
# Set casting and default values
DD_SITE_URL=(str, "http://localhost:8080"),
DD_DEBUG=(bool, False),
+ DD_DJANGO_DEBUG_TOOLBAR_ENABLED=(bool, False),
DD_TEMPLATE_DEBUG=(bool, False),
DD_LOG_LEVEL=(str, ""),
DD_DJANGO_METRICS_ENABLED=(bool, False),
@@ -120,6 +121,7 @@
DD_SOCIAL_AUTH_OIDC_AUTHORIZATION_URL=(str, ""),
DD_SOCIAL_AUTH_OIDC_USERINFO_URL=(str, ""),
DD_SOCIAL_AUTH_OIDC_JWKS_URI=(str, ""),
+ DD_SOCIAL_AUTH_OIDC_LOGIN_BUTTON_TEXT=(str, "Login with OIDC"),
DD_SOCIAL_AUTH_AUTH0_OAUTH2_ENABLED=(bool, False),
DD_SOCIAL_AUTH_AUTH0_KEY=(str, ""),
DD_SOCIAL_AUTH_AUTH0_SECRET=(str, ""),
@@ -356,6 +358,7 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param
# False if not in os.environ
DEBUG = env("DD_DEBUG")
+DJANGO_DEBUG_TOOLBAR_ENABLED = env("DD_DJANGO_DEBUG_TOOLBAR_ENABLED")
TEMPLATE_DEBUG = env("DD_TEMPLATE_DEBUG")
# Hosts/domain names that are valid for this site; required if DEBUG is False
@@ -618,6 +621,8 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param
SOCIAL_AUTH_OIDC_USERINFO_URL = value
if value := env("DD_SOCIAL_AUTH_OIDC_JWKS_URI"):
SOCIAL_AUTH_OIDC_JWKS_URI = value
+if value := env("DD_SOCIAL_AUTH_OIDC_LOGIN_BUTTON_TEXT"):
+ SOCIAL_AUTH_OIDC_LOGIN_BUTTON_TEXT = value
AUTH0_OAUTH2_ENABLED = env("DD_SOCIAL_AUTH_AUTH0_OAUTH2_ENABLED")
SOCIAL_AUTH_AUTH0_KEY = env("DD_SOCIAL_AUTH_AUTH0_KEY")
@@ -1853,6 +1858,7 @@ def saml2_attrib_map_format(din):
"NTAP-": "https://security.netapp.com/advisory/", # e.g. https://security.netapp.com/advisory/ntap-20250328-0007
"OPENSUSE-SU-": "https://osv.dev/vulnerability/", # e.g. https://osv.dev/vulnerability/openSUSE-SU-2025:14898-1
"OSV-": "https://osv.dev/vulnerability/", # e.g. https://osv.dev/vulnerability/OSV-2024-1330
+ "OXAS-ADV-": "https://cvepremium.circl.lu/vuln/", # e.g. https://cvepremium.circl.lu/vuln/OXAS-ADV-2023-0001
"PAN-SA-": "https://security.paloaltonetworks.com/", # e.g. https://security.paloaltonetworks.com/PAN-SA-2024-0010
"PFPT-SA-": "https://www.proofpoint.com/us/security/security-advisories/", # e.g. https://www.proofpoint.com/us/security/security-advisories/pfpt-sa-0002
"PMASA-": "https://www.phpmyadmin.net/security/", # e.g. https://www.phpmyadmin.net/security/PMASA-2025-1
@@ -1941,3 +1947,45 @@ def saml2_attrib_map_format(din):
warnings.filterwarnings("ignore", "The FORMS_URLFIELD_ASSUME_HTTPS transitional setting is deprecated.")
FORMS_URLFIELD_ASSUME_HTTPS = True
# Inspired by https://adamj.eu/tech/2023/12/07/django-fix-urlfield-assume-scheme-warnings/
+
+if DEBUG:
+ # adding DEBUG logging for all of Django.
+ LOGGING["loggers"]["root"] = {
+ "handlers": ["console"],
+ "level": "DEBUG",
+ }
+
+if DJANGO_DEBUG_TOOLBAR_ENABLED:
+
+ INSTALLED_APPS += (
+ "debug_toolbar",
+ )
+
+ MIDDLEWARE = ["debug_toolbar.middleware.DebugToolbarMiddleware", *MIDDLEWARE]
+
+ def show_toolbar(request):
+ return True
+
+ DEBUG_TOOLBAR_CONFIG = {
+ "SHOW_TOOLBAR_CALLBACK": show_toolbar,
+ "INTERCEPT_REDIRECTS": False,
+ "SHOW_COLLAPSED": True,
+ }
+
+ DEBUG_TOOLBAR_PANELS = [
+ # 'ddt_request_history.panels.request_history.RequestHistoryPanel', # Here it is
+ "debug_toolbar.panels.versions.VersionsPanel",
+ "debug_toolbar.panels.timer.TimerPanel",
+ "debug_toolbar.panels.settings.SettingsPanel",
+ "debug_toolbar.panels.headers.HeadersPanel",
+ "debug_toolbar.panels.request.RequestPanel",
+ "debug_toolbar.panels.sql.SQLPanel",
+ "debug_toolbar.panels.templates.TemplatesPanel",
+ # 'debug_toolbar.panels.staticfiles.StaticFilesPanel',
+ "debug_toolbar.panels.cache.CachePanel",
+ "debug_toolbar.panels.signals.SignalsPanel",
+ # 'debug_toolbar.panels.logging.LoggingPanel',
+ "debug_toolbar.panels.redirects.RedirectsPanel",
+ "debug_toolbar.panels.profiling.ProfilingPanel",
+ # 'cachalot.panels.CachalotPanel',
+ ]
diff --git a/dojo/settings/template-local_settings b/dojo/settings/template-local_settings
index 337027bff92..c710201d620 100644
--- a/dojo/settings/template-local_settings
+++ b/dojo/settings/template-local_settings
@@ -1,21 +1,7 @@
# local_settings.py
# this file will be included by settings.py *after* loading settings.dist.py
-# this example configures the django debug toolbar and sets some loglevels to DEBUG
-
-from django.urls import re_path
-from django.conf.urls import include
-
-# UPDATE: Adding debug_toolbar to to INSTALLED_APPS here prevents the nginx container from generating the correct static files
-# So add debug_toolbar to INSTALLED_APPS in settings.dist.py and rebuild to get started with the debug_toolbar.
-# Thje middleware and other config can remain in this file (local_settings.py) to avoid chance of conflicts on upgrades.
-INSTALLED_APPS += (
-# 'debug_toolbar',
-)
-
-MIDDLEWARE = [
- 'debug_toolbar.middleware.DebugToolbarMiddleware',
-] + MIDDLEWARE
+# this example sets some loglevels to DEBUG
# adding DEBUG logging for all of Django.
LOGGING['loggers']['root'] = {
@@ -27,35 +13,3 @@ LOGGING['loggers']['root'] = {
# output DEBUG logging for deduplication
# LOGGING['loggers']['dojo.specific-loggers.deduplication']['level'] = 'DEBUG'
-
-
-def show_toolbar(request):
- return True
-
-
-DEBUG_TOOLBAR_CONFIG = {
- "SHOW_TOOLBAR_CALLBACK": show_toolbar,
- "INTERCEPT_REDIRECTS": False,
- "SHOW_COLLAPSED": True,
-}
-
-DEBUG_TOOLBAR_PANELS = [
- # 'ddt_request_history.panels.request_history.RequestHistoryPanel', # Here it is
- 'debug_toolbar.panels.versions.VersionsPanel',
- 'debug_toolbar.panels.timer.TimerPanel',
- 'debug_toolbar.panels.settings.SettingsPanel',
- 'debug_toolbar.panels.headers.HeadersPanel',
- 'debug_toolbar.panels.request.RequestPanel',
- 'debug_toolbar.panels.sql.SQLPanel',
- 'debug_toolbar.panels.templates.TemplatesPanel',
- # 'debug_toolbar.panels.staticfiles.StaticFilesPanel',
- 'debug_toolbar.panels.cache.CachePanel',
- 'debug_toolbar.panels.signals.SignalsPanel',
- 'debug_toolbar.panels.logging.LoggingPanel',
- 'debug_toolbar.panels.redirects.RedirectsPanel',
- 'debug_toolbar.panels.profiling.ProfilingPanel',
- # 'cachalot.panels.CachalotPanel',
-]
-
-import debug_toolbar
-EXTRA_URL_PATTERNS = [re_path(r"^__debug__/", include(debug_toolbar.urls))]
diff --git a/dojo/templates/dojo/login.html b/dojo/templates/dojo/login.html
index 430539f4930..fe54191f2a6 100644
--- a/dojo/templates/dojo/login.html
+++ b/dojo/templates/dojo/login.html
@@ -49,7 +49,7 @@
{% if OIDC_ENABLED is True %}
{% endif %}
diff --git a/dojo/tools/fortify/fpr_parser.py b/dojo/tools/fortify/fpr_parser.py
index f348aa265d6..21c981eb75f 100644
--- a/dojo/tools/fortify/fpr_parser.py
+++ b/dojo/tools/fortify/fpr_parser.py
@@ -133,14 +133,14 @@ def convert_vulnerabilities_to_findings(self, root: Element, audit_log: Element,
finding = Finding(test=test, static_finding=True)
finding.active, finding.false_p = self.compute_status(related_data, vuln_data)
- finding.title = self.format_title(vuln_data, snippet, description, rule)
+ finding.title = self.format_title(vuln_data, snippet)
finding.description = self.format_description(vuln_data, snippet, description, rule)
finding.mitigation = self.format_mitigation(vuln_data, snippet, description, rule)
- finding.severity = self.compute_severity(vuln_data, snippet, description, rule)
+ finding.severity = self.compute_severity(vuln_data, rule)
finding.impact = self.format_impact(related_data, vuln_data)
finding.file_path = vuln_data.source_location_path
- finding.line = int(self.compute_line(vuln_data, snippet, description, rule))
+ finding.line = int(self.compute_line(vuln_data, snippet))
finding.unique_id_from_tool = vuln_data.instance_id
findings.append(finding)
@@ -225,26 +225,28 @@ def parse_description_information(self, description: Element) -> DescriptionData
def parse_rule_information(self, rule: Element) -> RuleData:
"""Parse the rule information and return a RuleData object."""
rule_data = RuleData()
- rule_data.accuracy = rule.findtext("Group[@name='Accuracy']", None, self.namespaces)
- rule_data.impact = rule.findtext("Group[@name='Impact']", None, self.namespaces)
- rule_data.probability = rule.findtext("Group[@name='Probability']", None, self.namespaces)
- rule_data.impact_bias = rule.findtext("Group[@name='ImpactBias']", None, self.namespaces)
- rule_data.confidentiality_impact = rule.findtext("Group[@name='ConfidentialityImpact']", None, self.namespaces)
- rule_data.integrity_impact = rule.findtext("Group[@name='IntegrityImpact']", None, self.namespaces)
- rule_data.remediation_effort = rule.findtext("Group[@name='Recommendations']", None, self.namespaces)
- logger.debug(f"Rule Impact: {rule_data.impact}")
+ if rule is not None:
+ rule_data.accuracy = rule.findtext("Group[@name='Accuracy']", None, self.namespaces)
+ rule_data.impact = rule.findtext("Group[@name='Impact']", None, self.namespaces)
+ rule_data.probability = rule.findtext("Group[@name='Probability']", None, self.namespaces)
+ rule_data.impact_bias = rule.findtext("Group[@name='ImpactBias']", None, self.namespaces)
+ rule_data.confidentiality_impact = rule.findtext("Group[@name='ConfidentialityImpact']", None, self.namespaces)
+ rule_data.integrity_impact = rule.findtext("Group[@name='IntegrityImpact']", None, self.namespaces)
+ rule_data.remediation_effort = rule.findtext("Group[@name='Recommendations']", None, self.namespaces)
+ logger.debug(f"Rule Impact: {rule_data.impact}")
return rule_data
- def format_title(self, vulnerability, snippet, description, rule) -> str:
+ def format_title(self, vulnerability, snippet) -> str:
# defaults for when there is no snippet (shouldn't happen, future improvement: parser might also parse ReplacementDefinitions and/or Context elements)
file_name = vulnerability.source_location_path.split("/")[-1]
- line = self.compute_line(vulnerability, snippet, description, rule)
+ line = self.compute_line(vulnerability, snippet)
return f"{vulnerability.vulnerability_type} - {file_name}: {line} ({vulnerability.class_id})"
def format_description(self, vulnerability, snippet, description, rule) -> str:
desc = f"##Catagory: {vulnerability.vulnerability_type}\n"
- desc += f"###Abstract:\n{description.abstract}\n"
+ if description:
+ desc += f"###Abstract:\n{description.abstract}\n"
desc += f"**SourceLocationPath:** {vulnerability.source_location_path}\n"
desc += f"**SourceLocationLine:** {vulnerability.source_location_line}\n"
@@ -258,7 +260,8 @@ def format_description(self, vulnerability, snippet, description, rule) -> str:
"leads to this finding. \n")
desc += f"###Snippet:\n**File: {snippet.file_name}: {snippet.start_line}**\n```\n{snippet.text}\n```\n"
- desc += f"##Explanation:\n {description.explanation}"
+ if description:
+ desc += f"##Explanation:\n {description.explanation}"
desc += f"##Details: {vulnerability.instance_id}\n"
desc += f"**InstanceID:** {vulnerability.instance_id}\n"
@@ -273,14 +276,14 @@ def format_description(self, vulnerability, snippet, description, rule) -> str:
def format_mitigation(self, vulnerability, snippet, description, rule) -> str:
mitigation = ""
- if description.recommendations:
+ if description and description.recommendations:
mitigation += f"###Recommendation:\n {description.recommendations}\n"
- if description.tips:
+ if description and description.tips:
mitigation += f"###Tips:\n {description.tips}"
return mitigation
- def compute_severity(self, vulnerability, snippet, description, rule) -> str:
+ def compute_severity(self, vulnerability, rule) -> str:
"""Convert the the float representation of severity and confidence to a string severity."""
if not rule.impact:
logger.debug("No rule impact found, setting severity to Informational")
@@ -330,7 +333,7 @@ def compute_status(self, related_data, vulnerability) -> tuple[bool, bool]:
return False, True
return True, False
- def compute_line(self, vulnerability, snippet, description, rule) -> str:
+ def compute_line(self, vulnerability, snippet) -> str:
if snippet and snippet.start_line:
return snippet.start_line
return vulnerability.source_location_line
diff --git a/dojo/tools/generic/csv_parser.py b/dojo/tools/generic/csv_parser.py
index f29f434f198..55d0845f0a0 100644
--- a/dojo/tools/generic/csv_parser.py
+++ b/dojo/tools/generic/csv_parser.py
@@ -81,6 +81,26 @@ def _get_findings_csv(self, filename):
if len(cvss_objects) > 0:
finding.cvssv3 = cvss_objects[0].clean_vector()
+ if "CVSSV4" in row:
+ cvss4_objects = cvss_parser.parse_cvss_from_text(row["CVSSV4"])
+ if len(cvss4_objects) > 0:
+ finding.cvssv4 = cvss4_objects[0].clean_vector()
+
+ if "CVSSV4_score" in row:
+ finding.cvssv4_score = float(row["CVSSV4_score"])
+
+ if "kev_date" in row:
+ finding.kev_date = parse(row["kev_date"])
+
+ if "known_exploited" in row:
+ finding.known_exploited = bool(row["known_exploited"])
+
+ if "ransomware_used" in row:
+ finding.ransomware_used = bool(row["ransomware_used"])
+
+ if "fix_available" in row:
+ finding.fix_available = bool(row["fix_available"])
+
# manage endpoints
if "Url" in row:
finding.unsaved_endpoints = [
diff --git a/dojo/tools/generic/json_parser.py b/dojo/tools/generic/json_parser.py
index 1a77bd0551e..b5de914d393 100644
--- a/dojo/tools/generic/json_parser.py
+++ b/dojo/tools/generic/json_parser.py
@@ -65,6 +65,8 @@ def _get_test_json(self, data):
"epss_percentile",
"cvssv3",
"cvssv3_score",
+ "cvssv4",
+ "cvssv4_score",
"mitigation",
"impact",
"steps_to_reproduce",
@@ -102,6 +104,10 @@ def _get_test_json(self, data):
"planned_remediation_version",
"effort_for_fixing",
"tags",
+ "kev_date",
+ "known_exploited",
+ "ransomware_used",
+ "fix_available",
}.union(required)
not_allowed = sorted(set(item).difference(allowed))
if not_allowed:
diff --git a/dojo/tools/jfrog_xray_on_demand_binary_scan/parser.py b/dojo/tools/jfrog_xray_on_demand_binary_scan/parser.py
index 1fcaee93843..021f3cdcb81 100644
--- a/dojo/tools/jfrog_xray_on_demand_binary_scan/parser.py
+++ b/dojo/tools/jfrog_xray_on_demand_binary_scan/parser.py
@@ -102,10 +102,12 @@ def get_severity_justification(vulnerability):
def process_component(component):
mitigation = ""
impact = "**Impact paths**\n\n- "
+ fix_available = False
fixed_versions = component.get("fixed_versions")
if fixed_versions:
mitigation = "**Versions containing a fix:**\n\n- "
mitigation += "\n- ".join(fixed_versions)
+ fix_available = True
if "impact_paths" in component:
refs = []
impact_paths_l1 = component["impact_paths"]
@@ -117,7 +119,7 @@ def process_component(component):
refs.append(item["full_path"])
if refs:
impact += "\n- ".join(sorted(set(refs))) # deduplication
- return mitigation, impact
+ return mitigation, impact, fix_available
def get_cve(vulnerability):
@@ -158,7 +160,7 @@ def get_item_set(vulnerability):
for component_name_with_version, component in vulnerability.get("components", {}).items():
component_name, component_version = get_component_name_version(component_name_with_version)
- mitigation, impact = process_component(component)
+ mitigation, impact, fix_available = process_component(component)
title = clean_title(vulnerability["summary"])
# create the finding object
@@ -176,6 +178,7 @@ def get_item_set(vulnerability):
dynamic_finding=False,
cvssv3=cvssv3,
vuln_id_from_tool=vuln_id_from_tool,
+ fix_available=fix_available,
)
if vulnerability_ids:
finding.unsaved_vulnerability_ids = vulnerability_ids
diff --git a/dojo/tools/mend/parser.py b/dojo/tools/mend/parser.py
index c71ed89e2f3..37bef7667cc 100644
--- a/dojo/tools/mend/parser.py
+++ b/dojo/tools/mend/parser.py
@@ -77,6 +77,7 @@ def _build_common_output(node, lib_name=None):
ransomware_used = node.get("malicious", None)
known_exploited = node.get("exploitable", None)
component_path = node["component"].get("path", None)
+ fix_available = False
if component_path:
locations.append(component_path)
if "topFix" in node:
@@ -91,6 +92,7 @@ def _build_common_output(node, lib_name=None):
+ topfix_node.get("fixResolution", "")
+ "\n"
)
+ fix_available = True
except Exception:
logger.exception("Error handling topFix node.")
elif "library" in node:
@@ -116,6 +118,7 @@ def _build_common_output(node, lib_name=None):
component_name = node["library"].get("artifactId")
component_version = node["library"].get("version")
cvss3_score = node.get("cvss3_score", None)
+ fix_available = False
if "topFix" in node:
try:
topfix_node = node.get("topFix")
@@ -123,10 +126,12 @@ def _build_common_output(node, lib_name=None):
topfix_node.get("date"),
topfix_node.get("fixResolution"),
)
+ fix_available = True
except Exception:
logger.exception("Error handling topFix node.")
else:
description = node.get("description", "Unknown")
+ fix_available = False
cve = node.get("name")
title = "CVE-None | " + lib_name if cve is None else cve + " | " + lib_name
@@ -208,6 +213,7 @@ def _build_common_output(node, lib_name=None):
impact=impact if impact is not None else None,
steps_to_reproduce="**Locations Found**: " + ", ".join(locations) if locations is not None else None,
kev_date=kev_date if kev_date is not None else None,
+ fix_available=fix_available,
)
# only overwrite default values if they are not None #12989
if known_exploited is not None:
diff --git a/dojo/tools/wpscan/parser.py b/dojo/tools/wpscan/parser.py
index 2ba6b5016b7..261b65220d8 100644
--- a/dojo/tools/wpscan/parser.py
+++ b/dojo/tools/wpscan/parser.py
@@ -62,8 +62,10 @@ def get_vulnerabilities(
if report_date:
finding.date = report_date
# if there is a fixed version fill mitigation
+ finding.fix_available = False
if vul.get("fixed_in"):
finding.mitigation = "fixed in : " + vul["fixed_in"]
+ finding.fix_available = True
# manage CVE
if "cve" in vul["references"]:
finding.unsaved_vulnerability_ids = []
diff --git a/dojo/urls.py b/dojo/urls.py
index f7467f477f4..06ae8eb86ee 100644
--- a/dojo/urls.py
+++ b/dojo/urls.py
@@ -281,3 +281,9 @@ def drf_spectacular_preprocessing_filter_spec(endpoints):
if path.startswith("/api/v2/"):
filtered.append((path, path_regex, method, callback))
return filtered
+
+
+if hasattr(settings, "DJANGO_DEBUG_TOOLBAR_ENABLED"):
+ if settings.DJANGO_DEBUG_TOOLBAR_ENABLED:
+ from debug_toolbar.toolbar import debug_toolbar_urls
+ urlpatterns += debug_toolbar_urls()
diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml
index eaba06160a6..3931a9c6f43 100644
--- a/helm/defectdojo/Chart.yaml
+++ b/helm/defectdojo/Chart.yaml
@@ -1,8 +1,8 @@
apiVersion: v2
-appVersion: "2.50.1"
+appVersion: "2.50.2"
description: A Helm chart for Kubernetes to install DefectDojo
name: defectdojo
-version: 1.6.206
+version: 1.6.207
icon: https://www.defectdojo.org/img/favicon.ico
maintainers:
- name: madchap
diff --git a/helm/defectdojo/values.yaml b/helm/defectdojo/values.yaml
index 5d073474a08..acd06793da0 100644
--- a/helm/defectdojo/values.yaml
+++ b/helm/defectdojo/values.yaml
@@ -283,8 +283,8 @@ django:
cpu: 2000m
memory: 512Mi
appSettings:
- processes: 2
- threads: 2
+ processes: 4
+ threads: 4
# maxFd: 102400 # Uncomment to set the maximum number of file descriptors. If not set will be detected by uwsgi
enableDebug: false # this also requires DD_DEBUG to be set to True
certificates:
diff --git a/readme-docs/DOCKER.md b/readme-docs/DOCKER.md
index 30ebdaa57c2..0f9c5bcedf2 100644
--- a/readme-docs/DOCKER.md
+++ b/readme-docs/DOCKER.md
@@ -176,8 +176,8 @@ Or you can modify `settings.dist.py` directly, but this adds the risk of having
```
## Debug Toolbar
-In the `dojo/settings/template-local_settings.py` you'll find instructions on how to enable the [Django Debug Toolbar](https://github.com/jazzband/django-debug-toolbar).
-This toolbar allows you to debug SQL queries, and shows some other interesting information.
+The [Django Debug Toolbar](https://github.com/jazzband/django-debug-toolbar) can be enabled via the `DD_DJANGO_DEBUG_TOOLBAR_ENABLED` environment variable.
+This toolbar allows you to debug SQL queries, and shows some other interesting information. Do NOT enable this in Production environments.
# Explicit Versioning
diff --git a/requirements.txt b/requirements.txt
index 350b8d03dcc..dc0f1f2f8a2 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -21,7 +21,7 @@ django-slack==5.19.0
git+https://github.com/DefectDojo/django-tagging@develop#egg=django-tagging
django-watson==1.6.3
django-prometheus==2.4.1
-Django==5.1.11
+Django==5.1.12
djangorestframework==3.16.1
html2text==2025.4.15
humanize==4.13.0
diff --git a/unittests/scans/fortify/hello_world_no_metainfo.fpr b/unittests/scans/fortify/hello_world_no_metainfo.fpr
new file mode 100644
index 00000000000..c502c41ba9f
Binary files /dev/null and b/unittests/scans/fortify/hello_world_no_metainfo.fpr differ
diff --git a/unittests/scans/generic/generic_report_kev_cvssv4.csv b/unittests/scans/generic/generic_report_kev_cvssv4.csv
new file mode 100644
index 00000000000..7b7054f137b
--- /dev/null
+++ b/unittests/scans/generic/generic_report_kev_cvssv4.csv
@@ -0,0 +1,2 @@
+Date,Title,CweId,epss_score,epss_percentile,Url,Severity,Description,Mitigation,Impact,References,Active,Verified,FalsePositive,Duplicate,CVSSV4,CVSSV4_score,known_exploited,ransomware_used,fix_available,kev_date,CVSSV3
+01/30/2018,"Test finding",0,.00042,.23474,https://192.168.1.1/,Low,"Test finding description",,,,False,False,False,False,"CVSS:4.0/AV:N/AC:L/AT:N/PR:H/UI:N/VC:L/VI:L/VA:N/SC:N/SI:N/SA:N","7.3",True,True,True,"09/11/2025","CVSS:3.1/AV:N/AC:L/PR:H/UI:R/S:C/C:L/I:L/A:N""
diff --git a/unittests/scans/generic/generic_report_kev_cvssv4.json b/unittests/scans/generic/generic_report_kev_cvssv4.json
new file mode 100644
index 00000000000..ee75bc67508
--- /dev/null
+++ b/unittests/scans/generic/generic_report_kev_cvssv4.json
@@ -0,0 +1,46 @@
+{
+ "findings": [
+ {
+ "title": "test title",
+ "description": "Some very long description with\n\n some UTF-8 chars à qu'il est beau",
+ "active": true,
+ "verified": true,
+ "severity": "Medium",
+ "impact": "Some impact",
+ "date": "2021-01-06",
+ "cve": "CVE-2020-36234",
+ "cwe": 261,
+ "cvssv3": "CVSS:3.1/AV:N/AC:L/PR:H/UI:R/S:C/C:L/I:L/A:N",
+ "tags": [
+ "security",
+ "network"
+ ],
+ "unique_id_from_tool": "3287f2d0-554f-491b-8516-3c349ead8ee5",
+ "vuln_id_from_tool": "TEST1",
+ "known_exploited": true,
+ "ransomware_used": true,
+ "fix_available": true,
+ "kev_date": "2024-05-01",
+ "cvssv4": "CVSS:4.0/AV:N/AC:L/PR:H/UI:R/S:C/C:L/I:L/A:N",
+ "cvssv4_score": 7.3
+ },
+ {
+ "title": "test title2",
+ "description": "Some very long description with\n\n some UTF-8 chars à qu'il est beau2",
+ "active": true,
+ "verified": false,
+ "severity": "Medium",
+ "impact": "Some impact",
+ "date": "2021-01-06",
+ "cve": "CVE-2020-36235",
+ "cwe": 287,
+ "cvssv3": "CVSS:3.1/AV:N/AC:L/PR:H/UI:R/S:C/C:L/I:L/A:N",
+ "tags": [
+ "security",
+ "network"
+ ],
+ "unique_id_from_tool": "42500af3-68c5-4dc3-8022-191d93c2f1f7",
+ "vuln_id_from_tool": "TEST2"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/unittests/tools/test_fortify_parser.py b/unittests/tools/test_fortify_parser.py
index c0d3b3d191c..144eb502615 100644
--- a/unittests/tools/test_fortify_parser.py
+++ b/unittests/tools/test_fortify_parser.py
@@ -154,3 +154,27 @@ def test_fortify_fpr_suppressed_finding(self):
self.assertFalse(finding.active)
self.assertTrue(finding.false_p)
self.assertEqual("Threaded Comments:\n2025-03-10T20:52:28.964+05:30 - (testuser): Not an issue. Handled in server config to refer to internal Artifactory\n", finding.impact)
+
+ def test_fortify_hello_world_fpr_rule_without_metainfo(self):
+ with (get_unit_tests_scans_path("fortify") / "hello_world_no_metainfo.fpr").open(encoding="utf-8") as testfile:
+ parser = FortifyParser()
+ findings = parser.get_findings(testfile, Test())
+ self.assertEqual(4, len(findings))
+ # for i in range(len(findings)):
+ # print(f"{i}: {findings[i]}: {findings[i].severity}")
+
+ with self.subTest(i=0):
+ finding = findings[0]
+ self.assertEqual("Password Management - HelloWorld.java: 5 (720E3A66-55AC-4D2D-8DB9-DC30E120A52F)", finding.title)
+ # Info as rule has no metainfo/impact
+ self.assertEqual("Informational", finding.severity)
+ self.assertEqual("A5338E223E737FF81F8A806C50A05969", finding.unique_id_from_tool)
+ self.assertEqual("src/main/java/hello/HelloWorld.java", finding.file_path)
+ self.assertEqual(5, finding.line)
+ with self.subTest(i=1):
+ finding = findings[1]
+ self.assertEqual("Password Management - HelloWorld.java: 13 (9C5BD1B5-C296-48d4-B5F5-5D2958661BC4)", finding.title)
+ self.assertEqual("High", finding.severity)
+ self.assertEqual("D3166922519EDD92D132761602EB71B4", finding.unique_id_from_tool)
+ self.assertEqual("src/main/java/hello/HelloWorld.java", finding.file_path)
+ self.assertEqual(13, finding.line)
diff --git a/unittests/tools/test_jfrog_xray_on_demand_binary_scan_parser.py b/unittests/tools/test_jfrog_xray_on_demand_binary_scan_parser.py
index 6793b392257..668cfed533e 100644
--- a/unittests/tools/test_jfrog_xray_on_demand_binary_scan_parser.py
+++ b/unittests/tools/test_jfrog_xray_on_demand_binary_scan_parser.py
@@ -19,6 +19,7 @@ def test_parse_file_with_one_vuln(self):
self.assertEqual("gav://test", item.component_name)
self.assertEqual("CVE-2014-0114", item.unsaved_vulnerability_ids[0])
self.assertEqual("High", item.severity)
+ self.assertEqual(True, item.fix_available)
def test_parse_file_with_many_vulns(self):
testfile = (get_unit_tests_scans_path("jfrog_xray_on_demand_binary_scan") / "many_vulns.json").open(encoding="utf-8")
diff --git a/unittests/tools/test_mend_parser.py b/unittests/tools/test_mend_parser.py
index f8dc883155b..136fb393f72 100644
--- a/unittests/tools/test_mend_parser.py
+++ b/unittests/tools/test_mend_parser.py
@@ -21,6 +21,7 @@ def test_parse_file_with_one_vuln_has_one_findings(self):
self.assertEqual("CVE-2019-9658", finding.unsaved_vulnerability_ids[0])
self.assertEqual("CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:N/A:N", finding.cvssv3)
self.assertEqual(5.3, finding.cvssv3_score)
+ self.assertEqual(True, finding.fix_available)
def test_parse_file_with_multiple_vuln_has_multiple_finding(self):
with (get_unit_tests_scans_path("mend") / "okhttp_many_vuln.json").open(encoding="utf-8") as testfile:
@@ -44,6 +45,7 @@ def test_parse_file_with_one_sca_vuln_finding(self):
finding = list(findings)[0]
self.assertEqual("**Locations Found**: D:\\MendRepo\\test-product\\test-project\\test-project-subcomponent\\path\\to\\the\\Java\\commons-codec-1.6_donotuse.jar", finding.steps_to_reproduce)
self.assertEqual("WS-2019-0379 | commons-codec-1.6.jar", finding.title)
+ self.assertEqual(True, finding.fix_available)
def test_parse_file_with_no_vuln_has_no_findings_platform(self):
with (get_unit_tests_scans_path("mend") / "mend-sca-platform-api3-no-findings.json").open(encoding="utf-8") as testfile:
diff --git a/unittests/tools/test_wpscan_parser.py b/unittests/tools/test_wpscan_parser.py
index 994a03b90fa..0aa376e0a48 100644
--- a/unittests/tools/test_wpscan_parser.py
+++ b/unittests/tools/test_wpscan_parser.py
@@ -107,6 +107,7 @@ def test_parse_file_with_multiple_vuln_in_version(self):
self.assertNotEqual("Info", finding.severity) # it is a vulnerability so not 'Info'
self.assertEqual("WordPress 2.8.1-4.7.2 - Control Characters in Redirect URL Validation", finding.title)
self.assertEqual("fixed in : 4.6.4", finding.mitigation)
+ self.assertEqual(True, finding.fix_available)
self.assertEqual("", finding.get_scanner_confidence_text()) # data are => 100%
def test_parse_file_issue5774(self):
@@ -123,6 +124,7 @@ def test_parse_file_issue5774(self):
self.assertNotEqual("Info", finding.severity)
self.assertEqual("All in One SEO Pack <= 2.9.1.1 - Authenticated Stored Cross-Site Scripting (XSS)", finding.title)
self.assertEqual("fixed in : 2.10", finding.mitigation)
+ self.assertEqual(True, finding.fix_available)
self.assertEqual(7, finding.scanner_confidence)
self.assertEqual("Tentative", finding.get_scanner_confidence_text()) # data are at 30%
with self.subTest(i=19):
@@ -137,6 +139,7 @@ def test_parse_file_issue5774(self):
self.assertNotEqual("Info", finding.severity)
self.assertEqual("All in One SEO Pack <= 2.9.1.1 - Authenticated Stored Cross-Site Scripting (XSS)", finding.title)
self.assertEqual("fixed in : 2.10", finding.mitigation)
+ self.assertEqual(True, finding.fix_available)
self.assertEqual("Tentative", finding.get_scanner_confidence_text()) # data are at 30%
with self.subTest(i=50):