Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 13 additions & 2 deletions .github/workflows/close-stale.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,17 @@ jobs:
close-stale:
runs-on: ubuntu-latest
steps:
- name: Close issues and PRs that are pending closure
uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0
with:
# Disable automatic stale marking - only close manually labeled items
days-before-stale: -1
days-before-close: 7
stale-issue-label: 'pending-closure'
stale-pr-label: 'pending-closure'
close-issue-message: 'This issue has been automatically closed because it was manually labeled as stale. If you believe this was closed in error, please reopen it and remove the stale label.'
close-pr-message: 'This PR has been automatically closed because it was manually labeled as stale. If you believe this was closed in error, please reopen it and remove the stale label.'

- name: Close stale issues and PRs
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
with:
Expand All @@ -23,5 +34,5 @@ jobs:
days-before-close: 7
stale-issue-label: 'stale'
stale-pr-label: 'stale'
close-issue-message: 'This issue has been automatically closed because it was manually labeled as stale. If you believe this was closed in error, please reopen it and remove the stale label.'
close-pr-message: 'This PR has been automatically closed because it was manually labeled as stale. If you believe this was closed in error, please reopen it and remove the stale label.'
close-issue-message: 'This issue has been automatically closed because it was labeled as stale. If you believe this was closed in error, please reopen it and remove the stale label.'
close-pr-message: 'This PR has been automatically closed because it was labeled as stale. If you believe this was closed in error, please reopen it and remove the stale label.'
5 changes: 5 additions & 0 deletions docs/content/en/changelog/changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,11 @@ For Open Source release notes, please see the [Releases page on GitHub](https://

## Sept 2025: v2.50

### Sept 22, 2025: v2.50.4

* **(Pro UI)** Changes Engagement Deduplication form label and help text
* **(Pro UI)** Adds toggle for MCP (for superusers only)

### Sept 15, 2025: v2.50.3

* **(Pro UI)** Added support for [CVSSv4.0](https://www.first.org/cvss/v4-0/) vector strings.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
title: "Github Secrets Detection Report"
toc_hide: true
---
Import findings in JSON format from Github Secret Scanning REST API:
<https://docs.github.com/en/rest/secret-scanning/secret-scanning>

### Sample Scan Data
Sample Github SAST scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/github_secrets_detection_report_many_vul.json).
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,17 @@ Good example:
finding.cwe = data["mykey"]
```

```python
finding.cwe = data.get("mykey", 123)
```

```python
some_list = data.get("key_of_the_list") or []
```

The finale example guards against cases where `key_of_the_list` is present, but `null`.


### Parsing of CVSS vectors

Data can have `CVSS` vectors or scores. Defect Dojo use the `cvss` module provided by RedHat Security.
Expand Down
19 changes: 11 additions & 8 deletions dojo/api_v2/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1761,13 +1761,15 @@ def update(self, instance, validated_data):
if reporter_id := validated_data.get("reporter"):
instance.reporter = reporter_id

# Persist vulnerability IDs first so model save computes hash including them (if there is no hash yet)
# we can't pass unsaved_vulnerabilitiy_ids to super.update()
if parsed_vulnerability_ids:
save_vulnerability_ids(instance, parsed_vulnerability_ids)

instance = super().update(
instance, validated_data,
)

if parsed_vulnerability_ids:
save_vulnerability_ids(instance, parsed_vulnerability_ids)

if push_to_jira:
jira_helper.push_to_jira(instance)

Expand Down Expand Up @@ -1901,11 +1903,15 @@ def create(self, validated_data):
if (vulnerability_ids := validated_data.pop("vulnerability_id_set", None)):
logger.debug("VULNERABILITY_ID_SET: %s", vulnerability_ids)
parsed_vulnerability_ids.extend(vulnerability_id["vulnerability_id"] for vulnerability_id in vulnerability_ids)
logger.debug("PARSED_VULNERABILITY_IDST: %s", parsed_vulnerability_ids)
logger.debug("SETTING CVE FROM VULNERABILITY_ID_SET: %s", parsed_vulnerability_ids[0])
validated_data["cve"] = parsed_vulnerability_ids[0]
# validated_data["unsaved_vulnerability_ids"] = parsed_vulnerability_ids

new_finding = super().create(
validated_data)
# super.create() doesn't accept unsaved_vulnerability_ids or dedupe_option=False, so call save directly.
new_finding = Finding(**validated_data)
new_finding.unsaved_vulnerability_ids = parsed_vulnerability_ids or []
new_finding.save()

logger.debug(f"New finding CVE: {new_finding.cve}")

Expand All @@ -1918,9 +1924,6 @@ def create(self, validated_data):
new_finding.reviewers.set(reviewers)
if parsed_vulnerability_ids:
save_vulnerability_ids(new_finding, parsed_vulnerability_ids)
# can we avoid this extra save? the cve has already been set above in validated_data. but there are no tests for this
# on finding update nothing is done # with vulnerability_ids?
# new_finding.save()

if push_to_jira:
jira_helper.push_to_jira(new_finding)
Expand Down
2 changes: 1 addition & 1 deletion dojo/finding/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1561,7 +1561,7 @@ def request_finding_review(request, fid):

create_notification(
event="review_requested", # TODO: - if 'review_requested' functionality will be supported by API as well, 'create_notification' needs to be migrated to place where it will be able to cover actions from both interfaces
title="Finding review requested",
title=f"Finding review requested for Test created for {finding.test.engagement.product}: {finding.test.engagement.name}: {finding.test} - {finding.title}",
requested_by=user,
note=new_note,
finding=finding,
Expand Down
6 changes: 4 additions & 2 deletions dojo/search/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from dojo.engagement.queries import get_authorized_engagements
from dojo.filters import FindingFilter, FindingFilterWithoutObjectLookups
from dojo.finding.queries import get_authorized_findings, get_authorized_vulnerability_ids, prefetch_for_findings
from dojo.forms import SimpleSearchForm
from dojo.forms import FindingBulkUpdateForm, SimpleSearchForm
from dojo.models import Engagement, Finding, Finding_Template, Languages, Product, Test
from dojo.product.queries import get_authorized_app_analysis, get_authorized_products
from dojo.test.queries import get_authorized_tests
Expand Down Expand Up @@ -390,7 +390,9 @@ def simple_search(request):
"form": form,
"activetab": activetab,
"show_product_column": True,
"generic": generic})
"generic": generic,
"bulk_edit_form": FindingBulkUpdateForm(request.GET),
})

if cookie:
response.set_cookie("highlight", value=keywords_query,
Expand Down
6 changes: 5 additions & 1 deletion dojo/settings/settings.dist.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,7 @@
DD_SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL=(str, ""),
DD_SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY=(str, ""),
DD_SOCIAL_AUTH_GITHUB_ENTERPRISE_SECRET=(str, ""),
DD_SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL=(bool, True),
DD_SAML2_ENABLED=(bool, False),
# Allows to override default SAML authentication backend. Check https://djangosaml2.readthedocs.io/contents/setup.html#custom-user-attributes-processing
DD_SAML2_AUTHENTICATION_BACKENDS=(str, "djangosaml2.backends.Saml2Backend"),
Expand Down Expand Up @@ -577,7 +578,7 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param
SOCIAL_AUTH_STRATEGY = "social_django.strategy.DjangoStrategy"
SOCIAL_AUTH_STORAGE = "social_django.models.DjangoStorage"
SOCIAL_AUTH_ADMIN_USER_SEARCH_FIELDS = ["username", "first_name", "last_name", "email"]
SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = True
SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = env("DD_SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL")

GOOGLE_OAUTH_ENABLED = env("DD_SOCIAL_AUTH_GOOGLE_OAUTH2_ENABLED")
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = env("DD_SOCIAL_AUTH_GOOGLE_OAUTH2_KEY")
Expand Down Expand Up @@ -1325,6 +1326,7 @@ def saml2_attrib_map_format(din):
"Scout Suite Scan": ["file_path", "vuln_id_from_tool"], # for now we use file_path as there is no attribute for "service"
"Meterian Scan": ["cwe", "component_name", "component_version", "description", "severity"],
"Github Vulnerability Scan": ["title", "severity", "component_name", "vulnerability_ids", "file_path"],
"Github Secrets Detection Report": ["title", "file_path", "line"],
"Solar Appscreener Scan": ["title", "file_path", "line", "severity"],
"pip-audit Scan": ["vuln_id_from_tool", "component_name", "component_version"],
"Rubocop Scan": ["vuln_id_from_tool", "file_path", "line"],
Expand Down Expand Up @@ -1570,6 +1572,7 @@ def saml2_attrib_map_format(din):
"AWS Security Hub Scan": DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL,
"Meterian Scan": DEDUPE_ALGO_HASH_CODE,
"Github Vulnerability Scan": DEDUPE_ALGO_HASH_CODE,
"Github Secrets Detection Report": DEDUPE_ALGO_HASH_CODE,
"Cloudsploit Scan": DEDUPE_ALGO_HASH_CODE,
"SARIF": DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE,
"Azure Security Center Recommendations Scan": DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL,
Expand Down Expand Up @@ -1850,6 +1853,7 @@ def saml2_attrib_map_format(din):
"ALSA-": "https://osv.dev/vulnerability/", # e.g. https://osv.dev/vulnerability/ALSA-2024:0827
"ASA-": "https://security.archlinux.org/", # e.g. https://security.archlinux.org/ASA-202003-8
"AVD": "https://avd.aquasec.com/misconfig/", # e.g. https://avd.aquasec.com/misconfig/avd-ksv-01010
"AWS-": "https://aws.amazon.com/security/security-bulletins/", # e.g. https://aws.amazon.com/security/security-bulletins/AWS-2025-001
"BAM-": "https://jira.atlassian.com/browse/", # e.g. https://jira.atlassian.com/browse/BAM-25498
"BSERV-": "https://jira.atlassian.com/browse/", # e.g. https://jira.atlassian.com/browse/BSERV-19020
"C-": "https://hub.armosec.io/docs/", # e.g. https://hub.armosec.io/docs/c-0085
Expand Down
4 changes: 4 additions & 0 deletions dojo/templates/dojo/findings_list_snippet.html
Original file line number Diff line number Diff line change
Expand Up @@ -722,6 +722,9 @@ <h3 class="has-filters">
<td class="nowrap">
{% if finding.planned_remediation_date %}{{ finding.planned_remediation_date }}{% endif %}
</td>
<td class="nowrap">
{% if finding.planned_remediation_version %}{{ finding.planned_remediation_version }}{% endif %}
</td>
{% if filter_name != 'Closed' %}
<td class="nowrap">
{% if finding.reviewers %}
Expand Down Expand Up @@ -820,6 +823,7 @@ <h3 class="has-filters">
{% endif %}
{ "data": "service" },
{ "data": "planned_remediation_date" },
{ "data": "planned_remediation_version" },
{% if filter_name != 'Closed' %}
{ "data": "reviewers" },
{% endif %}
Expand Down
Empty file.
146 changes: 146 additions & 0 deletions dojo/tools/github_secrets_detection_report/parser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
import json

from dojo.models import Finding


class GithubSecretsDetectionReportParser:
def get_scan_types(self):
return ["Github Secrets Detection Report Scan"]

def get_label_for_scan_types(self, scan_type):
return "Github Secrets Detection Report Scan"

def get_description_for_scan_types(self, scan_type):
return "Github Secrets Detection Report report file can be imported in JSON format (option --json)."

def get_findings(self, file, test):
data = json.load(file)

if not isinstance(data, list):
error_msg = "Invalid GitHub secrets detection report format, expected a JSON list of alerts."
raise TypeError(error_msg)

findings = []
for alert in data:
# Extract basic alert information
alert_number = alert.get("number")
state = alert.get("state", "open")
secret_type = alert.get("secret_type", "Unknown")
secret_type_display_name = alert.get("secret_type_display_name", secret_type)
html_url = alert.get("html_url", "")

# Create title
title = f"Exposed Secret Detected: {secret_type_display_name}"

# Build description
desc_lines = []
if html_url:
desc_lines.append(f"**GitHub Alert**: [{html_url}]({html_url})")

desc_lines.extend([f"**Secret Type**: {secret_type_display_name}", f"**Alert State**: {state}"])

# Add repository information
repository = alert.get("repository", {})
if repository:
repo_full_name = repository.get("full_name")
if repo_full_name:
desc_lines.append(f"**Repository**: {repo_full_name}")

# Add location information
first_location = alert.get("first_location_detected", {})
if first_location:
file_path = first_location.get("path")
start_line = first_location.get("start_line")
end_line = first_location.get("end_line")

if file_path:
desc_lines.append(f"**File Path**: {file_path}")
if start_line:
if end_line and end_line != start_line:
desc_lines.append(f"**Lines**: {start_line}-{end_line}")
else:
desc_lines.append(f"**Line**: {start_line}")

# Add resolution information
resolution = alert.get("resolution")
if resolution:
desc_lines.append(f"**Resolution**: {resolution}")

resolved_by = alert.get("resolved_by")
if resolved_by:
resolved_by_login = resolved_by.get("login", "Unknown")
desc_lines.append(f"**Resolved By**: {resolved_by_login}")

resolved_at = alert.get("resolved_at")
if resolved_at:
desc_lines.append(f"**Resolved At**: {resolved_at}")

resolution_comment = alert.get("resolution_comment")
if resolution_comment:
desc_lines.append(f"**Resolution Comment**: {resolution_comment}")

# Add push protection information
push_protection_bypassed = alert.get("push_protection_bypassed", False)
if push_protection_bypassed:
desc_lines.append("**Push Protection Bypassed**: True")

bypassed_by = alert.get("push_protection_bypassed_by")
if bypassed_by:
bypassed_by_login = bypassed_by.get("login", "Unknown")
desc_lines.append(f"**Bypassed By**: {bypassed_by_login}")

bypassed_at = alert.get("push_protection_bypassed_at")
if bypassed_at:
desc_lines.append(f"**Bypassed At**: {bypassed_at}")
else:
desc_lines.append("**Push Protection Bypassed**: False")

# Add additional metadata
validity = alert.get("validity", "unknown")
desc_lines.append(f"**Validity**: {validity}")

publicly_leaked = alert.get("publicly_leaked", False)
desc_lines.append(f"**Publicly Leaked**: {'Yes' if publicly_leaked else 'No'}")

multi_repo = alert.get("multi_repo", False)
desc_lines.append(f"**Multi-Repository**: {'Yes' if multi_repo else 'No'}")

has_more_locations = alert.get("has_more_locations", False)
if has_more_locations:
desc_lines.append("**Note**: This secret has been detected in multiple locations")

description = "\n\n".join(desc_lines)

# Determine severity based on state and other factors
if state == "resolved":
severity = "Info"
elif validity == "active" and publicly_leaked:
severity = "Critical"
elif validity == "active":
severity = "High"
else:
severity = "Medium"

# Create finding
finding = Finding(
title=title,
test=test,
description=description,
severity=severity,
static_finding=True,
dynamic_finding=False,
vuln_id_from_tool=str(alert_number) if alert_number else None,
)

# Set file path and line information
if first_location:
finding.file_path = first_location.get("path")
finding.line = first_location.get("start_line")

# Set external URL
if html_url:
finding.url = html_url

findings.append(finding)

return findings
Loading