Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 26 additions & 0 deletions dojo/api_v2/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from datetime import datetime
from pathlib import Path

import pghistory
import tagulous
from crum import get_current_user
from dateutil.relativedelta import relativedelta
Expand Down Expand Up @@ -2530,7 +2531,17 @@ def perform_create(self, serializer):
if jira_project := (jira_helper.get_jira_project(jira_driver) if jira_driver else None):
push_to_jira = push_to_jira or jira_project.push_all_issues

# Add pghistory context for audit trail (adds to existing middleware context).
# /api/vue is the Pro UI
source = "import_vue" if "/api/vue/" in self.request.path else "import_api"
pghistory.context(
source=source,
scan_type=serializer.validated_data.get("scan_type"),
)
serializer.save(push_to_jira=push_to_jira)
# Add test_id to pghistory context now that test is created
if test_id := serializer.data.get("test"):
pghistory.context(test_id=test_id)

def get_queryset(self):
return get_authorized_tests(Permissions.Import_Scan_Result)
Expand Down Expand Up @@ -2678,7 +2689,22 @@ def perform_create(self, serializer):
if jira_project := (jira_helper.get_jira_project(jira_driver) if jira_driver else None):
push_to_jira = push_to_jira or jira_project.push_all_issues
logger.debug("push_to_jira: %s", push_to_jira)
# Add pghistory context for audit trail (adds to existing middleware context)
# For reimport, test may already exist or be created during save
test_id = test.id if test else serializer.validated_data.get("test", {})
if hasattr(test_id, "id"):
test_id = test_id.id
# /api/vue is the Pro UI
source = "reimport_vue" if "/api/vue/" in self.request.path else "reimport_api"
pghistory.context(
source=source,
test_id=test_id if isinstance(test_id, int) else None,
scan_type=serializer.validated_data.get("scan_type"),
)
serializer.save(push_to_jira=push_to_jira)
# Update test_id if it wasn't available before save
if test_id_from_response := serializer.data.get("test"):
pghistory.context(test_id=test_id_from_response)


# Authorization: configuration
Expand Down
28 changes: 26 additions & 2 deletions dojo/celery.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import os
from logging.config import dictConfig

from celery import Celery
from celery import Celery, Task
from celery.signals import setup_logging
from django.conf import settings

Expand All @@ -11,7 +11,31 @@
# set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dojo.settings.settings")

app = Celery("dojo")

class PgHistoryTask(Task):

"""
Custom Celery base task that automatically applies pghistory context.

When a task is dispatched via dojo_async_task, the current pghistory
context is captured and passed in kwargs as "_pgh_context". This base
class extracts that context and applies it before running the task,
ensuring all database events share the same context as the original
request.
"""

def __call__(self, *args, **kwargs):
# Import here to avoid circular imports during Celery startup
from dojo.pghistory_utils import get_pghistory_context_manager # noqa: PLC0415

# Extract context from kwargs (won't be passed to task function)
pgh_context = kwargs.pop("_pgh_context", None)

with get_pghistory_context_manager(pgh_context):
return super().__call__(*args, **kwargs)


app = Celery("dojo", task_cls=PgHistoryTask)

# Using a string here means the worker will not have to
# pickle the object when using Windows.
Expand Down
7 changes: 7 additions & 0 deletions dojo/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,17 @@ def dojo_async_task(func=None, *, signature=False):
def decorator(func):
@wraps(func)
def __wrapper__(*args, **kwargs):
from dojo.pghistory_utils import get_serializable_pghistory_context # noqa: PLC0415 circular import
from dojo.utils import get_current_user # noqa: PLC0415 circular import

user = get_current_user()
kwargs["async_user"] = user

# Capture pghistory context to pass to Celery worker
# The PgHistoryTask base class will apply this context in the worker
if pgh_context := get_serializable_pghistory_context():
kwargs["_pgh_context"] = pgh_context

dojo_async_task_counter.incr(
func.__name__,
args=args,
Expand Down
9 changes: 9 additions & 0 deletions dojo/engagement/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from tempfile import NamedTemporaryFile
from time import strftime

import pghistory
from django.conf import settings
from django.contrib import messages
from django.contrib.admin.utils import NestedObjects
Expand Down Expand Up @@ -1138,10 +1139,18 @@ def post(
if form_error := self.process_form(request, context.get("form"), context):
add_error_message_to_response(form_error)
return self.failure_redirect(request, context)
# Add pghistory context for audit trail (adds to existing middleware context)
pghistory.context(
source="import",
scan_type=context.get("scan_type"),
)
# Kick off the import process
if import_error := self.import_findings(context):
add_error_message_to_response(import_error)
return self.failure_redirect(request, context)
# Add test_id to pghistory context now that test is created
if test := context.get("test"):
pghistory.context(test_id=test.id)
# Process the credential form
if form_error := self.process_credentials_form(request, context.get("cred_form"), context):
add_error_message_to_response(form_error)
Expand Down
6 changes: 6 additions & 0 deletions dojo/finding/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from itertools import chain
from pathlib import Path

import pghistory
from django.conf import settings
from django.contrib import messages
from django.core import serializers
Expand Down Expand Up @@ -2557,6 +2558,11 @@ def finding_bulk_update_all(request, pid=None):
logger.debug("bulk 20")

finding_to_update = request.POST.getlist("finding_to_update")
# Add pghistory context for audit trail (adds to existing middleware context)
pghistory.context(
source="bulk_edit",
finding_count=len(finding_to_update),
)
finds = Finding.objects.filter(id__in=finding_to_update).order_by("id")
total_find_count = finds.count()
prods = set(find.test.engagement.product for find in finds) # noqa: C401
Expand Down
164 changes: 89 additions & 75 deletions dojo/jira_link/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import re

# Third party imports
import pghistory
from django.contrib import messages
from django.contrib.admin.utils import NestedObjects
from django.core.exceptions import PermissionDenied
Expand Down Expand Up @@ -85,85 +86,98 @@ def webhook(request, secret=None):
if request.content_type != "application/json":
return webhook_responser_handler("debug", "only application/json supported")
# Time to process the request
# Parse the JSON first to get webhook event type for context
try:
parsed = json.loads(request.body.decode("utf-8"))
# Check if the events supplied are supported
if parsed.get("webhookEvent") not in {"comment_created", "jira:issue_updated"}:
return webhook_responser_handler("info", f"Unrecognized JIRA webhook event received: {parsed.get('webhookEvent')}")

if parsed.get("webhookEvent") == "jira:issue_updated":
# xml examples at the end of file
jid = parsed["issue"]["id"]
# This may raise a 404, but it will be handled in the exception response
try:
jissue = JIRA_Issue.objects.get(jira_id=jid)
except JIRA_Instance.DoesNotExist:
return webhook_responser_handler("info", f"JIRA issue {jid} is not linked to a DefectDojo Finding")
findings = None
# Determine what type of object we will be working with
if jissue.finding:
logger.debug(f"Received issue update for {jissue.jira_key} for finding {jissue.finding.id}")
findings = [jissue.finding]
elif jissue.finding_group:
logger.debug(f"Received issue update for {jissue.jira_key} for finding group {jissue.finding_group}")
findings = jissue.finding_group.findings.all()
elif jissue.engagement:
return webhook_responser_handler("debug", "Update for engagement ignored")
else:
return webhook_responser_handler("info", f"Received issue update for {jissue.jira_key} for unknown object")
# Process the assignee if present
assignee = parsed["issue"]["fields"].get("assignee")
assignee_name = "Jira User"
if assignee is not None:
# First look for the 'name' field. If not present, try 'displayName'. Else put None
assignee_name = assignee.get("name", assignee.get("displayName"))

# "resolution":{
# "self":"http://www.testjira.com/rest/api/2/resolution/11",
# "id":"11",
# "description":"Cancelled by the customer.",
# "name":"Cancelled"
# },

# or
# "resolution": null

# or
# "resolution": "None"

resolution = parsed["issue"]["fields"]["resolution"]
resolution = resolution if resolution and resolution != "None" else None
resolution_id = resolution["id"] if resolution else None
resolution_name = resolution["name"] if resolution else None
jira_now = parse_datetime(parsed["issue"]["fields"]["updated"])

if findings:
for finding in findings:
jira_helper.process_resolution_from_jira(finding, resolution_id, resolution_name, assignee_name, jira_now, jissue, finding_group=jissue.finding_group)
# Check for any comment that could have come along with the resolution
if (error_response := check_for_and_create_comment(parsed)) is not None:
return error_response

if parsed.get("webhookEvent") == "comment_created":
if (error_response := check_for_and_create_comment(parsed)) is not None:
return error_response

except Exception as e:
# Check if the issue is originally a 404
if isinstance(e, Http404):
return webhook_responser_handler("debug", str(e))
# Try to get a little more information on the exact exception
return webhook_responser_handler("debug", f"Failed to parse JSON: {e}")

# Check if the events supplied are supported
if parsed.get("webhookEvent") not in {"comment_created", "jira:issue_updated"}:
return webhook_responser_handler("info", f"Unrecognized JIRA webhook event received: {parsed.get('webhookEvent')}")

# Wrap processing with pghistory context for audit trail
# JIRA webhooks don't have a user session, so we create a new context
with pghistory.context(
source="jira_webhook",
jira_event=parsed.get("webhookEvent"),
):
try:
message = (
f"Original Exception: {e}\n"
f"jira webhook body parsed:\n{json.dumps(parsed, indent=4)}"
)
except Exception:
message = (
f"Original Exception: {e}\n"
f"jira webhook body :\n{request.body.decode('utf-8')}"
)
return webhook_responser_handler("debug", message)
if parsed.get("webhookEvent") == "jira:issue_updated":
# xml examples at the end of file
jid = parsed["issue"]["id"]
# This may raise a 404, but it will be handled in the exception response
try:
jissue = JIRA_Issue.objects.get(jira_id=jid)
except JIRA_Instance.DoesNotExist:
return webhook_responser_handler("info", f"JIRA issue {jid} is not linked to a DefectDojo Finding")
# Add jira_key to context now that we have it
pghistory.context(jira_key=jissue.jira_key)
findings = None
# Determine what type of object we will be working with
if jissue.finding:
logger.debug(f"Received issue update for {jissue.jira_key} for finding {jissue.finding.id}")
findings = [jissue.finding]
elif jissue.finding_group:
logger.debug(f"Received issue update for {jissue.jira_key} for finding group {jissue.finding_group}")
findings = jissue.finding_group.findings.all()
elif jissue.engagement:
return webhook_responser_handler("debug", "Update for engagement ignored")
else:
return webhook_responser_handler("info", f"Received issue update for {jissue.jira_key} for unknown object")
# Process the assignee if present
assignee = parsed["issue"]["fields"].get("assignee")
assignee_name = "Jira User"
if assignee is not None:
# First look for the 'name' field. If not present, try 'displayName'. Else put None
assignee_name = assignee.get("name", assignee.get("displayName"))

# "resolution":{
# "self":"http://www.testjira.com/rest/api/2/resolution/11",
# "id":"11",
# "description":"Cancelled by the customer.",
# "name":"Cancelled"
# },

# or
# "resolution": null

# or
# "resolution": "None"

resolution = parsed["issue"]["fields"]["resolution"]
resolution = resolution if resolution and resolution != "None" else None
resolution_id = resolution["id"] if resolution else None
resolution_name = resolution["name"] if resolution else None
jira_now = parse_datetime(parsed["issue"]["fields"]["updated"])

if findings:
for finding in findings:
jira_helper.process_resolution_from_jira(finding, resolution_id, resolution_name, assignee_name, jira_now, jissue, finding_group=jissue.finding_group)
# Check for any comment that could have come along with the resolution
if (error_response := check_for_and_create_comment(parsed)) is not None:
return error_response

if parsed.get("webhookEvent") == "comment_created":
if (error_response := check_for_and_create_comment(parsed)) is not None:
return error_response

except Exception as e:
# Check if the issue is originally a 404
if isinstance(e, Http404):
return webhook_responser_handler("debug", str(e))
# Try to get a little more information on the exact exception
try:
message = (
f"Original Exception: {e}\n"
f"jira webhook body parsed:\n{json.dumps(parsed, indent=4)}"
)
except Exception:
message = (
f"Original Exception: {e}\n"
f"jira webhook body :\n{request.body.decode('utf-8')}"
)
return webhook_responser_handler("debug", message)

return webhook_responser_handler("No logging here", "Success!")

Expand Down
16 changes: 16 additions & 0 deletions dojo/management/commands/dedupe.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import logging

import pghistory
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db.models import Prefetch
Expand Down Expand Up @@ -64,6 +65,21 @@ def handle(self, *args, **options):
dedupe_sync = options["dedupe_sync"]
dedupe_batch_mode = options.get("dedupe_batch_mode", True) # Default to True (batch mode enabled)

# Wrap with pghistory context for audit trail
with pghistory.context(
source="dedupe_command",
dedupe_sync=dedupe_sync,
):
self._run_dedupe(
restrict_to_parsers=restrict_to_parsers,
hash_code_only=hash_code_only,
dedupe_only=dedupe_only,
dedupe_sync=dedupe_sync,
dedupe_batch_mode=dedupe_batch_mode,
)

def _run_dedupe(self, *, restrict_to_parsers, hash_code_only, dedupe_only, dedupe_sync, dedupe_batch_mode):
"""Internal method to run the dedupe logic within pghistory context."""
if restrict_to_parsers is not None:
findings = Finding.objects.filter(test__test_type__name__in=restrict_to_parsers).exclude(duplicate=True)
logger.info("######## Will process only parsers %s and %d findings ########", *restrict_to_parsers, findings.count())
Expand Down
14 changes: 7 additions & 7 deletions dojo/management/commands/jira_status_reconciliation.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import logging

import pghistory
from dateutil.relativedelta import relativedelta
from django.conf import settings
from django.core.management.base import BaseCommand
Expand Down Expand Up @@ -216,10 +217,9 @@ def add_arguments(self, parser):
parser.add_argument("--dryrun", action="store_true", help="Only print actions to be performed, but make no modifications.")

def handle(self, *args, **options):
# mode = options['mode']
# product = options['product']
# engagement = options['engagement']
# daysback = options['daysback']
# dryrun = options['dryrun']

return jira_status_reconciliation(*args, **options)
# Wrap with pghistory context for audit trail
with pghistory.context(
source="jira_reconciliation",
mode=options.get("mode", "reconcile"),
):
return jira_status_reconciliation(*args, **options)
Loading