diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 3f82aaa0dc3..ae0f0db498f 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -26,7 +26,7 @@ This checklist is for your information. - [ ] Bugfixes should be submitted against the `bugfix` branch. - [ ] Give a meaningful name to your PR, as it may end up being used in the release notes. - [ ] Your code is flake8 compliant. -- [ ] Your code is python 3.11 compliant. +- [ ] Your code is python 3.12 compliant. - [ ] If this is a new feature and not a bug fix, you've included the proper documentation in the docs at https://github.com/DefectDojo/django-DefectDojo/tree/dev/docs as part of this PR. - [ ] Model changes must include the necessary migrations in the dojo/db_migrations folder. - [ ] Add applicable tests to the unit tests. diff --git a/.github/workflows/close-stale.yml b/.github/workflows/close-stale.yml index 491ecb5b082..0b371f1cb60 100644 --- a/.github/workflows/close-stale.yml +++ b/.github/workflows/close-stale.yml @@ -15,13 +15,24 @@ jobs: close-stale: runs-on: ubuntu-latest steps: - - name: Close stale issues and PRs + - name: Close issues and PRs that are pending closure uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0 + with: + # Disable automatic stale marking - only close manually labeled items + days-before-stale: -1 + days-before-close: 7 + stale-issue-label: 'pending-closure' + stale-pr-label: 'pending-closure' + close-issue-message: 'This issue has been automatically closed because it was manually labeled as stale. If you believe this was closed in error, please reopen it and remove the stale label.' + close-pr-message: 'This PR has been automatically closed because it was manually labeled as stale. If you believe this was closed in error, please reopen it and remove the stale label.' + + - name: Close stale issues and PRs + uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0 with: # Disable automatic stale marking - only close manually labeled items days-before-stale: -1 days-before-close: 7 stale-issue-label: 'stale' stale-pr-label: 'stale' - close-issue-message: 'This issue has been automatically closed because it was manually labeled as stale. If you believe this was closed in error, please reopen it and remove the stale label.' - close-pr-message: 'This PR has been automatically closed because it was manually labeled as stale. If you believe this was closed in error, please reopen it and remove the stale label.' \ No newline at end of file + close-issue-message: 'This issue has been automatically closed because it was labeled as stale. If you believe this was closed in error, please reopen it and remove the stale label.' + close-pr-message: 'This PR has been automatically closed because it was labeled as stale. If you believe this was closed in error, please reopen it and remove the stale label.' diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml index 301047bbefc..5a749e0946f 100644 --- a/.github/workflows/gh-pages.yml +++ b/.github/workflows/gh-pages.yml @@ -19,12 +19,12 @@ jobs: extended: true - name: Setup Node - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 + uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 with: - node-version: '22.19.0' + node-version: '22.20.0' - name: Cache dependencies - uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: path: ~/.npm key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 9802ad91d3b..784ee42b676 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -2,12 +2,18 @@ name: Integration tests on: workflow_call: + inputs: + auditlog_type: + type: string + default: "django-auditlog" jobs: integration_tests: # run tests with docker compose name: User Interface Tests runs-on: ubuntu-latest + env: + AUDITLOG_TYPE: ${{ inputs.auditlog_type }} strategy: matrix: test-case: [ diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index f9e8cd38029..dc30f685793 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -27,7 +27,7 @@ jobs: # are tested (https://docs.aws.amazon.com/eks/latest/userguide/kubernetes-versions.html#available-versions) - databases: pgsql brokers: redis - k8s: 'v1.33.4' + k8s: 'v1.34.0' os: debian steps: - name: Checkout @@ -36,7 +36,7 @@ jobs: - name: Setup Minikube uses: manusa/actions-setup-minikube@b589f2d61bf96695c546929c72b38563e856059d # v2.14.0 with: - minikube version: 'v1.33.1' + minikube version: 'v1.37.0' kubernetes version: ${{ matrix.k8s }} driver: docker start args: '--addons=ingress --cni calico' @@ -108,43 +108,46 @@ jobs: echo "INFO: status:" kubectl get pods echo "INFO: logs:" - kubectl logs --selector=$3 --all-containers=true + kubectl logs --selector=$3 --all-containers=true exit 1 fi return ${?} } echo "Waiting for init job..." - to_complete "condition=Complete" job "defectdojo.org/component=initializer" + to_complete "condition=Complete" job "defectdojo.org/component=initializer" echo "Waiting for celery pods..." - to_complete "condition=ready" pod "defectdojo.org/component=celery" + to_complete "condition=ready" pod "defectdojo.org/component=celery" echo "Waiting for django pod..." - to_complete "condition=ready" pod "defectdojo.org/component=django" + to_complete "condition=ready" pod "defectdojo.org/component=django" echo "Pods up and ready to rumbole" kubectl get pods + + - name: Test login page + timeout-minutes: 10 + run: |- RETRY=0 while : do DJANGO_IP=$(kubectl get svc defectdojo-django -o jsonpath='{.spec.clusterIP}') OUT=$(kubectl run curl --quiet=true --image=curlimages/curl:8.15.0 \ - --overrides='{ "apiVersion": "v1" }' \ --restart=Never -i --rm -- \ --silent \ --max-time 20 \ --head \ --header "Host: $DD_HOSTNAME" \ - http://$DJANGO_IP/login?next=/) + "http://${DJANGO_IP}/login?next=/") echo $OUT - CR=`echo $OUT | egrep "^HTTP" | cut -d' ' -f2` + CR=$(echo $OUT | egrep "^HTTP" | cut -d' ' -f2) echo $CR if [[ $CR -ne 200 ]]; then echo $RETRY if [[ $RETRY -gt 2 ]]; then kubectl get pods - echo `kubectl logs --tail=30 -l defectdojo.org/component=django -c uwsgi` + echo $(kubectl logs --tail=30 -l defectdojo.org/component=django -c uwsgi) echo "ERROR: cannot display login screen; got HTTP code $CR" exit 1 else - ((RETRY++)) + RETRY=$((RETRY+1)) echo "Attempt $RETRY to get login page" sleep 5 fi @@ -153,29 +156,51 @@ jobs: break fi done + + - name: Test API auth call + timeout-minutes: 10 + run: |- ADMIN_PASS=$(kubectl get secret/defectdojo -o jsonpath='{.data.DD_ADMIN_PASSWORD}' | base64 -d) echo "Simple API check" DJANGO_IP=$(kubectl get svc defectdojo-django -o jsonpath='{.spec.clusterIP}') - CR=$(kubectl run curl --quiet=true --image=curlimages/curl:8.15.0 \ - --overrides='{ "apiVersion": "v1" }' \ - --restart=Never -i --rm -- \ - --silent \ - --max-time 20 \ - --header "Host: $DD_HOSTNAME" \ - --data-raw "username=admin&password=$ADMIN_PASS" \ - --output /dev/null \ - --write-out "%{http_code}\n" \ - http://$DJANGO_IP/api/v2/api-token-auth/) - echo $CR - if [[ $CR -ne 200 ]]; then - echo "ERROR: login is not possible; got HTTP code $CR" - exit 1 - else - echo "Result received" - fi + RETRY=0 + while : + do + OUT=$(kubectl run curl --quiet=true --image=curlimages/curl:8.15.0 \ + --restart=Never -i --rm -- \ + --dump-header - \ + --no-progress-meter \ + --max-time 20 \ + --header "Host: $DD_HOSTNAME" \ + --data-raw "username=admin&password=$ADMIN_PASS" \ + "http://${DJANGO_IP}/api/v2/api-token-auth/") + CR=$(echo $OUT | egrep "^HTTP" | cut -d' ' -f2) + echo "Return code $CR" + if [[ $CR -ne 200 ]]; then + echo "Retry: $RETRY" + if [[ $RETRY -gt 2 ]]; then + kubectl get pods + echo $(kubectl logs --tail=30 -l defectdojo.org/component=django -c uwsgi) + echo "ERROR: cannot perform API login; got HTTP code $CR; Full response:" + echo $OUT + exit 1 + else + RETRY=$((RETRY+1)) + echo "Attempt $RETRY to perform API login" + sleep 5 + fi + else + echo "Result received" + break + fi + done + + - name: Check of logs + timeout-minutes: 10 + run: |- echo "Final Check of components" - errors=`kubectl get pods | grep Error | awk '{print $1}'` - if [[ ! -z $errors ]]; then + errors=$(kubectl get pods | grep Error | awk '{print $1}') + if [[ ! -z $errors ]]; then echo "Few pods with errors" for line in $errors; do echo "Dumping log from $line" @@ -185,3 +210,11 @@ jobs: else echo "DD K8S successfully deployed" fi + + - name: Failed Logs + if: failure() + run: |- + echo "ERROR: Here are logs from deployment/defectdojo-django containers:" + kubectl logs deployment/defectdojo-django --all-pods=true --all-containers=true --tail=100 + echo "And all pod status one more time" + kubectl get pods diff --git a/.github/workflows/pr-labeler.yml b/.github/workflows/pr-labeler.yml index cde6795db05..824e7c14f44 100644 --- a/.github/workflows/pr-labeler.yml +++ b/.github/workflows/pr-labeler.yml @@ -15,7 +15,7 @@ jobs: name: "Autolabeler" runs-on: ubuntu-latest steps: - - uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0 + - uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1 with: repo-token: "${{ secrets.GITHUB_TOKEN }}" sync-labels: true diff --git a/.github/workflows/release-1-create-pr.yml b/.github/workflows/release-1-create-pr.yml index dfab71f7ab3..4e4b710400f 100644 --- a/.github/workflows/release-1-create-pr.yml +++ b/.github/workflows/release-1-create-pr.yml @@ -80,6 +80,11 @@ jobs: sed -ri "0,/version/s/version: \S+/$NEW_CHART_VERSION/" helm/defectdojo/Chart.yaml fi + - name: Update values in HELM chart + run: | + yq -i '.annotations."artifacthub.io/prerelease" = "false"' helm/defectdojo/Chart.yaml + yq -i '.annotations."artifacthub.io/changes" += "- kind: changed\n description: Bump DefectDojo to ${{ inputs.release_number }}\n"' helm/defectdojo/Chart.yaml + - name: Check version numbers run: | grep -H version dojo/__init__.py @@ -87,6 +92,11 @@ jobs: grep -H appVersion helm/defectdojo/Chart.yaml grep -H version helm/defectdojo/Chart.yaml + - name: Run helm-docs + uses: losisin/helm-docs-github-action@a57fae5676e4c55a228ea654a1bcaec8dd3cf5b5 # v1.6.2 + with: + chart-search-root: "helm/defectdojo" + - name: Push version changes uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 # v6.0.1 with: @@ -97,7 +107,7 @@ jobs: branch: ${{ env.NEW_BRANCH }} - name: Create Pull Request - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/release-3-master-into-dev.yml b/.github/workflows/release-3-master-into-dev.yml index bfca2a82091..d13ce0a9323 100644 --- a/.github/workflows/release-3-master-into-dev.yml +++ b/.github/workflows/release-3-master-into-dev.yml @@ -74,6 +74,17 @@ jobs: git add docs/content/en/open_source/upgrading/$minorv.md if: endsWith(inputs.release_number_new, '.0') && endsWith(inputs.release_number_dev, '.0-dev') + - name: Update values in HELM chart + run: | + yq -i '.annotations = {}' helm/defectdojo/Chart.yaml + yq -i '.annotations."artifacthub.io/prerelease" = "true"' helm/defectdojo/Chart.yaml + yq -i '.annotations."artifacthub.io/changes" = ""' helm/defectdojo/Chart.yaml + + - name: Run helm-docs + uses: losisin/helm-docs-github-action@a57fae5676e4c55a228ea654a1bcaec8dd3cf5b5 # v1.6.2 + with: + chart-search-root: "helm/defectdojo" + - name: Push version changes uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 # v6.0.1 with: @@ -84,7 +95,7 @@ jobs: branch: ${{ env.NEW_BRANCH }} - name: Create Pull Request - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | @@ -139,6 +150,17 @@ jobs: grep appVersion helm/defectdojo/Chart.yaml grep version components/package.json + - name: Update values in HELM chart + run: | + yq -i '.annotations = {}' helm/defectdojo/Chart.yaml + yq -i '.annotations."artifacthub.io/prerelease" = "true"' helm/defectdojo/Chart.yaml + yq -i '.annotations."artifacthub.io/changes" = ""' helm/defectdojo/Chart.yaml + + - name: Run helm-docs + uses: losisin/helm-docs-github-action@a57fae5676e4c55a228ea654a1bcaec8dd3cf5b5 # v1.6.2 + with: + chart-search-root: "helm/defectdojo" + - name: Push version changes uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 # v6.0.1 with: @@ -149,7 +171,7 @@ jobs: branch: ${{ env.NEW_BRANCH }} - name: Create Pull Request - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/release-x-manual-docker-containers.yml b/.github/workflows/release-x-manual-docker-containers.yml index 779db6daf99..a492bed7518 100644 --- a/.github/workflows/release-x-manual-docker-containers.yml +++ b/.github/workflows/release-x-manual-docker-containers.yml @@ -52,7 +52,7 @@ jobs: run: echo "DOCKER_ORG=$(echo ${GITHUB_REPOSITORY%%/*} | tr '[:upper:]' '[:lower:]')" >> $GITHUB_ENV - name: Login to DockerHub - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/release-x-manual-helm-chart.yml b/.github/workflows/release-x-manual-helm-chart.yml index fc096eca65b..bd09c558bf9 100644 --- a/.github/workflows/release-x-manual-helm-chart.yml +++ b/.github/workflows/release-x-manual-helm-chart.yml @@ -66,12 +66,11 @@ jobs: - name: Configure HELM repos run: |- - helm repo add bitnami https://charts.bitnami.com/bitnami helm dependency list ./helm/defectdojo helm dependency update ./helm/defectdojo - name: Add yq - uses: mikefarah/yq@f03c9dc599c37bfcaf533427211d05e51e6fee64 # v4.47.1 + uses: mikefarah/yq@6251e95af8df3505def48c71f3119836701495d6 # v4.47.2 - name: Pin version docker version id: pin_image @@ -88,7 +87,7 @@ jobs: echo "chart_version=$(ls build | cut -d '-' -f 2,3 | sed 's|\.tgz||')" >> $GITHUB_ENV - name: Create release ${{ inputs.release_number }} - uses: softprops/action-gh-release@72f2c25fcb47643c292f7107632f7a47c1df5cd8 # v2.3.2 + uses: softprops/action-gh-release@62c96d0c4e8a889135c1f3a25910db8dbe0e85f7 # v2.3.4 with: name: '${{ inputs.release_number }} 🌈' tag_name: ${{ inputs.release_number }} diff --git a/.github/workflows/release-x-manual-merge-container-digests.yml b/.github/workflows/release-x-manual-merge-container-digests.yml index 54ca05b8be3..65abfdb7e08 100644 --- a/.github/workflows/release-x-manual-merge-container-digests.yml +++ b/.github/workflows/release-x-manual-merge-container-digests.yml @@ -48,7 +48,7 @@ jobs: merge-multiple: true - name: Login to DockerHub - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/release-x-manual-tag-as-latest.yml b/.github/workflows/release-x-manual-tag-as-latest.yml index 61237863780..0a3d447edd1 100644 --- a/.github/workflows/release-x-manual-tag-as-latest.yml +++ b/.github/workflows/release-x-manual-tag-as-latest.yml @@ -37,7 +37,7 @@ jobs: run: echo "DOCKER_ORG=$(echo ${GITHUB_REPOSITORY%%/*} | tr '[:upper:]' '[:lower:]')" >> $GITHUB_ENV - name: Login to DockerHub - uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0 + uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} diff --git a/.github/workflows/rest-framework-tests.yml b/.github/workflows/rest-framework-tests.yml index 0b222fed842..5df066ec486 100644 --- a/.github/workflows/rest-framework-tests.yml +++ b/.github/workflows/rest-framework-tests.yml @@ -6,11 +6,16 @@ on: platform: type: string default: "linux/amd64" + auditlog_type: + type: string + default: "django-auditlog" jobs: unit_tests: name: Rest Framework Unit Tests runs-on: ${{ inputs.platform == 'linux/arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }} + env: + AUDITLOG_TYPE: ${{ inputs.auditlog_type }} strategy: matrix: diff --git a/.github/workflows/test-helm-chart.yml b/.github/workflows/test-helm-chart.yml index 91a14845391..7e89d2ac7fd 100644 --- a/.github/workflows/test-helm-chart.yml +++ b/.github/workflows/test-helm-chart.yml @@ -10,7 +10,7 @@ on: jobs: lint: - name: Lint chart + name: Lint chart (version) runs-on: ubuntu-latest steps: - name: Checkout @@ -22,13 +22,12 @@ jobs: - name: Set up Helm uses: azure/setup-helm@1a275c3b69536ee54be43f2070a358922e12c8d4 # v4.3.1 - - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: python-version: 3.13 - name: Configure Helm repos run: |- - helm repo add bitnami https://charts.bitnami.com/bitnami helm dependency list ./helm/defectdojo helm dependency update ./helm/defectdojo @@ -66,6 +65,26 @@ jobs: run: ct lint --config ct.yaml --target-branch ${{ env.ct-branch }} --check-version-increment=false if: env.changed == 'true' + - name: Check update of "artifacthub.io/changes" HELM annotation + if: env.changed == 'true' + run: | + target_branch=${{ env.ct-branch }} + + echo "Checking Chart.yaml annotation changes" + + # Get current branch annotation + current_annotation=$(yq e '.annotations."artifacthub.io/changes"' "helm/defectdojo/Chart.yaml") + + # Get target branch version of Chart.yaml annotation + target_annotation=$(git show "${{ env.ct-branch }}:helm/defectdojo/Chart.yaml" | yq e '.annotations."artifacthub.io/changes"' -) + + if [[ "$current_annotation" == "$target_annotation" ]]; then + echo "::error file=helm/defectdojo/Chart.yaml::The 'artifacthub.io/changes' annotation has not been updated compared to ${{ env.ct-branch }}" + exit 1 + fi + + echo "'artifacthub.io/changes' annotation updated in helm/defectdojo" + # - name: Create kind cluster # uses: helm/kind-action@v1.1.0 # if: env.changed == 'true' @@ -73,3 +92,75 @@ jobs: # - name: Run chart-testing (install) # run: ct install --config ct.yaml --target-branch ${{ env.ct-branch }} --helm-extra-args '--set createSecret=true --set createRabbitMqSecret=true --set createPostgresqlSecret=true --set timeout=900' # if: env.changed == 'true' + + docs_generation: + name: Update documentation + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + + # Documentation provided in the README file needs to contain the latest information from `values.yaml` and all other related assets. + # If this step fails, install https://github.com/norwoodj/helm-docs and run locally `helm-docs --chart-search-root helm/defectdojo` before committing your changes. + # The helm-docs documentation will be generated for you. + - name: Run helm-docs + uses: losisin/helm-docs-github-action@a57fae5676e4c55a228ea654a1bcaec8dd3cf5b5 # v1.6.2 + with: + fail-on-diff: true + chart-search-root: "helm/defectdojo" + + generate_schema: + name: Update schema + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + + # The HELM structure supports the existence of a `values.schema.json` file. This file is used to validate all values provided by the user before Helm starts rendering templates. + # The chart needs to have a `values.schema.json` file that is compatible with the default `values.yaml` file. + # If this step fails, install https://github.com/losisin/helm-values-schema-json and run locally `helm schema --use-helm-docs` in `helm/defectdojo` before committing your changes. + # The helm schema will be generated for you. + - name: Generate values schema json + uses: losisin/helm-values-schema-json-action@d5847286fa04322702c4f8d45031974798c83ac7 # v2.3.0 + with: + fail-on-diff: true + working-directory: "helm/defectdojo" + useHelmDocs: true + values: values.yaml + + lint_format: + name: Lint chart (format) + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + persist-credentials: false + fetch-depth: 0 + + - name: Set up Helm + uses: azure/setup-helm@1a275c3b69536ee54be43f2070a358922e12c8d4 # v4.3.1 + + - name: Configure Helm repos + run: |- + helm dependency list ./helm/defectdojo + helm dependency update ./helm/defectdojo + + - name: Lint + run: |- + helm lint ./helm/defectdojo --strict + + artifacthub_linter: + name: Artifacthub Lint + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - name: Run ah lint + working-directory: ./helm/defectdojo + run: |- + docker run --rm \ + -v ${{ github.workspace }}/helm/defectdojo:/workspace \ + -w /workspace \ + artifacthub/ah:v1.21.0@sha256:511818fa90ce87d7132c6214e51ea6dd62eea030f5d2271ce073f948b3060972 \ + ah lint diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index e16990520df..cbda2b40caf 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -25,18 +25,26 @@ jobs: strategy: matrix: platform: ['linux/amd64', 'linux/arm64'] + auditlog_type: ['django-auditlog', 'django-pghistory'] fail-fast: false needs: build-docker-containers uses: ./.github/workflows/rest-framework-tests.yml secrets: inherit with: platform: ${{ matrix.platform}} + auditlog_type: ${{ matrix.auditlog_type }} # only run integration tests for linux/amd64 (default) test-user-interface: needs: build-docker-containers uses: ./.github/workflows/integration-tests.yml secrets: inherit + strategy: + matrix: + auditlog_type: ['django-auditlog', 'django-pghistory'] + fail-fast: false + with: + auditlog_type: ${{ matrix.auditlog_type }} # only run k8s tests for linux/amd64 (default) test-k8s: diff --git a/.github/workflows/validate_docs_build.yml b/.github/workflows/validate_docs_build.yml index 8fb3c8c8fca..223fa2a2a0c 100644 --- a/.github/workflows/validate_docs_build.yml +++ b/.github/workflows/validate_docs_build.yml @@ -16,12 +16,12 @@ jobs: extended: true - name: Setup Node - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0 + uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 with: - node-version: '22.19.0' + node-version: '22.20.0' - name: Cache dependencies - uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4.2.4 + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 with: path: ~/.npm key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} diff --git a/Dockerfile.django-alpine b/Dockerfile.django-alpine index 79731e55d0d..010017b0f50 100644 --- a/Dockerfile.django-alpine +++ b/Dockerfile.django-alpine @@ -5,7 +5,7 @@ # Dockerfile.nginx to use the caching mechanism of Docker. # Ref: https://devguide.python.org/#branchstatus -FROM python:3.11.13-alpine3.22@sha256:8d8c6d3808243160605925c2a7ab2dc5c72d0e75651699b0639143613e0855b8 AS base +FROM python:3.12.11-alpine3.22@sha256:02a73ead8397e904cea6d17e18516f1df3590e05dc8823bd5b1c7f849227d272 AS base FROM base AS build WORKDIR /app RUN \ @@ -29,9 +29,10 @@ RUN \ COPY requirements.txt ./ # CPUCOUNT=1 is needed, otherwise the wheel for uwsgi won't always be build succesfully # https://github.com/unbit/uwsgi/issues/1318#issuecomment-542238096 -RUN CPUCOUNT=1 pip3 wheel --wheel-dir=/tmp/wheels -r ./requirements.txt +RUN export PYCURL_SSL_LIBRARY=openssl && \ + CPUCOUNT=1 pip3 wheel --wheel-dir=/tmp/wheels -r ./requirements.txt -FROM base AS django +FROM base AS release WORKDIR /app ARG uid=1001 ARG gid=1337 @@ -55,10 +56,10 @@ RUN \ && \ rm -rf /var/cache/apk/* && \ true -COPY --from=build /tmp/wheels /tmp/wheels -COPY requirements.txt ./ -RUN export PYCURL_SSL_LIBRARY=openssl && \ - pip3 install \ +RUN \ + --mount=from=build,src=/tmp/wheels,target=/tmp/wheels \ + --mount=from=build,src=/app/requirements.txt,target=/app/requirements.txt \ + pip3 install \ --no-cache-dir \ --no-index \ --find-links=/tmp/wheels \ @@ -136,5 +137,11 @@ ENV \ DD_UWSGI_NUM_OF_THREADS="4" ENTRYPOINT ["/entrypoint-uwsgi.sh"] -FROM django AS django-unittests +FROM release AS development +USER root +COPY requirements-dev.txt ./ +RUN pip3 install --no-cache-dir -r requirements-dev.txt +USER ${uid} + +FROM development AS django-unittests COPY unittests/ ./unittests/ diff --git a/Dockerfile.django-debian b/Dockerfile.django-debian index 2a263e1bf48..b8077bb0b77 100644 --- a/Dockerfile.django-debian +++ b/Dockerfile.django-debian @@ -5,7 +5,7 @@ # Dockerfile.nginx to use the caching mechanism of Docker. # Ref: https://devguide.python.org/#branchstatus -FROM python:3.11.11-slim-bookworm@sha256:42420f737ba91d509fc60d5ed65ed0492678a90c561e1fa08786ae8ba8b52eda AS base +FROM python:3.12.11-slim-trixie@sha256:d67a7b66b989ad6b6d6b10d428dcc5e0bfc3e5f88906e67d490c4d3daac57047 AS base FROM base AS build WORKDIR /app RUN \ @@ -28,9 +28,10 @@ RUN \ COPY requirements.txt ./ # CPUCOUNT=1 is needed, otherwise the wheel for uwsgi won't always be build succesfully # https://github.com/unbit/uwsgi/issues/1318#issuecomment-542238096 -RUN CPUCOUNT=1 pip3 wheel --wheel-dir=/tmp/wheels -r ./requirements.txt +RUN export PYCURL_SSL_LIBRARY=openssl && \ + CPUCOUNT=1 pip3 wheel --wheel-dir=/tmp/wheels -r ./requirements.txt -FROM base AS django +FROM base AS release WORKDIR /app ARG uid=1001 ARG gid=1337 @@ -58,10 +59,10 @@ RUN \ apt-get clean && \ rm -rf /var/lib/apt/lists && \ true -COPY --from=build /tmp/wheels /tmp/wheels -COPY requirements.txt ./ -RUN export PYCURL_SSL_LIBRARY=openssl && \ - pip3 install \ +RUN \ + --mount=from=build,src=/tmp/wheels,target=/tmp/wheels \ + --mount=from=build,src=/app/requirements.txt,target=/app/requirements.txt \ + pip3 install \ --no-cache-dir \ --no-index \ --find-links=/tmp/wheels \ @@ -139,5 +140,11 @@ ENV \ DD_UWSGI_NUM_OF_THREADS="4" ENTRYPOINT ["/entrypoint-uwsgi.sh"] -FROM django AS django-unittests +FROM release AS development +USER root +COPY requirements-dev.txt ./ +RUN pip3 install --no-cache-dir -r requirements-dev.txt +USER ${uid} + +FROM development AS django-unittests COPY unittests/ ./unittests/ diff --git a/Dockerfile.integration-tests-debian b/Dockerfile.integration-tests-debian index ed843d95416..95398cb6e8e 100644 --- a/Dockerfile.integration-tests-debian +++ b/Dockerfile.integration-tests-debian @@ -1,9 +1,9 @@ # code: language=Dockerfile -FROM openapitools/openapi-generator-cli:v7.15.0@sha256:509f01c3c7eee9d1ad286506a7b6aa4624a95b410be9a238a306d209e900621f AS openapitools +FROM openapitools/openapi-generator-cli:v7.16.0@sha256:e56372add5e038753fb91aa1bbb470724ef58382fdfc35082bf1b3e079ce353c AS openapitools # currently only supports x64, no arm yet due to chrome and selenium dependencies -FROM python:3.11.11-slim-bookworm@sha256:42420f737ba91d509fc60d5ed65ed0492678a90c561e1fa08786ae8ba8b52eda AS build +FROM python:3.12.11-slim-trixie@sha256:d67a7b66b989ad6b6d6b10d428dcc5e0bfc3e5f88906e67d490c4d3daac57047 AS build WORKDIR /app RUN \ apt-get -y update && \ @@ -47,11 +47,11 @@ RUN \ apt-get -y install $missing_chrome_deps # Install a suggested list of additional packages (https://stackoverflow.com/a/76734752) -RUN apt-get install -y libxi6 libgconf-2-4 jq libjq1 libonig5 libxkbcommon0 libxss1 libglib2.0-0 libnss3 \ - libfontconfig1 libatk-bridge2.0-0 libatspi2.0-0 libgtk-3-0 libpango-1.0-0 libgdk-pixbuf2.0-0 libxcomposite1 \ +RUN apt-get install -y libxi6 jq libjq1 libonig5 libxkbcommon0 libxss1 libglib2.0-0 libnss3 \ + libfontconfig1 libatk-bridge2.0-0 libatspi2.0-0 libgtk-3-0 libpango-1.0-0 libxcomposite1 \ libxcursor1 libxdamage1 libxtst6 libappindicator3-1 libasound2 libatk1.0-0 libc6 libcairo2 libcups2 libxfixes3 \ libdbus-1-3 libexpat1 libgcc1 libnspr4 libgbm1 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxext6 \ - libxrandr2 libxrender1 gconf-service ca-certificates fonts-liberation libappindicator1 lsb-release xdg-utils + libxrandr2 libxrender1 ca-certificates fonts-liberation lsb-release xdg-utils # Installing the latest stable Google Chrome driver release WORKDIR /opt/chrome-driver diff --git a/Dockerfile.nginx-alpine b/Dockerfile.nginx-alpine index 8bd948ee65d..fd50cb9e472 100644 --- a/Dockerfile.nginx-alpine +++ b/Dockerfile.nginx-alpine @@ -5,8 +5,7 @@ # Dockerfile.django-alpine to use the caching mechanism of Docker. # Ref: https://devguide.python.org/#branchstatus -FROM python:3.11.13-alpine3.22@sha256:8d8c6d3808243160605925c2a7ab2dc5c72d0e75651699b0639143613e0855b8 AS base - +FROM python:3.12.11-alpine3.22@sha256:02a73ead8397e904cea6d17e18516f1df3590e05dc8823bd5b1c7f849227d272 AS base FROM base AS build WORKDIR /app RUN \ @@ -27,14 +26,15 @@ RUN \ && \ rm -rf /var/cache/apk/* && \ true -COPY requirements.txt ./ +COPY requirements.txt requirements-dev.txt ./ # CPUCOUNT=1 is needed, otherwise the wheel for uwsgi won't always be build succesfully # https://github.com/unbit/uwsgi/issues/1318#issuecomment-542238096 RUN CPUCOUNT=1 pip3 wheel --wheel-dir=/tmp/wheels -r ./requirements.txt +# needed for static files debug toolbar +RUN CPUCOUNT=1 pip3 wheel --wheel-dir=/tmp/wheels -r ./requirements-dev.txt FROM build AS collectstatic -ARG COLLECT_DJANGO_DEBUG_TOOLBAR_STATIC=false RUN apk add nodejs npm RUN npm install -g yarn --force @@ -46,6 +46,13 @@ RUN pip3 install \ --find-links=/tmp/wheels \ -r ./requirements.txt +# needed for static files debug toolbar +RUN pip3 install \ + --no-cache-dir \ + --no-index \ + --find-links=/tmp/wheels \ + -r ./requirements-dev.txt + # generate static files COPY components/ ./components/ RUN \ @@ -53,7 +60,8 @@ RUN \ yarn COPY manage.py ./ COPY dojo/ ./dojo/ -RUN env DD_SECRET_KEY='.' DD_DJANGO_DEBUG_TOOLBAR_ENABLED=${COLLECT_DJANGO_DEBUG_TOOLBAR_STATIC} python3 manage.py collectstatic --noinput --verbosity=2 && true +# always collect static for debug toolbar as we can't make it dependant on env variables or build arguments without breaking docker layer caching +RUN env DD_SECRET_KEY='.' DD_DJANGO_DEBUG_TOOLBAR_ENABLED=True python3 manage.py collectstatic --noinput --verbosity=2 && true FROM nginx:1.29.1-alpine3.22@sha256:42a516af16b852e33b7682d5ef8acbd5d13fe08fecadc7ed98605ba5e3b26ab8 ARG uid=1001 diff --git a/components/package.json b/components/package.json index 91f406d22f5..09954b463c9 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "2.50.4", + "version": "2.51.0", "license" : "BSD-3-Clause", "private": true, "dependencies": { @@ -12,8 +12,8 @@ "chosen-bootstrap": "https://github.com/dbtek/chosen-bootstrap", "chosen-js": "^1.8.7", "clipboard": "^2.0.11", - "datatables.net": "^2.3.3", - "datatables.net-buttons-bs": "^3.2.4", + "datatables.net": "^2.3.4", + "datatables.net-buttons-bs": "^3.2.5", "datatables.net-colreorder": "^2.1.1", "drmonty-datatables-plugins": "^1.0.0", "drmonty-datatables-responsive": "^1.0.0", diff --git a/components/yarn.lock b/components/yarn.lock index 5a8534dccb8..78aa6e5e86e 100644 --- a/components/yarn.lock +++ b/components/yarn.lock @@ -187,19 +187,19 @@ datatables.net-bs@^2: datatables.net "2.3.2" jquery ">=1.7" -datatables.net-buttons-bs@^3.2.4: - version "3.2.4" - resolved "https://registry.yarnpkg.com/datatables.net-buttons-bs/-/datatables.net-buttons-bs-3.2.4.tgz#7a883c3ee8c6428fb99e6e6e56c39d0051386039" - integrity sha512-wOljUlsJ4sU5pABim+cwbO61ZFRv4aak1PkNL812i/qFwIEcsji7uz59PAx1ZoP1YdNtetj4Vn7D5oTU+Ijedw== +datatables.net-buttons-bs@^3.2.5: + version "3.2.5" + resolved "https://registry.yarnpkg.com/datatables.net-buttons-bs/-/datatables.net-buttons-bs-3.2.5.tgz#fe9a8085a66cabd723833834f29e68e91f28ea14" + integrity sha512-7fXOIue+2jpWPWcIrAXWH3BjEhMUD8L2pInT0tqfoEcl/3T+CH0Q6dHJRI5RiYmYKO/HLjpCQ5yqYAL5DT7iHA== dependencies: datatables.net-bs "^2" - datatables.net-buttons "3.2.4" + datatables.net-buttons "3.2.5" jquery ">=1.7" -datatables.net-buttons@3.2.4: - version "3.2.4" - resolved "https://registry.yarnpkg.com/datatables.net-buttons/-/datatables.net-buttons-3.2.4.tgz#c58cc0bb518da8738bec6e64a54c1135dc257141" - integrity sha512-anA39/R0kpHA2DOwqEHy/ZMXD5vf4tWmyNO0BnO0kJG7AFNvGTUCWBnBifXYg3G64U6JYpYY+MuTFKIB1/ZMTQ== +datatables.net-buttons@3.2.5: + version "3.2.5" + resolved "https://registry.yarnpkg.com/datatables.net-buttons/-/datatables.net-buttons-3.2.5.tgz#e37fc4f06743e057e8e3e4abfda60c988e7c16da" + integrity sha512-OSTl7evbfe0SMee11lyzu5iv/z8Yp05eh3s1QBte/FNqHcoXN8hlAVSSGpYgk5pj8zwHPYIu6fHeMEue4ARUNg== dependencies: datatables.net "^2" jquery ">=1.7" @@ -219,10 +219,10 @@ datatables.net@2.3.2: dependencies: jquery ">=1.7" -datatables.net@^2, datatables.net@^2.3.3: - version "2.3.3" - resolved "https://registry.yarnpkg.com/datatables.net/-/datatables.net-2.3.3.tgz#fe4f96bdbc4cf47c8d11162a7af525ca6a3683d2" - integrity sha512-SWL3za6nheY6gdoiLgCc++tYmxbwrmv2bjrEiII9rXBWXXSbOZct6pjR3FueMVRM5jmt7pQcXiGovfuFDnutQg== +datatables.net@^2, datatables.net@^2.3.4: + version "2.3.4" + resolved "https://registry.yarnpkg.com/datatables.net/-/datatables.net-2.3.4.tgz#8cf69f2e6cb8d271be3d5c4f75a479684d20f253" + integrity sha512-fKuRlrBIdpAl2uIFgl9enKecHB41QmFd/2nN9LBbOvItV/JalAxLcyqdZXex7wX4ZXjnJQEnv6xeS9veOpKzSw== dependencies: jquery ">=1.7" diff --git a/docker-compose.override.dev.yml b/docker-compose.override.dev.yml index 650d8b14bb6..65b39e350ec 100644 --- a/docker-compose.override.dev.yml +++ b/docker-compose.override.dev.yml @@ -1,13 +1,17 @@ --- services: uwsgi: + build: + context: . + dockerfile: Dockerfile.django-debian + target: development entrypoint: ['/wait-for-it.sh', '${DD_DATABASE_HOST:-postgres}:${DD_DATABASE_PORT:-5432}', '-t', '30', '--', '/entrypoint-uwsgi-dev.sh'] volumes: - '.:/app:z' environment: PYTHONWARNINGS: error # We are strict about Warnings during development DD_DEBUG: 'True' - DD_DJANGO_DEBUG_TOOLBAR_ENABLED: 'True' + DD_DJANGO_DEBUG_TOOLBAR_ENABLED: "${DD_DJANGO_DEBUG_TOOLBAR_ENABLED:-False}" DD_ADMIN_USER: "${DD_ADMIN_USER:-admin}" DD_ADMIN_PASSWORD: "${DD_ADMIN_PASSWORD:-admin}" DD_EMAIL_URL: "smtp://mailhog:1025" @@ -34,11 +38,6 @@ services: DD_ADMIN_USER: "${DD_ADMIN_USER:-admin}" DD_ADMIN_PASSWORD: "${DD_ADMIN_PASSWORD:-admin}" nginx: - build: - args: - COLLECT_DJANGO_DEBUG_TOOLBAR_STATIC: 'True' - environment: - DD_DJANGO_DEBUG_TOOLBAR_ENABLED: 'True' volumes: - './dojo/static/dojo:/usr/share/nginx/html/static/dojo' postgres: diff --git a/docker-compose.override.unit_tests.yml b/docker-compose.override.unit_tests.yml index b5eb939356e..d1b90f57fdd 100644 --- a/docker-compose.override.unit_tests.yml +++ b/docker-compose.override.unit_tests.yml @@ -1,10 +1,6 @@ --- services: - nginx: - image: busybox:1.37.0-musl@sha256:254e6134b1bf813b34e920bc4235864a54079057d51ae6db9a4f2328f261c2ad - entrypoint: ['echo', 'skipping', 'nginx'] - volumes: - - defectdojo_media_unit_tests:/usr/share/nginx/html/media + nginx: !reset uwsgi: build: target: django-unittests @@ -12,6 +8,9 @@ services: volumes: - '.:/app:z' - "defectdojo_media_unit_tests:${DD_MEDIA_ROOT:-/app/media}" + depends_on: !override + postgres: + condition: service_started environment: PYTHONWARNINGS: error # We are strict about Warnings during testing DD_DEBUG: 'True' @@ -30,15 +29,9 @@ services: DD_CELERY_BROKER_PATH: '/dojo.celerydb.sqlite' DD_CELERY_BROKER_PARAMS: '' DD_JIRA_EXTRA_ISSUE_TYPES: 'Vulnerability' # Shouldn't trigger a migration error - celerybeat: - image: busybox:1.37.0-musl@sha256:254e6134b1bf813b34e920bc4235864a54079057d51ae6db9a4f2328f261c2ad - entrypoint: ['echo', 'skipping', 'celery beat'] - celeryworker: - image: busybox:1.37.0-musl@sha256:254e6134b1bf813b34e920bc4235864a54079057d51ae6db9a4f2328f261c2ad - entrypoint: ['echo', 'skipping', 'celery worker'] - initializer: - image: busybox:1.37.0-musl@sha256:254e6134b1bf813b34e920bc4235864a54079057d51ae6db9a4f2328f261c2ad - entrypoint: ['echo', 'skipping', 'initializer'] + celerybeat: !reset + celeryworker: !reset + initializer: !reset postgres: ports: - target: ${DD_DATABASE_PORT:-5432} @@ -49,9 +42,7 @@ services: POSTGRES_DB: ${DD_TEST_DATABASE_NAME:-test_defectdojo} volumes: - defectdojo_postgres_unit_tests:/var/lib/postgresql/data - redis: - image: busybox:1.37.0-musl@sha256:254e6134b1bf813b34e920bc4235864a54079057d51ae6db9a4f2328f261c2ad - entrypoint: ['echo', 'skipping', 'redis'] + redis: !reset "webhook.endpoint": image: mccutchen/go-httpbin:2.18.3@sha256:3992f3763e9ce5a4307eae0a869a78b4df3931dc8feba74ab823dd2444af6a6b volumes: diff --git a/docker-compose.override.unit_tests_cicd.yml b/docker-compose.override.unit_tests_cicd.yml index 62f59d13769..8d6eec1701c 100644 --- a/docker-compose.override.unit_tests_cicd.yml +++ b/docker-compose.override.unit_tests_cicd.yml @@ -1,10 +1,6 @@ --- services: - nginx: - image: busybox:1.37.0-musl@sha256:254e6134b1bf813b34e920bc4235864a54079057d51ae6db9a4f2328f261c2ad@sha256:254e6134b1bf813b34e920bc4235864a54079057d51ae6db9a4f2328f261c2ad - entrypoint: ['echo', 'skipping', 'nginx'] - volumes: - - defectdojo_media_unit_tests:/usr/share/nginx/html/media + nginx: !reset uwsgi: build: target: django-unittests @@ -12,6 +8,9 @@ services: volumes: - '.:/app:z' - "defectdojo_media_unit_tests:${DD_MEDIA_ROOT:-/app/media}" + depends_on: !override + postgres: + condition: service_started environment: PYTHONWARNINGS: error # We are strict about Warnings during testing DD_DEBUG: 'True' @@ -29,15 +28,9 @@ services: DD_CELERY_BROKER_PATH: '/dojo.celerydb.sqlite' DD_CELERY_BROKER_PARAMS: '' DD_JIRA_EXTRA_ISSUE_TYPES: 'Vulnerability' # Shouldn't trigger a migration error - celerybeat: - image: busybox:1.37.0-musl@sha256:254e6134b1bf813b34e920bc4235864a54079057d51ae6db9a4f2328f261c2ad - entrypoint: ['echo', 'skipping', 'celery beat'] - celeryworker: - image: busybox:1.37.0-musl@sha256:254e6134b1bf813b34e920bc4235864a54079057d51ae6db9a4f2328f261c2ad - entrypoint: ['echo', 'skipping', 'celery worker'] - initializer: - image: busybox:1.37.0-musl@sha256:254e6134b1bf813b34e920bc4235864a54079057d51ae6db9a4f2328f261c2ad - entrypoint: ['echo', 'skipping', 'initializer'] + celerybeat: !reset + celeryworker: !reset + initializer: !reset postgres: ports: - target: ${DD_DATABASE_PORT:-5432} @@ -48,9 +41,7 @@ services: POSTGRES_DB: ${DD_TEST_DATABASE_NAME:-test_defectdojo} volumes: - defectdojo_postgres_unit_tests:/var/lib/postgresql/data - redis: - image: busybox:1.37.0-musl@sha256:254e6134b1bf813b34e920bc4235864a54079057d51ae6db9a4f2328f261c2ad - entrypoint: ['echo', 'skipping', 'redis'] + redis: !reset "webhook.endpoint": image: mccutchen/go-httpbin:2.18.3@sha256:3992f3763e9ce5a4307eae0a869a78b4df3931dc8feba74ab823dd2444af6a6b volumes: diff --git a/docker-compose.yml b/docker-compose.yml index 727492e474d..f18651fa52e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -34,7 +34,7 @@ services: build: context: ./ dockerfile: "Dockerfile.django-${DEFECT_DOJO_OS:-debian}" - target: django + target: release image: "defectdojo/defectdojo-django:${DJANGO_VERSION:-latest}" depends_on: initializer: @@ -120,7 +120,7 @@ services: source: ./docker/extra_settings target: /app/docker/extra_settings postgres: - image: postgres:17.6-alpine@sha256:3406990b6e4c7192317b6fdc5680498744f6142f01f0287f4ee0420d8c74063c + image: postgres:18.0-alpine@sha256:70b32afe0c274b4d93098fd724fcdaab3aba47270a4f1e63cbf9cc69d7bf1be4 environment: POSTGRES_DB: ${DD_DATABASE_NAME:-defectdojo} POSTGRES_USER: ${DD_DATABASE_USER:-defectdojo} @@ -129,7 +129,7 @@ services: - defectdojo_postgres:/var/lib/postgresql/data redis: # Pinning to this version due to licensing constraints - image: redis:7.2.10-alpine@sha256:395ccd7ee4db0867de0d0410f4712a9e0331cff9fdbd864f71ec0f7982d3ffe6 + image: redis:7.2.11-alpine@sha256:7632e82373929f39cdbead93f2e45d8b3cd295072c4755e00e7e6b19d56cc512 volumes: - defectdojo_redis:/data volumes: diff --git a/docker/entrypoint-initializer.sh b/docker/entrypoint-initializer.sh index 52650b036bf..ec193ef6f06 100755 --- a/docker/entrypoint-initializer.sh +++ b/docker/entrypoint-initializer.sh @@ -110,6 +110,8 @@ python3 manage.py makemigrations --no-input --check --dry-run --verbosity 3 || { cat <<-EOF ******************************************************************************** +WARNING: Missing Database Migrations Detected +******************************************************************************** You made changes to the models without creating a DB migration for them. @@ -119,15 +121,25 @@ If you're not familiar with migrations in Django, please read the great documentation thoroughly: https://docs.djangoproject.com/en/5.0/topics/migrations/ +This is now a WARNING and the container will continue to start. +However, you should create the necessary migrations as soon as possible using: +docker compose exec uwsgi bash -c 'python manage.py makemigrations -v2' + ******************************************************************************** EOF - exit 1 + echo "WARNING: Continuing startup despite missing migrations..." } echo "Migrating" python3 manage.py migrate +echo "Configuring pghistory triggers based on audit settings" +cat < + +### Sample Scan Data +Sample Github SAST scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/github_secrets_detection_report_many_vul.json). \ No newline at end of file diff --git a/docs/content/en/connecting_your_tools/parsers/file/openvas.md b/docs/content/en/connecting_your_tools/parsers/file/openvas.md index b0153900161..78596cd1188 100644 --- a/docs/content/en/connecting_your_tools/parsers/file/openvas.md +++ b/docs/content/en/connecting_your_tools/parsers/file/openvas.md @@ -2,16 +2,39 @@ title: "OpenVAS Parser" toc_hide: true --- -You can either upload the exported results of an OpenVAS Scan in a .csv or .xml format. +You can upload the results of an OpenVAS/Greenbone report in either .csv or .xml format. ### Sample Scan Data Sample OpenVAS scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/openvas). -### Default Deduplication Hashcode Fields -By default, DefectDojo identifies duplicate Findings using these [hashcode fields](https://docs.defectdojo.com/en/working_with_findings/finding_deduplication/about_deduplication/): +### Parser versions +The OpenVAS parser has two versions: Version 2 and the legacy version. Only version 2 should be used going forward. This documentation assumes Version 2 going forward. + +Version 2 comes with a number of improvements: +- Use of a hash code algorithm for deduplication +- Increased consistency in parsing between the XML and CSV parsers. +- Combined findings where the only differences are in fields that cannot be rehashed due to inconsistent values between scans (e.g. fields containing timestamps or packet IDs). This prevents duplicates if the vulnerability is found multiple times on the same endpoint. +- Increased parser value coverage +- Heuristic for fix_available detection +- Updated mapping to DefectDojo fields compared to version 1. + +### Deduplication Algorithm +Default Deduplication Hashcode Fields: +By default, DefectDojo Parser V2 identifies duplicate findings using the following [hashcode fields](https://docs.defectdojo.com/en/working_with_findings/finding_deduplication/about_deduplication/): - title -- cwe -- line -- file path -- description +- severity +- vuln_id_from_tool +- endpoints + +The legacy version (version 1) uses the legacy deduplication algorithm. + +### CSV and XML differences and similarityies +The parser attempts to parse XML and CSV files in a similar way. However, this is not always possible. The following lists the differences between the parsers: + +- EPSS scores and percentiles are only available in CSV format. +- CVSS vectors are only available in the XML format. +- The CVSS score will always be reported as CVSS v3 in the CSV parser +- The references in the CSV parser will never contain URLs. + +If no supported CVSS version is detected, the score (if present) is registered as a CVSS v3 score, even if this is incorrect. diff --git a/docs/content/en/connecting_your_tools/parsers/file/snyk_issue_api.md b/docs/content/en/connecting_your_tools/parsers/file/snyk_issue_api.md index 6de9cd61f94..4306110de71 100644 --- a/docs/content/en/connecting_your_tools/parsers/file/snyk_issue_api.md +++ b/docs/content/en/connecting_your_tools/parsers/file/snyk_issue_api.md @@ -2,7 +2,11 @@ title: "Snyk Issue API" toc_hide: true --- -The Snyk Issue API parser supports importing vulnerability data from the Snyk Issue API in JSON format. Currently only parsing issues of type `code` is supported. Samples of ther issue types are welcome. +The Snyk Issue API parser supports importing vulnerability data from the Snyk Issue API in JSON format. + +Currently parsing issues of type `code` (SAST) and `package_vulnerability` (SCA) are supported. + +Samples of ther issue types are welcome. For more information about the Snyk Issue API, refer to the [official Snyk API documentation](https://docs.snyk.io/snyk-api/reference/issues#get-orgs-org_id-issues). diff --git a/docs/content/en/customize_dojo/notifications/email_slack_teams.md b/docs/content/en/customize_dojo/notifications/email_slack_teams.md index 617d9cbe76d..248defa7b70 100644 --- a/docs/content/en/customize_dojo/notifications/email_slack_teams.md +++ b/docs/content/en/customize_dojo/notifications/email_slack_teams.md @@ -99,7 +99,7 @@ If you want to apply RBAC\-based filtering to your Slack messages, enabling pers If your team has a Slack integration enabled (through the above process), individual users can also configure notifications to send directly to your personal Slackbot channel. -1. Start by navigating to your personal Profile page on DefectDojo. Find this by clicking the 👤 **icon** in the top\-right corner. Select your DefectDojo Username from the list. (👤 **paul** in our example) +1. Start by navigating to your personal Profile page on DefectDojo. Find this by clicking the 👤 **icon** in the top\-right corner. Select your DefectDojo Username from the list. (👤 **paul** in our example) ​ ![image](images/Configure_a_Slack_Integration_4.png) @@ -111,14 +111,16 @@ You can now [set specific notifications](../about_notifications/) to be sent to Microsoft Teams can receive notifications to a specific channel. To do this, you will need to **set up an incoming webhook** on the channel where you wish to receive messages. -1. Complete the process listed in the **[Microsoft Teams Documentation](https://learn.microsoft.com/en-us/microsoftteams/platform/webhooks-and-connectors/how-to/add-incoming-webhook?tabs=dotnet)** for creating a new Incoming Webhook. Keep your unique webhook.office.com link handy as you will need it in subsequent steps. +Please note that the old [Office Connector webhooks](https://learn.microsoft.com/en-us/microsoftteams/platform/webhooks-and-connectors/how-to/add-incoming-webhook?tabs=newteams%2Cdotnet) will be retired by Microsoft, use a new Power Automate Workflow based webhook as documented below. + +1. Complete the process listed in the **[Microsoft Teams Documentation](https://support.microsoft.com/en-us/office/create-incoming-webhooks-with-workflows-for-microsoft-teams-8ae491c7-0394-4861-ba59-055e33f75498)** for creating a new Incoming Webhook. Keep your unique logic.azure.com link handy as you will need it in subsequent steps. You can create webhook for a channel or for a specific chat. ​ ![image](images/Configure_a_Microsoft_Teams_Integration.png) 2. In DefectDojo, navigate to **Configuration \> System Settings** from the sidebar. (In the Pro UI, this form is located under **Enterprise Settings > System Settings**.) -3. Check the **Enable Microsoft Teams notifications** box. This will open a hidden section of the form, labeled **‘Msteams url**’. +3. Check the **Enable Microsoft Teams notifications** box. This will open a hidden section of the form, labeled **‘Msteams url**’. ​ ![image](images/Configure_a_Microsoft_Teams_Integration_2.png) -4. Paste the webhook.office.com URL (created in Step 1\) in the **Msteams url** box. Your Teams app will now listen to incoming Notifications from DefectDojo and post them to the channel you selected. +4. Paste the logic.azure.com URL (created in Step 1\) in the **Msteams url** box. Your Teams app will now listen to incoming Notifications from DefectDojo and post them to the channel you selected. ### Notes on the Teams integration diff --git a/docs/content/en/open_source/contributing/how-to-write-a-parser.md b/docs/content/en/open_source/contributing/how-to-write-a-parser.md index 3c0fcfe53a9..029e6b47a4e 100644 --- a/docs/content/en/open_source/contributing/how-to-write-a-parser.md +++ b/docs/content/en/open_source/contributing/how-to-write-a-parser.md @@ -166,6 +166,17 @@ Good example: finding.cwe = data["mykey"] ``` +```python + finding.cwe = data.get("mykey", 123) +``` + +```python + some_list = data.get("key_of_the_list") or [] +``` + +The finale example guards against cases where `key_of_the_list` is present, but `null`. + + ### Parsing of CVSS vectors Data can have `CVSS` vectors or scores. Defect Dojo use the `cvss` module provided by RedHat Security. diff --git a/docs/content/en/open_source/upgrading/2.0.md b/docs/content/en/open_source/upgrading/2.0.md index 052ade05c26..f9fe4e44c59 100644 --- a/docs/content/en/open_source/upgrading/2.0.md +++ b/docs/content/en/open_source/upgrading/2.0.md @@ -8,6 +8,7 @@ exclude_search: true Follow the usual steps to upgrade as described above. BEFORE UPGRADING +- If you are upgrading from a version before 1.11, first do an upgrade to 1.15.1. Then come back to this. - If you are using SAML2 checkout the new [documentaion](https://documentation.defectdojo.com/integrations/social-authentication/#saml-20) and update you settings following the migration section. We replaced [django-saml2-auth](https://github.com/fangli/django-saml2-auth) with [djangosaml2](https://github.com/IdentityPython/djangosaml2). AFTER UPGRADING diff --git a/docs/content/en/open_source/upgrading/2.51.md b/docs/content/en/open_source/upgrading/2.51.md new file mode 100644 index 00000000000..e3cf71186cc --- /dev/null +++ b/docs/content/en/open_source/upgrading/2.51.md @@ -0,0 +1,151 @@ +--- +title: "Upgrading to DefectDojo Version 2.51.x" +toc_hide: true +weight: -20250902 +description: Helm chart changes and Postgres major version updates. +--- + +## Performance improvements + +This release includes multiple improvements aimed at making DefectDojo faster, more scalable, and lighter on your database and workers. + +- Import and reimport are significantly more efficient: product grading is now orchestrated in batches using Celery chords, reducing the number of background tasks and database churn during large scans. This means faster imports and smoother post-processing on busy systems. See [PR 12914](https://github.com/DefectDojo/django-DefectDojo/pull/12914). +- Query-count reductions and importer hot-path tuning: we trimmed unnecessary ORM calls and optimized how findings/endpoints are updated during (re)import. You should see noticeably quicker runs out of the box. See [PR 13182](https://github.com/DefectDojo/django-DefectDojo/pull/13182) and [PR 13152](https://github.com/DefectDojo/django-DefectDojo/pull/13152). +- Smarter background task orchestration for product graing: less duplicate work and better scheduling during heavy operations, keeping the UI responsive while long jobs run. See [PR 12900](https://github.com/DefectDojo/django-DefectDojo/pull/12900). +- Bulk tag addition for large batches: adds an internal method to add tags to many findings at once, performing tagging in batches (default 1,000) with only a few queries per batch. This replaces ~3 queries per finding with ~3 queries per batch, significantly reducing DB load during imports, reimports, and bulk edit. On a ~10k-findings sample, import time dropped from ~372s to ~190s. See [PR 13285](https://github.com/DefectDojo/django-DefectDojo/pull/13285). +- Preparations for our switch to `django-pghistory` which provides more features and better performance compared to `django-auditlog`. See [PR 13169](https://github.com/DefectDojo/django-DefectDojo/pull/13169). + +No configuration changes are required—gains are automatic after upgrading. + +## Helm Chart Changes + +This release introduces several important changes to the Helm chart configuration: + +### Breaking changes + +#### Volume Management Improvements + +- **Streamlined volume configuration**: The existing volume logic has been removed and replaced with more flexible `extraVolumes` and `extraVolumeMounts` options that provide deployment-agnostic volume management. + +> The previous volume implementation prevented mounting projected volumes (such as secret mounts with renamed key names) and per-container volume mounts (like nginx emptyDir when readOnlyRootFs is enforced). +> The new approach resolves these limitations. + +#### Moved values + +The following Helm chart values have been modified in this release: + +- `redis.transportEncryption.enabled` → `redis.tls.enabled` (aligned with upstream Helm chart) +- `redis.scheme` → `redis.sentinel.enabled` (controls deployment mode and aligns with upstream chart) +- `redis.redisServer` → `redisServer` (prevents potential schema conflicts with upstream chart) +- `redis.transportEncryption.params` → `redisParams` (prevents potential schema conflicts with upstream chart) +- `postgresql.postgresServer` → `postgresServer` (prevents potential schema conflicts with upstream chart) + +### New features + +#### Container and Environment Enhancements + +- **Added extraInitContainers support**: Both Celery and Django deployments now support additional init containers through the `extraInitContainers` configuration option. +- **Enhanced probe configuration for Celery**: Added support for customizing liveness, readiness, and startup probes in both Celery beat and worker deployments. +- **Enhanced environment variable management**: All deployments now include `extraEnv` support for adding custom environment variables. For backwards compatibility, `.Values.extraEnv` can be used to inject common environment variables to all workloads. + +### Other changes + +- **Celery pod annotations**: Now we can add annotations to Celery beat/worker pods separately. +- **Flexible secret deployment**: Added the capability to deploy secrets as regular (non-hooked) resources to address compatibility issues encountered with CI/CD tools (such as ArgoCD). +- **Optional secret references**: Some secret references are now optional, allowing the chart to function even when certain secrets are not created. +- **Fixed secret mounting**: Resolved issues with optional secret mounts and references. +- **Improved code organization**: Minor Helm chart refactoring to enhance readability and maintainability. + +## PostgreSQL Major Version Upgrade in Docker Compose + +This release incorporates a major upgrade of Postgres. When using the default docker compose setup you'll need to upgrade the Postgres data folder before you can use Defect Dojo 2.51.0. + +There are lots of online guides to be found such as https://hub.docker.com/r/tianon/postgres-upgrade or https://github.com/pgautoupgrade/docker-pgautoupgrade. + +There's also the [official documentation on `pg_upgrade`](https://www.postgresql.org/docs/current/pgupgrade.html), but this doesn't work out of the box when using Docker containers. + +Sometimes it's easier to just perform the upgrade manually, which would look something like the steps below. +It may need some tuning to your specific needs and docker compose setup. The guide is loosely based on https://simplebackups.com/blog/docker-postgres-backup-restore-guide-with-examples. +If you already have a valid backup of the postgres 16 database, you can start at step 4. + +### 0. Backup + +Always back up your data before starting and save it somewhere. +Make sure the backup and restore is tested before continuing the steps below where the docker volume containing the database will be removed. + +### 1. Start the Old Postgres Container + +If you've acceidentally already updated your docker-compose.yml to the new versions, downgrade to postgres 16 for now: + +Edit your `docker-compose.yml` to use the old Postgres version (e.g., `postgres:17.6-alpine`): + +```yaml +postgres: + image: postgres:17.6-alpine + ... +``` + +Start only the Postgres container which will now be 17.6: + +```bash +docker compose up -d postgres +``` + +### 2. Dump Your Database + +```bash +docker compose exec -t postgres pg_dump -U defectdojo -Fc defectdojo -f /tmp/defectdojo.dump +docker cp :/tmp/defectdojo.dump defectdojo.dump +``` + +You can find the postgres_container_name via `docker container ls` or `docker ps`. + +### 3. Stop Containers and Remove the Old Volume + +You can find the volume name via `docker volume ls`. + +```bash +docker compose down +docker volume rm +``` + +### 4. Switch to the New Postgres Version + +Edit your `docker-compose.yml` to use the new version (e.g., `postgres:18-alpine`): + +```yaml +postgres: + image: postgres:18-alpine + ... +``` + +### 5. Start the New Postgres Container + +```bash +docker compose up -d postgres +``` + +### 6. Restore Your Database + +**Copy the dump file into the new container:** + +```bash +docker cp defectdojo.dump :/defectdojo.dump +``` + +**Restore inside the container:** + +```bash +docker exec -it bash +pg_restore -U defectdojo -d defectdojo /defectdojo.dump +``` + +### 7. Start the Rest of Your Services + +```bash +docker compose up -d +``` + +--- + +There are other instructions for upgrading to 2.51.x. Check the [Release Notes](https://github.com/DefectDojo/django-DefectDojo/releases/tag/2.51.0) for the contents of the release. diff --git a/docs/package-lock.json b/docs/package-lock.json index bd26efc195b..cdd0561b267 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -9,9 +9,9 @@ "version": "0.0.0", "license": "MIT", "dependencies": { - "@docsearch/css": "3.9.0", - "@docsearch/js": "3.9.0", - "@tabler/icons": "3.34.1", + "@docsearch/css": "4.1.0", + "@docsearch/js": "4.1.0", + "@tabler/icons": "3.35.0", "@thulite/doks-core": "1.8.0", "@thulite/images": "3.3.0", "@thulite/inline-svg": "1.2.1", @@ -20,250 +20,10 @@ }, "devDependencies": { "prettier": "3.6.2", - "vite": "7.1.5" + "vite": "7.1.9" }, "engines": { - "node": "22.19.0" - } - }, - "node_modules/@algolia/abtesting": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@algolia/abtesting/-/abtesting-1.1.0.tgz", - "integrity": "sha512-sEyWjw28a/9iluA37KLGu8vjxEIlb60uxznfTUmXImy7H5NvbpSO6yYgmgH5KiD7j+zTUUihiST0jEP12IoXow==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/autocomplete-core": { - "version": "1.17.9", - "resolved": "https://registry.npmjs.org/@algolia/autocomplete-core/-/autocomplete-core-1.17.9.tgz", - "integrity": "sha512-O7BxrpLDPJWWHv/DLA9DRFWs+iY1uOJZkqUwjS5HSZAGcl0hIVCQ97LTLewiZmZ402JYUrun+8NqFP+hCknlbQ==", - "license": "MIT", - "dependencies": { - "@algolia/autocomplete-plugin-algolia-insights": "1.17.9", - "@algolia/autocomplete-shared": "1.17.9" - } - }, - "node_modules/@algolia/autocomplete-plugin-algolia-insights": { - "version": "1.17.9", - "resolved": "https://registry.npmjs.org/@algolia/autocomplete-plugin-algolia-insights/-/autocomplete-plugin-algolia-insights-1.17.9.tgz", - "integrity": "sha512-u1fEHkCbWF92DBeB/KHeMacsjsoI0wFhjZtlCq2ddZbAehshbZST6Hs0Avkc0s+4UyBGbMDnSuXHLuvRWK5iDQ==", - "license": "MIT", - "dependencies": { - "@algolia/autocomplete-shared": "1.17.9" - }, - "peerDependencies": { - "search-insights": ">= 1 < 3" - } - }, - "node_modules/@algolia/autocomplete-preset-algolia": { - "version": "1.17.9", - "resolved": "https://registry.npmjs.org/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.17.9.tgz", - "integrity": "sha512-Na1OuceSJeg8j7ZWn5ssMu/Ax3amtOwk76u4h5J4eK2Nx2KB5qt0Z4cOapCsxot9VcEN11ADV5aUSlQF4RhGjQ==", - "license": "MIT", - "dependencies": { - "@algolia/autocomplete-shared": "1.17.9" - }, - "peerDependencies": { - "@algolia/client-search": ">= 4.9.1 < 6", - "algoliasearch": ">= 4.9.1 < 6" - } - }, - "node_modules/@algolia/autocomplete-shared": { - "version": "1.17.9", - "resolved": "https://registry.npmjs.org/@algolia/autocomplete-shared/-/autocomplete-shared-1.17.9.tgz", - "integrity": "sha512-iDf05JDQ7I0b7JEA/9IektxN/80a2MZ1ToohfmNS3rfeuQnIKI3IJlIafD0xu4StbtQTghx9T3Maa97ytkXenQ==", - "license": "MIT", - "peerDependencies": { - "@algolia/client-search": ">= 4.9.1 < 6", - "algoliasearch": ">= 4.9.1 < 6" - } - }, - "node_modules/@algolia/client-abtesting": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/client-abtesting/-/client-abtesting-5.35.0.tgz", - "integrity": "sha512-uUdHxbfHdoppDVflCHMxRlj49/IllPwwQ2cQ8DLC4LXr3kY96AHBpW0dMyi6ygkn2MtFCc6BxXCzr668ZRhLBQ==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-analytics": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-5.35.0.tgz", - "integrity": "sha512-SunAgwa9CamLcRCPnPHx1V2uxdQwJGqb1crYrRWktWUdld0+B2KyakNEeVn5lln4VyeNtW17Ia7V7qBWyM/Skw==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-common": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-5.35.0.tgz", - "integrity": "sha512-ipE0IuvHu/bg7TjT2s+187kz/E3h5ssfTtjpg1LbWMgxlgiaZIgTTbyynM7NfpSJSKsgQvCQxWjGUO51WSCu7w==", - "license": "MIT", - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-insights": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/client-insights/-/client-insights-5.35.0.tgz", - "integrity": "sha512-UNbCXcBpqtzUucxExwTSfAe8gknAJ485NfPN6o1ziHm6nnxx97piIbcBQ3edw823Tej2Wxu1C0xBY06KgeZ7gA==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-personalization": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-5.35.0.tgz", - "integrity": "sha512-/KWjttZ6UCStt4QnWoDAJ12cKlQ+fkpMtyPmBgSS2WThJQdSV/4UWcqCUqGH7YLbwlj3JjNirCu3Y7uRTClxvA==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-query-suggestions": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/client-query-suggestions/-/client-query-suggestions-5.35.0.tgz", - "integrity": "sha512-8oCuJCFf/71IYyvQQC+iu4kgViTODbXDk3m7yMctEncRSRV+u2RtDVlpGGfPlJQOrAY7OONwJlSHkmbbm2Kp/w==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/client-search": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-5.35.0.tgz", - "integrity": "sha512-FfmdHTrXhIduWyyuko1YTcGLuicVbhUyRjO3HbXE4aP655yKZgdTIfMhZ/V5VY9bHuxv/fGEh3Od1Lvv2ODNTg==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/ingestion": { - "version": "1.35.0", - "resolved": "https://registry.npmjs.org/@algolia/ingestion/-/ingestion-1.35.0.tgz", - "integrity": "sha512-gPzACem9IL1Co8mM1LKMhzn1aSJmp+Vp434An4C0OBY4uEJRcqsLN3uLBlY+bYvFg8C8ImwM9YRiKczJXRk0XA==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/monitoring": { - "version": "1.35.0", - "resolved": "https://registry.npmjs.org/@algolia/monitoring/-/monitoring-1.35.0.tgz", - "integrity": "sha512-w9MGFLB6ashI8BGcQoVt7iLgDIJNCn4OIu0Q0giE3M2ItNrssvb8C0xuwJQyTy1OFZnemG0EB1OvXhIHOvQwWw==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/recommend": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-5.35.0.tgz", - "integrity": "sha512-AhrVgaaXAb8Ue0u2nuRWwugt0dL5UmRgS9LXe0Hhz493a8KFeZVUE56RGIV3hAa6tHzmAV7eIoqcWTQvxzlJeQ==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/requester-browser-xhr": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.35.0.tgz", - "integrity": "sha512-diY415KLJZ6x1Kbwl9u96Jsz0OstE3asjXtJ9pmk1d+5gPuQ5jQyEsgC+WmEXzlec3iuVszm8AzNYYaqw6B+Zw==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/requester-fetch": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-fetch/-/requester-fetch-5.35.0.tgz", - "integrity": "sha512-uydqnSmpAjrgo8bqhE9N1wgcB98psTRRQXcjc4izwMB7yRl9C8uuAQ/5YqRj04U0mMQ+fdu2fcNF6m9+Z1BzDQ==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, - "node_modules/@algolia/requester-node-http": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-5.35.0.tgz", - "integrity": "sha512-RgLX78ojYOrThJHrIiPzT4HW3yfQa0D7K+MQ81rhxqaNyNBu4F1r+72LNHYH/Z+y9I1Mrjrd/c/Ue5zfDgAEjQ==", - "license": "MIT", - "dependencies": { - "@algolia/client-common": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" + "node": "22.20.0" } }, "node_modules/@ampproject/remapping": { @@ -336,6 +96,7 @@ "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.0.tgz", "integrity": "sha512-i1SLeK+DzNnQ3LL/CswPCa/E5u4lh1k6IAEphON8F+cXt0t9euTshDru0q7/IqMa1PMPz5RnHuHscF8/ZJsStg==", "license": "MIT", + "peer": true, "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.26.0", @@ -1746,52 +1507,16 @@ } }, "node_modules/@docsearch/css": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.9.0.tgz", - "integrity": "sha512-cQbnVbq0rrBwNAKegIac/t6a8nWoUAn8frnkLFW6YARaRmAQr5/Eoe6Ln2fqkUCZ40KpdrKbpSAmgrkviOxuWA==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-4.1.0.tgz", + "integrity": "sha512-nuNKGjHj/FQeWgE9t+i83QD/V67QiaAmGY7xS9TVCRUiCqSljOgIKlsLoQZKKVwEG8f+OWKdznzZkJxGZ7d06A==", "license": "MIT" }, "node_modules/@docsearch/js": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/@docsearch/js/-/js-3.9.0.tgz", - "integrity": "sha512-4bKHcye6EkLgRE8ze0vcdshmEqxeiJM77M0JXjef7lrYZfSlMunrDOCqyLjiZyo1+c0BhUqA2QpFartIjuHIjw==", - "license": "MIT", - "dependencies": { - "@docsearch/react": "3.9.0", - "preact": "^10.0.0" - } - }, - "node_modules/@docsearch/react": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.9.0.tgz", - "integrity": "sha512-mb5FOZYZIkRQ6s/NWnM98k879vu5pscWqTLubLFBO87igYYT4VzVazh4h5o/zCvTIZgEt3PvsCOMOswOUo9yHQ==", - "license": "MIT", - "dependencies": { - "@algolia/autocomplete-core": "1.17.9", - "@algolia/autocomplete-preset-algolia": "1.17.9", - "@docsearch/css": "3.9.0", - "algoliasearch": "^5.14.2" - }, - "peerDependencies": { - "@types/react": ">= 16.8.0 < 20.0.0", - "react": ">= 16.8.0 < 20.0.0", - "react-dom": ">= 16.8.0 < 20.0.0", - "search-insights": ">= 1 < 3" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "react": { - "optional": true - }, - "react-dom": { - "optional": true - }, - "search-insights": { - "optional": true - } - } + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@docsearch/js/-/js-4.1.0.tgz", + "integrity": "sha512-49+CzeGfOiwG85k+dDvKfOsXLd9PQACoY/FLrZfFOKmpWv166u7bAHmBLdzvxlk8nJ289UgpGf0k6GQZtC85Fg==", + "license": "MIT" }, "node_modules/@esbuild/aix-ppc64": { "version": "0.25.1", @@ -2431,6 +2156,7 @@ "resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz", "integrity": "sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A==", "license": "MIT", + "peer": true, "funding": { "type": "opencollective", "url": "https://opencollective.com/popperjs" @@ -2729,9 +2455,9 @@ } }, "node_modules/@tabler/icons": { - "version": "3.34.1", - "resolved": "https://registry.npmjs.org/@tabler/icons/-/icons-3.34.1.tgz", - "integrity": "sha512-9gTnUvd7Fd/DmQgr3MKY+oJLa1RfNsQo8c/ir3TJAWghOuZXodbtbVp0QBY2DxWuuvrSZFys0HEbv1CoiI5y6A==", + "version": "3.35.0", + "resolved": "https://registry.npmjs.org/@tabler/icons/-/icons-3.35.0.tgz", + "integrity": "sha512-yYXe+gJ56xlZFiXwV9zVoe3FWCGuZ/D7/G4ZIlDtGxSx5CGQK110wrnT29gUj52kEZoxqF7oURTk97GQxELOFQ==", "license": "MIT", "funding": { "type": "github", @@ -2821,31 +2547,6 @@ "dev": true, "license": "MIT" }, - "node_modules/algoliasearch": { - "version": "5.35.0", - "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-5.35.0.tgz", - "integrity": "sha512-Y+moNhsqgLmvJdgTsO4GZNgsaDWv8AOGAaPeIeHKlDn/XunoAqYbA+XNpBd1dW8GOXAUDyxC9Rxc7AV4kpFcIg==", - "license": "MIT", - "dependencies": { - "@algolia/abtesting": "1.1.0", - "@algolia/client-abtesting": "5.35.0", - "@algolia/client-analytics": "5.35.0", - "@algolia/client-common": "5.35.0", - "@algolia/client-insights": "5.35.0", - "@algolia/client-personalization": "5.35.0", - "@algolia/client-query-suggestions": "5.35.0", - "@algolia/client-search": "5.35.0", - "@algolia/ingestion": "1.35.0", - "@algolia/monitoring": "1.35.0", - "@algolia/recommend": "5.35.0", - "@algolia/requester-browser-xhr": "5.35.0", - "@algolia/requester-fetch": "5.35.0", - "@algolia/requester-node-http": "5.35.0" - }, - "engines": { - "node": ">= 14.0.0" - } - }, "node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", @@ -3037,6 +2738,7 @@ } ], "license": "MIT", + "peer": true, "dependencies": { "caniuse-lite": "^1.0.30001669", "electron-to-chromium": "^1.5.41", @@ -4043,7 +3745,6 @@ "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", "license": "MIT", - "peer": true, "engines": { "node": ">=0.10.0" } @@ -4212,6 +3913,7 @@ } ], "license": "MIT", + "peer": true, "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", @@ -4346,16 +4048,6 @@ "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", "license": "MIT" }, - "node_modules/preact": { - "version": "10.27.0", - "resolved": "https://registry.npmjs.org/preact/-/preact-10.27.0.tgz", - "integrity": "sha512-/DTYoB6mwwgPytiqQTh/7SFRL98ZdiD8Sk8zIUVOxtwq4oWcwrcd1uno9fE/zZmUaUrFNYzbH14CPebOz9tZQw==", - "license": "MIT", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/preact" - } - }, "node_modules/prettier": { "version": "3.6.2", "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.6.2.tgz", @@ -4386,7 +4078,6 @@ "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", "license": "MIT", - "peer": true, "dependencies": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", @@ -4541,8 +4232,7 @@ "version": "16.13.1", "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/read-cache": { "version": "1.0.0", @@ -4773,7 +4463,6 @@ "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.19.1.tgz", "integrity": "sha512-n/zwRWRYSUj0/3g/otKDRPMh6qv2SYMWNq85IEa8iZyAv8od9zDYpGSnpBEjNgcMNq6Scbu5KfIPxNF72R/2EA==", "license": "MIT", - "peer": true, "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" @@ -4792,13 +4481,6 @@ "node": ">=6.0.0" } }, - "node_modules/search-insights": { - "version": "2.17.3", - "resolved": "https://registry.npmjs.org/search-insights/-/search-insights-2.17.3.tgz", - "integrity": "sha512-RQPdCYTa8A68uM2jwxoY842xDhvx3E5LFL1LxvxCNMev4o5mLuokczhzjAgGwUZBAmOKZknArSxLKmXtIi2AxQ==", - "license": "MIT", - "peer": true - }, "node_modules/select": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/select/-/select-1.1.2.tgz", @@ -5011,6 +4693,7 @@ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, @@ -5128,9 +4811,9 @@ "license": "MIT" }, "node_modules/vite": { - "version": "7.1.5", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.1.5.tgz", - "integrity": "sha512-4cKBO9wR75r0BeIWWWId9XK9Lj6La5X846Zw9dFfzMRw38IlTk2iCcUt6hsyiDRcPidc55ZParFYDXi0nXOeLQ==", + "version": "7.1.9", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.1.9.tgz", + "integrity": "sha512-4nVGliEpxmhCL8DslSAUdxlB6+SMrhB0a1v5ijlh1xB1nEPuy1mxaHxysVucLHuWryAxLWg6a5ei+U4TLn/rFg==", "dev": true, "license": "MIT", "dependencies": { @@ -5226,6 +4909,7 @@ "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "dev": true, "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, diff --git a/docs/package.json b/docs/package.json index de75ce7b2b6..007c3374468 100644 --- a/docs/package.json +++ b/docs/package.json @@ -12,20 +12,20 @@ "preview": "vite preview --outDir public" }, "dependencies": { - "@docsearch/css": "3.9.0", - "@docsearch/js": "3.9.0", + "@docsearch/css": "4.1.0", + "@docsearch/js": "4.1.0", "@thulite/doks-core": "1.8.0", "@thulite/images": "3.3.0", "@thulite/inline-svg": "1.2.1", "@thulite/seo": "2.4.2", - "@tabler/icons": "3.34.1", + "@tabler/icons": "3.35.0", "thulite": "2.5.0" }, "devDependencies": { "prettier": "3.6.2", - "vite": "7.1.5" + "vite": "7.1.9" }, "engines": { - "node": "22.19.0" + "node": "22.20.0" } } diff --git a/dojo/__init__.py b/dojo/__init__.py index 5737b3ff975..3ca651bd880 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa: F401 -__version__ = "2.50.4" +__version__ = "2.51.0" __url__ = "https://github.com/DefectDojo/django-DefectDojo" __docs__ = "https://documentation.defectdojo.com" diff --git a/dojo/admin.py b/dojo/admin.py index c40d39e3c23..c7a21b91019 100644 --- a/dojo/admin.py +++ b/dojo/admin.py @@ -1,5 +1,5 @@ -from auditlog.models import LogEntry from django.contrib import admin +from django.contrib.admin.sites import NotRegistered from polymorphic.admin import PolymorphicChildModelAdmin, PolymorphicParentModelAdmin from dojo.models import ( @@ -14,7 +14,13 @@ TextQuestion, ) -admin.site.unregister(LogEntry) +# Conditionally unregister LogEntry from auditlog if it's registered +try: + from auditlog.models import LogEntry + admin.site.unregister(LogEntry) +except (ImportError, NotRegistered): + # auditlog not available or LogEntry not registered + pass # ============================== # Defect Dojo Engaegment Surveys diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py index 1c393c0b066..e0aa2ce8dc4 100644 --- a/dojo/api_v2/serializers.py +++ b/dojo/api_v2/serializers.py @@ -22,6 +22,7 @@ from rest_framework.exceptions import ValidationError as RestFrameworkValidationError from rest_framework.fields import DictField, MultipleChoiceField +import dojo.finding.helper as finding_helper import dojo.jira_link.helper as jira_helper import dojo.risk_acceptance.helper as ra_helper from dojo.authorization.authorization import user_has_permission @@ -122,6 +123,7 @@ requires_file, requires_tool_type, ) +from dojo.user.queries import get_authorized_users from dojo.user.utils import get_configuration_permissions_codenames from dojo.utils import is_scan_file_too_large from dojo.validators import ImporterFileExtensionValidator, tag_validator @@ -219,7 +221,7 @@ def to_internal_value(self, data): except ValueError: self.fail("invalid_json") - logger.debug(f"data as json: {data}") + logger.debug("data as json: %s", data) if not isinstance(data, list): self.fail("not_a_list", input_type=type(data).__name__) @@ -238,7 +240,7 @@ def to_internal_value(self, data): tag_validator(sub, exception_class=RestFrameworkValidationError) data_safe.extend(substrings) - logger.debug(f"result after rendering tags: {data_safe}") + logger.debug("result after rendering tags: %s", data_safe) return data_safe def to_representation(self, value): @@ -1674,6 +1676,8 @@ class Meta: class FindingSerializer(serializers.ModelSerializer): + mitigated = serializers.DateTimeField(required=False, allow_null=True) + mitigated_by = serializers.PrimaryKeyRelatedField(required=False, allow_null=True, queryset=User.objects.all()) tags = TagListSerializerField(required=False) request_response = serializers.SerializerMethodField() accepted_risks = RiskAcceptanceSerializer( @@ -1757,19 +1761,36 @@ def update(self, instance, validated_data): if reporter_id := validated_data.get("reporter"): instance.reporter = reporter_id + # Persist vulnerability IDs first so model save computes hash including them (if there is no hash yet) + # we can't pass unsaved_vulnerabilitiy_ids to super.update() + if parsed_vulnerability_ids: + save_vulnerability_ids(instance, parsed_vulnerability_ids) + instance = super().update( instance, validated_data, ) - if parsed_vulnerability_ids: - save_vulnerability_ids(instance, parsed_vulnerability_ids) - if push_to_jira: jira_helper.push_to_jira(instance) return instance def validate(self, data): + # Enforce mitigated metadata editability (only when non-null values are provided) + attempting_to_set_mitigated = any( + (field in data) and (data.get(field) is not None) + for field in ["mitigated", "mitigated_by"] + ) + user = getattr(self.context.get("request", None), "user", None) + if attempting_to_set_mitigated and not finding_helper.can_edit_mitigated_data(user): + errors = {} + if ("mitigated" in data) and (data.get("mitigated") is not None): + errors["mitigated"] = ["Editing mitigated timestamp is disabled (EDITABLE_MITIGATED_DATA=false)"] + if ("mitigated_by" in data) and (data.get("mitigated_by") is not None): + errors["mitigated_by"] = ["Editing mitigated_by is disabled (EDITABLE_MITIGATED_DATA=false)"] + if errors: + raise serializers.ValidationError(errors) + if self.context["request"].method == "PATCH": is_active = data.get("active", self.instance.active) is_verified = data.get("verified", self.instance.verified) @@ -1839,6 +1860,8 @@ def get_request_response(self, obj): class FindingCreateSerializer(serializers.ModelSerializer): + mitigated = serializers.DateTimeField(required=False, allow_null=True) + mitigated_by = serializers.PrimaryKeyRelatedField(required=False, allow_null=True, queryset=User.objects.all()) notes = serializers.PrimaryKeyRelatedField( read_only=True, allow_null=True, required=False, many=True, ) @@ -1870,7 +1893,7 @@ class Meta: # Overriding this to push add Push to JIRA functionality def create(self, validated_data): - logger.debug(f"Creating finding with validated data: {validated_data}") + logger.debug("Creating finding with validated data: %s", validated_data) push_to_jira = validated_data.pop("push_to_jira", False) notes = validated_data.pop("notes", None) found_by = validated_data.pop("found_by", None) @@ -1880,11 +1903,15 @@ def create(self, validated_data): if (vulnerability_ids := validated_data.pop("vulnerability_id_set", None)): logger.debug("VULNERABILITY_ID_SET: %s", vulnerability_ids) parsed_vulnerability_ids.extend(vulnerability_id["vulnerability_id"] for vulnerability_id in vulnerability_ids) + logger.debug("PARSED_VULNERABILITY_IDST: %s", parsed_vulnerability_ids) logger.debug("SETTING CVE FROM VULNERABILITY_ID_SET: %s", parsed_vulnerability_ids[0]) validated_data["cve"] = parsed_vulnerability_ids[0] + # validated_data["unsaved_vulnerability_ids"] = parsed_vulnerability_ids - new_finding = super().create( - validated_data) + # super.create() doesn't accept unsaved_vulnerability_ids or dedupe_option=False, so call save directly. + new_finding = Finding(**validated_data) + new_finding.unsaved_vulnerability_ids = parsed_vulnerability_ids or [] + new_finding.save() logger.debug(f"New finding CVE: {new_finding.cve}") @@ -1897,9 +1924,6 @@ def create(self, validated_data): new_finding.reviewers.set(reviewers) if parsed_vulnerability_ids: save_vulnerability_ids(new_finding, parsed_vulnerability_ids) - # can we avoid this extra save? the cve has already been set above in validated_data. but there are no tests for this - # on finding update nothing is done # with vulnerability_ids? - # new_finding.save() if push_to_jira: jira_helper.push_to_jira(new_finding) @@ -1907,6 +1931,21 @@ def create(self, validated_data): return new_finding def validate(self, data): + # Ensure mitigated fields are only set when editable is enabled (ignore nulls) + attempting_to_set_mitigated = any( + (field in data) and (data.get(field) is not None) + for field in ["mitigated", "mitigated_by"] + ) + user = getattr(getattr(self.context, "request", None), "user", None) + if attempting_to_set_mitigated and not finding_helper.can_edit_mitigated_data(user): + errors = {} + if ("mitigated" in data) and (data.get("mitigated") is not None): + errors["mitigated"] = ["Editing mitigated timestamp is disabled (EDITABLE_MITIGATED_DATA=false)"] + if ("mitigated_by" in data) and (data.get("mitigated_by") is not None): + errors["mitigated_by"] = ["Editing mitigated_by is disabled (EDITABLE_MITIGATED_DATA=false)"] + if errors: + raise serializers.ValidationError(errors) + if "reporter" not in data: request = self.context["request"] data["reporter"] = request.user @@ -2102,8 +2141,14 @@ class CommonImportScanSerializer(serializers.Serializer): required=False, validators=[ImporterFileExtensionValidator()], ) - product_type_name = serializers.CharField(required=False) - product_name = serializers.CharField(required=False) + product_type_name = serializers.CharField( + required=False, + help_text=_("Also referred to as 'Organization' name."), + ) + product_name = serializers.CharField( + required=False, + help_text=_("Also referred to as 'Asset' name."), + ) engagement_name = serializers.CharField(required=False) engagement_end_date = serializers.DateField( required=False, @@ -2158,8 +2203,14 @@ class CommonImportScanSerializer(serializers.Serializer): # confused test_id = serializers.IntegerField(read_only=True) engagement_id = serializers.IntegerField(read_only=True) - product_id = serializers.IntegerField(read_only=True) - product_type_id = serializers.IntegerField(read_only=True) + product_id = serializers.IntegerField( + read_only=True, + help_text=_("Also referred to as 'Asset' ID."), + ) + product_type_id = serializers.IntegerField( + read_only=True, + help_text=_("Also referred to as 'Organization' ID."), + ) statistics = ImportStatisticsSerializer(read_only=True, required=False) pro = serializers.ListField(read_only=True, required=False) apply_tags_to_findings = serializers.BooleanField( @@ -2314,14 +2365,16 @@ class ImportScanSerializer(CommonImportScanSerializer): required=False, default=False, help_text="Old findings no longer present in the new report get closed as mitigated when importing. " - "If service has been set, only the findings for this service will be closed. " + "If service has been set, only the findings for this service will be closed; " + "if no service is set, only findings without a service will be closed. " "This only affects findings within the same engagement.", ) close_old_findings_product_scope = serializers.BooleanField( required=False, default=False, help_text="Old findings no longer present in the new report get closed as mitigated when importing. " - "If service has been set, only the findings for this service will be closed. " + "If service has been set, only the findings for this service will be closed; " + "if no service is set, only findings without a service will be closed. " "This only affects findings within the same product." "By default, it is false meaning that only old findings of the same type in the engagement are in scope.", ) @@ -2396,7 +2449,8 @@ class ReImportScanSerializer(CommonImportScanSerializer): required=False, default=True, help_text="Old findings no longer present in the new report get closed as mitigated when importing. " - "If service has been set, only the findings for this service will be closed. " + "If service has been set, only the findings for this service will be closed; " + "if no service is set, only findings without a service will be closed. " "This only affects findings within the same test.", ) close_old_findings_product_scope = serializers.BooleanField( @@ -2694,6 +2748,9 @@ class FindingCloseSerializer(serializers.ModelSerializer): false_p = serializers.BooleanField(required=False) out_of_scope = serializers.BooleanField(required=False) duplicate = serializers.BooleanField(required=False) + mitigated_by = serializers.PrimaryKeyRelatedField(required=False, allow_null=True, queryset=Dojo_User.objects.all()) + note = serializers.CharField(required=False, allow_blank=True) + note_type = serializers.PrimaryKeyRelatedField(required=False, allow_null=True, queryset=Note_Type.objects.all()) class Meta: model = Finding @@ -2703,8 +2760,34 @@ class Meta: "false_p", "out_of_scope", "duplicate", + "mitigated_by", + "note", + "note_type", ) + def validate(self, data): + request = self.context.get("request") + request_user = getattr(request, "user", None) + + mitigated_by_user = data.get("mitigated_by") + if mitigated_by_user is not None: + # Require permission to edit mitigated metadata + if not (request_user and finding_helper.can_edit_mitigated_data(request_user)): + raise serializers.ValidationError({ + "mitigated_by": ["Not allowed to set mitigated_by."], + }) + + # Ensure selected user is authorized (Finding_Edit) + authorized_users = get_authorized_users(Permissions.Finding_Edit, user=request_user) + if not authorized_users.filter(id=mitigated_by_user.id).exists(): + raise serializers.ValidationError({ + "mitigated_by": [ + "Selected user is not authorized to be set as mitigated_by.", + ], + }) + + return data + class ReportGenerateOptionSerializer(serializers.Serializer): include_finding_notes = serializers.BooleanField(default=False) diff --git a/dojo/api_v2/views.py b/dojo/api_v2/views.py index f8cda3e8413..65591bfb6af 100644 --- a/dojo/api_v2/views.py +++ b/dojo/api_v2/views.py @@ -31,6 +31,7 @@ from rest_framework.permissions import DjangoModelPermissions, IsAuthenticated from rest_framework.response import Response +import dojo.finding.helper as finding_helper import dojo.jira_link.helper as jira_helper from dojo.api_v2 import ( mixins as dojo_mixins, @@ -40,6 +41,7 @@ prefetch, serializers, ) +from dojo.api_v2.prefetch.prefetcher import _Prefetcher from dojo.authorization.roles_permissions import Permissions from dojo.cred.queries import get_authorized_cred_mappings from dojo.endpoint.queries import ( @@ -83,6 +85,7 @@ get_authorized_jira_issues, get_authorized_jira_projects, ) +from dojo.labels import get_labels from dojo.models import ( Announcement, Answer, @@ -177,6 +180,9 @@ logger = logging.getLogger(__name__) +labels = get_labels() + + def schema_with_prefetch() -> dict: return { "list": extend_schema( @@ -584,8 +590,6 @@ def files(self, request, pk=None): ) @action(detail=True, methods=["get", "post"]) def complete_checklist(self, request, pk=None): - from dojo.api_v2.prefetch.prefetcher import _Prefetcher - engagement = self.get_object() check_lists = Check_List.objects.filter(engagement=engagement) if request.method == "POST": @@ -925,49 +929,27 @@ def close(self, request, pk=None): if request.method == "POST": finding_close = serializers.FindingCloseSerializer( data=request.data, + context={"request": request}, ) if finding_close.is_valid(): - finding.is_mitigated = finding_close.validated_data[ - "is_mitigated" - ] - if settings.EDITABLE_MITIGATED_DATA: - finding.mitigated = ( - finding_close.validated_data["mitigated"] - or timezone.now() - ) - else: - finding.mitigated = timezone.now() - finding.mitigated_by = request.user - finding.active = False - finding.false_p = finding_close.validated_data.get( - "false_p", False, - ) - finding.duplicate = finding_close.validated_data.get( - "duplicate", False, - ) - finding.out_of_scope = finding_close.validated_data.get( - "out_of_scope", False, + # Use shared helper to perform close operations + finding_helper.close_finding( + finding=finding, + user=request.user, + is_mitigated=finding_close.validated_data["is_mitigated"], + mitigated=(finding_close.validated_data.get("mitigated") if finding_helper.can_edit_mitigated_data(request.user) else timezone.now()), + mitigated_by=finding_close.validated_data.get("mitigated_by") or (request.user if not finding_helper.can_edit_mitigated_data(request.user) else None), + false_p=finding_close.validated_data.get("false_p", False), + out_of_scope=finding_close.validated_data.get("out_of_scope", False), + duplicate=finding_close.validated_data.get("duplicate", False), + note_entry=finding_close.validated_data.get("note"), + note_type=finding_close.validated_data.get("note_type"), ) - - endpoints_status = finding.status_finding.all() - for e_status in endpoints_status: - e_status.mitigated_by = request.user - if settings.EDITABLE_MITIGATED_DATA: - e_status.mitigated_time = ( - finding_close.validated_data["mitigated"] - or timezone.now() - ) - else: - e_status.mitigated_time = timezone.now() - e_status.mitigated = True - e_status.last_modified = timezone.now() - e_status.save() - finding.save() else: return Response( finding_close.errors, status=status.HTTP_400_BAD_REQUEST, ) - serialized_finding = serializers.FindingCloseSerializer(finding) + serialized_finding = serializers.FindingCloseSerializer(finding, context={"request": request}) return Response(serialized_finding.data) @extend_schema( @@ -2648,7 +2630,7 @@ def perform_create(self, serializer): jira_driver = test or (engagement or (product or None)) if jira_project := (jira_helper.get_jira_project(jira_driver) if jira_driver else None): push_to_jira = push_to_jira or jira_project.push_all_issues - logger.debug(f"push_to_jira: {push_to_jira}") + logger.debug("push_to_jira: %s", push_to_jira) serializer.save(push_to_jira=push_to_jira) @@ -2747,7 +2729,7 @@ def report_generate(request, obj, options): if type(obj).__name__ == "Product_Type": product_type = obj - report_name = "Product Type Report: " + str(product_type) + report_name = labels.ORG_REPORT_WITH_NAME_TITLE % {"name": str(product_type)} findings = report_finding_filter_class( request.GET, @@ -2776,7 +2758,7 @@ def report_generate(request, obj, options): elif type(obj).__name__ == "Product": product = obj - report_name = "Product Report: " + str(product) + report_name = labels.ASSET_REPORT_WITH_NAME_TITLE % {"name": str(product)} findings = report_finding_filter_class( request.GET, diff --git a/dojo/apps.py b/dojo/apps.py index c1831b6a06e..f47eb5184f2 100644 --- a/dojo/apps.py +++ b/dojo/apps.py @@ -5,6 +5,7 @@ from django.db import models from watson import search as watson +from dojo.auditlog import configure_audit_system, register_django_pghistory_models from dojo.checks import check_configuration_deduplication logger = logging.getLogger(__name__) @@ -71,21 +72,28 @@ def ready(self): # Load any signals here that will be ready for runtime # Importing the signals file is good enough if using the reciever decorator - import dojo.announcement.signals - import dojo.benchmark.signals - import dojo.cred.signals - import dojo.endpoint.signals - import dojo.engagement.signals - import dojo.file_uploads.signals - import dojo.finding_group.signals - import dojo.notes.signals - import dojo.product.signals - import dojo.product_type.signals - import dojo.risk_acceptance.signals - import dojo.sla_config.helpers - import dojo.tags_signals - import dojo.test.signals - import dojo.tool_product.signals # noqa: F401 + import dojo.announcement.signals # noqa: PLC0415 raised: AppRegistryNotReady + import dojo.benchmark.signals # noqa: PLC0415 raised: AppRegistryNotReady + import dojo.cred.signals # noqa: PLC0415 raised: AppRegistryNotReady + import dojo.endpoint.signals # noqa: PLC0415 raised: AppRegistryNotReady + import dojo.engagement.signals # noqa: PLC0415 raised: AppRegistryNotReady + import dojo.file_uploads.signals # noqa: PLC0415 raised: AppRegistryNotReady + import dojo.finding_group.signals # noqa: PLC0415 raised: AppRegistryNotReady + import dojo.notes.signals # noqa: PLC0415 raised: AppRegistryNotReady + import dojo.product.signals # noqa: PLC0415 raised: AppRegistryNotReady + import dojo.product_type.signals # noqa: PLC0415 raised: AppRegistryNotReady + import dojo.risk_acceptance.signals # noqa: PLC0415 raised: AppRegistryNotReady + import dojo.sla_config.helpers # noqa: PLC0415 raised: AppRegistryNotReady + import dojo.tags_signals # noqa: PLC0415 raised: AppRegistryNotReady + import dojo.test.signals # noqa: PLC0415 raised: AppRegistryNotReady + import dojo.tool_product.signals # noqa: F401,PLC0415 raised: AppRegistryNotReady + + # Configure audit system after all models are loaded + # This must be done in ready() to avoid "Models aren't loaded yet" errors + # Note: pghistory models are registered here (no database access), but trigger + # enabling is handled via management command to avoid database access warnings + register_django_pghistory_models() + configure_audit_system() def get_model_fields_with_extra(model, extra_fields=()): diff --git a/dojo/asset/__init__.py b/dojo/asset/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/dojo/asset/api/__init__.py b/dojo/asset/api/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/dojo/asset/api/filters.py b/dojo/asset/api/filters.py new file mode 100644 index 00000000000..991fd329ac8 --- /dev/null +++ b/dojo/asset/api/filters.py @@ -0,0 +1,121 @@ +from django_filters import BooleanFilter, CharFilter, NumberFilter, OrderingFilter +from django_filters.rest_framework import FilterSet +from drf_spectacular.types import OpenApiTypes +from drf_spectacular.utils import extend_schema_field + +from dojo.filters import ( + CharFieldFilterANDExpression, + CharFieldInFilter, + DateRangeFilter, + DojoFilter, + NumberInFilter, + ProductSLAFilter, + custom_filter, +) +from dojo.labels import get_labels +from dojo.models import ( + Product_API_Scan_Configuration, + Product_Group, + Product_Member, +) + +labels = get_labels() + + +class AssetAPIScanConfigurationFilterSet(FilterSet): + asset = NumberFilter(field_name="product") + + class Meta: + model = Product_API_Scan_Configuration + fields = ("id", "tool_configuration", "service_key_1", "service_key_2", "service_key_3") + + +class ApiAssetFilter(DojoFilter): + # BooleanFilter + external_audience = BooleanFilter(field_name="external_audience") + internet_accessible = BooleanFilter(field_name="internet_accessible") + # CharFilter + name = CharFilter(lookup_expr="icontains") + name_exact = CharFilter(field_name="name", lookup_expr="iexact") + description = CharFilter(lookup_expr="icontains") + business_criticality = CharFilter(method=custom_filter, field_name="business_criticality") + platform = CharFilter(method=custom_filter, field_name="platform") + lifecycle = CharFilter(method=custom_filter, field_name="lifecycle") + origin = CharFilter(method=custom_filter, field_name="origin") + # NumberInFilter + id = NumberInFilter(field_name="id", lookup_expr="in") + asset_manager = NumberInFilter(field_name="product_manager", lookup_expr="in") + technical_contact = NumberInFilter(field_name="technical_contact", lookup_expr="in") + team_manager = NumberInFilter(field_name="team_manager", lookup_expr="in") + prod_type = NumberInFilter(field_name="prod_type", lookup_expr="in") + tid = NumberInFilter(field_name="tid", lookup_expr="in") + prod_numeric_grade = NumberInFilter(field_name="prod_numeric_grade", lookup_expr="in") + user_records = NumberInFilter(field_name="user_records", lookup_expr="in") + regulations = NumberInFilter(field_name="regulations", lookup_expr="in") + + tag = CharFilter(field_name="tags__name", lookup_expr="icontains", label="Tag name contains") + tags = CharFieldInFilter( + field_name="tags__name", + lookup_expr="in", + help_text="Comma separated list of exact tags (uses OR for multiple values)") + tags__and = CharFieldFilterANDExpression( + field_name="tags__name", + help_text="Comma separated list of exact tags to match with an AND expression") + not_tag = CharFilter(field_name="tags__name", lookup_expr="icontains", help_text="Not Tag name contains", exclude="True") + not_tags = CharFieldInFilter(field_name="tags__name", lookup_expr="in", + help_text=labels.ASSET_FILTERS_CSV_TAGS_NOT_HELP, exclude="True") + has_tags = BooleanFilter(field_name="tags", lookup_expr="isnull", exclude=True, label="Has tags") + outside_of_sla = extend_schema_field(OpenApiTypes.NUMBER)(ProductSLAFilter()) + + # DateRangeFilter + created = DateRangeFilter() + updated = DateRangeFilter() + # NumberFilter + revenue = NumberFilter() + + o = OrderingFilter( + # tuple-mapping retains order + fields=( + ("id", "id"), + ("tid", "tid"), + ("name", "name"), + ("created", "created"), + ("prod_numeric_grade", "prod_numeric_grade"), + ("business_criticality", "business_criticality"), + ("platform", "platform"), + ("lifecycle", "lifecycle"), + ("origin", "origin"), + ("revenue", "revenue"), + ("external_audience", "external_audience"), + ("internet_accessible", "internet_accessible"), + ("product_manager", "asset_manager"), + ("product_manager__first_name", "asset_manager__first_name"), + ("product_manager__last_name", "asset_manager__last_name"), + ("technical_contact", "technical_contact"), + ("technical_contact__first_name", "technical_contact__first_name"), + ("technical_contact__last_name", "technical_contact__last_name"), + ("team_manager", "team_manager"), + ("team_manager__first_name", "team_manager__first_name"), + ("team_manager__last_name", "team_manager__last_name"), + ("prod_type", "prod_type"), + ("prod_type__name", "prod_type__name"), + ("updated", "updated"), + ("user_records", "user_records"), + ), + ) + + +class AssetMemberFilterSet(FilterSet): + asset_id = NumberFilter(field_name="product_id") + + class Meta: + model = Product_Member + fields = ("id", "user_id") + + +class AssetGroupFilterSet(FilterSet): + asset_id = NumberFilter(field_name="product_id") + + class Meta: + model = Product_Group + fields = ("id", "group_id") diff --git a/dojo/asset/api/serializers.py b/dojo/asset/api/serializers.py new file mode 100644 index 00000000000..688d772ce9b --- /dev/null +++ b/dojo/asset/api/serializers.py @@ -0,0 +1,160 @@ +from rest_framework import serializers +from rest_framework.exceptions import PermissionDenied, ValidationError + +from dojo.api_v2.serializers import ProductMetaSerializer, TagListSerializerField +from dojo.authorization.authorization import user_has_permission +from dojo.authorization.roles_permissions import Permissions +from dojo.models import ( + Dojo_User, + Product, + Product_API_Scan_Configuration, + Product_Group, + Product_Member, +) +from dojo.organization.api.serializers import RelatedOrganizationField +from dojo.product.queries import get_authorized_products + + +class RelatedAssetField(serializers.PrimaryKeyRelatedField): + def get_queryset(self): + return get_authorized_products(Permissions.Product_View) + + +class AssetAPIScanConfigurationSerializer(serializers.ModelSerializer): + asset = RelatedAssetField(source="product") + + class Meta: + model = Product_API_Scan_Configuration + exclude = ("product",) + + +class AssetSerializer(serializers.ModelSerializer): + findings_count = serializers.SerializerMethodField() + findings_list = serializers.SerializerMethodField() + + tags = TagListSerializerField(required=False) + + # V3 fields + asset_meta = ProductMetaSerializer(source="product_meta", read_only=True, many=True) + organization = RelatedOrganizationField(source="prod_type") + asset_numeric_grade = serializers.IntegerField(source="prod_numeric_grade") + enable_asset_tag_inheritance = serializers.BooleanField(source="enable_product_tag_inheritance") + asset_managers = serializers.PrimaryKeyRelatedField( + source="product_manager", + queryset=Dojo_User.objects.exclude(is_active=False)) + + class Meta: + model = Product + exclude = ( + "tid", + "updated", + "async_updating", + # Below here excluded for V3 migration + "prod_type", + "prod_numeric_grade", + "enable_product_tag_inheritance", + "product_manager", + ) + + def validate(self, data): + async_updating = getattr(self.instance, "async_updating", None) + if async_updating: + new_sla_config = data.get("sla_configuration", None) + old_sla_config = getattr(self.instance, "sla_configuration", None) + if new_sla_config and old_sla_config and new_sla_config != old_sla_config: + msg = "Finding SLA expiration dates are currently being recalculated. The SLA configuration for this asset cannot be changed until the calculation is complete." + raise serializers.ValidationError(msg) + return data + + def get_findings_count(self, obj) -> int: + return obj.findings_count + + # TODO: maybe extend_schema_field is needed here? + def get_findings_list(self, obj) -> list[int]: + return obj.open_findings_list() + + +class AssetMemberSerializer(serializers.ModelSerializer): + asset = RelatedAssetField(source="product") + + class Meta: + model = Product_Member + exclude = ("product",) + + def validate(self, data): + if ( + self.instance is not None + and data.get("asset") != self.instance.product + and not user_has_permission( + self.context["request"].user, + data.get("asset"), + Permissions.Product_Manage_Members, + ) + ): + msg = "You are not permitted to add a member to this Asset" + raise PermissionDenied(msg) + + if ( + self.instance is None + or data.get("asset") != self.instance.product + or data.get("user") != self.instance.user + ): + members = Product_Member.objects.filter( + product=data.get("asset"), user=data.get("user"), + ) + if members.count() > 0: + msg = "Asset Member already exists" + raise ValidationError(msg) + + if data.get("role").is_owner and not user_has_permission( + self.context["request"].user, + data.get("asset"), + Permissions.Product_Member_Add_Owner, + ): + msg = "You are not permitted to add a member as Owner to this Asset" + raise PermissionDenied(msg) + + return data + + +class AssetGroupSerializer(serializers.ModelSerializer): + asset = RelatedAssetField(source="product") + + class Meta: + model = Product_Group + exclude = ("product",) + + def validate(self, data): + if ( + self.instance is not None + and data.get("asset") != self.instance.product + and not user_has_permission( + self.context["request"].user, + data.get("asset"), + Permissions.Product_Group_Add, + ) + ): + msg = "You are not permitted to add a group to this Asset" + raise PermissionDenied(msg) + + if ( + self.instance is None + or data.get("asset") != self.instance.product + or data.get("group") != self.instance.group + ): + members = Product_Group.objects.filter( + product=data.get("asset"), group=data.get("group"), + ) + if members.count() > 0: + msg = "Asset Group already exists" + raise ValidationError(msg) + + if data.get("role").is_owner and not user_has_permission( + self.context["request"].user, + data.get("asset"), + Permissions.Product_Group_Add_Owner, + ): + msg = "You are not permitted to add a group as Owner to this Asset" + raise PermissionDenied(msg) + + return data diff --git a/dojo/asset/api/urls.py b/dojo/asset/api/urls.py new file mode 100644 index 00000000000..706996ea27e --- /dev/null +++ b/dojo/asset/api/urls.py @@ -0,0 +1,15 @@ +from dojo.asset.api.views import ( + AssetAPIScanConfigurationViewSet, + AssetGroupViewSet, + AssetMemberViewSet, + AssetViewSet, +) + + +def add_asset_urls(router): + router.register(r"assets", AssetViewSet, basename="asset") + router.register(r"asset_api_scan_configurations", AssetAPIScanConfigurationViewSet, + basename="asset_api_scan_configuration") + router.register(r"asset_groups", AssetGroupViewSet, basename="asset_group") + router.register(r"asset_members", AssetMemberViewSet, basename="asset_member") + return router diff --git a/dojo/asset/api/views.py b/dojo/asset/api/views.py new file mode 100644 index 00000000000..d3a873f97da --- /dev/null +++ b/dojo/asset/api/views.py @@ -0,0 +1,183 @@ +from django_filters.rest_framework import DjangoFilterBackend +from drf_spectacular.utils import extend_schema, extend_schema_view +from rest_framework import mixins, status, viewsets +from rest_framework.decorators import action +from rest_framework.permissions import IsAuthenticated +from rest_framework.response import Response + +import dojo.api_v2.mixins as dojo_mixins +from dojo.api_v2 import permissions, prefetch +from dojo.api_v2.serializers import ReportGenerateOptionSerializer, ReportGenerateSerializer +from dojo.api_v2.views import PrefetchDojoModelViewSet, report_generate, schema_with_prefetch +from dojo.asset.api import serializers +from dojo.asset.api.filters import ( + ApiAssetFilter, + AssetAPIScanConfigurationFilterSet, + AssetGroupFilterSet, + AssetMemberFilterSet, +) +from dojo.authorization.roles_permissions import Permissions +from dojo.models import ( + Product, + Product_API_Scan_Configuration, + Product_Group, + Product_Member, +) +from dojo.product.queries import ( + get_authorized_product_api_scan_configurations, + get_authorized_product_groups, + get_authorized_product_members, + get_authorized_products, +) +from dojo.utils import async_delete, get_setting + + +# Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) +class AssetAPIScanConfigurationViewSet( + PrefetchDojoModelViewSet, +): + serializer_class = serializers.AssetAPIScanConfigurationSerializer + queryset = Product_API_Scan_Configuration.objects.none() + filter_backends = (DjangoFilterBackend,) + filterset_class = AssetAPIScanConfigurationFilterSet + permission_classes = ( + IsAuthenticated, + permissions.UserHasProductAPIScanConfigurationPermission, + ) + + def get_queryset(self): + return get_authorized_product_api_scan_configurations( + Permissions.Product_API_Scan_Configuration_View, + ) + + +@extend_schema_view(**schema_with_prefetch()) +class AssetViewSet( + prefetch.PrefetchListMixin, + prefetch.PrefetchRetrieveMixin, + mixins.CreateModelMixin, + mixins.DestroyModelMixin, + mixins.UpdateModelMixin, + viewsets.GenericViewSet, + dojo_mixins.DeletePreviewModelMixin, +): + serializer_class = serializers.AssetSerializer + queryset = Product.objects.none() + filter_backends = (DjangoFilterBackend,) + filterset_class = ApiAssetFilter + permission_classes = ( + IsAuthenticated, + permissions.UserHasProductPermission, + ) + + def get_queryset(self): + return get_authorized_products(Permissions.Product_View).distinct() + + def destroy(self, request, *args, **kwargs): + instance = self.get_object() + if get_setting("ASYNC_OBJECT_DELETE"): + async_del = async_delete() + async_del.delete(instance) + else: + instance.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + # def list(self, request): + # # Note the use of `get_queryset()` instead of `self.queryset` + # queryset = self.get_queryset() + # serializer = self.serializer_class(queryset, many=True) + # return Response(serializer.data) + + @extend_schema( + request=ReportGenerateOptionSerializer, + responses={status.HTTP_200_OK: ReportGenerateSerializer}, + ) + @action( + detail=True, methods=["post"], permission_classes=[IsAuthenticated], + ) + def generate_report(self, request, pk=None): + product = self.get_object() + + options = {} + # prepare post data + report_options = ReportGenerateOptionSerializer( + data=request.data, + ) + if report_options.is_valid(): + options["include_finding_notes"] = report_options.validated_data[ + "include_finding_notes" + ] + options["include_finding_images"] = report_options.validated_data[ + "include_finding_images" + ] + options[ + "include_executive_summary" + ] = report_options.validated_data["include_executive_summary"] + options[ + "include_table_of_contents" + ] = report_options.validated_data["include_table_of_contents"] + else: + return Response( + report_options.errors, status=status.HTTP_400_BAD_REQUEST, + ) + + data = report_generate(request, product, options) + report = ReportGenerateSerializer(data) + return Response(report.data) + + +# Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) +class AssetMemberViewSet( + PrefetchDojoModelViewSet, +): + serializer_class = serializers.AssetMemberSerializer + queryset = Product_Member.objects.none() + filter_backends = (DjangoFilterBackend,) + filterset_class = AssetMemberFilterSet + permission_classes = ( + IsAuthenticated, + permissions.UserHasProductMemberPermission, + ) + + def get_queryset(self): + return get_authorized_product_members( + Permissions.Product_View, + ).distinct() + + @extend_schema( + exclude=True, + ) + def partial_update(self, request, pk=None): + # Object authorization won't work if not all data is provided + response = {"message": "Patch function is not offered in this path."} + return Response(response, status=status.HTTP_405_METHOD_NOT_ALLOWED) + + +# Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) +class AssetGroupViewSet( + PrefetchDojoModelViewSet, +): + serializer_class = serializers.AssetGroupSerializer + queryset = Product_Group.objects.none() + filter_backends = (DjangoFilterBackend,) + filterset_class = AssetGroupFilterSet + permission_classes = ( + IsAuthenticated, + permissions.UserHasProductGroupPermission, + ) + + def get_queryset(self): + return get_authorized_product_groups( + Permissions.Product_Group_View, + ).distinct() + + @extend_schema( + exclude=True, + ) + def partial_update(self, request, pk=None): + # Object authorization won't work if not all data is provided + response = {"message": "Patch function is not offered in this path."} + return Response(response, status=status.HTTP_405_METHOD_NOT_ALLOWED) diff --git a/dojo/asset/labels.py b/dojo/asset/labels.py new file mode 100644 index 00000000000..9061d6b05bf --- /dev/null +++ b/dojo/asset/labels.py @@ -0,0 +1,317 @@ +from django.conf import settings +from django.utils.translation import gettext_lazy as _ + + +class AssetLabelsKeys: + + """Directory of text copy used by the Asset model.""" + + ASSET_LABEL = "asset.label" + ASSET_PLURAL_LABEL = "asset.plural_label" + ASSET_ALL_LABEL = "asset.all_label" + ASSET_WITH_NAME_LABEL = "asset.with_name_label" + ASSET_NONE_FOUND_MESSAGE = "asset.none_found_label" + ASSET_MANAGER_LABEL = "asset.manager_label" + ASSET_GLOBAL_ROLE_HELP = "asset.global_role_help" + ASSET_NOTIFICATIONS_HELP = "asset.notifications_help" + ASSET_OPTIONS_LABEL = "asset.options_label" + ASSET_OPTIONS_MENU_LABEL = "asset.options_menu_label" + ASSET_COUNT_LABEL = "asset.count_label" + ASSET_ENGAGEMENTS_BY_LABEL = "asset.engagements_by_label" + ASSET_LIFECYCLE_LABEL = "asset.lifecycle_label" + ASSET_TAG_LABEL = "asset.tag_label" + ASSET_METRICS_TAG_COUNTS_LABEL = "asset.metrics.tag_counts_label" + ASSET_METRICS_TAG_COUNTS_ERROR_MESSAGE = "asset.metrics.tag_counts_error_message" + ASSET_METRICS_CRITICAL_LABEL = "asset.metrics.critical_label" + ASSET_METRICS_NO_CRITICAL_ERROR_MESSAGE = "asset.metrics.no_critical_error_message" + ASSET_METRICS_TOP_TEN_BY_SEVERITY_LABEL = "asset.metrics.top_by_severity_label" + ASSET_NOTIFICATION_WITH_NAME_CREATED_MESSAGE = "asset.notification_with_name_created_message" + ASSET_REPORT_LABEL = "asset.report_label" + ASSET_REPORT_TITLE = "asset.report_title" + ASSET_REPORT_WITH_NAME_TITLE = "asset.report_with_name_title" + ASSET_TRACKED_FILES_ADD_LABEL = "asset.tracked_files.add_label" + ASSET_TRACKED_FILES_ADD_SUCCESS_MESSAGE = "asset.tracked_files.add_success_message" + ASSET_TRACKED_FILES_ID_MISMATCH_ERROR_MESSAGE = "asset.tracked_files.id_mismatch_error_message" + ASSET_FINDINGS_CLOSE_LABEL = "asset.findings_close_label" + ASSET_FINDINGS_CLOSE_HELP = "asset.findings_close_help" + ASSET_TAG_INHERITANCE_ENABLE_LABEL = "asset.tag_inheritance_enable_label" + ASSET_TAG_INHERITANCE_ENABLE_HELP = "asset.tag_inheritance_enable_help" + ASSET_ENDPOINT_HELP = "asset.endpoint_help" + ASSET_CREATE_LABEL = "asset.create.label" + ASSET_CREATE_SUCCESS_MESSAGE = "asset.create.success_message" + ASSET_READ_LIST_LABEL = "asset.read.list_label" + ASSET_UPDATE_LABEL = "asset.update.label" + ASSET_UPDATE_SUCCESS_MESSAGE = "asset.update.success_message" + ASSET_UPDATE_SLA_CHANGED_MESSAGE = "asset.update.sla_changed_message" + ASSET_DELETE_LABEL = "asset.delete.label" + ASSET_DELETE_WITH_NAME_LABEL = "asset.delete.with_name_label" + ASSET_DELETE_CONFIRM_MESSAGE = "asset.delete.confirm_message" + ASSET_DELETE_SUCCESS_MESSAGE = "asset.delete.success_message" + ASSET_DELETE_SUCCESS_ASYNC_MESSAGE = "asset.delete.success_async_message" + ASSET_DELETE_WITH_NAME_SUCCESS_MESSAGE = "asset.delete.with_name_success_message" + ASSET_DELETE_WITH_NAME_WITH_USER_SUCCESS_MESSAGE = "asset.delete.with_name_with_user_success_message" + ASSET_FILTERS_LABEL = "asset.filters.label" + ASSET_FILTERS_NAME_LABEL = "asset.filters.name_label" + ASSET_FILTERS_NAME_HELP = "asset.filters.name_help" + ASSET_FILTERS_NAME_EXACT_LABEL = "asset.filters.name_exact_label" + ASSET_FILTERS_NAME_CONTAINS_LABEL = "asset.filters.name_contains_label" + ASSET_FILTERS_NAME_CONTAINS_HELP = "asset.filters.name_contains_help" + ASSET_FILTERS_TAGS_LABEL = "asset.filters.tags_label" + ASSET_FILTERS_TAGS_HELP = "asset.filters.tags_help" + ASSET_FILTERS_NOT_TAGS_HELP = "asset.filters.not_tags_help" + ASSET_FILTERS_ASSETS_WITHOUT_TAGS_LABEL = "asset.filters.assets_without_tags_label" + ASSET_FILTERS_ASSETS_WITHOUT_TAGS_HELP = "asset.filters.assets_without_tags_help" + ASSET_FILTERS_TAGS_FILTER_LABEL = "asset.filters.tags_filter_label" + ASSET_FILTERS_TAGS_FILTER_HELP = "asset.filters.tags_filter_help" + ASSET_FILTERS_CSV_TAGS_OR_HELP = "asset.filters.csv_tags_or_help" + ASSET_FILTERS_CSV_TAGS_AND_HELP = "asset.filters.csv_tags_and_help" + ASSET_FILTERS_CSV_TAGS_NOT_HELP = "asset.filters.csv_tags_not_help" + ASSET_FILTERS_CSV_LIFECYCLES_LABEL = "asset.filters.csv_lifecycles_label" + ASSET_FILTERS_TAGS_ASSET_LABEL = "asset.filters.tags_asset_label" + ASSET_FILTERS_TAG_ASSET_LABEL = "asset.filters.tag_asset_label" + ASSET_FILTERS_TAG_ASSET_HELP = "asset.filters.tag_asset_help" + ASSET_FILTERS_NOT_TAGS_ASSET_LABEL = "asset.filters.not_tags_asset_label" + ASSET_FILTERS_WITHOUT_TAGS_LABEL = "asset.filters.without_tags_label" + ASSET_FILTERS_TAG_ASSET_CONTAINS_LABEL = "asset.filters.tag_asset_contains_label" + ASSET_FILTERS_TAG_ASSET_CONTAINS_HELP = "asset.filters.tag_asset_contains_help" + ASSET_FILTERS_TAG_NOT_CONTAIN_LABEL = "asset.filters.tag_not_contain_label" + ASSET_FILTERS_TAG_NOT_CONTAIN_HELP = "asset.filters.tag_not_contain_help" + ASSET_FILTERS_TAG_NOT_LABEL = "asset.filters.tag_not_label" + ASSET_FILTERS_TAG_NOT_HELP = "asset.filters.tag_not_help" + ASSET_USERS_ACCESS_LABEL = "asset.users.access_label" + ASSET_USERS_NO_ACCESS_MESSAGE = "asset.users.no_access_message" + ASSET_USERS_ADD_LABEL = "asset.users.add_label" + ASSET_USERS_USERS_ADD_LABEL = "asset.users.users_add_label" + ASSET_USERS_MEMBER_LABEL = "asset.users.member_label" + ASSET_USERS_MEMBER_ADD_LABEL = "asset.users.member_add_label" + ASSET_USERS_MEMBER_ADD_SUCCESS_MESSAGE = "asset.users.member_add_success_message" + ASSET_USERS_MEMBER_UPDATE_LABEL = "asset.users.member_update_label" + ASSET_USERS_MEMBER_UPDATE_SUCCESS_MESSAGE = "asset.users.member_update_success_message" + ASSET_USERS_MEMBER_DELETE_LABEL = "asset.users.member_delete_label" + ASSET_USERS_MEMBER_DELETE_SUCCESS_MESSAGE = "asset.users.member_delete_success_message" + ASSET_GROUPS_ACCESS_LABEL = "asset.groups.access_label" + ASSET_GROUPS_NO_ACCESS_MESSAGE = "asset.groups.no_access_message" + ASSET_GROUPS_MEMBER_LABEL = "asset.groups.member_label" + ASSET_GROUPS_ADD_LABEL = "asset.groups.add_label" + ASSET_GROUPS_ADD_SUCCESS_MESSAGE = "asset.groups.add_success_message" + ASSET_GROUPS_UPDATE_LABEL = "asset.groups.update_label" + ASSET_GROUPS_UPDATE_SUCCESS_MESSAGE = "asset.groups.update_success_message" + ASSET_GROUPS_DELETE_LABEL = "asset.groups.delete_label" + ASSET_GROUPS_DELETE_SUCCESS_MESSAGE = "asset.groups.delete_success_message" + ASSET_GROUPS_ADD_ASSETS_LABEL = "asset.groups.add_assets_label" + ASSET_GROUPS_NUM_ASSETS_LABEL = "asset.groups.num_assets_label" + + +# TODO: remove the else: branch once v3 migration is complete +if settings.ENABLE_V3_ORGANIZATION_ASSET_RELABEL: + labels = { + AssetLabelsKeys.ASSET_LABEL: _("Asset"), + AssetLabelsKeys.ASSET_PLURAL_LABEL: _("Assets"), + AssetLabelsKeys.ASSET_ALL_LABEL: _("All Assets"), + AssetLabelsKeys.ASSET_WITH_NAME_LABEL: _("Asset '%(name)s'"), + AssetLabelsKeys.ASSET_NONE_FOUND_MESSAGE: _("No Assets found."), + AssetLabelsKeys.ASSET_MANAGER_LABEL: _("Asset Manager"), + AssetLabelsKeys.ASSET_GLOBAL_ROLE_HELP: _("The global role will be applied to all Organizations and Assets."), + AssetLabelsKeys.ASSET_NOTIFICATIONS_HELP: _("These are your personal settings for this Asset."), + AssetLabelsKeys.ASSET_OPTIONS_LABEL: _("Asset Options"), + AssetLabelsKeys.ASSET_OPTIONS_MENU_LABEL: _("Asset Options Menu"), + AssetLabelsKeys.ASSET_COUNT_LABEL: _("Asset Count"), + AssetLabelsKeys.ASSET_ENGAGEMENTS_BY_LABEL: _("Engagements by Asset"), + AssetLabelsKeys.ASSET_LIFECYCLE_LABEL: _("Asset Lifecycle"), + AssetLabelsKeys.ASSET_TAG_LABEL: _("Asset Tag"), + AssetLabelsKeys.ASSET_METRICS_TAG_COUNTS_LABEL: _("Asset Tag Counts"), + AssetLabelsKeys.ASSET_METRICS_TAG_COUNTS_ERROR_MESSAGE: _("Please choose month and year and the Asset Tag."), + AssetLabelsKeys.ASSET_METRICS_CRITICAL_LABEL: _("Critical Asset Metrics"), + AssetLabelsKeys.ASSET_METRICS_NO_CRITICAL_ERROR_MESSAGE: _("No Critical Assets registered"), + AssetLabelsKeys.ASSET_METRICS_TOP_TEN_BY_SEVERITY_LABEL: _("Top 10 Assets by bug severity"), + AssetLabelsKeys.ASSET_NOTIFICATION_WITH_NAME_CREATED_MESSAGE: _("Asset %(name)s has been created successfully."), + AssetLabelsKeys.ASSET_REPORT_LABEL: _("Asset Report"), + AssetLabelsKeys.ASSET_REPORT_TITLE: _("Asset Report"), + AssetLabelsKeys.ASSET_REPORT_WITH_NAME_TITLE: _("Asset Report: %(name)s"), + AssetLabelsKeys.ASSET_TRACKED_FILES_ADD_LABEL: _("Add Tracked Files to an Asset"), + AssetLabelsKeys.ASSET_TRACKED_FILES_ADD_SUCCESS_MESSAGE: _("Added Tracked File to an Asset"), + AssetLabelsKeys.ASSET_TRACKED_FILES_ID_MISMATCH_ERROR_MESSAGE: _( + "Asset %(asset_id)s does not match Asset of Object %(object_asset_id)s"), + AssetLabelsKeys.ASSET_FINDINGS_CLOSE_LABEL: _("Close old findings within this Asset"), + AssetLabelsKeys.ASSET_FINDINGS_CLOSE_HELP: _( + "Old findings no longer present in the new report get closed as mitigated when importing. If service has been set, only the findings for this service will be closed; if no service is set, only findings without a service will be closed. This affects findings within the same Asset."), + AssetLabelsKeys.ASSET_TAG_INHERITANCE_ENABLE_LABEL: _("Enable Asset Tag Inheritance"), + AssetLabelsKeys.ASSET_TAG_INHERITANCE_ENABLE_HELP: _( + "Enables Asset tag inheritance. Any tags added on an Asset will automatically be added to all Engagements, Tests, and Findings."), + AssetLabelsKeys.ASSET_ENDPOINT_HELP: _("The Asset this Endpoint should be associated with."), + AssetLabelsKeys.ASSET_CREATE_LABEL: _("Add Asset"), + AssetLabelsKeys.ASSET_CREATE_SUCCESS_MESSAGE: _("Asset added successfully."), + AssetLabelsKeys.ASSET_READ_LIST_LABEL: _("Asset List"), + AssetLabelsKeys.ASSET_UPDATE_LABEL: _("Edit Asset"), + AssetLabelsKeys.ASSET_UPDATE_SUCCESS_MESSAGE: _("Asset updated successfully."), + AssetLabelsKeys.ASSET_UPDATE_SLA_CHANGED_MESSAGE: _( + "All SLA expiration dates for Findings within this Asset will be recalculated asynchronously for the newly assigned SLA configuration."), + AssetLabelsKeys.ASSET_DELETE_LABEL: _("Delete Asset"), + AssetLabelsKeys.ASSET_DELETE_WITH_NAME_LABEL: _("Delete Asset %(name)s"), + AssetLabelsKeys.ASSET_DELETE_CONFIRM_MESSAGE: _( + "Deleting this Asset will remove any related objects associated with it. These relationships are listed below: "), + AssetLabelsKeys.ASSET_DELETE_SUCCESS_MESSAGE: _("Asset and relationships removed."), + AssetLabelsKeys.ASSET_DELETE_SUCCESS_ASYNC_MESSAGE: _("Asset and relationships will be removed in the background."), + AssetLabelsKeys.ASSET_DELETE_WITH_NAME_SUCCESS_MESSAGE: _('The Asset "%(name)s" was deleted'), + AssetLabelsKeys.ASSET_DELETE_WITH_NAME_WITH_USER_SUCCESS_MESSAGE: _('The Asset "%(name)s" was deleted by %(user)s'), + AssetLabelsKeys.ASSET_FILTERS_LABEL: _("Asset"), + AssetLabelsKeys.ASSET_FILTERS_NAME_LABEL: _("Asset Name"), + AssetLabelsKeys.ASSET_FILTERS_NAME_HELP: _("Search for Asset names that are an exact match"), + AssetLabelsKeys.ASSET_FILTERS_NAME_EXACT_LABEL: _("Exact Asset Name"), + AssetLabelsKeys.ASSET_FILTERS_NAME_CONTAINS_LABEL: _("Asset Name Contains"), + AssetLabelsKeys.ASSET_FILTERS_NAME_CONTAINS_HELP: _("Search for Asset names that contain a given pattern"), + AssetLabelsKeys.ASSET_FILTERS_TAGS_LABEL: _("Tags (Asset)"), + AssetLabelsKeys.ASSET_FILTERS_TAGS_HELP: _("Filter for Assets with the given tags"), + AssetLabelsKeys.ASSET_FILTERS_NOT_TAGS_HELP: _("Filter for Assets that do not have the given tags"), + AssetLabelsKeys.ASSET_FILTERS_ASSETS_WITHOUT_TAGS_LABEL: _("Assets without tags"), + AssetLabelsKeys.ASSET_FILTERS_ASSETS_WITHOUT_TAGS_HELP: _( + "Search for tags on an Asset that contain a given pattern, and exclude them"), + AssetLabelsKeys.ASSET_FILTERS_TAGS_FILTER_LABEL: _("Asset with tags"), + AssetLabelsKeys.ASSET_FILTERS_TAGS_FILTER_HELP: _("Filter Assets by the selected tags"), + AssetLabelsKeys.ASSET_FILTERS_CSV_TAGS_OR_HELP: _( + "Comma separated list of exact tags present on Asset (uses OR for multiple values)"), + AssetLabelsKeys.ASSET_FILTERS_CSV_TAGS_AND_HELP: _( + "Comma separated list of exact tags to match with an AND expression present on Asset"), + AssetLabelsKeys.ASSET_FILTERS_CSV_TAGS_NOT_HELP: _("Comma separated list of exact tags not present on Asset"), + AssetLabelsKeys.ASSET_FILTERS_CSV_LIFECYCLES_LABEL: _("Comma separated list of exact Asset lifecycles"), + AssetLabelsKeys.ASSET_FILTERS_TAGS_ASSET_LABEL: _("Asset Tags"), + AssetLabelsKeys.ASSET_FILTERS_TAG_ASSET_LABEL: _("Asset Tag"), + AssetLabelsKeys.ASSET_FILTERS_TAG_ASSET_HELP: _("Search for tags on an Asset that are an exact match"), + AssetLabelsKeys.ASSET_FILTERS_NOT_TAGS_ASSET_LABEL: _("Not Asset Tags"), + AssetLabelsKeys.ASSET_FILTERS_WITHOUT_TAGS_LABEL: _("Asset without tags"), + AssetLabelsKeys.ASSET_FILTERS_TAG_ASSET_CONTAINS_LABEL: _("Asset Tag Contains"), + AssetLabelsKeys.ASSET_FILTERS_TAG_ASSET_CONTAINS_HELP: _("Search for tags on an Asset that contain a given pattern"), + AssetLabelsKeys.ASSET_FILTERS_TAG_NOT_CONTAIN_LABEL: _("Asset Tag Does Not Contain"), + AssetLabelsKeys.ASSET_FILTERS_TAG_NOT_CONTAIN_HELP: _( + "Search for tags on an Asset that contain a given pattern, and exclude them"), + AssetLabelsKeys.ASSET_FILTERS_TAG_NOT_LABEL: _("Not Asset Tag"), + AssetLabelsKeys.ASSET_FILTERS_TAG_NOT_HELP: _("Search for tags on an Asset that are an exact match, and exclude them"), + AssetLabelsKeys.ASSET_USERS_ACCESS_LABEL: _("Assets this User can access"), + AssetLabelsKeys.ASSET_USERS_NO_ACCESS_MESSAGE: _("This User is not assigned to any Assets."), + AssetLabelsKeys.ASSET_USERS_ADD_LABEL: _("Add Assets"), + AssetLabelsKeys.ASSET_USERS_USERS_ADD_LABEL: _("Add Users"), + AssetLabelsKeys.ASSET_USERS_MEMBER_LABEL: _("Asset Member"), + AssetLabelsKeys.ASSET_USERS_MEMBER_ADD_LABEL: _("Add Asset Member"), + AssetLabelsKeys.ASSET_USERS_MEMBER_ADD_SUCCESS_MESSAGE: _("Asset members added successfully."), + AssetLabelsKeys.ASSET_USERS_MEMBER_UPDATE_LABEL: _("Edit Asset Member"), + AssetLabelsKeys.ASSET_USERS_MEMBER_UPDATE_SUCCESS_MESSAGE: _("Asset member updated successfully."), + AssetLabelsKeys.ASSET_USERS_MEMBER_DELETE_LABEL: _("Delete Asset Member"), + AssetLabelsKeys.ASSET_USERS_MEMBER_DELETE_SUCCESS_MESSAGE: _("Asset member deleted successfully."), + AssetLabelsKeys.ASSET_GROUPS_ACCESS_LABEL: _("Assets this Group can access"), + AssetLabelsKeys.ASSET_GROUPS_NO_ACCESS_MESSAGE: _("This Group cannot access any Assets."), + AssetLabelsKeys.ASSET_GROUPS_MEMBER_LABEL: _("Asset Group"), + AssetLabelsKeys.ASSET_GROUPS_ADD_LABEL: _("Add Asset Group"), + AssetLabelsKeys.ASSET_GROUPS_ADD_SUCCESS_MESSAGE: _("Asset groups added successfully."), + AssetLabelsKeys.ASSET_GROUPS_UPDATE_LABEL: _("Edit Asset Group"), + AssetLabelsKeys.ASSET_GROUPS_UPDATE_SUCCESS_MESSAGE: _("Asset group updated successfully."), + AssetLabelsKeys.ASSET_GROUPS_DELETE_LABEL: _("Delete Asset Group"), + AssetLabelsKeys.ASSET_GROUPS_DELETE_SUCCESS_MESSAGE: _("Asset group deleted successfully."), + AssetLabelsKeys.ASSET_GROUPS_ADD_ASSETS_LABEL: _("Add Assets"), + AssetLabelsKeys.ASSET_GROUPS_NUM_ASSETS_LABEL: _("Number of Assets"), + } +else: + labels = { + AssetLabelsKeys.ASSET_LABEL: _("Product"), + AssetLabelsKeys.ASSET_PLURAL_LABEL: _("Products"), + AssetLabelsKeys.ASSET_ALL_LABEL: _("All Products"), + AssetLabelsKeys.ASSET_WITH_NAME_LABEL: _("Product '%(name)s'"), + AssetLabelsKeys.ASSET_NONE_FOUND_MESSAGE: _("No Products found."), + AssetLabelsKeys.ASSET_MANAGER_LABEL: _("Product Manager"), + AssetLabelsKeys.ASSET_GLOBAL_ROLE_HELP: _("The global role will be applied to all Product Types and Products."), + AssetLabelsKeys.ASSET_NOTIFICATIONS_HELP: _("These are your personal settings for this Product."), + AssetLabelsKeys.ASSET_OPTIONS_LABEL: _("Product Options"), + AssetLabelsKeys.ASSET_OPTIONS_MENU_LABEL: _("Product Options Menu"), + AssetLabelsKeys.ASSET_COUNT_LABEL: _("Product Count"), + AssetLabelsKeys.ASSET_ENGAGEMENTS_BY_LABEL: _("Engagements by Product"), + AssetLabelsKeys.ASSET_LIFECYCLE_LABEL: _("Product Lifecycle"), + AssetLabelsKeys.ASSET_TAG_LABEL: _("Product Tag"), + AssetLabelsKeys.ASSET_METRICS_TAG_COUNTS_LABEL: _("Product Tag Counts"), + AssetLabelsKeys.ASSET_METRICS_TAG_COUNTS_ERROR_MESSAGE: _("Please choose month and year and the Product Tag."), + AssetLabelsKeys.ASSET_METRICS_CRITICAL_LABEL: _("Critical Product Metrics"), + AssetLabelsKeys.ASSET_METRICS_NO_CRITICAL_ERROR_MESSAGE: _("No Critical Products registered"), + AssetLabelsKeys.ASSET_METRICS_TOP_TEN_BY_SEVERITY_LABEL: _("Top 10 Products by bug severity"), + AssetLabelsKeys.ASSET_NOTIFICATION_WITH_NAME_CREATED_MESSAGE: _("Product %(name)s has been created successfully."), + AssetLabelsKeys.ASSET_REPORT_LABEL: _("Product Report"), + AssetLabelsKeys.ASSET_REPORT_TITLE: _("Product Report"), + AssetLabelsKeys.ASSET_REPORT_WITH_NAME_TITLE: _("Product Report: %(name)s"), + AssetLabelsKeys.ASSET_TRACKED_FILES_ADD_LABEL: _("Add Tracked Files to a Product"), + AssetLabelsKeys.ASSET_TRACKED_FILES_ADD_SUCCESS_MESSAGE: _("Added Tracked File to a Product"), + AssetLabelsKeys.ASSET_TRACKED_FILES_ID_MISMATCH_ERROR_MESSAGE: _( + "Product %(asset_id)s does not match Product of Object %(object_asset_id)s"), + AssetLabelsKeys.ASSET_FINDINGS_CLOSE_LABEL: _("Close old findings within this Product"), + AssetLabelsKeys.ASSET_FINDINGS_CLOSE_HELP: _( + "Old findings no longer present in the new report get closed as mitigated when importing. If service has been set, only the findings for this service will be closed; if no service is set, only findings without a service will be closed. This affects findings within the same product."), + AssetLabelsKeys.ASSET_TAG_INHERITANCE_ENABLE_LABEL: _("Enable Product Tag Inheritance"), + AssetLabelsKeys.ASSET_TAG_INHERITANCE_ENABLE_HELP: _( + "Enables Product tag inheritance. Any tags added on an Product will automatically be added to all Engagements, Tests, and Findings."), + AssetLabelsKeys.ASSET_ENDPOINT_HELP: _("The Product this Endpoint should be associated with."), + AssetLabelsKeys.ASSET_CREATE_LABEL: _("Add Product"), + AssetLabelsKeys.ASSET_CREATE_SUCCESS_MESSAGE: _("Product added successfully."), + AssetLabelsKeys.ASSET_READ_LIST_LABEL: _("Product List"), + AssetLabelsKeys.ASSET_UPDATE_LABEL: _("Edit Product"), + AssetLabelsKeys.ASSET_UPDATE_SUCCESS_MESSAGE: _("Product updated successfully."), + AssetLabelsKeys.ASSET_UPDATE_SLA_CHANGED_MESSAGE: _( + "All SLA expiration dates for Findings within this Product will be recalculated asynchronously for the newly assigned SLA configuration."), + AssetLabelsKeys.ASSET_DELETE_LABEL: _("Delete Product"), + AssetLabelsKeys.ASSET_DELETE_WITH_NAME_LABEL: _("Delete Product %(name)s"), + AssetLabelsKeys.ASSET_DELETE_CONFIRM_MESSAGE: _( + "Deleting this Product will remove any related objects associated with it. These relationships are listed below: "), + AssetLabelsKeys.ASSET_DELETE_SUCCESS_MESSAGE: _("Product and relationships removed."), + AssetLabelsKeys.ASSET_DELETE_SUCCESS_ASYNC_MESSAGE: _("Product and relationships will be removed in the background."), + AssetLabelsKeys.ASSET_DELETE_WITH_NAME_SUCCESS_MESSAGE: _('The product "%(name)s" was deleted'), + AssetLabelsKeys.ASSET_DELETE_WITH_NAME_WITH_USER_SUCCESS_MESSAGE: _('The product "%(name)s" was deleted by %(user)s'), + AssetLabelsKeys.ASSET_FILTERS_LABEL: _("Product"), + AssetLabelsKeys.ASSET_FILTERS_NAME_LABEL: _("Product Name"), + AssetLabelsKeys.ASSET_FILTERS_NAME_HELP: _("Search for Product names that are an exact match"), + AssetLabelsKeys.ASSET_FILTERS_NAME_EXACT_LABEL: _("Exact Product Name"), + AssetLabelsKeys.ASSET_FILTERS_NAME_CONTAINS_LABEL: _("Product Name Contains"), + AssetLabelsKeys.ASSET_FILTERS_NAME_CONTAINS_HELP: _("Search for Product names that contain a given pattern"), + AssetLabelsKeys.ASSET_FILTERS_TAGS_LABEL: _("Tags (Product)"), + AssetLabelsKeys.ASSET_FILTERS_TAGS_HELP: _("Filter for Products with the given tags"), + AssetLabelsKeys.ASSET_FILTERS_NOT_TAGS_HELP: _("Filter for Products that do not have the given tags"), + AssetLabelsKeys.ASSET_FILTERS_ASSETS_WITHOUT_TAGS_LABEL: _("Products without tags"), + AssetLabelsKeys.ASSET_FILTERS_ASSETS_WITHOUT_TAGS_HELP: _( + "Search for tags on an Product that contain a given pattern, and exclude them"), + AssetLabelsKeys.ASSET_FILTERS_TAGS_FILTER_LABEL: _("Product with tags"), + AssetLabelsKeys.ASSET_FILTERS_TAGS_FILTER_HELP: _("Filter Products by the selected tags"), + AssetLabelsKeys.ASSET_FILTERS_CSV_TAGS_OR_HELP: _( + "Comma separated list of exact tags present on Product (uses OR for multiple values)"), + AssetLabelsKeys.ASSET_FILTERS_CSV_TAGS_AND_HELP: _( + "Comma separated list of exact tags to match with an AND expression present on Product"), + AssetLabelsKeys.ASSET_FILTERS_CSV_TAGS_NOT_HELP: _("Comma separated list of exact tags not present on Product"), + AssetLabelsKeys.ASSET_FILTERS_CSV_LIFECYCLES_LABEL: _("Comma separated list of exact Product lifecycles"), + AssetLabelsKeys.ASSET_FILTERS_TAGS_ASSET_LABEL: _("Product Tags"), + AssetLabelsKeys.ASSET_FILTERS_TAG_ASSET_LABEL: _("Product Tag"), + AssetLabelsKeys.ASSET_FILTERS_TAG_ASSET_HELP: _("Search for tags on an Product that are an exact match"), + AssetLabelsKeys.ASSET_FILTERS_NOT_TAGS_ASSET_LABEL: _("Not Product Tags"), + AssetLabelsKeys.ASSET_FILTERS_WITHOUT_TAGS_LABEL: _("Product without tags"), + AssetLabelsKeys.ASSET_FILTERS_TAG_ASSET_CONTAINS_LABEL: _("Product Tag Contains"), + AssetLabelsKeys.ASSET_FILTERS_TAG_ASSET_CONTAINS_HELP: _("Search for tags on an Product that contain a given pattern"), + AssetLabelsKeys.ASSET_FILTERS_TAG_NOT_CONTAIN_LABEL: _("Product Tag Does Not Contain"), + AssetLabelsKeys.ASSET_FILTERS_TAG_NOT_CONTAIN_HELP: _( + "Search for tags on an Product that contain a given pattern, and exclude them"), + AssetLabelsKeys.ASSET_FILTERS_TAG_NOT_LABEL: _("Not Product Tag"), + AssetLabelsKeys.ASSET_FILTERS_TAG_NOT_HELP: _("Search for tags on an Product that are an exact match, and exclude them"), + AssetLabelsKeys.ASSET_USERS_ACCESS_LABEL: _("Products this User can access"), + AssetLabelsKeys.ASSET_USERS_NO_ACCESS_MESSAGE: _("This User is not assigned to any Products."), + AssetLabelsKeys.ASSET_USERS_ADD_LABEL: _("Add Products"), + AssetLabelsKeys.ASSET_USERS_USERS_ADD_LABEL: _("Add Users"), + AssetLabelsKeys.ASSET_USERS_MEMBER_LABEL: _("Product Member"), + AssetLabelsKeys.ASSET_USERS_MEMBER_ADD_LABEL: _("Add Product Member"), + AssetLabelsKeys.ASSET_USERS_MEMBER_ADD_SUCCESS_MESSAGE: _("Product members added successfully."), + AssetLabelsKeys.ASSET_USERS_MEMBER_UPDATE_LABEL: _("Edit Product Member"), + AssetLabelsKeys.ASSET_USERS_MEMBER_UPDATE_SUCCESS_MESSAGE: _("Product member updated successfully."), + AssetLabelsKeys.ASSET_USERS_MEMBER_DELETE_LABEL: _("Delete Product Member"), + AssetLabelsKeys.ASSET_USERS_MEMBER_DELETE_SUCCESS_MESSAGE: _("Product member deleted successfully."), + AssetLabelsKeys.ASSET_GROUPS_ACCESS_LABEL: _("Products this Group can access"), + AssetLabelsKeys.ASSET_GROUPS_NO_ACCESS_MESSAGE: _("This Group cannot access any Products."), + AssetLabelsKeys.ASSET_GROUPS_MEMBER_LABEL: _("Product Group"), + AssetLabelsKeys.ASSET_GROUPS_ADD_LABEL: _("Add Product Group"), + AssetLabelsKeys.ASSET_GROUPS_ADD_SUCCESS_MESSAGE: _("Product groups added successfully."), + AssetLabelsKeys.ASSET_GROUPS_UPDATE_LABEL: _("Edit Product Group"), + AssetLabelsKeys.ASSET_GROUPS_UPDATE_SUCCESS_MESSAGE: _("Product group updated successfully."), + AssetLabelsKeys.ASSET_GROUPS_DELETE_LABEL: _("Delete Product Group"), + AssetLabelsKeys.ASSET_GROUPS_DELETE_SUCCESS_MESSAGE: _("Product group deleted successfully."), + AssetLabelsKeys.ASSET_GROUPS_ADD_ASSETS_LABEL: _("Add Products"), + AssetLabelsKeys.ASSET_GROUPS_NUM_ASSETS_LABEL: _("Number of Products"), + } diff --git a/dojo/asset/urls.py b/dojo/asset/urls.py new file mode 100644 index 00000000000..e248348b74b --- /dev/null +++ b/dojo/asset/urls.py @@ -0,0 +1,317 @@ +from django.conf import settings +from django.urls import re_path + +from dojo.engagement import views as dojo_engagement_views +from dojo.product import views +from dojo.utils import redirect_view + +# TODO: remove the else: branch once v3 migration is complete +if settings.ENABLE_V3_ORGANIZATION_ASSET_RELABEL: + urlpatterns = [ + re_path( + r"^asset$", + views.product, + name="product", + ), + re_path( + r"^asset/(?P\d+)$", + views.view_product, + name="view_product", + ), + re_path( + r"^asset/(?P\d+)/components$", + views.view_product_components, + name="view_product_components", + ), + re_path( + r"^asset/(?P\d+)/engagements$", + views.view_engagements, + name="view_engagements", + ), + re_path( + r"^asset/(?P\d+)/import_scan_results$", + dojo_engagement_views.ImportScanResultsView.as_view(), + name="import_scan_results_prod", + ), + re_path( + r"^asset/(?P\d+)/metrics$", + views.view_product_metrics, + name="view_product_metrics", + ), + re_path( + r"^asset/(?P\d+)/async_burndown_metrics$", + views.async_burndown_metrics, + name="async_burndown_metrics", + ), + re_path( + r"^asset/(?P\d+)/edit$", + views.edit_product, + name="edit_product", + ), + re_path( + r"^asset/(?P\d+)/delete$", + views.delete_product, + name="delete_product", + ), + re_path( + r"^asset/add", + views.new_product, + name="new_product", + ), + re_path( + r"^asset/(?P\d+)/new_engagement$", + views.new_eng_for_app, + name="new_eng_for_prod", + ), + re_path( + r"^asset/(?P\d+)/new_technology$", + views.new_tech_for_prod, + name="new_tech_for_prod", + ), + re_path( + r"^technology/(?P\d+)/edit$", + views.edit_technology, + name="edit_technology", + ), + re_path( + r"^technology/(?P\d+)/delete$", + views.delete_technology, + name="delete_technology", + ), + re_path( + r"^asset/(?P\d+)/new_engagement/cicd$", + views.new_eng_for_app_cicd, + name="new_eng_for_prod_cicd", + ), + re_path( + r"^asset/(?P\d+)/add_meta_data$", + views.add_meta_data, + name="add_meta_data", + ), + re_path( + r"^asset/(?P\d+)/edit_notifications$", + views.edit_notifications, + name="edit_notifications", + ), + re_path( + r"^asset/(?P\d+)/edit_meta_data$", + views.edit_meta_data, + name="edit_meta_data", + ), + re_path( + r"^asset/(?P\d+)/ad_hoc_finding$", + views.AdHocFindingView.as_view(), + name="ad_hoc_finding", + ), + re_path( + r"^asset/(?P\d+)/engagement_presets$", + views.engagement_presets, + name="engagement_presets", + ), + re_path( + r"^asset/(?P\d+)/engagement_presets/(?P\d+)/edit$", + views.edit_engagement_presets, + name="edit_engagement_presets", + ), + re_path( + r"^asset/(?P\d+)/engagement_presets/add$", + views.add_engagement_presets, + name="add_engagement_presets", + ), + re_path( + r"^asset/(?P\d+)/engagement_presets/(?P\d+)/delete$", + views.delete_engagement_presets, + name="delete_engagement_presets", + ), + re_path( + r"^asset/(?P\d+)/add_member$", + views.add_product_member, + name="add_product_member", + ), + re_path( + r"^asset/member/(?P\d+)/edit$", + views.edit_product_member, + name="edit_product_member", + ), + re_path( + r"^asset/member/(?P\d+)/delete$", + views.delete_product_member, + name="delete_product_member", + ), + re_path( + r"^asset/(?P\d+)/add_api_scan_configuration$", + views.add_api_scan_configuration, + name="add_api_scan_configuration", + ), + re_path( + r"^asset/(?P\d+)/view_api_scan_configurations$", + views.view_api_scan_configurations, + name="view_api_scan_configurations", + ), + re_path( + r"^asset/(?P\d+)/edit_api_scan_configuration/(?P\d+)$", + views.edit_api_scan_configuration, + name="edit_api_scan_configuration", + ), + re_path( + r"^asset/(?P\d+)/delete_api_scan_configuration/(?P\d+)$", + views.delete_api_scan_configuration, + name="delete_api_scan_configuration", + ), + re_path( + r"^asset/(?P\d+)/add_group$", + views.add_product_group, + name="add_product_group", + ), + re_path( + r"^asset/group/(?P\d+)/edit$", + views.edit_product_group, + name="edit_product_group", + ), + re_path( + r"^asset/group/(?P\d+)/delete$", + views.delete_product_group, + name="delete_product_group", + ), + # TODO: Backwards compatibility; remove after v3 migration is complete + re_path(r"^product$", redirect_view("product")), + re_path(r"^product/(?P\d+)$", redirect_view("view_product")), + re_path(r"^product/(?P\d+)/components$", redirect_view("view_product_components")), + re_path(r"^product/(?P\d+)/engagements$", redirect_view("view_engagements")), + re_path(r"^product/(?P\d+)/import_scan_results$", redirect_view("import_scan_results_prod")), + re_path(r"^product/(?P\d+)/metrics$", redirect_view("view_product_metrics")), + re_path(r"^product/(?P\d+)/async_burndown_metrics$", redirect_view("async_burndown_metrics")), + re_path(r"^product/(?P\d+)/edit$", redirect_view("edit_product")), + re_path(r"^product/(?P\d+)/delete$", redirect_view("delete_product")), + re_path(r"^product/add", redirect_view("new_product")), + re_path(r"^product/(?P\d+)/new_engagement$", redirect_view("new_eng_for_prod")), + re_path(r"^product/(?P\d+)/new_technology$", redirect_view("new_tech_for_prod")), + re_path(r"^product/(?P\d+)/new_engagement/cicd$", redirect_view("new_eng_for_prod_cicd")), + re_path(r"^product/(?P\d+)/add_meta_data$", redirect_view("add_meta_data")), + re_path(r"^product/(?P\d+)/edit_notifications$", redirect_view("edit_notifications")), + re_path(r"^product/(?P\d+)/edit_meta_data$", redirect_view("edit_meta_data")), + re_path(r"^product/(?P\d+)/ad_hoc_finding$", redirect_view("ad_hoc_finding")), + re_path(r"^product/(?P\d+)/engagement_presets$", redirect_view("engagement_presets")), + re_path(r"^product/(?P\d+)/engagement_presets/(?P\d+)/edit$", redirect_view("edit_engagement_presets")), + re_path(r"^product/(?P\d+)/engagement_presets/add$", redirect_view("add_engagement_presets")), + re_path(r"^product/(?P\d+)/engagement_presets/(?P\d+)/delete$", redirect_view("delete_engagement_presets")), + re_path(r"^product/(?P\d+)/add_member$", redirect_view("add_product_member")), + re_path(r"^product/member/(?P\d+)/edit$", redirect_view("edit_product_member")), + re_path(r"^product/member/(?P\d+)/delete$", redirect_view("delete_product_member")), + re_path(r"^product/(?P\d+)/add_api_scan_configuration$", redirect_view("add_api_scan_configuration")), + re_path(r"^product/(?P\d+)/view_api_scan_configurations$", redirect_view("view_api_scan_configurations")), + re_path(r"^product/(?P\d+)/edit_api_scan_configuration/(?P\d+)$", redirect_view("edit_api_scan_configuration")), + re_path(r"^product/(?P\d+)/delete_api_scan_configuration/(?P\d+)$", redirect_view("delete_api_scan_configuration")), + re_path(r"^product/(?P\d+)/add_group$", redirect_view("add_product_group")), + re_path(r"^product/group/(?P\d+)/edit$", redirect_view("edit_product_group")), + re_path(r"^product/group/(?P\d+)/delete$", redirect_view("delete_product_group")), + ] +else: + urlpatterns = [ + # product + re_path(r"^product$", views.product, name="product"), + re_path(r"^product/(?P\d+)$", views.view_product, + name="view_product"), + re_path(r"^product/(?P\d+)/components$", views.view_product_components, + name="view_product_components"), + re_path(r"^product/(?P\d+)/engagements$", views.view_engagements, + name="view_engagements"), + re_path( + r"^product/(?P\d+)/import_scan_results$", + dojo_engagement_views.ImportScanResultsView.as_view(), + name="import_scan_results_prod"), + re_path(r"^product/(?P\d+)/metrics$", views.view_product_metrics, + name="view_product_metrics"), + re_path(r"^product/(?P\d+)/async_burndown_metrics$", views.async_burndown_metrics, + name="async_burndown_metrics"), + re_path(r"^product/(?P\d+)/edit$", views.edit_product, + name="edit_product"), + re_path(r"^product/(?P\d+)/delete$", views.delete_product, + name="delete_product"), + re_path(r"^product/add", views.new_product, name="new_product"), + re_path(r"^product/(?P\d+)/new_engagement$", views.new_eng_for_app, + name="new_eng_for_prod"), + re_path(r"^product/(?P\d+)/new_technology$", views.new_tech_for_prod, + name="new_tech_for_prod"), + re_path(r"^technology/(?P\d+)/edit$", views.edit_technology, + name="edit_technology"), + re_path(r"^technology/(?P\d+)/delete$", views.delete_technology, + name="delete_technology"), + re_path(r"^product/(?P\d+)/new_engagement/cicd$", views.new_eng_for_app_cicd, + name="new_eng_for_prod_cicd"), + re_path(r"^product/(?P\d+)/add_meta_data$", views.add_meta_data, + name="add_meta_data"), + re_path(r"^product/(?P\d+)/edit_notifications$", views.edit_notifications, + name="edit_notifications"), + re_path(r"^product/(?P\d+)/edit_meta_data$", views.edit_meta_data, + name="edit_meta_data"), + re_path( + r"^product/(?P\d+)/ad_hoc_finding$", + views.AdHocFindingView.as_view(), + name="ad_hoc_finding"), + re_path(r"^product/(?P\d+)/engagement_presets$", views.engagement_presets, + name="engagement_presets"), + re_path(r"^product/(?P\d+)/engagement_presets/(?P\d+)/edit$", views.edit_engagement_presets, + name="edit_engagement_presets"), + re_path(r"^product/(?P\d+)/engagement_presets/add$", views.add_engagement_presets, + name="add_engagement_presets"), + re_path(r"^product/(?P\d+)/engagement_presets/(?P\d+)/delete$", views.delete_engagement_presets, + name="delete_engagement_presets"), + re_path(r"^product/(?P\d+)/add_member$", views.add_product_member, + name="add_product_member"), + re_path(r"^product/member/(?P\d+)/edit$", views.edit_product_member, + name="edit_product_member"), + re_path(r"^product/member/(?P\d+)/delete$", views.delete_product_member, + name="delete_product_member"), + re_path(r"^product/(?P\d+)/add_api_scan_configuration$", views.add_api_scan_configuration, + name="add_api_scan_configuration"), + re_path(r"^product/(?P\d+)/view_api_scan_configurations$", views.view_api_scan_configurations, + name="view_api_scan_configurations"), + re_path(r"^product/(?P\d+)/edit_api_scan_configuration/(?P\d+)$", + views.edit_api_scan_configuration, + name="edit_api_scan_configuration"), + re_path(r"^product/(?P\d+)/delete_api_scan_configuration/(?P\d+)$", + views.delete_api_scan_configuration, + name="delete_api_scan_configuration"), + re_path(r"^product/(?P\d+)/add_group$", views.add_product_group, + name="add_product_group"), + re_path(r"^product/group/(?P\d+)/edit$", views.edit_product_group, + name="edit_product_group"), + re_path(r"^product/group/(?P\d+)/delete$", views.delete_product_group, + name="delete_product_group"), + # Forward compatibility + re_path(r"^asset$", redirect_view("product")), + re_path(r"^asset/(?P\d+)$", redirect_view("view_product")), + re_path(r"^asset/(?P\d+)/components$", redirect_view("view_product_components")), + re_path(r"^asset/(?P\d+)/engagements$", redirect_view("view_engagements")), + re_path(r"^asset/(?P\d+)/import_scan_results$", redirect_view("import_scan_results_prod")), + re_path(r"^asset/(?P\d+)/metrics$", redirect_view("view_product_metrics")), + re_path(r"^asset/(?P\d+)/async_burndown_metrics$", redirect_view("async_burndown_metrics")), + re_path(r"^asset/(?P\d+)/edit$", redirect_view("edit_product")), + re_path(r"^asset/(?P\d+)/delete$", redirect_view("delete_product")), + re_path(r"^asset/add", redirect_view("new_product")), + re_path(r"^asset/(?P\d+)/new_engagement$", redirect_view("new_eng_for_prod")), + re_path(r"^asset/(?P\d+)/new_technology$", redirect_view("new_tech_for_prod")), + re_path(r"^asset/(?P\d+)/new_engagement/cicd$", redirect_view("new_eng_for_prod_cicd")), + re_path(r"^asset/(?P\d+)/add_meta_data$", redirect_view("add_meta_data")), + re_path(r"^asset/(?P\d+)/edit_notifications$", redirect_view("edit_notifications")), + re_path(r"^asset/(?P\d+)/edit_meta_data$", redirect_view("edit_meta_data")), + re_path(r"^asset/(?P\d+)/ad_hoc_finding$", redirect_view("ad_hoc_finding")), + re_path(r"^asset/(?P\d+)/engagement_presets$", redirect_view("engagement_presets")), + re_path(r"^asset/(?P\d+)/engagement_presets/(?P\d+)/edit$", redirect_view("edit_engagement_presets")), + re_path(r"^asset/(?P\d+)/engagement_presets/add$", redirect_view("add_engagement_presets")), + re_path(r"^asset/(?P\d+)/engagement_presets/(?P\d+)/delete$", + redirect_view("delete_engagement_presets")), + re_path(r"^asset/(?P\d+)/add_member$", redirect_view("add_product_member")), + re_path(r"^asset/member/(?P\d+)/edit$", redirect_view("edit_product_member")), + re_path(r"^asset/member/(?P\d+)/delete$", redirect_view("delete_product_member")), + re_path(r"^asset/(?P\d+)/add_api_scan_configuration$", redirect_view("add_api_scan_configuration")), + re_path(r"^asset/(?P\d+)/view_api_scan_configurations$", redirect_view("view_api_scan_configurations")), + re_path(r"^asset/(?P\d+)/edit_api_scan_configuration/(?P\d+)$", + redirect_view("edit_api_scan_configuration")), + re_path(r"^asset/(?P\d+)/delete_api_scan_configuration/(?P\d+)$", + redirect_view("delete_api_scan_configuration")), + re_path(r"^asset/(?P\d+)/add_group$", redirect_view("add_product_group")), + re_path(r"^asset/group/(?P\d+)/edit$", redirect_view("edit_product_group")), + re_path(r"^asset/group/(?P\d+)/delete$", redirect_view("delete_product_group")), + ] diff --git a/dojo/auditlog.py b/dojo/auditlog.py new file mode 100644 index 00000000000..1996fc1cca4 --- /dev/null +++ b/dojo/auditlog.py @@ -0,0 +1,509 @@ +""" +Audit logging configuration for DefectDojo. + +This module handles conditional registration of models with either django-auditlog +or django-pghistory based on the DD_AUDITLOG_TYPE setting. +""" +import contextlib +import logging +import sys + +import pghistory +from dateutil.relativedelta import relativedelta +from django.apps import apps +from django.conf import settings +from django.core.management import call_command +from django.db import models +from django.utils import timezone + +logger = logging.getLogger(__name__) + + +def _flush_models_in_batches(models_to_flush, timestamp_field: str, retention_period: int, batch_size: int, max_batches: int, *, dry_run: bool = False) -> tuple[int, int, bool]: + """ + Generic batched deletion by timestamp for a set of models. + + Returns (deleted_or_would_delete_total, batches_done_or_needed, reached_limit) + """ + # Use a timestamp and not a date. this allows for efficient databse index use. + cutoff_dt = timezone.now() - relativedelta(months=retention_period) + logger.info("Audit flush cutoff datetime: %s (retention_period=%s months)", cutoff_dt, retention_period) + + total_deleted = 0 + total_batches = 0 + reached_any_limit = False + + for Model in models_to_flush: + deleted_total = 0 + batches_done = 0 + filter_kwargs = {f"{timestamp_field}__lt": cutoff_dt} + last_pk = None + verb = "Would delete" if dry_run else "Deleted" + + while batches_done < max_batches: + batch_qs = Model.objects.filter(**filter_kwargs) + if last_pk is not None: + batch_qs = batch_qs.filter(pk__gt=last_pk) + batch_qs = batch_qs.order_by("pk") + + pks = list(batch_qs.values_list("pk", flat=True)[:batch_size]) + if not pks: + if batches_done == 0: + logger.info("No outdated %s entries found", Model._meta.object_name) + break + + if dry_run: + deleted_count = len(pks) + else: + qs = Model.objects.filter(pk__in=pks) + deleted_count = int(qs._raw_delete(qs.db)) + + deleted_total += deleted_count + batches_done += 1 + last_pk = pks[-1] + + logger.info( + "%s %s batch %s (size ~%s), total %s: %s", + verb, + Model._meta.object_name, + batches_done, + batch_size, + verb.lower(), + deleted_total, + ) + + total_deleted += deleted_total + total_batches += batches_done + if batches_done >= max_batches: + reached_any_limit = True + + return total_deleted, total_batches, reached_any_limit + + +def _flush_django_auditlog(retention_period: int, batch_size: int, max_batches: int, *, dry_run: bool = False) -> tuple[int, int, bool]: + # Import inside to avoid model import issues at startup + from auditlog.models import LogEntry # noqa: PLC0415 + + return _flush_models_in_batches([LogEntry], "timestamp", retention_period, batch_size, max_batches, dry_run=dry_run) + + +def _iter_pghistory_event_models(): + """Yield pghistory Event models registered under the dojo app.""" + for model in apps.get_app_config("dojo").get_models(): + if model._meta.object_name.endswith("Event"): + # Ensure the model has pgh_created_at field + if any(f.name == "pgh_created_at" for f in model._meta.fields): + yield model + + +def _flush_pghistory_events(retention_period: int, batch_size: int, max_batches: int, *, dry_run: bool = False) -> tuple[int, int, bool]: + models_to_flush = list(_iter_pghistory_event_models()) + return _flush_models_in_batches(models_to_flush, "pgh_created_at", retention_period, batch_size, max_batches, dry_run=dry_run) + + +def run_flush_auditlog(retention_period: int | None = None, + batch_size: int | None = None, + max_batches: int | None = None, + *, + dry_run: bool = False) -> tuple[int, int, bool]: + """ + Deletes audit entries older than the configured retention from both + django-auditlog and django-pghistory log entries. + + Returns a tuple of (deleted_total, batches_done, reached_limit). + """ + retention_period = retention_period if retention_period is not None else getattr(settings, "AUDITLOG_FLUSH_RETENTION_PERIOD", -1) + if retention_period < 0: + logger.info("Flushing audit logs is disabled") + return 0, 0, False + + batch_size = batch_size if batch_size is not None else getattr(settings, "AUDITLOG_FLUSH_BATCH_SIZE", 1000) + max_batches = max_batches if max_batches is not None else getattr(settings, "AUDITLOG_FLUSH_MAX_BATCHES", 100) + + phase = "DRY RUN" if dry_run else "Cleanup" + logger.info("Running %s for django-auditlog entries with %d Months retention across all backends", phase, retention_period) + d_deleted, d_batches, d_limit = _flush_django_auditlog(retention_period, batch_size, max_batches, dry_run=dry_run) + logger.info("Running %s for django-pghistory entries with %d Months retention across all backends", phase, retention_period) + p_deleted, p_batches, p_limit = _flush_pghistory_events(retention_period, batch_size, max_batches, dry_run=dry_run) + + total_deleted = d_deleted + p_deleted + total_batches = d_batches + p_batches + reached_limit = bool(d_limit or p_limit) + + verb = "would delete" if dry_run else "deleted" + logger.info("Audit flush summary: django-auditlog %s=%s batches=%s; pghistory %s=%s batches=%s; total_%s=%s total_batches=%s", + verb, d_deleted, d_batches, verb, p_deleted, p_batches, verb.replace(" ", "_"), total_deleted, total_batches) + + return total_deleted, total_batches, reached_limit + + +def enable_django_auditlog(): + """Enable django-auditlog by registering models.""" + # Import inside function to avoid AppRegistryNotReady errors + from auditlog.registry import auditlog # noqa: PLC0415 + + from dojo.models import ( # noqa: PLC0415 + Cred_User, + Dojo_User, + Endpoint, + Engagement, + Finding, + Finding_Group, + Finding_Template, + Notification_Webhooks, + Product, + Product_Type, + Risk_Acceptance, + Test, + ) + + logger.info("Enabling django-auditlog: Registering models") + auditlog.register(Dojo_User, exclude_fields=["password"]) + auditlog.register(Endpoint) + auditlog.register(Engagement) + auditlog.register(Finding, m2m_fields={"reviewers"}) + auditlog.register(Finding_Group) + auditlog.register(Product_Type) + auditlog.register(Product) + auditlog.register(Test) + auditlog.register(Risk_Acceptance) + auditlog.register(Finding_Template) + auditlog.register(Cred_User, exclude_fields=["password"]) + auditlog.register(Notification_Webhooks, exclude_fields=["header_name", "header_value"]) + logger.info("Successfully enabled django-auditlog") + + +def disable_django_auditlog(): + """Disable django-auditlog by unregistering models.""" + # Import inside function to avoid AppRegistryNotReady errors + from auditlog.registry import auditlog # noqa: PLC0415 + + from dojo.models import ( # noqa: PLC0415 + Cred_User, + Dojo_User, + Endpoint, + Engagement, + Finding, + Finding_Group, + Finding_Template, + Notification_Webhooks, + Product, + Product_Type, + Risk_Acceptance, + Test, + ) + + # Only log during actual application startup, not during shell commands + if "shell" not in sys.argv: + logger.info("Django-auditlog disabled - unregistering models") + + # Unregister all models from auditlog + models_to_unregister = [ + Dojo_User, Endpoint, Engagement, Finding, Finding_Group, + Product_Type, Product, Test, Risk_Acceptance, Finding_Template, + Cred_User, Notification_Webhooks, + ] + + for model in models_to_unregister: + with contextlib.suppress(Exception): + # Model might not be registered, ignore the error + auditlog.unregister(model) + + +def register_django_pghistory_models(): + """ + Register models with django-pghistory (always called to avoid migrations). + + Note: This function is always called regardless of audit logging settings because: + 1. Django migrations are generated based on model registration at import time + 2. If pghistory models are not registered, Django will try to create migrations + to remove the pghistory tables when the models are not found + 3. This would cause migration conflicts and database inconsistencies + 4. By always registering the models, we ensure the database schema remains + stable while controlling audit behavior through trigger enable/disable + So we always register the models and make migrations for them. + Then we control the enabling/disabling by enabling/disabling the underlying database + triggers. + """ + # Import models inside function to avoid AppRegistryNotReady errors + from dojo.models import ( # noqa: PLC0415 + Cred_User, + Dojo_User, + Endpoint, + Engagement, + Finding, + Finding_Group, + Finding_Template, + Notification_Webhooks, + Product, + Product_Type, + Risk_Acceptance, + Test, + ) + + # Only log during actual application startup, not during shell commands + if "shell" not in sys.argv: + logger.info("Registering models with django-pghistory") + + # Register models with pghistory for tracking changes + # Using pghistory.track() as a decorator function (correct syntax) + # The function returns a decorator that should be applied to the model class + + # Track Dojo_User with excluded fields + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + exclude=["password"], + # add some indexes manually so we don't have to define a customer phistory Event model with overridden fields. + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Dojo_User) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Endpoint) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Engagement) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Finding) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Finding_Group) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Product_Type) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Product) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Test) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Risk_Acceptance) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Finding_Template) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + exclude=["password"], + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Cred_User) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + exclude=["header_name", "header_value"], + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Notification_Webhooks) + + # Only log during actual application startup, not during shell commands + if "shell" not in sys.argv: + logger.info("Successfully registered models with django-pghistory") + + +def enable_django_pghistory(): + """Enable django-pghistory by enabling triggers.""" + logger.info("Enabling django-pghistory: Enabling triggers") + + # Enable pghistory triggers + try: + call_command("pgtrigger", "enable") + logger.info("Successfully enabled pghistory triggers") + except Exception as e: + logger.warning(f"Failed to enable pgtrigger triggers: {e}") + # Don't raise the exception as this shouldn't prevent Django from starting + + +def disable_django_pghistory(): + """Disable django-pghistory by disabling triggers.""" + logger.info("Disabling django-pghistory: Disabling triggers") + try: + call_command("pgtrigger", "disable") + logger.info("Successfully disabled pghistory triggers") + except Exception as e: + logger.warning(f"Failed to disable pgtrigger triggers: {e}") + # Don't raise the exception as this shouldn't prevent Django from starting + + +def configure_pghistory_triggers(): + """ + Configure pghistory triggers based on audit settings. + + This function should be called after Django startup and migrations to properly + enable/disable pghistory triggers without database access warnings. + """ + if not settings.ENABLE_AUDITLOG: + logger.info("Audit logging disabled - disabling pghistory triggers") + try: + call_command("pgtrigger", "disable") + logger.info("Successfully disabled pghistory triggers") + except Exception as e: + logger.error(f"Failed to disable pghistory triggers: {e}") + raise + elif settings.AUDITLOG_TYPE == "django-pghistory": + try: + call_command("pgtrigger", "enable") + logger.info("Successfully enabled pghistory triggers") + except Exception as e: + logger.error(f"Failed to enable pghistory triggers: {e}") + raise + else: + try: + call_command("pgtrigger", "disable") + logger.info("Successfully disabled pghistory triggers") + except Exception as e: + logger.error(f"Failed to disable pghistory triggers: {e}") + raise + + +def configure_audit_system(): + """ + Configure the audit system based on settings. + + Note: This function only handles auditlog registration. pghistory model registration + is handled in apps.py, and trigger management should be done via the + configure_pghistory_triggers() function to avoid database access during initialization. + """ + # Only log during actual application startup, not during shell commands + log_enabled = "shell" not in sys.argv + + if not settings.ENABLE_AUDITLOG: + if log_enabled: + logger.info("Audit logging disabled") + disable_django_auditlog() + return + + if settings.AUDITLOG_TYPE == "django-auditlog": + if log_enabled: + logger.info("Configuring audit system: django-auditlog enabled") + enable_django_auditlog() + else: + if log_enabled: + logger.info("django-auditlog disabled (pghistory or other audit type selected)") + disable_django_auditlog() diff --git a/dojo/celery.py b/dojo/celery.py index d747ee7a4cd..5f2935b4460 100644 --- a/dojo/celery.py +++ b/dojo/celery.py @@ -1,5 +1,6 @@ import logging import os +from logging.config import dictConfig from celery import Celery from celery.signals import setup_logging @@ -26,7 +27,6 @@ def debug_task(self): @setup_logging.connect def config_loggers(*args, **kwags): - from logging.config import dictConfig dictConfig(settings.LOGGING) diff --git a/dojo/context_processors.py b/dojo/context_processors.py index 39385ef3440..409851e2458 100644 --- a/dojo/context_processors.py +++ b/dojo/context_processors.py @@ -1,8 +1,12 @@ import contextlib +import time # import the settings file from django.conf import settings +from dojo.labels import get_labels +from dojo.models import Alerts, System_Settings, UserAnnouncement + def globalize_vars(request): # return the value you want as a dictionnary. you may add multiple values in there. @@ -35,14 +39,11 @@ def globalize_vars(request): def bind_system_settings(request): - from dojo.models import System_Settings - return {"system_settings": System_Settings.objects.get()} def bind_alert_count(request): if not settings.DISABLE_ALERT_COUNTER: - from dojo.models import Alerts if hasattr(request, "user") and request.user.is_authenticated: return {"alert_count": Alerts.objects.filter(user_id=request.user).count()} @@ -50,8 +51,6 @@ def bind_alert_count(request): def bind_announcement(request): - from dojo.models import UserAnnouncement - with contextlib.suppress(Exception): # TODO: this should be replaced with more meaningful exception if request.user.is_authenticated: user_announcement = UserAnnouncement.objects.select_related( @@ -62,8 +61,6 @@ def bind_announcement(request): def session_expiry_notification(request): - import time - try: if request.user.is_authenticated: last_activity = request.session.get("_last_activity", time.time()) @@ -78,3 +75,9 @@ def session_expiry_notification(request): return { "session_notify_time": notify_time, } + + +def labels(request): + return { + "labels": get_labels(), + } diff --git a/dojo/db_migrations/0066_django_tagulous.py b/dojo/db_migrations/0066_django_tagulous.py index 7727e8179b1..5c1f860cb77 100644 --- a/dojo/db_migrations/0066_django_tagulous.py +++ b/dojo/db_migrations/0066_django_tagulous.py @@ -2,7 +2,6 @@ from django.db import migrations, models # import django.db.models.deletion -from tagging.registry import register as tag_register from django.forms.models import model_to_dict import tagulous.models.fields import tagulous.models.models @@ -12,69 +11,6 @@ class Migration(migrations.Migration): - - def copy_existing_tags_to_tags_from_django_tagging_field(apps, schema_editor): - # We can't import the models directly as it may be a newer - # version than this migration expects. We use the historical version. - logger.info('Migrating tags from django-tagging to django-tagulous step1. Enable DEBUG logging to find out more.') - import tagulous.utils - # for model_name in ['Product']: - for model_name in ['Product', 'test', 'finding', 'engagement', 'endpoint', 'finding_template', 'app_Analysis', 'objects']: - model_class = apps.get_model('dojo', model_name) - # the get_model returns a fake class proxy, which is not registered with django-tagging - tag_register(model_class) - - for obj in model_class.objects.all(): - # logger.debug('%s:%s:%s', model_class, obj.id, obj) - if obj.tags: - tags_as_string = tagulous.utils.render_tags(obj.tags.all()) - logger.debug('%s:%s:%s: found tags: %s', model_class, obj.id, obj, tags_as_string) - obj.tags_from_django_tagging = tags_as_string - # obj.description = tags_as_string - # finding.save() doesn't look at push_all_jira_issue, so we should be good - # if model_name == 'finding2': - # obj.save(dedupe_option=False, rules_option=False, issue_updater_option=False, push_to_jira=False) - # else: - try: - if hasattr(obj, 'prod_type_id') and obj.prod_type_id == 0: - logger.warning('product found without product type (prod_type==0), changing to: "_tag migration lost and found" product type') - Product_Type_Model = apps.get_model('dojo', 'Product_Type') - prod_type_lost_and_found, created = Product_Type_Model.objects.get_or_create(name='_tag migration lost and found') - obj.prod_type = prod_type_lost_and_found - obj.save() - logger.warning('product type successfully changed to %i', prod_type_lost_and_found.id) - - obj.save() - except Exception as e: - logger.error('Error saving old existing django-tagging tags to new string field') - logger.error('Details of object:') - logger.error(vars(obj)) - logger.error('Model to dict:') - logger.error(model_to_dict(obj)) - - def copy_tags_from_django_tagging_field_to_new_tagulous_tags_field(apps, schema_editor): - # We can't import the models directly as it may be a newer - # version than this migration expects. We use the historical version. - logger.info('Migrating tags from django-tagging to django-tagulous step2. Enable DEBUG logging to find out more.') - # for model_name in ['Product']: - for model_name in ['Product', 'test', 'finding', 'engagement', 'endpoint', 'finding_template', 'app_Analysis', 'objects_product']: - model_class = apps.get_model('dojo', model_name) - - for obj in model_class.objects.all(): - # logger.debug('%s:%s:%s', model_class, obj.id, obj) - if obj.tags_from_django_tagging: - logger.debug('%s:%s:%s: found tags: %s', model_class, obj.id, obj, obj.tags_from_django_tagging) - obj.tags = obj.tags_from_django_tagging - - try: - obj.save() - except Exception as e: - logger.error('Error saving tags to new tagulous m2m field') - logger.error('Details of object:') - logger.error(vars(obj)) - logger.error('Model to dict:') - logger.error(model_to_dict(obj)) - dependencies = [ ('dojo', '0065_delete_empty_jira_project_configs'), ] @@ -121,8 +57,6 @@ def copy_tags_from_django_tagging_field_to_new_tagulous_tags_field(apps, schema_ field=models.TextField(blank=True, editable=False, help_text='Temporary archive with tags from the previous tagging library we used'), ), - migrations.RunPython(copy_existing_tags_to_tags_from_django_tagging_field, migrations.RunPython.noop), - migrations.RenameModel('Objects', 'Objects_Product'), migrations.CreateModel( @@ -309,6 +243,4 @@ def copy_tags_from_django_tagging_field_to_new_tagulous_tags_field(apps, schema_ name='match_field', field=models.CharField(choices=[('id', 'id'), ('title', 'title'), ('date', 'date'), ('cwe', 'cwe'), ('cve', 'cve'), ('cvssv3', 'cvssv3'), ('url', 'url'), ('severity', 'severity'), ('description', 'description'), ('mitigation', 'mitigation'), ('impact', 'impact'), ('steps_to_reproduce', 'steps_to_reproduce'), ('severity_justification', 'severity_justification'), ('references', 'references'), ('test', 'test'), ('is_template', 'is_template'), ('active', 'active'), ('verified', 'verified'), ('false_p', 'false_p'), ('duplicate', 'duplicate'), ('duplicate_finding', 'duplicate_finding'), ('out_of_scope', 'out_of_scope'), ('under_review', 'under_review'), ('review_requested_by', 'review_requested_by'), ('under_defect_review', 'under_defect_review'), ('defect_review_requested_by', 'defect_review_requested_by'), ('is_Mitigated', 'is_Mitigated'), ('thread_id', 'thread_id'), ('mitigated', 'mitigated'), ('mitigated_by', 'mitigated_by'), ('reporter', 'reporter'), ('numerical_severity', 'numerical_severity'), ('last_reviewed', 'last_reviewed'), ('last_reviewed_by', 'last_reviewed_by'), ('line_number', 'line_number'), ('sourcefilepath', 'sourcefilepath'), ('sourcefile', 'sourcefile'), ('param', 'param'), ('payload', 'payload'), ('hash_code', 'hash_code'), ('line', 'line'), ('file_path', 'file_path'), ('component_name', 'component_name'), ('component_version', 'component_version'), ('static_finding', 'static_finding'), ('dynamic_finding', 'dynamic_finding'), ('created', 'created'), ('scanner_confidence', 'scanner_confidence'), ('sonarqube_issue', 'sonarqube_issue'), ('unique_id_from_tool', 'unique_id_from_tool'), ('vuln_id_from_tool', 'vuln_id_from_tool'), ('sast_source_object', 'sast_source_object'), ('sast_sink_object', 'sast_sink_object'), ('sast_source_line', 'sast_source_line'), ('sast_source_file_path', 'sast_source_file_path'), ('nb_occurences', 'nb_occurences')], max_length=200), ), - - migrations.RunPython(copy_tags_from_django_tagging_field_to_new_tagulous_tags_field, migrations.RunPython.noop), ] diff --git a/dojo/db_migrations/0243_pghistory_models.py b/dojo/db_migrations/0243_pghistory_models.py new file mode 100644 index 00000000000..57d3c723eab --- /dev/null +++ b/dojo/db_migrations/0243_pghistory_models.py @@ -0,0 +1,938 @@ +# Generated by Django 5.1.11 on 2025-09-12 16:57 + +import django.contrib.auth.validators +import django.core.validators +import django.db.models.deletion +import django.db.models.manager +import django.utils.timezone +import django_extensions.db.fields +import dojo.models +import dojo.validators +import pgtrigger.compiler +import pgtrigger.migrations +from decimal import Decimal +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0242_file_upload_cleanup'), + ('pghistory', '0007_auto_20250421_0444'), + ] + + operations = [ + migrations.CreateModel( + name='Cred_UserEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('name', models.CharField(max_length=200)), + ('username', models.CharField(max_length=200)), + ('role', models.CharField(max_length=200)), + ('authentication', models.CharField(choices=[('Form', 'Form Authentication'), ('SSO', 'SSO Redirect')], default='Form', max_length=15)), + ('http_authentication', models.CharField(blank=True, choices=[('Basic', 'Basic'), ('NTLM', 'NTLM')], max_length=15, null=True)), + ('description', models.CharField(blank=True, max_length=2000, null=True)), + ('url', models.URLField(max_length=2000)), + ('login_regex', models.CharField(blank=True, max_length=200, null=True)), + ('logout_regex', models.CharField(blank=True, max_length=200, null=True)), + ('is_valid', models.BooleanField(default=True, verbose_name='Login is valid')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Dojo_UserEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), + ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), + ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')), + ('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')), + ('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')), + ('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')), + ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), + ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), + ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='EndpointEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('protocol', models.CharField(blank=True, help_text="The communication protocol/scheme such as 'http', 'ftp', 'dns', etc.", max_length=20, null=True)), + ('userinfo', models.CharField(blank=True, help_text="User info as 'alice', 'bob', etc.", max_length=500, null=True)), + ('host', models.CharField(blank=True, help_text="The host name or IP address. It must not include the port number. For example '127.0.0.1', 'localhost', 'yourdomain.com'.", max_length=500, null=True)), + ('port', models.IntegerField(blank=True, help_text='The network port associated with the endpoint.', null=True)), + ('path', models.CharField(blank=True, help_text="The location of the resource, it must not start with a '/'. For example endpoint/420/edit", max_length=500, null=True)), + ('query', models.CharField(blank=True, help_text="The query string, the question mark should be omitted.For example 'group=4&team=8'", max_length=1000, null=True)), + ('fragment', models.CharField(blank=True, help_text="The fragment identifier which follows the hash mark. The hash mark should be omitted. For example 'section-13', 'paragraph-2'.", max_length=500, null=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='EngagementEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('name', models.CharField(blank=True, max_length=300, null=True)), + ('description', models.CharField(blank=True, max_length=2000, null=True)), + ('version', models.CharField(blank=True, help_text='Version of the product the engagement tested.', max_length=100, null=True)), + ('first_contacted', models.DateField(blank=True, null=True)), + ('target_start', models.DateField()), + ('target_end', models.DateField()), + ('reason', models.CharField(blank=True, max_length=2000, null=True)), + ('updated', models.DateTimeField(auto_now=True, null=True)), + ('created', models.DateTimeField(auto_now_add=True, null=True)), + ('active', models.BooleanField(default=True, editable=False)), + ('tracker', models.URLField(blank=True, help_text='Link to epic or ticket system with changes to version.', null=True)), + ('test_strategy', models.URLField(blank=True, null=True)), + ('threat_model', models.BooleanField(default=True)), + ('api_test', models.BooleanField(default=True)), + ('pen_test', models.BooleanField(default=True)), + ('check_list', models.BooleanField(default=True)), + ('status', models.CharField(choices=[('Not Started', 'Not Started'), ('Blocked', 'Blocked'), ('Cancelled', 'Cancelled'), ('Completed', 'Completed'), ('In Progress', 'In Progress'), ('On Hold', 'On Hold'), ('Waiting for Resource', 'Waiting for Resource')], default='', max_length=2000, null=True)), + ('progress', models.CharField(default='threat_model', editable=False, max_length=100)), + ('tmodel_path', models.CharField(blank=True, default='none', editable=False, max_length=1000, null=True)), + ('done_testing', models.BooleanField(default=False, editable=False)), + ('engagement_type', models.CharField(choices=[('Interactive', 'Interactive'), ('CI/CD', 'CI/CD')], default='Interactive', max_length=30, null=True)), + ('build_id', models.CharField(blank=True, help_text='Build ID of the product the engagement tested.', max_length=150, null=True, verbose_name='Build ID')), + ('commit_hash', models.CharField(blank=True, help_text='Commit hash from repo', max_length=150, null=True, verbose_name='Commit Hash')), + ('branch_tag', models.CharField(blank=True, help_text='Tag or branch of the product the engagement tested.', max_length=150, null=True, verbose_name='Branch/Tag')), + ('source_code_management_uri', models.URLField(blank=True, help_text='Resource link to source code', max_length=600, null=True, verbose_name='Repo')), + ('deduplication_on_engagement', models.BooleanField(default=False, help_text='If enabled deduplication will only mark a finding in this engagement as duplicate of another finding if both findings are in this engagement. If disabled, deduplication is on the product level.', verbose_name='Deduplication within this engagement only')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Finding_GroupEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')), + ('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')), + ('name', models.CharField(max_length=255)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Finding_TemplateEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('title', models.TextField(max_length=1000)), + ('cwe', models.IntegerField(blank=True, default=None, null=True)), + ('cve', models.CharField(help_text='An id of a vulnerability in a security advisory associated with this finding. Can be a Common Vulnerabilities and Exposures (CVE) or from other sources.', max_length=50, null=True, verbose_name='Vulnerability Id')), + ('cvssv3', models.TextField(help_text='Common Vulnerability Scoring System version 3 (CVSSv3) score associated with this finding.', max_length=117, null=True, validators=[dojo.validators.cvss3_validator], verbose_name='CVSS v3 vector')), + ('severity', models.CharField(blank=True, max_length=200, null=True)), + ('description', models.TextField(blank=True, null=True)), + ('mitigation', models.TextField(blank=True, null=True)), + ('impact', models.TextField(blank=True, null=True)), + ('references', models.TextField(blank=True, db_column='refs', null=True)), + ('last_used', models.DateTimeField(editable=False, null=True)), + ('numerical_severity', models.CharField(blank=True, editable=False, max_length=4, null=True)), + ('template_match', models.BooleanField(default=False, help_text='Enables this template for matching remediation advice. Match will be applied to all active, verified findings by CWE.', verbose_name='Template Match Enabled')), + ('template_match_title', models.BooleanField(default=False, help_text='Matches by title text (contains search) and CWE.', verbose_name='Match Template by Title and CWE')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='FindingEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('title', models.CharField(help_text='A short description of the flaw.', max_length=511, verbose_name='Title')), + ('date', models.DateField(default=dojo.models.get_current_date, help_text='The date the flaw was discovered.', verbose_name='Date')), + ('sla_start_date', models.DateField(blank=True, help_text="(readonly)The date used as start date for SLA calculation. Set by expiring risk acceptances. Empty by default, causing a fallback to 'date'.", null=True, verbose_name='SLA Start Date')), + ('sla_expiration_date', models.DateField(blank=True, help_text="(readonly)The date SLA expires for this finding. Empty by default, causing a fallback to 'date'.", null=True, verbose_name='SLA Expiration Date')), + ('cwe', models.IntegerField(blank=True, default=0, help_text='The CWE number associated with this flaw.', null=True, verbose_name='CWE')), + ('cve', models.CharField(help_text='An id of a vulnerability in a security advisory associated with this finding. Can be a Common Vulnerabilities and Exposures (CVE) or from other sources.', max_length=50, null=True, verbose_name='Vulnerability Id')), + ('epss_score', models.FloatField(blank=True, default=None, help_text='EPSS score for the CVE. Describes how likely it is the vulnerability will be exploited in the next 30 days.', null=True, validators=[django.core.validators.MinValueValidator(0.0), django.core.validators.MaxValueValidator(1.0)], verbose_name='EPSS Score')), + ('epss_percentile', models.FloatField(blank=True, default=None, help_text='EPSS percentile for the CVE. Describes how many CVEs are scored at or below this one.', null=True, validators=[django.core.validators.MinValueValidator(0.0), django.core.validators.MaxValueValidator(1.0)], verbose_name='EPSS percentile')), + ('known_exploited', models.BooleanField(default=False, help_text='Whether this vulnerability is known to have been exploited in the wild.', verbose_name='Known Exploited')), + ('ransomware_used', models.BooleanField(default=False, help_text='Whether this vulnerability is known to have been leveraged as part of a ransomware campaign.', verbose_name='Used in Ransomware')), + ('kev_date', models.DateField(blank=True, help_text='The date the vulnerability was added to the KEV catalog.', null=True, validators=[django.core.validators.MaxValueValidator(dojo.models.tomorrow)], verbose_name='KEV Date Added')), + ('cvssv3', models.TextField(help_text='Common Vulnerability Scoring System version 3 (CVSS3) score associated with this finding.', max_length=117, null=True, validators=[dojo.validators.cvss3_validator], verbose_name='CVSS3 Vector')), + ('cvssv3_score', models.FloatField(blank=True, help_text='Numerical CVSSv3 score for the vulnerability. If the vector is given, the score is updated while saving the finding. The value must be between 0-10.', null=True, validators=[django.core.validators.MinValueValidator(0.0), django.core.validators.MaxValueValidator(10.0)], verbose_name='CVSS3 Score')), + ('cvssv4', models.TextField(help_text='Common Vulnerability Scoring System version 4 (CVSS4) score associated with this finding.', max_length=255, null=True, validators=[dojo.validators.cvss4_validator], verbose_name='CVSS4 vector')), + ('cvssv4_score', models.FloatField(blank=True, help_text='Numerical CVSSv4 score for the vulnerability. If the vector is given, the score is updated while saving the finding. The value must be between 0-10.', null=True, validators=[django.core.validators.MinValueValidator(0.0), django.core.validators.MaxValueValidator(10.0)], verbose_name='CVSSv4 Score')), + ('url', models.TextField(blank=True, editable=False, help_text='External reference that provides more information about this flaw.', null=True, verbose_name='URL')), + ('severity', models.CharField(help_text='The severity level of this flaw (Critical, High, Medium, Low, Info).', max_length=200, verbose_name='Severity')), + ('description', models.TextField(help_text='Longer more descriptive information about the flaw.', verbose_name='Description')), + ('mitigation', models.TextField(blank=True, help_text='Text describing how to best fix the flaw.', null=True, verbose_name='Mitigation')), + ('fix_available', models.BooleanField(default=None, help_text='Denotes if there is a fix available for this flaw.', null=True, verbose_name='Fix Available')), + ('impact', models.TextField(blank=True, help_text='Text describing the impact this flaw has on systems, products, enterprise, etc.', null=True, verbose_name='Impact')), + ('steps_to_reproduce', models.TextField(blank=True, help_text='Text describing the steps that must be followed in order to reproduce the flaw / bug.', null=True, verbose_name='Steps to Reproduce')), + ('severity_justification', models.TextField(blank=True, help_text='Text describing why a certain severity was associated with this flaw.', null=True, verbose_name='Severity Justification')), + ('references', models.TextField(blank=True, db_column='refs', help_text='The external documentation available for this flaw.', null=True, verbose_name='References')), + ('active', models.BooleanField(default=True, help_text='Denotes if this flaw is active or not.', verbose_name='Active')), + ('verified', models.BooleanField(default=False, help_text='Denotes if this flaw has been manually verified by the tester.', verbose_name='Verified')), + ('false_p', models.BooleanField(default=False, help_text='Denotes if this flaw has been deemed a false positive by the tester.', verbose_name='False Positive')), + ('duplicate', models.BooleanField(default=False, help_text='Denotes if this flaw is a duplicate of other flaws reported.', verbose_name='Duplicate')), + ('out_of_scope', models.BooleanField(default=False, help_text='Denotes if this flaw falls outside the scope of the test and/or engagement.', verbose_name='Out Of Scope')), + ('risk_accepted', models.BooleanField(default=False, help_text='Denotes if this finding has been marked as an accepted risk.', verbose_name='Risk Accepted')), + ('under_review', models.BooleanField(default=False, help_text='Denotes is this flaw is currently being reviewed.', verbose_name='Under Review')), + ('last_status_update', models.DateTimeField(auto_now_add=True, help_text='Timestamp of latest status update (change in status related fields).', null=True, verbose_name='Last Status Update')), + ('under_defect_review', models.BooleanField(default=False, help_text='Denotes if this finding is under defect review.', verbose_name='Under Defect Review')), + ('is_mitigated', models.BooleanField(default=False, help_text='Denotes if this flaw has been fixed.', verbose_name='Is Mitigated')), + ('thread_id', models.IntegerField(default=0, editable=False, verbose_name='Thread ID')), + ('mitigated', models.DateTimeField(blank=True, editable=False, help_text='Denotes if this flaw has been fixed by storing the date it was fixed.', null=True, verbose_name='Mitigated')), + ('numerical_severity', models.CharField(help_text='The numerical representation of the severity (S0, S1, S2, S3, S4).', max_length=4, verbose_name='Numerical Severity')), + ('last_reviewed', models.DateTimeField(editable=False, help_text="Provides the date the flaw was last 'touched' by a tester.", null=True, verbose_name='Last Reviewed')), + ('param', models.TextField(blank=True, editable=False, help_text='Parameter used to trigger the issue (DAST).', null=True, verbose_name='Parameter')), + ('payload', models.TextField(blank=True, editable=False, help_text='Payload used to attack the service / application and trigger the bug / problem.', null=True, verbose_name='Payload')), + ('hash_code', models.CharField(blank=True, editable=False, help_text='A hash over a configurable set of fields that is used for findings deduplication.', max_length=64, null=True, verbose_name='Hash Code')), + ('line', models.IntegerField(blank=True, help_text='Source line number of the attack vector.', null=True, verbose_name='Line number')), + ('file_path', models.CharField(blank=True, help_text='Identified file(s) containing the flaw.', max_length=4000, null=True, verbose_name='File path')), + ('component_name', models.CharField(blank=True, help_text='Name of the affected component (library name, part of a system, ...).', max_length=500, null=True, verbose_name='Component name')), + ('component_version', models.CharField(blank=True, help_text='Version of the affected component.', max_length=100, null=True, verbose_name='Component version')), + ('static_finding', models.BooleanField(default=False, help_text='Flaw has been detected from a Static Application Security Testing tool (SAST).', verbose_name='Static finding (SAST)')), + ('dynamic_finding', models.BooleanField(default=True, help_text='Flaw has been detected from a Dynamic Application Security Testing tool (DAST).', verbose_name='Dynamic finding (DAST)')), + ('created', models.DateTimeField(auto_now_add=True, help_text='The date the finding was created inside DefectDojo.', null=True, verbose_name='Created')), + ('scanner_confidence', models.IntegerField(blank=True, default=None, editable=False, help_text='Confidence level of vulnerability which is supplied by the scanner.', null=True, verbose_name='Scanner confidence')), + ('unique_id_from_tool', models.CharField(blank=True, help_text='Vulnerability technical id from the source tool. Allows to track unique vulnerabilities over time across subsequent scans.', max_length=500, null=True, verbose_name='Unique ID from tool')), + ('vuln_id_from_tool', models.CharField(blank=True, help_text='Non-unique technical id from the source tool associated with the vulnerability type.', max_length=500, null=True, verbose_name='Vulnerability ID from tool')), + ('sast_source_object', models.CharField(blank=True, help_text='Source object (variable, function...) of the attack vector.', max_length=500, null=True, verbose_name='SAST Source Object')), + ('sast_sink_object', models.CharField(blank=True, help_text='Sink object (variable, function...) of the attack vector.', max_length=500, null=True, verbose_name='SAST Sink Object')), + ('sast_source_line', models.IntegerField(blank=True, help_text='Source line number of the attack vector.', null=True, verbose_name='SAST Source Line number')), + ('sast_source_file_path', models.CharField(blank=True, help_text='Source file path of the attack vector.', max_length=4000, null=True, verbose_name='SAST Source File Path')), + ('nb_occurences', models.IntegerField(blank=True, help_text='Number of occurences in the source tool when several vulnerabilites were found and aggregated by the scanner.', null=True, verbose_name='Number of occurences')), + ('publish_date', models.DateField(blank=True, help_text='Date when this vulnerability was made publicly available.', null=True, verbose_name='Publish date')), + ('service', models.CharField(blank=True, help_text='A service is a self-contained piece of functionality within a Product. This is an optional field which is used in deduplication of findings when set.', max_length=200, null=True, verbose_name='Service')), + ('planned_remediation_date', models.DateField(help_text='The date the flaw is expected to be remediated.', null=True, verbose_name='Planned Remediation Date')), + ('planned_remediation_version', models.CharField(blank=True, help_text='The target version when the vulnerability should be fixed / remediated', max_length=99, null=True, verbose_name='Planned remediation version')), + ('effort_for_fixing', models.CharField(blank=True, help_text='Effort for fixing / remediating the vulnerability (Low, Medium, High)', max_length=99, null=True, verbose_name='Effort for fixing')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Notification_WebhooksEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('name', models.CharField(default='', help_text='Name of the incoming webhook', max_length=100)), + ('url', models.URLField(default='', help_text='The full URL of the incoming webhook')), + ('status', models.CharField(choices=[('active', 'Active'), ('active_tmp', 'Active but 5xx (or similar) error detected'), ('inactive_tmp', 'Temporary inactive because of 5xx (or similar) error'), ('inactive_permanent', 'Permanently inactive')], default='active', editable=False, help_text='Status of the incoming webhook', max_length=20)), + ('first_error', models.DateTimeField(blank=True, editable=False, help_text='If endpoint is active, when error happened first time', null=True)), + ('last_error', models.DateTimeField(blank=True, editable=False, help_text='If endpoint is active, when error happened last time', null=True)), + ('note', models.CharField(blank=True, default='', editable=False, help_text='Description of the latest error', max_length=1000, null=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Product_TypeEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('name', models.CharField(max_length=255)), + ('description', models.CharField(blank=True, max_length=4000, null=True)), + ('critical_product', models.BooleanField(default=False)), + ('key_product', models.BooleanField(default=False)), + ('updated', models.DateTimeField(auto_now=True, null=True)), + ('created', models.DateTimeField(auto_now_add=True, null=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ProductEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('name', models.CharField(max_length=255)), + ('description', models.CharField(max_length=4000)), + ('created', models.DateTimeField(auto_now_add=True, null=True)), + ('updated', models.DateTimeField(auto_now=True, null=True)), + ('tid', models.IntegerField(default=0, editable=False)), + ('prod_numeric_grade', models.IntegerField(blank=True, null=True)), + ('business_criticality', models.CharField(blank=True, choices=[('very high', 'Very High'), ('high', 'High'), ('medium', 'Medium'), ('low', 'Low'), ('very low', 'Very Low'), ('none', 'None')], max_length=9, null=True)), + ('platform', models.CharField(blank=True, choices=[('web service', 'API'), ('desktop', 'Desktop'), ('iot', 'Internet of Things'), ('mobile', 'Mobile'), ('web', 'Web')], max_length=11, null=True)), + ('lifecycle', models.CharField(blank=True, choices=[('construction', 'Construction'), ('production', 'Production'), ('retirement', 'Retirement')], max_length=12, null=True)), + ('origin', models.CharField(blank=True, choices=[('third party library', 'Third Party Library'), ('purchased', 'Purchased'), ('contractor', 'Contractor Developed'), ('internal', 'Internally Developed'), ('open source', 'Open Source'), ('outsourced', 'Outsourced')], max_length=19, null=True)), + ('user_records', models.PositiveIntegerField(blank=True, help_text='Estimate the number of user records within the application.', null=True)), + ('revenue', models.DecimalField(blank=True, decimal_places=2, help_text="Estimate the application's revenue.", max_digits=15, null=True, validators=[django.core.validators.MinValueValidator(Decimal('0.00'))])), + ('external_audience', models.BooleanField(default=False, help_text='Specify if the application is used by people outside the organization.')), + ('internet_accessible', models.BooleanField(default=False, help_text='Specify if the application is accessible from the public internet.')), + ('enable_product_tag_inheritance', models.BooleanField(default=False, help_text='Enables product tag inheritance. Any tags added on a product will automatically be added to all Engagements, Tests, and Findings', verbose_name='Enable Product Tag Inheritance')), + ('enable_simple_risk_acceptance', models.BooleanField(default=False, help_text='Allows simple risk acceptance by checking/unchecking a checkbox.')), + ('enable_full_risk_acceptance', models.BooleanField(default=True, help_text='Allows full risk acceptance using a risk acceptance form, expiration date, uploaded proof, etc.')), + ('disable_sla_breach_notifications', models.BooleanField(default=False, help_text='Disable SLA breach notifications if configured in the global settings', verbose_name='Disable SLA breach notifications')), + ('async_updating', models.BooleanField(default=False, help_text='Findings under this Product or SLA configuration are asynchronously being updated')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Risk_AcceptanceEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('name', models.CharField(help_text='Descriptive name which in the future may also be used to group risk acceptances together across engagements and products', max_length=300)), + ('recommendation', models.CharField(choices=[('A', 'Accept (The risk is acknowledged, yet remains)'), ('V', 'Avoid (Do not engage with whatever creates the risk)'), ('M', 'Mitigate (The risk still exists, yet compensating controls make it less of a threat)'), ('F', 'Fix (The risk is eradicated)'), ('T', 'Transfer (The risk is transferred to a 3rd party)')], default='F', help_text='Recommendation from the security team.', max_length=2, verbose_name='Security Recommendation')), + ('recommendation_details', models.TextField(blank=True, help_text='Explanation of security recommendation', null=True, verbose_name='Security Recommendation Details')), + ('decision', models.CharField(choices=[('A', 'Accept (The risk is acknowledged, yet remains)'), ('V', 'Avoid (Do not engage with whatever creates the risk)'), ('M', 'Mitigate (The risk still exists, yet compensating controls make it less of a threat)'), ('F', 'Fix (The risk is eradicated)'), ('T', 'Transfer (The risk is transferred to a 3rd party)')], default='A', help_text='Risk treatment decision by risk owner', max_length=2)), + ('decision_details', models.TextField(blank=True, default=None, help_text='If a compensating control exists to mitigate the finding or reduce risk, then list the compensating control(s).', null=True)), + ('accepted_by', models.CharField(blank=True, default=None, help_text='The person that accepts the risk, can be outside of DefectDojo.', max_length=200, null=True, verbose_name='Accepted By')), + ('path', models.FileField(blank=True, null=True, upload_to='risk/%Y/%m/%d', verbose_name='Proof')), + ('expiration_date', models.DateTimeField(blank=True, default=None, help_text='When the risk acceptance expires, the findings will be reactivated (unless disabled below).', null=True)), + ('expiration_date_warned', models.DateTimeField(blank=True, default=None, help_text='(readonly) Date at which notice about the risk acceptance expiration was sent.', null=True)), + ('expiration_date_handled', models.DateTimeField(blank=True, default=None, help_text='(readonly) When the risk acceptance expiration was handled (manually or by the daily job).', null=True)), + ('reactivate_expired', models.BooleanField(default=True, help_text='Reactivate findings when risk acceptance expires?', verbose_name='Reactivate findings on expiration')), + ('restart_sla_expired', models.BooleanField(default=False, help_text='When enabled, the SLA for findings is restarted when the risk acceptance expires.', verbose_name='Restart SLA on expiration')), + ('created', models.DateTimeField(auto_now_add=True)), + ('updated', models.DateTimeField(auto_now=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='TestEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('scan_type', models.TextField(null=True)), + ('title', models.CharField(blank=True, max_length=255, null=True)), + ('description', models.TextField(blank=True, null=True)), + ('target_start', models.DateTimeField()), + ('target_end', models.DateTimeField()), + ('percent_complete', models.IntegerField(blank=True, null=True)), + ('updated', models.DateTimeField(auto_now=True, null=True)), + ('created', models.DateTimeField(auto_now_add=True, null=True)), + ('version', models.CharField(blank=True, max_length=100, null=True)), + ('build_id', models.CharField(blank=True, help_text='Build ID that was tested, a reimport may update this field.', max_length=150, null=True, verbose_name='Build ID')), + ('commit_hash', models.CharField(blank=True, help_text='Commit hash tested, a reimport may update this field.', max_length=150, null=True, verbose_name='Commit Hash')), + ('branch_tag', models.CharField(blank=True, help_text='Tag or branch that was tested, a reimport may update this field.', max_length=150, null=True, verbose_name='Branch/Tag')), + ], + options={ + 'abstract': False, + }, + ), + pgtrigger.migrations.AddTrigger( + model_name='cred_user', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_cred_userevent" ("authentication", "description", "environment_id", "http_authentication", "id", "is_valid", "login_regex", "logout_regex", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "role", "url", "username") VALUES (NEW."authentication", NEW."description", NEW."environment_id", NEW."http_authentication", NEW."id", NEW."is_valid", NEW."login_regex", NEW."logout_regex", NEW."name", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."role", NEW."url", NEW."username"); RETURN NULL;', hash='43f52f9845d27e920508439dbfbd3ec6e9597d25', operation='INSERT', pgid='pgtrigger_insert_insert_318ed', table='dojo_cred_user', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='cred_user', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."authentication" IS DISTINCT FROM (NEW."authentication") OR OLD."description" IS DISTINCT FROM (NEW."description") OR OLD."environment_id" IS DISTINCT FROM (NEW."environment_id") OR OLD."http_authentication" IS DISTINCT FROM (NEW."http_authentication") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."is_valid" IS DISTINCT FROM (NEW."is_valid") OR OLD."login_regex" IS DISTINCT FROM (NEW."login_regex") OR OLD."logout_regex" IS DISTINCT FROM (NEW."logout_regex") OR OLD."name" IS DISTINCT FROM (NEW."name") OR OLD."role" IS DISTINCT FROM (NEW."role") OR OLD."url" IS DISTINCT FROM (NEW."url") OR OLD."username" IS DISTINCT FROM (NEW."username"))', func='INSERT INTO "dojo_cred_userevent" ("authentication", "description", "environment_id", "http_authentication", "id", "is_valid", "login_regex", "logout_regex", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "role", "url", "username") VALUES (NEW."authentication", NEW."description", NEW."environment_id", NEW."http_authentication", NEW."id", NEW."is_valid", NEW."login_regex", NEW."logout_regex", NEW."name", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."role", NEW."url", NEW."username"); RETURN NULL;', hash='630b91cc38735df1a354209f05519c55538abd7a', operation='UPDATE', pgid='pgtrigger_update_update_3889b', table='dojo_cred_user', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='cred_user', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_cred_userevent" ("authentication", "description", "environment_id", "http_authentication", "id", "is_valid", "login_regex", "logout_regex", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "role", "url", "username") VALUES (OLD."authentication", OLD."description", OLD."environment_id", OLD."http_authentication", OLD."id", OLD."is_valid", OLD."login_regex", OLD."logout_regex", OLD."name", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."role", OLD."url", OLD."username"); RETURN NULL;', hash='4947f3c486b6f7922a82690473cc5166506d79fc', operation='DELETE', pgid='pgtrigger_delete_delete_d8a8c', table='dojo_cred_user', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='dojo_user', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_dojo_userevent" ("date_joined", "email", "first_name", "id", "is_active", "is_staff", "is_superuser", "last_login", "last_name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "username") VALUES (NEW."date_joined", NEW."email", NEW."first_name", NEW."id", NEW."is_active", NEW."is_staff", NEW."is_superuser", NEW."last_login", NEW."last_name", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."username"); RETURN NULL;', hash='f997de9e21fa9d350c04039bc6aa7ac450d72c3d', operation='INSERT', pgid='pgtrigger_insert_insert_f69f3', table='auth_user', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='dojo_user', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."date_joined" IS DISTINCT FROM (NEW."date_joined") OR OLD."email" IS DISTINCT FROM (NEW."email") OR OLD."first_name" IS DISTINCT FROM (NEW."first_name") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."is_active" IS DISTINCT FROM (NEW."is_active") OR OLD."is_staff" IS DISTINCT FROM (NEW."is_staff") OR OLD."is_superuser" IS DISTINCT FROM (NEW."is_superuser") OR OLD."last_login" IS DISTINCT FROM (NEW."last_login") OR OLD."last_name" IS DISTINCT FROM (NEW."last_name") OR OLD."username" IS DISTINCT FROM (NEW."username"))', func='INSERT INTO "dojo_dojo_userevent" ("date_joined", "email", "first_name", "id", "is_active", "is_staff", "is_superuser", "last_login", "last_name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "username") VALUES (NEW."date_joined", NEW."email", NEW."first_name", NEW."id", NEW."is_active", NEW."is_staff", NEW."is_superuser", NEW."last_login", NEW."last_name", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."username"); RETURN NULL;', hash='4791d2880e30b42d862087ba0ad17e2f5f350fc3', operation='UPDATE', pgid='pgtrigger_update_update_1c25d', table='auth_user', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='dojo_user', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_dojo_userevent" ("date_joined", "email", "first_name", "id", "is_active", "is_staff", "is_superuser", "last_login", "last_name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "username") VALUES (OLD."date_joined", OLD."email", OLD."first_name", OLD."id", OLD."is_active", OLD."is_staff", OLD."is_superuser", OLD."last_login", OLD."last_name", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."username"); RETURN NULL;', hash='0fc44952f13a80dfc9661150ebe99cdb865bf305', operation='DELETE', pgid='pgtrigger_delete_delete_37974', table='auth_user', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='endpoint', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_endpointevent" ("fragment", "host", "id", "path", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "port", "product_id", "protocol", "query", "userinfo") VALUES (NEW."fragment", NEW."host", NEW."id", NEW."path", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."port", NEW."product_id", NEW."protocol", NEW."query", NEW."userinfo"); RETURN NULL;', hash='68e589c514e437831f4da3e802ca18e4cced175d', operation='INSERT', pgid='pgtrigger_insert_insert_c7973', table='dojo_endpoint', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='endpoint', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD.* IS DISTINCT FROM NEW.*)', func='INSERT INTO "dojo_endpointevent" ("fragment", "host", "id", "path", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "port", "product_id", "protocol", "query", "userinfo") VALUES (NEW."fragment", NEW."host", NEW."id", NEW."path", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."port", NEW."product_id", NEW."protocol", NEW."query", NEW."userinfo"); RETURN NULL;', hash='fce28bcbd64850a383950a720a72f134da7989d9', operation='UPDATE', pgid='pgtrigger_update_update_2b19a', table='dojo_endpoint', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='endpoint', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_endpointevent" ("fragment", "host", "id", "path", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "port", "product_id", "protocol", "query", "userinfo") VALUES (OLD."fragment", OLD."host", OLD."id", OLD."path", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."port", OLD."product_id", OLD."protocol", OLD."query", OLD."userinfo"); RETURN NULL;', hash='5e88416165d3e62cc6717b7d7f5d82933929de32', operation='DELETE', pgid='pgtrigger_delete_delete_dd1f9', table='dojo_endpoint', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='engagement', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_engagementevent" ("active", "api_test", "branch_tag", "build_id", "build_server_id", "check_list", "commit_hash", "created", "deduplication_on_engagement", "description", "done_testing", "engagement_type", "first_contacted", "id", "lead_id", "name", "orchestration_engine_id", "pen_test", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "preset_id", "product_id", "progress", "reason", "report_type_id", "requester_id", "source_code_management_server_id", "source_code_management_uri", "status", "target_end", "target_start", "test_strategy", "threat_model", "tmodel_path", "tracker", "updated", "version") VALUES (NEW."active", NEW."api_test", NEW."branch_tag", NEW."build_id", NEW."build_server_id", NEW."check_list", NEW."commit_hash", NEW."created", NEW."deduplication_on_engagement", NEW."description", NEW."done_testing", NEW."engagement_type", NEW."first_contacted", NEW."id", NEW."lead_id", NEW."name", NEW."orchestration_engine_id", NEW."pen_test", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."preset_id", NEW."product_id", NEW."progress", NEW."reason", NEW."report_type_id", NEW."requester_id", NEW."source_code_management_server_id", NEW."source_code_management_uri", NEW."status", NEW."target_end", NEW."target_start", NEW."test_strategy", NEW."threat_model", NEW."tmodel_path", NEW."tracker", NEW."updated", NEW."version"); RETURN NULL;', hash='4155b326b45cff0de61bd509545845d4a9fa6a18', operation='INSERT', pgid='pgtrigger_insert_insert_125f1', table='dojo_engagement', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='engagement', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."active" IS DISTINCT FROM (NEW."active") OR OLD."api_test" IS DISTINCT FROM (NEW."api_test") OR OLD."branch_tag" IS DISTINCT FROM (NEW."branch_tag") OR OLD."build_id" IS DISTINCT FROM (NEW."build_id") OR OLD."build_server_id" IS DISTINCT FROM (NEW."build_server_id") OR OLD."check_list" IS DISTINCT FROM (NEW."check_list") OR OLD."commit_hash" IS DISTINCT FROM (NEW."commit_hash") OR OLD."deduplication_on_engagement" IS DISTINCT FROM (NEW."deduplication_on_engagement") OR OLD."description" IS DISTINCT FROM (NEW."description") OR OLD."done_testing" IS DISTINCT FROM (NEW."done_testing") OR OLD."engagement_type" IS DISTINCT FROM (NEW."engagement_type") OR OLD."first_contacted" IS DISTINCT FROM (NEW."first_contacted") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."lead_id" IS DISTINCT FROM (NEW."lead_id") OR OLD."name" IS DISTINCT FROM (NEW."name") OR OLD."orchestration_engine_id" IS DISTINCT FROM (NEW."orchestration_engine_id") OR OLD."pen_test" IS DISTINCT FROM (NEW."pen_test") OR OLD."preset_id" IS DISTINCT FROM (NEW."preset_id") OR OLD."product_id" IS DISTINCT FROM (NEW."product_id") OR OLD."progress" IS DISTINCT FROM (NEW."progress") OR OLD."reason" IS DISTINCT FROM (NEW."reason") OR OLD."report_type_id" IS DISTINCT FROM (NEW."report_type_id") OR OLD."requester_id" IS DISTINCT FROM (NEW."requester_id") OR OLD."source_code_management_server_id" IS DISTINCT FROM (NEW."source_code_management_server_id") OR OLD."source_code_management_uri" IS DISTINCT FROM (NEW."source_code_management_uri") OR OLD."status" IS DISTINCT FROM (NEW."status") OR OLD."target_end" IS DISTINCT FROM (NEW."target_end") OR OLD."target_start" IS DISTINCT FROM (NEW."target_start") OR OLD."test_strategy" IS DISTINCT FROM (NEW."test_strategy") OR OLD."threat_model" IS DISTINCT FROM (NEW."threat_model") OR OLD."tmodel_path" IS DISTINCT FROM (NEW."tmodel_path") OR OLD."tracker" IS DISTINCT FROM (NEW."tracker") OR OLD."version" IS DISTINCT FROM (NEW."version"))', func='INSERT INTO "dojo_engagementevent" ("active", "api_test", "branch_tag", "build_id", "build_server_id", "check_list", "commit_hash", "created", "deduplication_on_engagement", "description", "done_testing", "engagement_type", "first_contacted", "id", "lead_id", "name", "orchestration_engine_id", "pen_test", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "preset_id", "product_id", "progress", "reason", "report_type_id", "requester_id", "source_code_management_server_id", "source_code_management_uri", "status", "target_end", "target_start", "test_strategy", "threat_model", "tmodel_path", "tracker", "updated", "version") VALUES (NEW."active", NEW."api_test", NEW."branch_tag", NEW."build_id", NEW."build_server_id", NEW."check_list", NEW."commit_hash", NEW."created", NEW."deduplication_on_engagement", NEW."description", NEW."done_testing", NEW."engagement_type", NEW."first_contacted", NEW."id", NEW."lead_id", NEW."name", NEW."orchestration_engine_id", NEW."pen_test", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."preset_id", NEW."product_id", NEW."progress", NEW."reason", NEW."report_type_id", NEW."requester_id", NEW."source_code_management_server_id", NEW."source_code_management_uri", NEW."status", NEW."target_end", NEW."target_start", NEW."test_strategy", NEW."threat_model", NEW."tmodel_path", NEW."tracker", NEW."updated", NEW."version"); RETURN NULL;', hash='ef3645585741c02419e463ccd066ab5daf10eddd', operation='UPDATE', pgid='pgtrigger_update_update_65136', table='dojo_engagement', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='engagement', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_engagementevent" ("active", "api_test", "branch_tag", "build_id", "build_server_id", "check_list", "commit_hash", "created", "deduplication_on_engagement", "description", "done_testing", "engagement_type", "first_contacted", "id", "lead_id", "name", "orchestration_engine_id", "pen_test", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "preset_id", "product_id", "progress", "reason", "report_type_id", "requester_id", "source_code_management_server_id", "source_code_management_uri", "status", "target_end", "target_start", "test_strategy", "threat_model", "tmodel_path", "tracker", "updated", "version") VALUES (OLD."active", OLD."api_test", OLD."branch_tag", OLD."build_id", OLD."build_server_id", OLD."check_list", OLD."commit_hash", OLD."created", OLD."deduplication_on_engagement", OLD."description", OLD."done_testing", OLD."engagement_type", OLD."first_contacted", OLD."id", OLD."lead_id", OLD."name", OLD."orchestration_engine_id", OLD."pen_test", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."preset_id", OLD."product_id", OLD."progress", OLD."reason", OLD."report_type_id", OLD."requester_id", OLD."source_code_management_server_id", OLD."source_code_management_uri", OLD."status", OLD."target_end", OLD."target_start", OLD."test_strategy", OLD."threat_model", OLD."tmodel_path", OLD."tracker", OLD."updated", OLD."version"); RETURN NULL;', hash='7c91af532c0625d121388fb6d3fff7a0321d06b5', operation='DELETE', pgid='pgtrigger_delete_delete_9f4df', table='dojo_engagement', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_findingevent" ("active", "component_name", "component_version", "created", "cve", "cvssv3", "cvssv3_score", "cvssv4", "cvssv4_score", "cwe", "date", "defect_review_requested_by_id", "description", "duplicate", "duplicate_finding_id", "dynamic_finding", "effort_for_fixing", "epss_percentile", "epss_score", "false_p", "file_path", "fix_available", "hash_code", "id", "impact", "is_mitigated", "kev_date", "known_exploited", "last_reviewed", "last_reviewed_by_id", "last_status_update", "line", "mitigated", "mitigated_by_id", "mitigation", "nb_occurences", "numerical_severity", "out_of_scope", "param", "payload", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "planned_remediation_date", "planned_remediation_version", "publish_date", "ransomware_used", "refs", "reporter_id", "review_requested_by_id", "risk_accepted", "sast_sink_object", "sast_source_file_path", "sast_source_line", "sast_source_object", "scanner_confidence", "service", "severity", "severity_justification", "sla_expiration_date", "sla_start_date", "sonarqube_issue_id", "static_finding", "steps_to_reproduce", "test_id", "thread_id", "title", "under_defect_review", "under_review", "unique_id_from_tool", "url", "verified", "vuln_id_from_tool") VALUES (NEW."active", NEW."component_name", NEW."component_version", NEW."created", NEW."cve", NEW."cvssv3", NEW."cvssv3_score", NEW."cvssv4", NEW."cvssv4_score", NEW."cwe", NEW."date", NEW."defect_review_requested_by_id", NEW."description", NEW."duplicate", NEW."duplicate_finding_id", NEW."dynamic_finding", NEW."effort_for_fixing", NEW."epss_percentile", NEW."epss_score", NEW."false_p", NEW."file_path", NEW."fix_available", NEW."hash_code", NEW."id", NEW."impact", NEW."is_mitigated", NEW."kev_date", NEW."known_exploited", NEW."last_reviewed", NEW."last_reviewed_by_id", NEW."last_status_update", NEW."line", NEW."mitigated", NEW."mitigated_by_id", NEW."mitigation", NEW."nb_occurences", NEW."numerical_severity", NEW."out_of_scope", NEW."param", NEW."payload", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."planned_remediation_date", NEW."planned_remediation_version", NEW."publish_date", NEW."ransomware_used", NEW."refs", NEW."reporter_id", NEW."review_requested_by_id", NEW."risk_accepted", NEW."sast_sink_object", NEW."sast_source_file_path", NEW."sast_source_line", NEW."sast_source_object", NEW."scanner_confidence", NEW."service", NEW."severity", NEW."severity_justification", NEW."sla_expiration_date", NEW."sla_start_date", NEW."sonarqube_issue_id", NEW."static_finding", NEW."steps_to_reproduce", NEW."test_id", NEW."thread_id", NEW."title", NEW."under_defect_review", NEW."under_review", NEW."unique_id_from_tool", NEW."url", NEW."verified", NEW."vuln_id_from_tool"); RETURN NULL;', hash='4feb4f6a7e26a63edec0aed0646e539d83151bad', operation='INSERT', pgid='pgtrigger_insert_insert_2fbbb', table='dojo_finding', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."active" IS DISTINCT FROM (NEW."active") OR OLD."component_name" IS DISTINCT FROM (NEW."component_name") OR OLD."component_version" IS DISTINCT FROM (NEW."component_version") OR OLD."cve" IS DISTINCT FROM (NEW."cve") OR OLD."cvssv3" IS DISTINCT FROM (NEW."cvssv3") OR OLD."cvssv3_score" IS DISTINCT FROM (NEW."cvssv3_score") OR OLD."cvssv4" IS DISTINCT FROM (NEW."cvssv4") OR OLD."cvssv4_score" IS DISTINCT FROM (NEW."cvssv4_score") OR OLD."cwe" IS DISTINCT FROM (NEW."cwe") OR OLD."date" IS DISTINCT FROM (NEW."date") OR OLD."defect_review_requested_by_id" IS DISTINCT FROM (NEW."defect_review_requested_by_id") OR OLD."description" IS DISTINCT FROM (NEW."description") OR OLD."duplicate" IS DISTINCT FROM (NEW."duplicate") OR OLD."duplicate_finding_id" IS DISTINCT FROM (NEW."duplicate_finding_id") OR OLD."dynamic_finding" IS DISTINCT FROM (NEW."dynamic_finding") OR OLD."effort_for_fixing" IS DISTINCT FROM (NEW."effort_for_fixing") OR OLD."epss_percentile" IS DISTINCT FROM (NEW."epss_percentile") OR OLD."epss_score" IS DISTINCT FROM (NEW."epss_score") OR OLD."false_p" IS DISTINCT FROM (NEW."false_p") OR OLD."file_path" IS DISTINCT FROM (NEW."file_path") OR OLD."fix_available" IS DISTINCT FROM (NEW."fix_available") OR OLD."hash_code" IS DISTINCT FROM (NEW."hash_code") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."impact" IS DISTINCT FROM (NEW."impact") OR OLD."is_mitigated" IS DISTINCT FROM (NEW."is_mitigated") OR OLD."kev_date" IS DISTINCT FROM (NEW."kev_date") OR OLD."known_exploited" IS DISTINCT FROM (NEW."known_exploited") OR OLD."last_reviewed" IS DISTINCT FROM (NEW."last_reviewed") OR OLD."last_reviewed_by_id" IS DISTINCT FROM (NEW."last_reviewed_by_id") OR OLD."line" IS DISTINCT FROM (NEW."line") OR OLD."mitigated" IS DISTINCT FROM (NEW."mitigated") OR OLD."mitigated_by_id" IS DISTINCT FROM (NEW."mitigated_by_id") OR OLD."mitigation" IS DISTINCT FROM (NEW."mitigation") OR OLD."nb_occurences" IS DISTINCT FROM (NEW."nb_occurences") OR OLD."numerical_severity" IS DISTINCT FROM (NEW."numerical_severity") OR OLD."out_of_scope" IS DISTINCT FROM (NEW."out_of_scope") OR OLD."param" IS DISTINCT FROM (NEW."param") OR OLD."payload" IS DISTINCT FROM (NEW."payload") OR OLD."planned_remediation_date" IS DISTINCT FROM (NEW."planned_remediation_date") OR OLD."planned_remediation_version" IS DISTINCT FROM (NEW."planned_remediation_version") OR OLD."publish_date" IS DISTINCT FROM (NEW."publish_date") OR OLD."ransomware_used" IS DISTINCT FROM (NEW."ransomware_used") OR OLD."refs" IS DISTINCT FROM (NEW."refs") OR OLD."reporter_id" IS DISTINCT FROM (NEW."reporter_id") OR OLD."review_requested_by_id" IS DISTINCT FROM (NEW."review_requested_by_id") OR OLD."risk_accepted" IS DISTINCT FROM (NEW."risk_accepted") OR OLD."sast_sink_object" IS DISTINCT FROM (NEW."sast_sink_object") OR OLD."sast_source_file_path" IS DISTINCT FROM (NEW."sast_source_file_path") OR OLD."sast_source_line" IS DISTINCT FROM (NEW."sast_source_line") OR OLD."sast_source_object" IS DISTINCT FROM (NEW."sast_source_object") OR OLD."scanner_confidence" IS DISTINCT FROM (NEW."scanner_confidence") OR OLD."service" IS DISTINCT FROM (NEW."service") OR OLD."severity" IS DISTINCT FROM (NEW."severity") OR OLD."severity_justification" IS DISTINCT FROM (NEW."severity_justification") OR OLD."sla_expiration_date" IS DISTINCT FROM (NEW."sla_expiration_date") OR OLD."sla_start_date" IS DISTINCT FROM (NEW."sla_start_date") OR OLD."sonarqube_issue_id" IS DISTINCT FROM (NEW."sonarqube_issue_id") OR OLD."static_finding" IS DISTINCT FROM (NEW."static_finding") OR OLD."steps_to_reproduce" IS DISTINCT FROM (NEW."steps_to_reproduce") OR OLD."test_id" IS DISTINCT FROM (NEW."test_id") OR OLD."thread_id" IS DISTINCT FROM (NEW."thread_id") OR OLD."title" IS DISTINCT FROM (NEW."title") OR OLD."under_defect_review" IS DISTINCT FROM (NEW."under_defect_review") OR OLD."under_review" IS DISTINCT FROM (NEW."under_review") OR OLD."unique_id_from_tool" IS DISTINCT FROM (NEW."unique_id_from_tool") OR OLD."url" IS DISTINCT FROM (NEW."url") OR OLD."verified" IS DISTINCT FROM (NEW."verified") OR OLD."vuln_id_from_tool" IS DISTINCT FROM (NEW."vuln_id_from_tool"))', func='INSERT INTO "dojo_findingevent" ("active", "component_name", "component_version", "created", "cve", "cvssv3", "cvssv3_score", "cvssv4", "cvssv4_score", "cwe", "date", "defect_review_requested_by_id", "description", "duplicate", "duplicate_finding_id", "dynamic_finding", "effort_for_fixing", "epss_percentile", "epss_score", "false_p", "file_path", "fix_available", "hash_code", "id", "impact", "is_mitigated", "kev_date", "known_exploited", "last_reviewed", "last_reviewed_by_id", "last_status_update", "line", "mitigated", "mitigated_by_id", "mitigation", "nb_occurences", "numerical_severity", "out_of_scope", "param", "payload", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "planned_remediation_date", "planned_remediation_version", "publish_date", "ransomware_used", "refs", "reporter_id", "review_requested_by_id", "risk_accepted", "sast_sink_object", "sast_source_file_path", "sast_source_line", "sast_source_object", "scanner_confidence", "service", "severity", "severity_justification", "sla_expiration_date", "sla_start_date", "sonarqube_issue_id", "static_finding", "steps_to_reproduce", "test_id", "thread_id", "title", "under_defect_review", "under_review", "unique_id_from_tool", "url", "verified", "vuln_id_from_tool") VALUES (NEW."active", NEW."component_name", NEW."component_version", NEW."created", NEW."cve", NEW."cvssv3", NEW."cvssv3_score", NEW."cvssv4", NEW."cvssv4_score", NEW."cwe", NEW."date", NEW."defect_review_requested_by_id", NEW."description", NEW."duplicate", NEW."duplicate_finding_id", NEW."dynamic_finding", NEW."effort_for_fixing", NEW."epss_percentile", NEW."epss_score", NEW."false_p", NEW."file_path", NEW."fix_available", NEW."hash_code", NEW."id", NEW."impact", NEW."is_mitigated", NEW."kev_date", NEW."known_exploited", NEW."last_reviewed", NEW."last_reviewed_by_id", NEW."last_status_update", NEW."line", NEW."mitigated", NEW."mitigated_by_id", NEW."mitigation", NEW."nb_occurences", NEW."numerical_severity", NEW."out_of_scope", NEW."param", NEW."payload", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."planned_remediation_date", NEW."planned_remediation_version", NEW."publish_date", NEW."ransomware_used", NEW."refs", NEW."reporter_id", NEW."review_requested_by_id", NEW."risk_accepted", NEW."sast_sink_object", NEW."sast_source_file_path", NEW."sast_source_line", NEW."sast_source_object", NEW."scanner_confidence", NEW."service", NEW."severity", NEW."severity_justification", NEW."sla_expiration_date", NEW."sla_start_date", NEW."sonarqube_issue_id", NEW."static_finding", NEW."steps_to_reproduce", NEW."test_id", NEW."thread_id", NEW."title", NEW."under_defect_review", NEW."under_review", NEW."unique_id_from_tool", NEW."url", NEW."verified", NEW."vuln_id_from_tool"); RETURN NULL;', hash='e3a33f1fd38ee7d34a56dfaf4ca3706f986b5953', operation='UPDATE', pgid='pgtrigger_update_update_92175', table='dojo_finding', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_findingevent" ("active", "component_name", "component_version", "created", "cve", "cvssv3", "cvssv3_score", "cvssv4", "cvssv4_score", "cwe", "date", "defect_review_requested_by_id", "description", "duplicate", "duplicate_finding_id", "dynamic_finding", "effort_for_fixing", "epss_percentile", "epss_score", "false_p", "file_path", "fix_available", "hash_code", "id", "impact", "is_mitigated", "kev_date", "known_exploited", "last_reviewed", "last_reviewed_by_id", "last_status_update", "line", "mitigated", "mitigated_by_id", "mitigation", "nb_occurences", "numerical_severity", "out_of_scope", "param", "payload", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "planned_remediation_date", "planned_remediation_version", "publish_date", "ransomware_used", "refs", "reporter_id", "review_requested_by_id", "risk_accepted", "sast_sink_object", "sast_source_file_path", "sast_source_line", "sast_source_object", "scanner_confidence", "service", "severity", "severity_justification", "sla_expiration_date", "sla_start_date", "sonarqube_issue_id", "static_finding", "steps_to_reproduce", "test_id", "thread_id", "title", "under_defect_review", "under_review", "unique_id_from_tool", "url", "verified", "vuln_id_from_tool") VALUES (OLD."active", OLD."component_name", OLD."component_version", OLD."created", OLD."cve", OLD."cvssv3", OLD."cvssv3_score", OLD."cvssv4", OLD."cvssv4_score", OLD."cwe", OLD."date", OLD."defect_review_requested_by_id", OLD."description", OLD."duplicate", OLD."duplicate_finding_id", OLD."dynamic_finding", OLD."effort_for_fixing", OLD."epss_percentile", OLD."epss_score", OLD."false_p", OLD."file_path", OLD."fix_available", OLD."hash_code", OLD."id", OLD."impact", OLD."is_mitigated", OLD."kev_date", OLD."known_exploited", OLD."last_reviewed", OLD."last_reviewed_by_id", OLD."last_status_update", OLD."line", OLD."mitigated", OLD."mitigated_by_id", OLD."mitigation", OLD."nb_occurences", OLD."numerical_severity", OLD."out_of_scope", OLD."param", OLD."payload", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."planned_remediation_date", OLD."planned_remediation_version", OLD."publish_date", OLD."ransomware_used", OLD."refs", OLD."reporter_id", OLD."review_requested_by_id", OLD."risk_accepted", OLD."sast_sink_object", OLD."sast_source_file_path", OLD."sast_source_line", OLD."sast_source_object", OLD."scanner_confidence", OLD."service", OLD."severity", OLD."severity_justification", OLD."sla_expiration_date", OLD."sla_start_date", OLD."sonarqube_issue_id", OLD."static_finding", OLD."steps_to_reproduce", OLD."test_id", OLD."thread_id", OLD."title", OLD."under_defect_review", OLD."under_review", OLD."unique_id_from_tool", OLD."url", OLD."verified", OLD."vuln_id_from_tool"); RETURN NULL;', hash='af149137c005baecd86b57ceea9f19ca5cacb8b2', operation='DELETE', pgid='pgtrigger_delete_delete_72933', table='dojo_finding', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding_group', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_finding_groupevent" ("created", "creator_id", "id", "modified", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "test_id") VALUES (NEW."created", NEW."creator_id", NEW."id", NEW."modified", NEW."name", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."test_id"); RETURN NULL;', hash='ccce37d431a0be588c20aa39b570893c63be9b4b', operation='INSERT', pgid='pgtrigger_insert_insert_94bd2', table='dojo_finding_group', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding_group', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."creator_id" IS DISTINCT FROM (NEW."creator_id") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."name" IS DISTINCT FROM (NEW."name") OR OLD."test_id" IS DISTINCT FROM (NEW."test_id"))', func='INSERT INTO "dojo_finding_groupevent" ("created", "creator_id", "id", "modified", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "test_id") VALUES (NEW."created", NEW."creator_id", NEW."id", NEW."modified", NEW."name", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."test_id"); RETURN NULL;', hash='d5d94d19b54079f6c284d9448666eb408b20f245', operation='UPDATE', pgid='pgtrigger_update_update_37b5f', table='dojo_finding_group', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding_group', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_finding_groupevent" ("created", "creator_id", "id", "modified", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "test_id") VALUES (OLD."created", OLD."creator_id", OLD."id", OLD."modified", OLD."name", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."test_id"); RETURN NULL;', hash='4d7241855d22804d01d455fb4228e750c5b11497', operation='DELETE', pgid='pgtrigger_delete_delete_24b41', table='dojo_finding_group', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding_template', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_finding_templateevent" ("cve", "cvssv3", "cwe", "description", "id", "impact", "last_used", "mitigation", "numerical_severity", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "refs", "severity", "template_match", "template_match_title", "title") VALUES (NEW."cve", NEW."cvssv3", NEW."cwe", NEW."description", NEW."id", NEW."impact", NEW."last_used", NEW."mitigation", NEW."numerical_severity", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."refs", NEW."severity", NEW."template_match", NEW."template_match_title", NEW."title"); RETURN NULL;', hash='1e871e8199f43721385ad52a22ab150158f7ee6e', operation='INSERT', pgid='pgtrigger_insert_insert_59092', table='dojo_finding_template', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding_template', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD.* IS DISTINCT FROM NEW.*)', func='INSERT INTO "dojo_finding_templateevent" ("cve", "cvssv3", "cwe", "description", "id", "impact", "last_used", "mitigation", "numerical_severity", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "refs", "severity", "template_match", "template_match_title", "title") VALUES (NEW."cve", NEW."cvssv3", NEW."cwe", NEW."description", NEW."id", NEW."impact", NEW."last_used", NEW."mitigation", NEW."numerical_severity", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."refs", NEW."severity", NEW."template_match", NEW."template_match_title", NEW."title"); RETURN NULL;', hash='c59aaa841042474f2c9b84f779ce466d4f3f93bc', operation='UPDATE', pgid='pgtrigger_update_update_43036', table='dojo_finding_template', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding_template', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_finding_templateevent" ("cve", "cvssv3", "cwe", "description", "id", "impact", "last_used", "mitigation", "numerical_severity", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "refs", "severity", "template_match", "template_match_title", "title") VALUES (OLD."cve", OLD."cvssv3", OLD."cwe", OLD."description", OLD."id", OLD."impact", OLD."last_used", OLD."mitigation", OLD."numerical_severity", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."refs", OLD."severity", OLD."template_match", OLD."template_match_title", OLD."title"); RETURN NULL;', hash='75d1cfac63c77fa4b5edd4f0e2ed83b316713e3a', operation='DELETE', pgid='pgtrigger_delete_delete_3f3a6', table='dojo_finding_template', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='notification_webhooks', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_notification_webhooksevent" ("first_error", "id", "last_error", "name", "note", "owner_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "status", "url") VALUES (NEW."first_error", NEW."id", NEW."last_error", NEW."name", NEW."note", NEW."owner_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."status", NEW."url"); RETURN NULL;', hash='6e06e90d2d601262224f9a53d1965a0ddd65115e', operation='INSERT', pgid='pgtrigger_insert_insert_e0fa8', table='dojo_notification_webhooks', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='notification_webhooks', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."first_error" IS DISTINCT FROM (NEW."first_error") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."last_error" IS DISTINCT FROM (NEW."last_error") OR OLD."name" IS DISTINCT FROM (NEW."name") OR OLD."note" IS DISTINCT FROM (NEW."note") OR OLD."owner_id" IS DISTINCT FROM (NEW."owner_id") OR OLD."status" IS DISTINCT FROM (NEW."status") OR OLD."url" IS DISTINCT FROM (NEW."url"))', func='INSERT INTO "dojo_notification_webhooksevent" ("first_error", "id", "last_error", "name", "note", "owner_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "status", "url") VALUES (NEW."first_error", NEW."id", NEW."last_error", NEW."name", NEW."note", NEW."owner_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."status", NEW."url"); RETURN NULL;', hash='0474c577e4fb71352b9675dc736e9b6cd3075acd', operation='UPDATE', pgid='pgtrigger_update_update_6e00f', table='dojo_notification_webhooks', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='notification_webhooks', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_notification_webhooksevent" ("first_error", "id", "last_error", "name", "note", "owner_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "status", "url") VALUES (OLD."first_error", OLD."id", OLD."last_error", OLD."name", OLD."note", OLD."owner_id", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."status", OLD."url"); RETURN NULL;', hash='6348aaeba50ec158b4baca1b33611221e0e7b7cd', operation='DELETE', pgid='pgtrigger_delete_delete_21b9f', table='dojo_notification_webhooks', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='product', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_productevent" ("async_updating", "business_criticality", "created", "description", "disable_sla_breach_notifications", "enable_full_risk_acceptance", "enable_product_tag_inheritance", "enable_simple_risk_acceptance", "external_audience", "id", "internet_accessible", "lifecycle", "name", "origin", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "platform", "prod_numeric_grade", "prod_type_id", "product_manager_id", "revenue", "sla_configuration_id", "team_manager_id", "technical_contact_id", "tid", "updated", "user_records") VALUES (NEW."async_updating", NEW."business_criticality", NEW."created", NEW."description", NEW."disable_sla_breach_notifications", NEW."enable_full_risk_acceptance", NEW."enable_product_tag_inheritance", NEW."enable_simple_risk_acceptance", NEW."external_audience", NEW."id", NEW."internet_accessible", NEW."lifecycle", NEW."name", NEW."origin", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."platform", NEW."prod_numeric_grade", NEW."prod_type_id", NEW."product_manager_id", NEW."revenue", NEW."sla_configuration_id", NEW."team_manager_id", NEW."technical_contact_id", NEW."tid", NEW."updated", NEW."user_records"); RETURN NULL;', hash='71f5c7cfbba0f755e995508e10ef1bd8822667e1', operation='INSERT', pgid='pgtrigger_insert_insert_d5d32', table='dojo_product', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='product', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."async_updating" IS DISTINCT FROM (NEW."async_updating") OR OLD."business_criticality" IS DISTINCT FROM (NEW."business_criticality") OR OLD."description" IS DISTINCT FROM (NEW."description") OR OLD."disable_sla_breach_notifications" IS DISTINCT FROM (NEW."disable_sla_breach_notifications") OR OLD."enable_full_risk_acceptance" IS DISTINCT FROM (NEW."enable_full_risk_acceptance") OR OLD."enable_product_tag_inheritance" IS DISTINCT FROM (NEW."enable_product_tag_inheritance") OR OLD."enable_simple_risk_acceptance" IS DISTINCT FROM (NEW."enable_simple_risk_acceptance") OR OLD."external_audience" IS DISTINCT FROM (NEW."external_audience") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."internet_accessible" IS DISTINCT FROM (NEW."internet_accessible") OR OLD."lifecycle" IS DISTINCT FROM (NEW."lifecycle") OR OLD."name" IS DISTINCT FROM (NEW."name") OR OLD."origin" IS DISTINCT FROM (NEW."origin") OR OLD."platform" IS DISTINCT FROM (NEW."platform") OR OLD."prod_numeric_grade" IS DISTINCT FROM (NEW."prod_numeric_grade") OR OLD."prod_type_id" IS DISTINCT FROM (NEW."prod_type_id") OR OLD."product_manager_id" IS DISTINCT FROM (NEW."product_manager_id") OR OLD."revenue" IS DISTINCT FROM (NEW."revenue") OR OLD."sla_configuration_id" IS DISTINCT FROM (NEW."sla_configuration_id") OR OLD."team_manager_id" IS DISTINCT FROM (NEW."team_manager_id") OR OLD."technical_contact_id" IS DISTINCT FROM (NEW."technical_contact_id") OR OLD."tid" IS DISTINCT FROM (NEW."tid") OR OLD."user_records" IS DISTINCT FROM (NEW."user_records"))', func='INSERT INTO "dojo_productevent" ("async_updating", "business_criticality", "created", "description", "disable_sla_breach_notifications", "enable_full_risk_acceptance", "enable_product_tag_inheritance", "enable_simple_risk_acceptance", "external_audience", "id", "internet_accessible", "lifecycle", "name", "origin", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "platform", "prod_numeric_grade", "prod_type_id", "product_manager_id", "revenue", "sla_configuration_id", "team_manager_id", "technical_contact_id", "tid", "updated", "user_records") VALUES (NEW."async_updating", NEW."business_criticality", NEW."created", NEW."description", NEW."disable_sla_breach_notifications", NEW."enable_full_risk_acceptance", NEW."enable_product_tag_inheritance", NEW."enable_simple_risk_acceptance", NEW."external_audience", NEW."id", NEW."internet_accessible", NEW."lifecycle", NEW."name", NEW."origin", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."platform", NEW."prod_numeric_grade", NEW."prod_type_id", NEW."product_manager_id", NEW."revenue", NEW."sla_configuration_id", NEW."team_manager_id", NEW."technical_contact_id", NEW."tid", NEW."updated", NEW."user_records"); RETURN NULL;', hash='ac04dd898b94200f9795a19fcf097d74b493aa51', operation='UPDATE', pgid='pgtrigger_update_update_e7040', table='dojo_product', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='product', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_productevent" ("async_updating", "business_criticality", "created", "description", "disable_sla_breach_notifications", "enable_full_risk_acceptance", "enable_product_tag_inheritance", "enable_simple_risk_acceptance", "external_audience", "id", "internet_accessible", "lifecycle", "name", "origin", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "platform", "prod_numeric_grade", "prod_type_id", "product_manager_id", "revenue", "sla_configuration_id", "team_manager_id", "technical_contact_id", "tid", "updated", "user_records") VALUES (OLD."async_updating", OLD."business_criticality", OLD."created", OLD."description", OLD."disable_sla_breach_notifications", OLD."enable_full_risk_acceptance", OLD."enable_product_tag_inheritance", OLD."enable_simple_risk_acceptance", OLD."external_audience", OLD."id", OLD."internet_accessible", OLD."lifecycle", OLD."name", OLD."origin", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."platform", OLD."prod_numeric_grade", OLD."prod_type_id", OLD."product_manager_id", OLD."revenue", OLD."sla_configuration_id", OLD."team_manager_id", OLD."technical_contact_id", OLD."tid", OLD."updated", OLD."user_records"); RETURN NULL;', hash='7c403d993524408760d5430e6e8b5b6fd86753db', operation='DELETE', pgid='pgtrigger_delete_delete_064dd', table='dojo_product', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='product_type', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_product_typeevent" ("created", "critical_product", "description", "id", "key_product", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "updated") VALUES (NEW."created", NEW."critical_product", NEW."description", NEW."id", NEW."key_product", NEW."name", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."updated"); RETURN NULL;', hash='3f3d8e8a68a63bc86ff3557f8fc5f54d950e9d6d', operation='INSERT', pgid='pgtrigger_insert_insert_2d109', table='dojo_product_type', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='product_type', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."critical_product" IS DISTINCT FROM (NEW."critical_product") OR OLD."description" IS DISTINCT FROM (NEW."description") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."key_product" IS DISTINCT FROM (NEW."key_product") OR OLD."name" IS DISTINCT FROM (NEW."name"))', func='INSERT INTO "dojo_product_typeevent" ("created", "critical_product", "description", "id", "key_product", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "updated") VALUES (NEW."created", NEW."critical_product", NEW."description", NEW."id", NEW."key_product", NEW."name", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."updated"); RETURN NULL;', hash='97c18814aec2e06d73b89e3eb65e0cfbf832dda5', operation='UPDATE', pgid='pgtrigger_update_update_a0136', table='dojo_product_type', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='product_type', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_product_typeevent" ("created", "critical_product", "description", "id", "key_product", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "updated") VALUES (OLD."created", OLD."critical_product", OLD."description", OLD."id", OLD."key_product", OLD."name", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."updated"); RETURN NULL;', hash='1b3bb470de18c3270ba2a1d4453d276b34f650dc', operation='DELETE', pgid='pgtrigger_delete_delete_66b18', table='dojo_product_type', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='risk_acceptance', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_risk_acceptanceevent" ("accepted_by", "created", "decision", "decision_details", "expiration_date", "expiration_date_handled", "expiration_date_warned", "id", "name", "owner_id", "path", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "reactivate_expired", "recommendation", "recommendation_details", "restart_sla_expired", "updated") VALUES (NEW."accepted_by", NEW."created", NEW."decision", NEW."decision_details", NEW."expiration_date", NEW."expiration_date_handled", NEW."expiration_date_warned", NEW."id", NEW."name", NEW."owner_id", NEW."path", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."reactivate_expired", NEW."recommendation", NEW."recommendation_details", NEW."restart_sla_expired", NEW."updated"); RETURN NULL;', hash='5b2ded87a2593823805cc232abdcc9aecea43e09', operation='INSERT', pgid='pgtrigger_insert_insert_d29bd', table='dojo_risk_acceptance', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='risk_acceptance', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."accepted_by" IS DISTINCT FROM (NEW."accepted_by") OR OLD."decision" IS DISTINCT FROM (NEW."decision") OR OLD."decision_details" IS DISTINCT FROM (NEW."decision_details") OR OLD."expiration_date" IS DISTINCT FROM (NEW."expiration_date") OR OLD."expiration_date_handled" IS DISTINCT FROM (NEW."expiration_date_handled") OR OLD."expiration_date_warned" IS DISTINCT FROM (NEW."expiration_date_warned") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."name" IS DISTINCT FROM (NEW."name") OR OLD."owner_id" IS DISTINCT FROM (NEW."owner_id") OR OLD."path" IS DISTINCT FROM (NEW."path") OR OLD."reactivate_expired" IS DISTINCT FROM (NEW."reactivate_expired") OR OLD."recommendation" IS DISTINCT FROM (NEW."recommendation") OR OLD."recommendation_details" IS DISTINCT FROM (NEW."recommendation_details") OR OLD."restart_sla_expired" IS DISTINCT FROM (NEW."restart_sla_expired"))', func='INSERT INTO "dojo_risk_acceptanceevent" ("accepted_by", "created", "decision", "decision_details", "expiration_date", "expiration_date_handled", "expiration_date_warned", "id", "name", "owner_id", "path", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "reactivate_expired", "recommendation", "recommendation_details", "restart_sla_expired", "updated") VALUES (NEW."accepted_by", NEW."created", NEW."decision", NEW."decision_details", NEW."expiration_date", NEW."expiration_date_handled", NEW."expiration_date_warned", NEW."id", NEW."name", NEW."owner_id", NEW."path", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."reactivate_expired", NEW."recommendation", NEW."recommendation_details", NEW."restart_sla_expired", NEW."updated"); RETURN NULL;', hash='dc733156f0b9f5e70e3f64c07afaeaf511088cc9', operation='UPDATE', pgid='pgtrigger_update_update_55e64', table='dojo_risk_acceptance', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='risk_acceptance', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_risk_acceptanceevent" ("accepted_by", "created", "decision", "decision_details", "expiration_date", "expiration_date_handled", "expiration_date_warned", "id", "name", "owner_id", "path", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "reactivate_expired", "recommendation", "recommendation_details", "restart_sla_expired", "updated") VALUES (OLD."accepted_by", OLD."created", OLD."decision", OLD."decision_details", OLD."expiration_date", OLD."expiration_date_handled", OLD."expiration_date_warned", OLD."id", OLD."name", OLD."owner_id", OLD."path", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."reactivate_expired", OLD."recommendation", OLD."recommendation_details", OLD."restart_sla_expired", OLD."updated"); RETURN NULL;', hash='6ab6fabb2607af135635fa216cd8980bbdd38d66', operation='DELETE', pgid='pgtrigger_delete_delete_7d103', table='dojo_risk_acceptance', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='test', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_testevent" ("api_scan_configuration_id", "branch_tag", "build_id", "commit_hash", "created", "description", "engagement_id", "environment_id", "id", "lead_id", "percent_complete", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "scan_type", "target_end", "target_start", "test_type_id", "title", "updated", "version") VALUES (NEW."api_scan_configuration_id", NEW."branch_tag", NEW."build_id", NEW."commit_hash", NEW."created", NEW."description", NEW."engagement_id", NEW."environment_id", NEW."id", NEW."lead_id", NEW."percent_complete", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."scan_type", NEW."target_end", NEW."target_start", NEW."test_type_id", NEW."title", NEW."updated", NEW."version"); RETURN NULL;', hash='0b6ec21ca35b61b1abcc0b2f8629cb4d1cc92930', operation='INSERT', pgid='pgtrigger_insert_insert_ecfc1', table='dojo_test', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='test', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."api_scan_configuration_id" IS DISTINCT FROM (NEW."api_scan_configuration_id") OR OLD."branch_tag" IS DISTINCT FROM (NEW."branch_tag") OR OLD."build_id" IS DISTINCT FROM (NEW."build_id") OR OLD."commit_hash" IS DISTINCT FROM (NEW."commit_hash") OR OLD."description" IS DISTINCT FROM (NEW."description") OR OLD."engagement_id" IS DISTINCT FROM (NEW."engagement_id") OR OLD."environment_id" IS DISTINCT FROM (NEW."environment_id") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."lead_id" IS DISTINCT FROM (NEW."lead_id") OR OLD."percent_complete" IS DISTINCT FROM (NEW."percent_complete") OR OLD."scan_type" IS DISTINCT FROM (NEW."scan_type") OR OLD."target_end" IS DISTINCT FROM (NEW."target_end") OR OLD."target_start" IS DISTINCT FROM (NEW."target_start") OR OLD."test_type_id" IS DISTINCT FROM (NEW."test_type_id") OR OLD."title" IS DISTINCT FROM (NEW."title") OR OLD."version" IS DISTINCT FROM (NEW."version"))', func='INSERT INTO "dojo_testevent" ("api_scan_configuration_id", "branch_tag", "build_id", "commit_hash", "created", "description", "engagement_id", "environment_id", "id", "lead_id", "percent_complete", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "scan_type", "target_end", "target_start", "test_type_id", "title", "updated", "version") VALUES (NEW."api_scan_configuration_id", NEW."branch_tag", NEW."build_id", NEW."commit_hash", NEW."created", NEW."description", NEW."engagement_id", NEW."environment_id", NEW."id", NEW."lead_id", NEW."percent_complete", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."scan_type", NEW."target_end", NEW."target_start", NEW."test_type_id", NEW."title", NEW."updated", NEW."version"); RETURN NULL;', hash='777c92a16d48f7e590e50cb8fb6c0d77c9afa1b6', operation='UPDATE', pgid='pgtrigger_update_update_c40f8', table='dojo_test', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='test', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_testevent" ("api_scan_configuration_id", "branch_tag", "build_id", "commit_hash", "created", "description", "engagement_id", "environment_id", "id", "lead_id", "percent_complete", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "scan_type", "target_end", "target_start", "test_type_id", "title", "updated", "version") VALUES (OLD."api_scan_configuration_id", OLD."branch_tag", OLD."build_id", OLD."commit_hash", OLD."created", OLD."description", OLD."engagement_id", OLD."environment_id", OLD."id", OLD."lead_id", OLD."percent_complete", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."scan_type", OLD."target_end", OLD."target_start", OLD."test_type_id", OLD."title", OLD."updated", OLD."version"); RETURN NULL;', hash='51bce27193221308adc41e62f1faff5122bbbceb', operation='DELETE', pgid='pgtrigger_delete_delete_66d18', table='dojo_test', when='AFTER')), + ), + migrations.AddField( + model_name='cred_userevent', + name='environment', + field=models.ForeignKey(db_constraint=False, db_index=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.development_environment'), + ), + migrations.AddField( + model_name='cred_userevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='cred_userevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.cred_user'), + ), + migrations.AddField( + model_name='dojo_userevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='dojo_userevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='endpointevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='endpointevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.endpoint'), + ), + migrations.AddField( + model_name='endpointevent', + name='product', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.product'), + ), + migrations.AddField( + model_name='engagementevent', + name='build_server', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='Build server responsible for CI/CD test', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.tool_configuration', verbose_name='Build Server'), + ), + migrations.AddField( + model_name='engagementevent', + name='lead', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='engagementevent', + name='orchestration_engine', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='Orchestration service responsible for CI/CD test', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.tool_configuration', verbose_name='Orchestration Engine'), + ), + migrations.AddField( + model_name='engagementevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='engagementevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.engagement'), + ), + migrations.AddField( + model_name='engagementevent', + name='preset', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='Settings and notes for performing this engagement.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.engagement_presets'), + ), + migrations.AddField( + model_name='engagementevent', + name='product', + field=models.ForeignKey(db_constraint=False, db_index=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.product'), + ), + migrations.AddField( + model_name='engagementevent', + name='report_type', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.report_type'), + ), + migrations.AddField( + model_name='engagementevent', + name='requester', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.contact'), + ), + migrations.AddField( + model_name='engagementevent', + name='source_code_management_server', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='Source code server for CI/CD test', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.tool_configuration', verbose_name='SCM Server'), + ), + migrations.AddField( + model_name='finding_groupevent', + name='creator', + field=models.ForeignKey(db_constraint=False, db_index=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='finding_groupevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='finding_groupevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.finding_group'), + ), + migrations.AddField( + model_name='finding_groupevent', + name='test', + field=models.ForeignKey(db_constraint=False, db_index=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.test'), + ), + migrations.AddField( + model_name='finding_templateevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='finding_templateevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.finding_template'), + ), + migrations.AddField( + model_name='findingevent', + name='defect_review_requested_by', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='Documents who requested a defect review for this flaw.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user', verbose_name='Defect Review Requested By'), + ), + migrations.AddField( + model_name='findingevent', + name='duplicate_finding', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, editable=False, help_text='Link to the original finding if this finding is a duplicate.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.finding', verbose_name='Duplicate Finding'), + ), + migrations.AddField( + model_name='findingevent', + name='last_reviewed_by', + field=models.ForeignKey(db_constraint=False, db_index=False, editable=False, help_text='Provides the person who last reviewed the flaw.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user', verbose_name='Last Reviewed By'), + ), + migrations.AddField( + model_name='findingevent', + name='mitigated_by', + field=models.ForeignKey(db_constraint=False, db_index=False, editable=False, help_text='Documents who has marked this flaw as fixed.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user', verbose_name='Mitigated By'), + ), + migrations.AddField( + model_name='findingevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='findingevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.finding'), + ), + migrations.AddField( + model_name='findingevent', + name='reporter', + field=models.ForeignKey(db_constraint=False, db_index=False, default=1, editable=False, help_text='Documents who reported the flaw.', on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user', verbose_name='Reporter'), + ), + migrations.AddField( + model_name='findingevent', + name='review_requested_by', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='Documents who requested a review for this finding.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user', verbose_name='Review Requested By'), + ), + migrations.AddField( + model_name='findingevent', + name='sonarqube_issue', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='The SonarQube issue associated with this finding.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.sonarqube_issue', verbose_name='SonarQube issue'), + ), + migrations.AddField( + model_name='findingevent', + name='test', + field=models.ForeignKey(db_constraint=False, db_index=False, editable=False, help_text='The test that is associated with this flaw.', on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.test', verbose_name='Test'), + ), + migrations.AddField( + model_name='notification_webhooksevent', + name='owner', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='Owner/receiver of notification, if empty processed as system notification', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='notification_webhooksevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='notification_webhooksevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.notification_webhooks'), + ), + migrations.AddField( + model_name='product_typeevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='product_typeevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.product_type'), + ), + migrations.AddField( + model_name='productevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='productevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.product'), + ), + migrations.AddField( + model_name='productevent', + name='prod_type', + field=models.ForeignKey(db_constraint=False, db_index=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.product_type'), + ), + migrations.AddField( + model_name='productevent', + name='product_manager', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='productevent', + name='sla_configuration', + field=models.ForeignKey(db_constraint=False, db_index=False, default=1, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.sla_configuration'), + ), + migrations.AddField( + model_name='productevent', + name='team_manager', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='productevent', + name='technical_contact', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='risk_acceptanceevent', + name='owner', + field=models.ForeignKey(db_constraint=False, db_index=False, help_text='User in DefectDojo owning this acceptance. Only the owner and staff users can edit the risk acceptance.', on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='risk_acceptanceevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='risk_acceptanceevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.risk_acceptance'), + ), + migrations.AddField( + model_name='testevent', + name='api_scan_configuration', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.product_api_scan_configuration', verbose_name='API Scan Configuration'), + ), + migrations.AddField( + model_name='testevent', + name='engagement', + field=models.ForeignKey(db_constraint=False, db_index=False, editable=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.engagement'), + ), + migrations.AddField( + model_name='testevent', + name='environment', + field=models.ForeignKey(db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.development_environment'), + ), + migrations.AddField( + model_name='testevent', + name='lead', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='testevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='testevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.test'), + ), + migrations.AddField( + model_name='testevent', + name='test_type', + field=models.ForeignKey(db_constraint=False, db_index=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.test_type'), + ), + migrations.AddIndex( + model_name='cred_userevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_cred_u_pgh_cre_675d71_idx'), + ), + migrations.AddIndex( + model_name='cred_userevent', + index=models.Index(fields=['pgh_label'], name='dojo_cred_u_pgh_lab_7842ac_idx'), + ), + migrations.AddIndex( + model_name='cred_userevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_cred_u_pgh_con_9ac3a9_idx'), + ), + migrations.AddIndex( + model_name='dojo_userevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_dojo_u_pgh_cre_dd25e9_idx'), + ), + migrations.AddIndex( + model_name='dojo_userevent', + index=models.Index(fields=['pgh_label'], name='dojo_dojo_u_pgh_lab_5e3d06_idx'), + ), + migrations.AddIndex( + model_name='dojo_userevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_dojo_u_pgh_con_29c3a1_idx'), + ), + migrations.AddIndex( + model_name='endpointevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_endpoi_pgh_cre_b3a7c8_idx'), + ), + migrations.AddIndex( + model_name='endpointevent', + index=models.Index(fields=['pgh_label'], name='dojo_endpoi_pgh_lab_7d4c97_idx'), + ), + migrations.AddIndex( + model_name='endpointevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_endpoi_pgh_con_5d9640_idx'), + ), + migrations.AddIndex( + model_name='engagementevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_engage_pgh_cre_9e6148_idx'), + ), + migrations.AddIndex( + model_name='engagementevent', + index=models.Index(fields=['pgh_label'], name='dojo_engage_pgh_lab_bac3f6_idx'), + ), + migrations.AddIndex( + model_name='engagementevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_engage_pgh_con_a73738_idx'), + ), + migrations.AddIndex( + model_name='finding_groupevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_findin_pgh_cre_7acb9c_idx'), + ), + migrations.AddIndex( + model_name='finding_groupevent', + index=models.Index(fields=['pgh_label'], name='dojo_findin_pgh_lab_1a7554_idx'), + ), + migrations.AddIndex( + model_name='finding_groupevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_findin_pgh_con_d97c84_idx'), + ), + migrations.AddIndex( + model_name='finding_templateevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_findin_pgh_cre_397616_idx'), + ), + migrations.AddIndex( + model_name='finding_templateevent', + index=models.Index(fields=['pgh_label'], name='dojo_findin_pgh_lab_751cf8_idx'), + ), + migrations.AddIndex( + model_name='finding_templateevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_findin_pgh_con_9aac28_idx'), + ), + migrations.AddIndex( + model_name='findingevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_findin_pgh_cre_b4aed3_idx'), + ), + migrations.AddIndex( + model_name='findingevent', + index=models.Index(fields=['pgh_label'], name='dojo_findin_pgh_lab_3d7ed7_idx'), + ), + migrations.AddIndex( + model_name='findingevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_findin_pgh_con_807d35_idx'), + ), + migrations.AddIndex( + model_name='notification_webhooksevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_notifi_pgh_cre_14aea2_idx'), + ), + migrations.AddIndex( + model_name='notification_webhooksevent', + index=models.Index(fields=['pgh_label'], name='dojo_notifi_pgh_lab_0abf77_idx'), + ), + migrations.AddIndex( + model_name='notification_webhooksevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_notifi_pgh_con_359cd6_idx'), + ), + migrations.AddIndex( + model_name='product_typeevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_produc_pgh_cre_ff015d_idx'), + ), + migrations.AddIndex( + model_name='product_typeevent', + index=models.Index(fields=['pgh_label'], name='dojo_produc_pgh_lab_07c954_idx'), + ), + migrations.AddIndex( + model_name='product_typeevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_produc_pgh_con_c3c1ea_idx'), + ), + migrations.AddIndex( + model_name='productevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_produc_pgh_cre_e3146f_idx'), + ), + migrations.AddIndex( + model_name='productevent', + index=models.Index(fields=['pgh_label'], name='dojo_produc_pgh_lab_5f11db_idx'), + ), + migrations.AddIndex( + model_name='productevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_produc_pgh_con_cdffb4_idx'), + ), + migrations.AddIndex( + model_name='risk_acceptanceevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_risk_a_pgh_cre_c97aae_idx'), + ), + migrations.AddIndex( + model_name='risk_acceptanceevent', + index=models.Index(fields=['pgh_label'], name='dojo_risk_a_pgh_lab_9a6ce2_idx'), + ), + migrations.AddIndex( + model_name='risk_acceptanceevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_risk_a_pgh_con_5de681_idx'), + ), + migrations.AddIndex( + model_name='testevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_testev_pgh_cre_377964_idx'), + ), + migrations.AddIndex( + model_name='testevent', + index=models.Index(fields=['pgh_label'], name='dojo_testev_pgh_lab_b15edd_idx'), + ), + migrations.AddIndex( + model_name='testevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_testev_pgh_con_e18502_idx'), + ), + # DojoEvents proxy model for structured context field access + migrations.CreateModel( + name='DojoEvents', + fields=[ + ], + options={ + 'proxy': True, + 'indexes': [], + 'constraints': [], + }, + bases=('pghistory.events',), + managers=[ + ('no_objects', django.db.models.manager.Manager()), + ], + ), + ] diff --git a/dojo/db_migrations/0244_pghistory_indices.py b/dojo/db_migrations/0244_pghistory_indices.py new file mode 100644 index 00000000000..73eed4455bd --- /dev/null +++ b/dojo/db_migrations/0244_pghistory_indices.py @@ -0,0 +1,44 @@ +# Generated manually for pghistory performance indexes + +from django.db import migrations + + +class Migration(migrations.Migration): + # Mark as atomic=False to allow CONCURRENTLY operations + atomic = False + + dependencies = [ + ('dojo', '0243_pghistory_models'), + ] + + operations = [ + migrations.RunSQL( + # Forward migration - add indexes with CONCURRENTLY to avoid table locks + # Note: pghistory stores context as JSON in the 'metadata' column + sql=[ + # GIN index on the entire JSON metadata field - supports general JSON queries + # This is excellent for @>, ?, ?&, ?| operators and general JSON containment + 'CREATE INDEX CONCURRENTLY IF NOT EXISTS "pghistory_context_metadata_gin_idx" ON "pghistory_context" USING GIN ("metadata");', + + # Specific expression indexes for common filtering patterns + # These complement the GIN index for exact value lookups + + # Index on user field from JSON - most selective for exact user filtering + 'CREATE INDEX CONCURRENTLY IF NOT EXISTS "pghistory_context_user_idx" ON "pghistory_context" ((metadata->>\'user\'));', + + # Index on remote_addr field from JSON - for IP address filtering (supports icontains) + 'CREATE INDEX CONCURRENTLY IF NOT EXISTS "pghistory_context_remote_addr_idx" ON "pghistory_context" ((metadata->>\'remote_addr\'));', + + # Index on url field from JSON - for URL filtering (helps with icontains queries) + 'CREATE INDEX CONCURRENTLY IF NOT EXISTS "pghistory_context_url_idx" ON "pghistory_context" ((metadata->>\'url\'));', + ], + # Reverse migration - drop indexes safely + reverse_sql=[ + 'DROP INDEX CONCURRENTLY IF EXISTS "pghistory_context_metadata_gin_idx";', + 'DROP INDEX CONCURRENTLY IF EXISTS "pghistory_context_user_idx";', + 'DROP INDEX CONCURRENTLY IF EXISTS "pghistory_context_remote_addr_idx";', + 'DROP INDEX CONCURRENTLY IF EXISTS "pghistory_context_url_idx";', + ], + ), + ] + diff --git a/dojo/decorators.py b/dojo/decorators.py index 40174524fb7..b7b84d59430 100644 --- a/dojo/decorators.py +++ b/dojo/decorators.py @@ -55,8 +55,7 @@ def get_tasks(self): def we_want_async(*args, func=None, **kwargs): - from dojo.models import Dojo_User - from dojo.utils import get_current_user + from dojo.utils import get_current_user # noqa: PLC0415 circular import sync = kwargs.get("sync", False) if sync: @@ -64,37 +63,50 @@ def we_want_async(*args, func=None, **kwargs): return False user = kwargs.get("async_user", get_current_user()) - logger.debug("user: %s", user) + logger.debug("async user: %s", user) + + if not user: + logger.debug("dojo_async_task %s: no current user, running task in the background", func) + return True if Dojo_User.wants_block_execution(user): logger.debug("dojo_async_task %s: running task in the foreground as block_execution is set to True for %s", func, user) return False - logger.debug("dojo_async_task %s: no current user, running task in the background", func) + logger.debug("dojo_async_task %s: running task in the background as user has not set block_execution to True for %s", func, user) return True # Defect Dojo performs all tasks asynchrnonously using celery # *unless* the user initiating the task has set block_execution to True in their usercontactinfo profile -def dojo_async_task(func): - @wraps(func) - def __wrapper__(*args, **kwargs): - from dojo.utils import get_current_user - user = get_current_user() - kwargs["async_user"] = user - - dojo_async_task_counter.incr( - func.__name__, - args=args, - kwargs=kwargs, - ) - - countdown = kwargs.pop("countdown", 0) - if we_want_async(*args, func=func, **kwargs): - return func.apply_async(args=args, kwargs=kwargs, countdown=countdown) - return func(*args, **kwargs) - - return __wrapper__ +def dojo_async_task(func=None, *, signature=False): + def decorator(func): + @wraps(func) + def __wrapper__(*args, **kwargs): + from dojo.utils import get_current_user # noqa: PLC0415 circular import + user = get_current_user() + kwargs["async_user"] = user + + dojo_async_task_counter.incr( + func.__name__, + args=args, + kwargs=kwargs, + ) + + if signature: + return func.si(*args, **kwargs) + + countdown = kwargs.pop("countdown", 0) + if we_want_async(*args, func=func, **kwargs): + # Return a signature for use in chord/group if requested + # Execute the task + return func.apply_async(args=args, kwargs=kwargs, countdown=countdown) + return func(*args, **kwargs) + return __wrapper__ + + if func is None: + return decorator + return decorator(func) # decorator with parameters needs another wrapper layer diff --git a/dojo/endpoint/signals.py b/dojo/endpoint/signals.py index 6bdfe8e7a40..50251c5a80a 100644 --- a/dojo/endpoint/signals.py +++ b/dojo/endpoint/signals.py @@ -2,6 +2,7 @@ from auditlog.models import LogEntry from django.conf import settings +from django.contrib.auth import get_user_model from django.contrib.contenttypes.models import ContentType from django.db.models.signals import post_delete from django.dispatch import receiver @@ -10,6 +11,7 @@ from dojo.models import Endpoint from dojo.notifications.helper import create_notification +from dojo.pghistory_models import DojoEvents @receiver(post_delete, sender=Endpoint) @@ -18,14 +20,38 @@ def endpoint_post_delete(sender, instance, using, origin, **kwargs): with contextlib.suppress(sender.DoesNotExist): if instance == origin: description = _('The endpoint "%(name)s" was deleted') % {"name": str(instance)} + user = None + if settings.ENABLE_AUDITLOG: - if le := LogEntry.objects.filter( - action=LogEntry.Action.DELETE, - content_type=ContentType.objects.get(app_label="dojo", model="endpoint"), - object_id=instance.id, - ).order_by("-id").first(): + # First try to find deletion author in pghistory events + # Look for delete events for this specific endpoint instance + pghistory_delete_events = DojoEvents.objects.filter( + pgh_obj_model="dojo.Endpoint", + pgh_obj_id=instance.id, + pgh_label="delete", + ).order_by("-pgh_created_at") + + if pghistory_delete_events.exists(): + latest_delete = pghistory_delete_events.first() + # Extract user from pghistory context + if latest_delete.user: + User = get_user_model() + with contextlib.suppress(User.DoesNotExist): + user = User.objects.get(id=latest_delete.user) + + # Fall back to django-auditlog if no user found in pghistory + if not user: + if le := LogEntry.objects.filter( + action=LogEntry.Action.DELETE, + content_type=ContentType.objects.get(app_label="dojo", model="endpoint"), + object_id=instance.id, + ).order_by("-id").first(): + user = le.actor + + # Update description with user if found + if user: description = _('The endpoint "%(name)s" was deleted by %(user)s') % { - "name": str(instance), "user": le.actor} + "name": str(instance), "user": user} create_notification(event="endpoint_deleted", # template does not exists, it will default to "other" but this event name needs to stay because of unit testing title=_("Deletion of %(name)s") % {"name": str(instance)}, description=description, diff --git a/dojo/engagement/signals.py b/dojo/engagement/signals.py index 77cd4ca6d1f..144094a3264 100644 --- a/dojo/engagement/signals.py +++ b/dojo/engagement/signals.py @@ -2,6 +2,7 @@ from auditlog.models import LogEntry from django.conf import settings +from django.contrib.auth import get_user_model from django.contrib.contenttypes.models import ContentType from django.db.models.signals import post_delete, post_save, pre_delete, pre_save from django.dispatch import receiver @@ -12,6 +13,7 @@ from dojo.models import Engagement, Product from dojo.notes.helper import delete_related_notes from dojo.notifications.helper import create_notification +from dojo.pghistory_models import DojoEvents @receiver(post_save, sender=Engagement) @@ -45,14 +47,38 @@ def engagement_post_delete(sender, instance, using, origin, **kwargs): with contextlib.suppress(sender.DoesNotExist, Product.DoesNotExist): if instance == origin: description = _('The engagement "%(name)s" was deleted') % {"name": instance.name} + user = None + if settings.ENABLE_AUDITLOG: - if le := LogEntry.objects.filter( - action=LogEntry.Action.DELETE, - content_type=ContentType.objects.get(app_label="dojo", model="engagement"), - object_id=instance.id, - ).order_by("-id").first(): + # First try to find deletion author in pghistory events + # Look for delete events for this specific engagement instance + pghistory_delete_events = DojoEvents.objects.filter( + pgh_obj_model="dojo.Engagement", + pgh_obj_id=instance.id, + pgh_label="delete", + ).order_by("-pgh_created_at") + + if pghistory_delete_events.exists(): + latest_delete = pghistory_delete_events.first() + # Extract user from pghistory context + if latest_delete.user: + User = get_user_model() + with contextlib.suppress(User.DoesNotExist): + user = User.objects.get(id=latest_delete.user) + + # Fall back to django-auditlog if no user found in pghistory + if not user: + if le := LogEntry.objects.filter( + action=LogEntry.Action.DELETE, + content_type=ContentType.objects.get(app_label="dojo", model="engagement"), + object_id=instance.id, + ).order_by("-id").first(): + user = le.actor + + # Update description with user if found + if user: description = _('The engagement "%(name)s" was deleted by %(user)s') % { - "name": instance.name, "user": le.actor} + "name": instance.name, "user": user} create_notification(event="engagement_deleted", # template does not exists, it will default to "other" but this event name needs to stay because of unit testing title=_("Deletion of %(name)s") % {"name": instance.name}, description=description, diff --git a/dojo/engagement/views.py b/dojo/engagement/views.py index a06f32e8441..7ae3e758ead 100644 --- a/dojo/engagement/views.py +++ b/dojo/engagement/views.py @@ -961,6 +961,7 @@ def process_form( "active": None, "verified": None, "scan_type": request.POST.get("scan_type"), + "test_title": form.cleaned_data.get("test_title"), "tags": form.cleaned_data.get("tags"), "version": form.cleaned_data.get("version"), "branch_tag": form.cleaned_data.get("branch_tag", None), diff --git a/dojo/filters.py b/dojo/filters.py index c79bf952f21..55e4978d6c4 100644 --- a/dojo/filters.py +++ b/dojo/filters.py @@ -54,6 +54,7 @@ ) from dojo.finding.queries import get_authorized_findings from dojo.finding_group.queries import get_authorized_finding_groups +from dojo.labels import get_labels from dojo.models import ( EFFORT_FOR_FIXING_CHOICES, ENGAGEMENT_STATUS_CHOICES, @@ -96,6 +97,8 @@ logger = logging.getLogger(__name__) +labels = get_labels() + BOOLEAN_CHOICES = (("false", "No"), ("true", "Yes")) EARLIEST_FINDING = None @@ -367,6 +370,10 @@ def get_tags_model_from_field_name(field): def get_tags_label_from_model(model): if model: + if model is Product_Type: + return labels.ORG_FILTERS_TAGS_LABEL + if model is Product: + return labels.ASSET_FILTERS_TAGS_LABEL return f"Tags ({model.__name__.title()})" return "Tags (Unknown)" @@ -512,7 +519,8 @@ class FindingTagFilter(DojoFilter): field_name="test__engagement__product__tags__name", to_field_name="name", queryset=Product.tags.tag_model.objects.all().order_by("name"), - help_text="Filter Products by the selected tags") + label=labels.ASSET_FILTERS_TAGS_FILTER_LABEL, + help_text=labels.ASSET_FILTERS_TAGS_FILTER_HELP) not_tags = ModelMultipleChoiceFilter( field_name="tags__name", @@ -537,9 +545,9 @@ class FindingTagFilter(DojoFilter): not_test__engagement__product__tags = ModelMultipleChoiceFilter( field_name="test__engagement__product__tags__name", to_field_name="name", - label="Product without tags", + label=labels.ASSET_FILTERS_ASSETS_WITHOUT_TAGS_LABEL, queryset=Product.tags.tag_model.objects.all().order_by("name"), - help_text="Search for tags on a Product that contain a given pattern, and exclude them", + help_text=labels.ASSET_FILTERS_ASSETS_WITHOUT_TAGS_HELP, exclude=True) def __init__(self, *args, **kwargs): @@ -578,15 +586,15 @@ class FindingTagStringFilter(FilterSet): lookup_expr="iexact", help_text="Search for tags on a Finding that are an exact match") test__engagement__product__tags_contains = CharFilter( - label="Product Tag Contains", + label=labels.ASSET_FILTERS_TAG_ASSET_CONTAINS_LABEL, field_name="test__engagement__product__tags__name", lookup_expr="icontains", - help_text="Search for tags on a Finding that contain a given pattern") + help_text=labels.ASSET_FILTERS_TAG_ASSET_CONTAINS_HELP) test__engagement__product__tags = CharFilter( - label="Product Tag", + label=labels.ASSET_FILTERS_TAG_ASSET_LABEL, field_name="test__engagement__product__tags__name", lookup_expr="iexact", - help_text="Search for tags on a Finding that are an exact match") + help_text=labels.ASSET_FILTERS_TAG_ASSET_HELP) not_tags_contains = CharFilter( label="Finding Tag Does Not Contain", @@ -625,16 +633,16 @@ class FindingTagStringFilter(FilterSet): help_text="Search for tags on a Engagement that are an exact match, and exclude them", exclude=True) not_test__engagement__product__tags_contains = CharFilter( - label="Product Tag Does Not Contain", + label=labels.ASSET_FILTERS_TAG_NOT_CONTAIN_LABEL, field_name="test__engagement__product__tags__name", lookup_expr="icontains", - help_text="Search for tags on a Product that contain a given pattern, and exclude them", + help_text=labels.ASSET_FILTERS_TAG_NOT_CONTAIN_HELP, exclude=True) not_test__engagement__product__tags = CharFilter( - label="Not Product Tag", + label=labels.ASSET_FILTERS_TAG_NOT_LABEL, field_name="test__engagement__product__tags__name", lookup_expr="iexact", - help_text="Search for tags on a Product that are an exact match, and exclude them", + help_text=labels.ASSET_FILTERS_TAG_NOT_HELP, exclude=True) def delete_tags_from_form(self, tag_list: list): @@ -919,32 +927,32 @@ class ComponentFilterWithoutObjectLookups(ProductComponentFilter): test__engagement__product__prod_type__name = CharFilter( field_name="test__engagement__product__prod_type__name", lookup_expr="iexact", - label="Product Type Name", - help_text="Search for Product Type names that are an exact match") + label=labels.ORG_FILTERS_NAME_LABEL, + help_text=labels.ORG_FILTERS_NAME_HELP) test__engagement__product__prod_type__name_contains = CharFilter( field_name="test__engagement__product__prod_type__name", lookup_expr="icontains", - label="Product Type Name Contains", - help_text="Search for Product Type names that contain a given pattern") + label=labels.ORG_FILTERS_NAME_CONTAINS_LABEL, + help_text=labels.ORG_FILTERS_NAME_CONTAINS_HELP) test__engagement__product__name = CharFilter( field_name="test__engagement__product__name", lookup_expr="iexact", - label="Product Name", - help_text="Search for Product names that are an exact match") + label=labels.ASSET_FILTERS_NAME_LABEL, + help_text=labels.ASSET_FILTERS_NAME_HELP) test__engagement__product__name_contains = CharFilter( field_name="test__engagement__product__name", lookup_expr="icontains", - label="Product Name Contains", - help_text="Search for Product names that contain a given pattern") + label=labels.ASSET_FILTERS_NAME_CONTAINS_LABEL, + help_text=labels.ASSET_FILTERS_NAME_CONTAINS_HELP) class ComponentFilter(ProductComponentFilter): test__engagement__product__prod_type = ModelMultipleChoiceFilter( queryset=Product_Type.objects.none(), - label="Product Type") + label=labels.ORG_FILTERS_LABEL) test__engagement__product = ModelMultipleChoiceFilter( queryset=Product.objects.none(), - label="Product") + label=labels.ASSET_FILTERS_LABEL) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -958,7 +966,7 @@ class EngagementDirectFilterHelper(FilterSet): name = CharFilter(lookup_expr="icontains", label="Engagement name contains") version = CharFilter(field_name="version", lookup_expr="icontains", label="Engagement version") test__version = CharFilter(field_name="test__version", lookup_expr="icontains", label="Test version") - product__name = CharFilter(lookup_expr="icontains", label="Product name contains") + product__name = CharFilter(lookup_expr="icontains", label=labels.ASSET_FILTERS_NAME_CONTAINS_LABEL) status = MultipleChoiceFilter(choices=ENGAGEMENT_STATUS_CHOICES, label="Status") tag = CharFilter(field_name="tags__name", lookup_expr="icontains", label="Tag name contains") not_tag = CharFilter(field_name="tags__name", lookup_expr="icontains", label="Not tag name contains", exclude=True) @@ -967,7 +975,7 @@ class EngagementDirectFilterHelper(FilterSet): target_end = DateRangeFilter() test__engagement__product__lifecycle = MultipleChoiceFilter( choices=Product.LIFECYCLE_CHOICES, - label="Product lifecycle", + label=labels.ASSET_LIFECYCLE_LABEL, null_label="Empty") o = OrderingFilter( # tuple-mapping retains order @@ -981,8 +989,8 @@ class EngagementDirectFilterHelper(FilterSet): field_labels={ "target_start": "Start date", "name": "Engagement", - "product__name": "Product Name", - "product__prod_type__name": "Product Type", + "product__name": labels.ASSET_FILTERS_NAME_LABEL, + "product__prod_type__name": labels.ORG_FILTERS_LABEL, "lead__first_name": "Lead", }, ) @@ -992,7 +1000,7 @@ class EngagementDirectFilter(EngagementDirectFilterHelper, DojoFilter): lead = ModelChoiceFilter(queryset=Dojo_User.objects.none(), label="Lead") product__prod_type = ModelMultipleChoiceFilter( queryset=Product_Type.objects.none(), - label="Product Type") + label=labels.ORG_FILTERS_LABEL) tags = ModelMultipleChoiceFilter( field_name="tags__name", to_field_name="name", @@ -1028,13 +1036,13 @@ class EngagementDirectFilterWithoutObjectLookups(EngagementDirectFilterHelper): product__prod_type__name = CharFilter( field_name="product__prod_type__name", lookup_expr="iexact", - label="Product Type Name", - help_text="Search for Product Type names that are an exact match") + label=labels.ORG_FILTERS_NAME_LABEL, + help_text=labels.ORG_FILTERS_NAME_HELP) product__prod_type__name_contains = CharFilter( field_name="product__prod_type__name", lookup_expr="icontains", - label="Product Type Name Contains", - help_text="Search for Product Type names that contain a given pattern") + label=labels.ORG_FILTERS_NAME_CONTAINS_LABEL, + help_text=labels.ORG_FILTERS_NAME_CONTAINS_HELP) class Meta: model = Engagement @@ -1042,7 +1050,7 @@ class Meta: class EngagementFilterHelper(FilterSet): - name = CharFilter(lookup_expr="icontains", label="Product name contains") + name = CharFilter(lookup_expr="icontains", label=labels.ASSET_FILTERS_NAME_CONTAINS_LABEL) tag = CharFilter(field_name="tags__name", lookup_expr="icontains", label="Tag name contains") not_tag = CharFilter(field_name="tags__name", lookup_expr="icontains", label="Not tag name contains", exclude=True) has_tags = BooleanFilter(field_name="tags", lookup_expr="isnull", exclude=True, label="Has tags") @@ -1051,7 +1059,7 @@ class EngagementFilterHelper(FilterSet): engagement__test__version = CharFilter(field_name="engagement__test__version", lookup_expr="icontains", label="Test version") engagement__product__lifecycle = MultipleChoiceFilter( choices=Product.LIFECYCLE_CHOICES, - label="Product lifecycle", + label=labels.ASSET_LIFECYCLE_LABEL, null_label="Empty") engagement__status = MultipleChoiceFilter( choices=ENGAGEMENT_STATUS_CHOICES, @@ -1063,8 +1071,8 @@ class EngagementFilterHelper(FilterSet): ("prod_type__name", "prod_type__name"), ), field_labels={ - "name": "Product Name", - "prod_type__name": "Product Type", + "name": labels.ASSET_FILTERS_NAME_LABEL, + "prod_type__name": labels.ORG_FILTERS_LABEL, }, ) @@ -1075,7 +1083,7 @@ class EngagementFilter(EngagementFilterHelper, DojoFilter): label="Lead") prod_type = ModelMultipleChoiceFilter( queryset=Product_Type.objects.none(), - label="Product Type") + label=labels.ORG_FILTERS_LABEL) tags = ModelMultipleChoiceFilter( field_name="tags__name", to_field_name="name", @@ -1091,6 +1099,8 @@ def __init__(self, *args, **kwargs): self.form.fields["prod_type"].queryset = get_authorized_product_types(Permissions.Product_Type_View) self.form.fields["engagement__lead"].queryset = get_authorized_users(Permissions.Product_Type_View) \ .filter(engagement__lead__isnull=False).distinct() + self.form.fields["tags"].help_text = labels.ASSET_FILTERS_TAGS_HELP + self.form.fields["not_tags"].help_text = labels.ASSET_FILTERS_NOT_TAGS_HELP class Meta: model = Product @@ -1137,13 +1147,13 @@ class EngagementFilterWithoutObjectLookups(EngagementFilterHelper): prod_type__name = CharFilter( field_name="prod_type__name", lookup_expr="iexact", - label="Product Type Name", - help_text="Search for Product Type names that are an exact match") + label=labels.ORG_FILTERS_LABEL, + help_text=labels.ORG_FILTERS_LABEL_HELP) prod_type__name_contains = CharFilter( field_name="prod_type__name", lookup_expr="icontains", - label="Product Type Name Contains", - help_text="Search for Product Type names that contain a given pattern") + label=labels.ORG_FILTERS_NAME_CONTAINS_LABEL, + help_text=labels.ORG_FILTERS_NAME_CONTAINS_HELP) class Meta: model = Product @@ -1223,17 +1233,17 @@ class ApiEngagementFilter(DojoFilter): product__tags = CharFieldInFilter( field_name="product__tags__name", lookup_expr="in", - help_text="Comma separated list of exact tags present on product (uses OR for multiple values)") + help_text=labels.ASSET_FILTERS_CSV_TAGS_OR_HELP) product__tags__and = CharFieldFilterANDExpression( field_name="product__tags__name", - help_text="Comma separated list of exact tags to match with an AND expression present on product") + help_text=labels.ASSET_FILTERS_CSV_TAGS_AND_HELP) not_tag = CharFilter(field_name="tags__name", lookup_expr="icontains", help_text="Not Tag name contains", exclude="True") not_tags = CharFieldInFilter(field_name="tags__name", lookup_expr="in", help_text="Comma separated list of exact tags not present on model", exclude="True") not_product__tags = CharFieldInFilter(field_name="product__tags__name", lookup_expr="in", - help_text="Comma separated list of exact tags not present on product", + help_text=labels.ASSET_FILTERS_CSV_TAGS_NOT_HELP, exclude="True") has_tags = BooleanFilter(field_name="tags", lookup_expr="isnull", exclude=True, label="Has tags") @@ -1264,8 +1274,8 @@ class Meta: class ProductFilterHelper(FilterSet): - name = CharFilter(lookup_expr="icontains", label="Product Name") - name_exact = CharFilter(field_name="name", lookup_expr="iexact", label="Exact Product Name") + name = CharFilter(lookup_expr="icontains", label=labels.ASSET_FILTERS_NAME_LABEL) + name_exact = CharFilter(field_name="name", lookup_expr="iexact", label=labels.ASSET_FILTERS_NAME_EXACT_LABEL) business_criticality = MultipleChoiceFilter(choices=Product.BUSINESS_CRITICALITY_CHOICES, null_label="Empty") platform = MultipleChoiceFilter(choices=Product.PLATFORM_CHOICES, null_label="Empty") lifecycle = MultipleChoiceFilter(choices=Product.LIFECYCLE_CHOICES, null_label="Empty") @@ -1291,9 +1301,9 @@ class ProductFilterHelper(FilterSet): ("findings_count", "findings_count"), ), field_labels={ - "name": "Product Name", - "name_exact": "Exact Product Name", - "prod_type__name": "Product Type", + "name": labels.ASSET_FILTERS_NAME_LABEL, + "name_exact": labels.ASSET_FILTERS_NAME_EXACT_LABEL, + "prod_type__name": labels.ORG_FILTERS_LABEL, "business_criticality": "Business Criticality", "platform": "Platform ", "lifecycle": "Lifecycle ", @@ -1308,7 +1318,7 @@ class ProductFilterHelper(FilterSet): class ProductFilter(ProductFilterHelper, DojoFilter): prod_type = ModelMultipleChoiceFilter( queryset=Product_Type.objects.none(), - label="Product Type") + label=labels.ORG_FILTERS_LABEL) tags = ModelMultipleChoiceFilter( field_name="tags__name", to_field_name="name", @@ -1325,6 +1335,8 @@ def __init__(self, *args, **kwargs): self.user = kwargs.pop("user") super().__init__(*args, **kwargs) self.form.fields["prod_type"].queryset = get_authorized_product_types(Permissions.Product_Type_View) + self.form.fields["tags"].help_text = labels.ASSET_FILTERS_TAGS_HELP + self.form.fields["not_tags"].help_text = labels.ASSET_FILTERS_NOT_TAGS_HELP class Meta: model = Product @@ -1339,13 +1351,13 @@ class ProductFilterWithoutObjectLookups(ProductFilterHelper): prod_type__name = CharFilter( field_name="prod_type__name", lookup_expr="iexact", - label="Product Type Name", - help_text="Search for Product Type names that are an exact match") + label=labels.ORG_FILTERS_NAME_LABEL, + help_text=labels.ORG_FILTERS_NAME_HELP) prod_type__name_contains = CharFilter( field_name="prod_type__name", lookup_expr="icontains", - label="Product Type Name Contains", - help_text="Search for Product Type names that contain a given pattern") + label=labels.ORG_FILTERS_NAME_CONTAINS_LABEL, + help_text=labels.ORG_FILTERS_NAME_CONTAINS_HELP) def __init__(self, *args, **kwargs): kwargs.pop("user", None) @@ -1408,7 +1420,7 @@ class ApiProductFilter(DojoFilter): help_text="Comma separated list of exact tags to match with an AND expression") not_tag = CharFilter(field_name="tags__name", lookup_expr="icontains", help_text="Not Tag name contains", exclude="True") not_tags = CharFieldInFilter(field_name="tags__name", lookup_expr="in", - help_text="Comma separated list of exact tags not present on product", exclude="True") + help_text=labels.ASSET_FILTERS_CSV_TAGS_NOT_HELP, exclude="True") has_tags = BooleanFilter(field_name="tags", lookup_expr="isnull", exclude=True, label="Has tags") outside_of_sla = extend_schema_field(OpenApiTypes.NUMBER)(ProductSLAFilter()) @@ -1491,10 +1503,10 @@ class ApiFindingFilter(DojoFilter): steps_to_reproduce = CharFilter(lookup_expr="icontains") unique_id_from_tool = CharFilter(lookup_expr="icontains") title = CharFilter(lookup_expr="icontains") - product_name = CharFilter(lookup_expr="engagement__product__name__iexact", field_name="test", label="exact product name") - product_name_contains = CharFilter(lookup_expr="engagement__product__name__icontains", field_name="test", label="exact product name") + product_name = CharFilter(lookup_expr="engagement__product__name__iexact", field_name="test", label=labels.ASSET_FILTERS_NAME_EXACT_LABEL) + product_name_contains = CharFilter(lookup_expr="engagement__product__name__icontains", field_name="test", label=labels.ASSET_FILTERS_NAME_CONTAINS_LABEL) product_lifecycle = CharFilter(method=custom_filter, lookup_expr="engagement__product__lifecycle", - field_name="test__engagement__product__lifecycle", label="Comma separated list of exact product lifecycles") + field_name="test__engagement__product__lifecycle", label=labels.ASSET_FILTERS_CSV_LIFECYCLES_LABEL) # DateRangeFilter created = DateRangeFilter() date = DateRangeFilter() @@ -1574,10 +1586,10 @@ class ApiFindingFilter(DojoFilter): test__engagement__product__tags = CharFieldInFilter( field_name="test__engagement__product__tags__name", lookup_expr="in", - help_text="Comma separated list of exact tags present on product (uses OR for multiple values)") + help_text=labels.ASSET_FILTERS_CSV_TAGS_OR_HELP) test__engagement__product__tags__and = CharFieldFilterANDExpression( field_name="test__engagement__product__tags__name", - help_text="Comma separated list of exact tags to match with an AND expression present on product") + help_text=labels.ASSET_FILTERS_CSV_TAGS_AND_HELP) not_tag = CharFilter(field_name="tags__name", lookup_expr="icontains", help_text="Not Tag name contains", exclude="True") not_tags = CharFieldInFilter(field_name="tags__name", lookup_expr="in", help_text="Comma separated list of exact tags not present on model", exclude="True") @@ -1588,7 +1600,7 @@ class ApiFindingFilter(DojoFilter): not_test__engagement__product__tags = CharFieldInFilter( field_name="test__engagement__product__tags__name", lookup_expr="in", - help_text="Comma separated list of exact tags not present on product", + help_text=labels.ASSET_FILTERS_CSV_TAGS_NOT_HELP, exclude="True") has_tags = BooleanFilter(field_name="tags", lookup_expr="isnull", exclude=True, label="Has tags") outside_of_sla = extend_schema_field(OpenApiTypes.NUMBER)(FindingSLAFilter()) @@ -1612,7 +1624,9 @@ class ApiFindingFilter(DojoFilter): ("is_mitigated", "is_mitigated"), ("numerical_severity", "numerical_severity"), ("out_of_scope", "out_of_scope"), + ("planned_remediation_date", "planned_remediation_date"), ("severity", "severity"), + ("sla_expiration_date", "sla_expiration_date"), ("reviewers", "reviewers"), ("static_finding", "static_finding"), ("test__engagement__product__name", "test__engagement__product__name"), @@ -1697,7 +1711,7 @@ class FindingFilterHelper(FilterSet): status = FindingStatusFilter(label="Status") test__engagement__product__lifecycle = MultipleChoiceFilter( choices=Product.LIFECYCLE_CHOICES, - label="Product lifecycle") + label=labels.ASSET_LIFECYCLE_LABEL) has_component = BooleanFilter( field_name="component_name", @@ -1770,10 +1784,12 @@ class FindingFilterHelper(FilterSet): ("risk_acceptance__created__date", "risk_acceptance__created__date"), ("last_reviewed", "last_reviewed"), + ("planned_remediation_date", "planned_remediation_date"), ("title", "title"), ("test__engagement__product__name", "test__engagement__product__name"), ("service", "service"), + ("sla_age_days", "sla_age_days"), ("epss_score", "epss_score"), ("epss_percentile", "epss_percentile"), ("known_exploited", "known_exploited"), @@ -1787,12 +1803,14 @@ class FindingFilterHelper(FilterSet): "mitigated": "Mitigated Date", "fix_available": "Fix Available", "title": "Finding Name", - "test__engagement__product__name": "Product Name", + "test__engagement__product__name": labels.ASSET_FILTERS_NAME_LABEL, "epss_score": "EPSS Score", "epss_percentile": "EPSS Percentile", "known_exploited": "Known Exploited", "ransomware_used": "Ransomware Used", "kev_date": "Date added to KEV", + "sla_age_days": "SLA age (days)", + "planned_remediation_date": "Planned Remediation", }, ) @@ -1853,23 +1871,23 @@ class FindingFilterWithoutObjectLookups(FindingFilterHelper, FindingTagStringFil test__engagement__product__prod_type__name = CharFilter( field_name="test__engagement__product__prod_type__name", lookup_expr="iexact", - label="Product Type Name", - help_text="Search for Product Type names that are an exact match") + label=labels.ORG_FILTERS_NAME_LABEL, + help_text=labels.ORG_FILTERS_NAME_HELP) test__engagement__product__prod_type__name_contains = CharFilter( field_name="test__engagement__product__prod_type__name", lookup_expr="icontains", - label="Product Type Name Contains", - help_text="Search for Product Type names that contain a given pattern") + label=labels.ORG_FILTERS_NAME_CONTAINS_LABEL, + help_text=labels.ORG_FILTERS_NAME_CONTAINS_HELP) test__engagement__product__name = CharFilter( field_name="test__engagement__product__name", lookup_expr="iexact", - label="Product Name", - help_text="Search for Product names that are an exact match") + label=labels.ASSET_FILTERS_NAME_LABEL, + help_text=labels.ASSET_FILTERS_NAME_HELP) test__engagement__product__name_contains = CharFilter( field_name="test__engagement__product__name", lookup_expr="icontains", - label="Product name Contains", - help_text="Search for Product Typ names that contain a given pattern") + label=labels.ASSET_FILTERS_NAME_CONTAINS_LABEL, + help_text=labels.ASSET_FILTERS_NAME_CONTAINS_HELP) test__engagement__name = CharFilter( field_name="test__engagement__name", lookup_expr="iexact", @@ -1942,10 +1960,10 @@ class FindingFilter(FindingFilterHelper, FindingTagFilter): reviewers = ModelMultipleChoiceFilter(queryset=Dojo_User.objects.none()) test__engagement__product__prod_type = ModelMultipleChoiceFilter( queryset=Product_Type.objects.none(), - label="Product Type") + label=labels.ORG_FILTERS_LABEL) test__engagement__product = ModelMultipleChoiceFilter( queryset=Product.objects.none(), - label="Product") + label=labels.ASSET_FILTERS_LABEL) test__engagement = ModelMultipleChoiceFilter( queryset=Engagement.objects.none(), label="Engagement") @@ -2021,7 +2039,7 @@ class FindingGroupsFilter(FilterSet): label="Min Severity", ) engagement = ModelMultipleChoiceFilter(queryset=Engagement.objects.none(), label="Engagement") - product = ModelMultipleChoiceFilter(queryset=Product.objects.none(), label="Product") + product = ModelMultipleChoiceFilter(queryset=Product.objects.none(), label=labels.ASSET_LABEL) class Meta: model = Finding @@ -2214,7 +2232,7 @@ class Meta: field_name="test__engagement__product__tags__name", to_field_name="name", exclude=True, - label="Product without tags", + label=labels.ASSET_FILTERS_WITHOUT_TAGS_LABEL, queryset=Product.tags.tag_model.objects.all().order_by("name"), # label='tags', # doesn't work with tagulous, need to set in __init__ below ) @@ -2269,7 +2287,7 @@ class MetricsFindingFilter(FindingFilter): def __init__(self, *args, **kwargs): if args[0]: - if args[0].get("start_date", "") != "" or args[0].get("end_date", "") != "": + if args[0].get("start_date", "") or args[0].get("end_date", ""): args[0]._mutable = True args[0]["date"] = 8 args[0]._mutable = False @@ -2299,7 +2317,7 @@ class MetricsFindingFilterWithoutObjectLookups(FindingFilterWithoutObjectLookups def __init__(self, *args, **kwargs): if args[0]: - if args[0].get("start_date", "") != "" or args[0].get("end_date", "") != "": + if args[0].get("start_date", "") or args[0].get("end_date", ""): args[0]._mutable = True args[0]["date"] = 8 args[0]._mutable = False @@ -2326,7 +2344,7 @@ class MetricsEndpointFilterHelper(FilterSet): class MetricsEndpointFilter(MetricsEndpointFilterHelper): finding__test__engagement__product__prod_type = ModelMultipleChoiceFilter( queryset=Product_Type.objects.none(), - label="Product Type") + label=labels.ORG_FILTERS_LABEL) finding__test__engagement = ModelMultipleChoiceFilter( queryset=Engagement.objects.none(), label="Engagement") @@ -2353,7 +2371,7 @@ class MetricsEndpointFilter(MetricsEndpointFilterHelper): finding__test__engagement__product__tags = ModelMultipleChoiceFilter( field_name="finding__test__engagement__product__tags__name", to_field_name="name", - label="Product tags", + label=labels.ASSET_FILTERS_TAGS_ASSET_LABEL, queryset=Product.tags.tag_model.objects.all().order_by("name")) not_endpoint__tags = ModelMultipleChoiceFilter( field_name="endpoint__tags__name", @@ -2383,12 +2401,12 @@ class MetricsEndpointFilter(MetricsEndpointFilterHelper): field_name="finding__test__engagement__product__tags__name", to_field_name="name", exclude=True, - label="Product without tags", + label=labels.ASSET_FILTERS_WITHOUT_TAGS_LABEL, queryset=Product.tags.tag_model.objects.all().order_by("name")) def __init__(self, *args, **kwargs): if args[0]: - if args[0].get("start_date", "") != "" or args[0].get("end_date", "") != "": + if args[0].get("start_date", "") or args[0].get("end_date", ""): args[0]._mutable = True args[0]["date"] = 8 args[0]._mutable = False @@ -2419,13 +2437,13 @@ class MetricsEndpointFilterWithoutObjectLookups(MetricsEndpointFilterHelper, Fin finding__test__engagement__product__prod_type = CharFilter( field_name="finding__test__engagement__product__prod_type", lookup_expr="iexact", - label="Product Type Name", - help_text="Search for Product Type names that are an exact match") + label=labels.ORG_FILTERS_NAME_LABEL, + help_text=labels.ORG_FILTERS_NAME_HELP) finding__test__engagement__product__prod_type_contains = CharFilter( field_name="finding__test__engagement__product__prod_type", lookup_expr="icontains", - label="Product Type Name Contains", - help_text="Search for Product Type names that contain a given pattern") + label=labels.ORG_FILTERS_NAME_CONTAINS_LABEL, + help_text=labels.ORG_FILTERS_NAME_CONTAINS_HELP) finding__test__engagement = CharFilter( field_name="finding__test__engagement", lookup_expr="iexact", @@ -2477,15 +2495,15 @@ class MetricsEndpointFilterWithoutObjectLookups(MetricsEndpointFilterHelper, Fin lookup_expr="iexact", help_text="Search for tags on a Finding that are an exact match") finding__test__engagement__product__tags_contains = CharFilter( - label="Product Tag Contains", + label=labels.ASSET_FILTERS_TAG_ASSET_CONTAINS_LABEL, field_name="finding__test__engagement__product__tags__name", lookup_expr="icontains", - help_text="Search for tags on a Finding that contain a given pattern") + help_text=labels.ASSET_FILTERS_TAG_ASSET_CONTAINS_HELP) finding__test__engagement__product__tags = CharFilter( - label="Product Tag", + label=labels.ASSET_FILTERS_TAG_ASSET_LABEL, field_name="finding__test__engagement__product__tags__name", lookup_expr="iexact", - help_text="Search for tags on a Finding that are an exact match") + help_text=labels.ASSET_FILTERS_TAG_ASSET_HELP) not_endpoint__tags_contains = CharFilter( label="Endpoint Tag Does Not Contain", @@ -2536,21 +2554,21 @@ class MetricsEndpointFilterWithoutObjectLookups(MetricsEndpointFilterHelper, Fin help_text="Search for tags on a Engagement that are an exact match, and exclude them", exclude=True) not_finding__test__engagement__product__tags_contains = CharFilter( - label="Product Tag Does Not Contain", + label=labels.ASSET_FILTERS_TAG_NOT_CONTAIN_LABEL, field_name="finding__test__engagement__product__tags__name", lookup_expr="icontains", - help_text="Search for tags on a Product that contain a given pattern, and exclude them", + help_text=labels.ASSET_FILTERS_TAG_NOT_CONTAIN_HELP, exclude=True) not_finding__test__engagement__product__tags = CharFilter( - label="Not Product Tag", + label=labels.ASSET_FILTERS_TAG_NOT_LABEL, field_name="finding__test__engagement__product__tags__name", lookup_expr="iexact", - help_text="Search for tags on a Product that are an exact match, and exclude them", + help_text=labels.ASSET_FILTERS_TAG_NOT_HELP, exclude=True) def __init__(self, *args, **kwargs): if args[0]: - if args[0].get("start_date", "") != "" or args[0].get("end_date", "") != "": + if args[0].get("start_date", "") or args[0].get("end_date", ""): args[0]._mutable = True args[0]["date"] = 8 args[0]._mutable = False @@ -2590,7 +2608,7 @@ class EndpointFilterHelper(FilterSet): class EndpointFilter(EndpointFilterHelper, DojoFilter): product = ModelMultipleChoiceFilter( queryset=Product.objects.none(), - label="Product") + label=labels.ASSET_FILTERS_LABEL) tags = ModelMultipleChoiceFilter( field_name="tags__name", to_field_name="name", @@ -2614,7 +2632,7 @@ class EndpointFilter(EndpointFilterHelper, DojoFilter): findings__test__engagement__product__tags = ModelMultipleChoiceFilter( field_name="findings__test__engagement__product__tags__name", to_field_name="name", - label="Product Tags", + label=labels.ASSET_FILTERS_TAGS_ASSET_LABEL, queryset=Product.tags.tag_model.objects.all().order_by("name")) not_tags = ModelMultipleChoiceFilter( field_name="tags__name", @@ -2643,7 +2661,7 @@ class EndpointFilter(EndpointFilterHelper, DojoFilter): not_findings__test__engagement__product__tags = ModelMultipleChoiceFilter( field_name="findings__test__engagement__product__tags__name", to_field_name="name", - label="Not Product Tags", + label=labels.ASSET_FILTERS_NOT_TAGS_ASSET_LABEL, exclude=True, queryset=Product.tags.tag_model.objects.all().order_by("name")) @@ -2669,13 +2687,13 @@ class EndpointFilterWithoutObjectLookups(EndpointFilterHelper): product__name = CharFilter( field_name="product__name", lookup_expr="iexact", - label="Product Name", - help_text="Search for Product names that are an exact match") + label=labels.ASSET_FILTERS_NAME_LABEL, + help_text=labels.ASSET_FILTERS_NAME_HELP) product__name_contains = CharFilter( field_name="product__name", lookup_expr="icontains", - label="Product Name Contains", - help_text="Search for Product names that contain a given pattern") + label=labels.ASSET_FILTERS_NAME_CONTAINS_LABEL, + help_text=labels.ASSET_FILTERS_NAME_CONTAINS_HELP) tags_contains = CharFilter( label="Endpoint Tag Contains", @@ -2718,15 +2736,15 @@ class EndpointFilterWithoutObjectLookups(EndpointFilterHelper): lookup_expr="iexact", help_text="Search for tags on a Finding that are an exact match") findings__test__engagement__product__tags_contains = CharFilter( - label="Product Tag Contains", + label=labels.ASSET_FILTERS_TAG_ASSET_CONTAINS_LABEL, field_name="findings__test__engagement__product__tags__name", lookup_expr="icontains", - help_text="Search for tags on a Finding that contain a given pattern") + help_text=labels.ASSET_FILTERS_TAG_ASSET_CONTAINS_HELP) findings__test__engagement__product__tags = CharFilter( - label="Product Tag", + label=labels.ASSET_FILTERS_TAG_ASSET_LABEL, field_name="findings__test__engagement__product__tags__name", lookup_expr="iexact", - help_text="Search for tags on a Finding that are an exact match") + help_text=labels.ASSET_FILTERS_TAG_ASSET_HELP) not_tags_contains = CharFilter( label="Endpoint Tag Does Not Contain", @@ -2777,16 +2795,16 @@ class EndpointFilterWithoutObjectLookups(EndpointFilterHelper): help_text="Search for tags on a Engagement that are an exact match, and exclude them", exclude=True) not_findings__test__engagement__product__tags_contains = CharFilter( - label="Product Tag Does Not Contain", + label=labels.ASSET_FILTERS_TAG_NOT_CONTAIN_LABEL, field_name="findings__test__engagement__product__tags__name", lookup_expr="icontains", - help_text="Search for tags on a Product that contain a given pattern, and exclude them", + help_text=labels.ASSET_FILTERS_TAG_NOT_CONTAIN_HELP, exclude=True) not_findings__test__engagement__product__tags = CharFilter( - label="Not Product Tag", + label=labels.ASSET_FILTERS_TAG_NOT_LABEL, field_name="findings__test__engagement__product__tags__name", lookup_expr="iexact", - help_text="Search for tags on a Product that are an exact match, and exclude them", + help_text=labels.ASSET_FILTERS_TAG_NOT_HELP, exclude=True) def __init__(self, *args, **kwargs): @@ -2984,10 +3002,10 @@ class ApiTestFilter(DojoFilter): engagement__product__tags = CharFieldInFilter( field_name="engagement__product__tags__name", lookup_expr="in", - help_text="Comma separated list of exact tags present on product (uses OR for multiple values)") + help_text=labels.ASSET_FILTERS_CSV_TAGS_OR_HELP) engagement__product__tags__and = CharFieldFilterANDExpression( field_name="engagement__product__tags__name", - help_text="Comma separated list of exact tags to match with an AND expression present on product") + help_text=labels.ASSET_FILTERS_CSV_TAGS_AND_HELP) not_tag = CharFilter(field_name="tags__name", lookup_expr="icontains", help_text="Not Tag name contains", exclude="True") not_tags = CharFieldInFilter(field_name="tags__name", lookup_expr="in", @@ -2997,7 +3015,7 @@ class ApiTestFilter(DojoFilter): exclude="True") not_engagement__product__tags = CharFieldInFilter(field_name="engagement__product__tags__name", lookup_expr="in", - help_text="Comma separated list of exact tags not present on product", + help_text=labels.ASSET_FILTERS_CSV_TAGS_NOT_HELP, exclude="True") has_tags = BooleanFilter(field_name="tags", lookup_expr="isnull", exclude=True, label="Has tags") @@ -3151,11 +3169,11 @@ def qs(self): class ReportFindingFilter(ReportFindingFilterHelper, FindingTagFilter): test__engagement__product = ModelMultipleChoiceFilter( - queryset=Product.objects.none(), label="Product") + queryset=Product.objects.none(), label=labels.ASSET_FILTERS_LABEL) test__engagement__product__prod_type = ModelMultipleChoiceFilter( queryset=Product_Type.objects.none(), - label="Product Type") - test__engagement__product__lifecycle = MultipleChoiceFilter(choices=Product.LIFECYCLE_CHOICES, label="Product Lifecycle") + label=labels.ORG_FILTERS_LABEL) + test__engagement__product__lifecycle = MultipleChoiceFilter(choices=Product.LIFECYCLE_CHOICES, label=labels.ASSET_LIFECYCLE_LABEL) test__engagement = ModelMultipleChoiceFilter(queryset=Engagement.objects.none(), label="Engagement") duplicate_finding = ModelChoiceFilter(queryset=Finding.objects.filter(original_finding__isnull=False).distinct()) @@ -3266,23 +3284,23 @@ class ReportFindingFilterWithoutObjectLookups(ReportFindingFilterHelper, Finding test__engagement__product__prod_type__name = CharFilter( field_name="test__engagement__product__prod_type__name", lookup_expr="iexact", - label="Product Type Name", - help_text="Search for Product Type names that are an exact match") + label=labels.ORG_FILTERS_NAME_LABEL, + help_text=labels.ORG_FILTERS_NAME_HELP) test__engagement__product__prod_type__name_contains = CharFilter( field_name="test__engagement__product__prod_type__name", lookup_expr="icontains", - label="Product Type Name Contains", - help_text="Search for Product Type names that contain a given pattern") + label=labels.ORG_FILTERS_NAME_CONTAINS_LABEL, + help_text=labels.ORG_FILTERS_NAME_CONTAINS_HELP) test__engagement__product__name = CharFilter( field_name="test__engagement__product__name", lookup_expr="iexact", - label="Product Name", - help_text="Search for Product names that are an exact match") + label=labels.ASSET_FILTERS_NAME_LABEL, + help_text=labels.ASSET_FILTERS_NAME_HELP) test__engagement__product__name_contains = CharFilter( field_name="test__engagement__product__name", lookup_expr="icontains", - label="Product name Contains", - help_text="Search for Product names that contain a given pattern") + label=labels.ASSET_FILTERS_NAME_CONTAINS_LABEL, + help_text=labels.ASSET_FILTERS_NAME_CONTAINS_HELP) test__engagement__name = CharFilter( field_name="test__engagement__name", lookup_expr="iexact", @@ -3465,7 +3483,6 @@ class Meta: class LogEntryFilter(DojoFilter): - from auditlog.models import LogEntry action = MultipleChoiceFilter(choices=LogEntry.Action.choices) actor = ModelMultipleChoiceFilter(queryset=Dojo_User.objects.none()) @@ -3489,6 +3506,81 @@ class Meta: } +class PgHistoryFilter(DojoFilter): + + """ + Filter for django-pghistory audit entries. + + This filter works with pghistory event tables that have: + - pgh_created_at: timestamp of the event + - pgh_label: event type (insert/update/delete) + - user: user ID from context + - url: URL from context + - remote_addr: IP address from context + """ + + # Filter by event creation time (equivalent to auditlog timestamp) + pgh_created_at = DateRangeFilter(field_name="pgh_created_at", label="Timestamp") + + # Filter by event type/label + pgh_label = ChoiceFilter( + field_name="pgh_label", + label="Event Type", + choices=[ + ("", "All"), + ("insert", "Insert"), + ("update", "Update"), + ("delete", "Delete"), + ("initial_import", "Initial Import"), + ], + ) + + # Filter by user (from context) + user = ModelChoiceFilter( + field_name="user", + queryset=Dojo_User.objects.none(), + label="User", + empty_label="All Users", + ) + + # Filter by IP address (from context) + remote_addr = CharFilter( + field_name="remote_addr", + lookup_expr="icontains", + label="IP Address Contains", + ) + + # Filter by changes/diff field (JSON field containing what changed) + pgh_diff = CharFilter( + method="filter_pgh_diff_contains", + label="Changes Contains", + help_text="Search for field names or values in the changes (optimized for JSONB, but can be slow)", + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.form.fields["user"].queryset = get_authorized_users(Permissions.Product_View) + + def filter_pgh_diff_contains(self, queryset, name, value): + """ + Custom filter for pgh_diff that uses efficient JSONB operations. + Searches both keys and values in the JSONB field. + """ + if not value: + return queryset + + # Search in both keys and values using JSONB operators + return queryset.filter( + Q(pgh_diff__has_key=value) | # Search in keys: {"severity": [...]} + Q(pgh_diff__has_any_keys=[value]) | # Alternative key search + Q(pgh_diff__contains=f'"{value}"'), # Search in values: ["severity", "other"] + ) + + class Meta: + fields = ["pgh_created_at", "pgh_label", "user", "url", "remote_addr", "pgh_diff"] + exclude = [] + + class ProductTypeFilter(DojoFilter): name = CharFilter(lookup_expr="icontains") diff --git a/dojo/finding/helper.py b/dojo/finding/helper.py index 3f6a597a542..dd78ccbce69 100644 --- a/dojo/finding/helper.py +++ b/dojo/finding/helper.py @@ -7,10 +7,12 @@ from django.db.models.signals import post_delete, pre_delete from django.db.utils import IntegrityError from django.dispatch.dispatcher import receiver +from django.urls import reverse from django.utils import timezone from fieldsignals import pre_save_changed import dojo.jira_link.helper as jira_helper +import dojo.risk_acceptance.helper as ra_helper from dojo.celery import app from dojo.decorators import dojo_async_task, dojo_model_from_id, dojo_model_to_id from dojo.endpoint.utils import save_endpoints_to_add @@ -21,13 +23,24 @@ Engagement, Finding, Finding_Group, + Notes, System_Settings, Test, Vulnerability_Id, Vulnerability_Id_Template, ) from dojo.notes.helper import delete_related_notes -from dojo.utils import get_current_user, mass_model_updater, to_str_typed +from dojo.notifications.helper import create_notification +from dojo.tools import tool_issue_updater +from dojo.utils import ( + calculate_grade, + close_external_issue, + do_dedupe_finding, + do_false_positive_history, + get_current_user, + mass_model_updater, + to_str_typed, +) logger = logging.getLogger(__name__) deduplicationLogger = logging.getLogger("dojo.specific-loggers.deduplication") @@ -116,10 +129,13 @@ def update_finding_status(new_state_finding, user, changed_fields=None): new_state_finding.mitigated = None new_state_finding.mitigated_by = None - # people may try to remove mitigated/mitigated_by by accident + # Ensure mitigated metadata is present for mitigated findings + # If values are provided (including custom ones), keep them; if missing, set defaults if new_state_finding.is_mitigated: - new_state_finding.mitigated = new_state_finding.mitigated or now - new_state_finding.mitigated_by = new_state_finding.mitigated_by or user + if not new_state_finding.mitigated: + new_state_finding.mitigated = now + if not new_state_finding.mitigated_by: + new_state_finding.mitigated_by = user if is_new_finding or "active" in changed_fields: # finding is being (re)activated @@ -153,8 +169,26 @@ def update_finding_status(new_state_finding, user, changed_fields=None): new_state_finding.last_status_update = now +def filter_findings_by_existence(findings): + """ + Return only findings that still exist in the database (by id). + + Centralized helper used by importers to avoid FK violations during + bulk_create. + """ + if not findings: + return [] + candidate_ids = [finding.id for finding in findings if getattr(finding, "id", None)] + if not candidate_ids: + return [] + existing_ids = set( + Finding.objects.filter(id__in=candidate_ids).values_list("id", flat=True), + ) + return [finding for finding in findings if finding.id in existing_ids] + + def can_edit_mitigated_data(user): - return settings.EDITABLE_MITIGATED_DATA and user.is_superuser + return settings.EDITABLE_MITIGATED_DATA and user and getattr(user, "is_superuser", False) def create_finding_group(finds, finding_group_name): @@ -263,7 +297,6 @@ def get_group_by_group_name(finding, finding_group_by_option): else: msg = f"Invalid group_by option {finding_group_by_option}" raise ValueError(msg) - if group_name: return f"Findings in: {group_name}" @@ -349,6 +382,21 @@ def add_findings_to_auto_group(name, findings, group_by, *, create_finding_group finding_group.findings.add(*findings) +@dojo_model_to_id +@dojo_async_task(signature=True) +@app.task +@dojo_model_from_id +def post_process_finding_save_signature(finding, dedupe_option=True, rules_option=True, product_grading_option=True, # noqa: FBT002 + issue_updater_option=True, push_to_jira=False, user=None, *args, **kwargs): # noqa: FBT002 - this is bit hard to fix nice have this universally fixed + """ + Returns a task signature for post-processing a finding. This is useful for creating task signatures + that can be used in chords or groups or to await results. We need this extra method because of our dojo_async decorator. + If we use more of these celery features, we should probably move away from that decorator. + """ + return post_process_finding_save_internal(finding, dedupe_option, rules_option, product_grading_option, + issue_updater_option, push_to_jira, user, *args, **kwargs) + + @dojo_model_to_id @dojo_async_task @app.task @@ -356,6 +404,13 @@ def add_findings_to_auto_group(name, findings, group_by, *, create_finding_group def post_process_finding_save(finding, dedupe_option=True, rules_option=True, product_grading_option=True, # noqa: FBT002 issue_updater_option=True, push_to_jira=False, user=None, *args, **kwargs): # noqa: FBT002 - this is bit hard to fix nice have this universally fixed + return post_process_finding_save_internal(finding, dedupe_option, rules_option, product_grading_option, + issue_updater_option, push_to_jira, user, *args, **kwargs) + + +def post_process_finding_save_internal(finding, dedupe_option=True, rules_option=True, product_grading_option=True, # noqa: FBT002 + issue_updater_option=True, push_to_jira=False, user=None, *args, **kwargs): # noqa: FBT002 - this is bit hard to fix nice have this universally fixed + if not finding: logger.warning("post_process_finding_save called with finding==None, skipping post processing") return @@ -366,7 +421,6 @@ def post_process_finding_save(finding, dedupe_option=True, rules_option=True, pr if dedupe_option: if finding.hash_code is not None: if system_settings.enable_deduplication: - from dojo.utils import do_dedupe_finding do_dedupe_finding(finding, *args, **kwargs) else: deduplicationLogger.debug("skipping dedupe because it's disabled in system settings") @@ -378,17 +432,14 @@ def post_process_finding_save(finding, dedupe_option=True, rules_option=True, pr if system_settings.enable_deduplication: deduplicationLogger.warning("skipping false positive history because deduplication is also enabled") else: - from dojo.utils import do_false_positive_history do_false_positive_history(finding, *args, **kwargs) # STEP 2 run all non-status changing tasks as celery tasks in the background if issue_updater_option: - from dojo.tools import tool_issue_updater tool_issue_updater.async_tool_issue_update(finding) if product_grading_option: if system_settings.enable_product_grade: - from dojo.utils import calculate_grade calculate_grade(finding.test.engagement.product) else: deduplicationLogger.debug("skipping product grading because it's disabled in system settings") @@ -396,7 +447,6 @@ def post_process_finding_save(finding, dedupe_option=True, rules_option=True, pr # Adding a snippet here for push to JIRA so that it's in one place if push_to_jira: logger.debug("pushing finding %s to jira from finding.save()", finding.pk) - import dojo.jira_link.helper as jira_helper # current approach is that whenever a finding is in a group, the group will be pushed to JIRA # based on feedback we could introduct another push_group_to_jira boolean everywhere @@ -452,7 +502,6 @@ def finding_post_delete(sender, instance, **kwargs): # Catch instances in async delete where a single object is deleted more than once with suppress(Finding.DoesNotExist): logger.debug("finding post_delete, sender: %s instance: %s", to_str_typed(sender), to_str_typed(instance)) - # calculate_grade(instance.test.engagement.product) def reset_duplicate_before_delete(dupe): @@ -686,3 +735,91 @@ def save_vulnerability_ids_template(finding_template, vulnerability_ids): finding_template.cve = vulnerability_ids[0] else: finding_template.cve = None + + +def close_finding( + *, + finding, + user, + is_mitigated, + mitigated, + mitigated_by, + false_p, + out_of_scope, + duplicate, + note_entry=None, + note_type=None, +) -> None: + """ + Shared close logic used by UI and API. + + Handles status updates, endpoint statuses, risk acceptance, external issues, + JIRA sync, and notification. + """ + # Core status updates + finding.is_mitigated = is_mitigated + now = timezone.now() + finding.mitigated = mitigated or now + finding.mitigated_by = mitigated_by or user + finding.active = False + finding.false_p = bool(false_p) + finding.out_of_scope = bool(out_of_scope) + finding.duplicate = bool(duplicate) + finding.under_review = False + finding.last_reviewed = finding.mitigated + finding.last_reviewed_by = user + + # Create note if provided + new_note = None + if note_entry: + new_note = Notes.objects.create( + entry=note_entry, + author=user, + note_type=note_type, + date=finding.mitigated, + ) + finding.notes.add(new_note) + + # Endpoint statuses + for status in finding.status_finding.all(): + status.mitigated_by = finding.mitigated_by + status.mitigated_time = finding.mitigated + status.mitigated = True + status.last_modified = timezone.now() + status.save() + + # Risk acceptance + ra_helper.risk_unaccept(user, finding, perform_save=False) + + # External issues (best effort) + close_external_issue(finding, "Closed by defectdojo", "github") + + # JIRA sync + push_to_jira = False + finding_in_group = finding.has_finding_group + jira_issue_exists = finding.has_jira_issue or ( + finding.finding_group and finding.finding_group.has_jira_issue + ) + jira_instance = jira_helper.get_jira_instance(finding) + jira_project = jira_helper.get_jira_project(finding) + if jira_issue_exists: + push_to_jira = ( + jira_helper.is_push_all_issues(finding) + or (jira_instance and jira_instance.finding_jira_sync) + ) + if new_note and (getattr(jira_project, "push_notes", False) or push_to_jira) and not finding_in_group: + jira_helper.add_comment(finding, new_note, force_push=True) + + # Persist and push JIRA if applicable + finding.save(push_to_jira=(push_to_jira and not finding_in_group)) + if push_to_jira and finding_in_group: + jira_helper.push_to_jira(finding.finding_group) + + # Notification + create_notification( + event="finding_closed", + title=f"Closing of {finding.title}", + finding=finding, + description=f'The finding "{finding.title}" was closed by {user}', + url=reverse("view_finding", args=(finding.id,)), + ) diff --git a/dojo/finding/views.py b/dojo/finding/views.py index 7483a47c2ce..9a944dccb60 100644 --- a/dojo/finding/views.py +++ b/dojo/finding/views.py @@ -14,8 +14,8 @@ from django.core import serializers from django.core.exceptions import PermissionDenied, ValidationError from django.db import models -from django.db.models import QuerySet -from django.db.models.functions import Length +from django.db.models import F, QuerySet +from django.db.models.functions import Coalesce, ExtractDay, Length, TruncDate from django.db.models.query import Prefetch from django.http import Http404, HttpRequest, HttpResponse, HttpResponseRedirect, JsonResponse, StreamingHttpResponse from django.shortcuts import get_object_or_404, render @@ -100,7 +100,9 @@ Vulnerability_Id_Template, ) from dojo.notifications.helper import create_notification +from dojo.tag_utils import bulk_add_tags_to_instances from dojo.test.queries import get_authorized_tests +from dojo.tools import tool_issue_updater from dojo.utils import ( FileIterWrapper, Product_Tab, @@ -111,7 +113,6 @@ add_success_message_to_response, apply_cwe_to_template, calculate_grade, - close_external_issue, do_false_positive_history, get_page_items, get_page_items_and_count, @@ -273,7 +274,13 @@ def filter_findings_by_form(self, request: HttpRequest, findings: QuerySet[Findi ) def get_filtered_findings(self): - findings = get_authorized_findings(Permissions.Finding_View).order_by(self.get_order_by()) + findings = get_authorized_findings(Permissions.Finding_View) + # Annotate computed SLA age in days: sla_expiration_date - (sla_start_date or date) + findings = findings.annotate( + sla_age_days=ExtractDay( + F("sla_expiration_date") - Coalesce(F("sla_start_date"), TruncDate("created")), + ), + ).order_by(self.get_order_by()) findings = self.filter_findings_by_object(findings) return self.filter_findings_by_filter_name(findings) @@ -323,7 +330,7 @@ def add_breadcrumbs(self, request: HttpRequest, context: dict): # show custom breadcrumb if user has filtered by exactly 1 endpoint if "endpoints" in request.GET: endpoint_ids = request.GET.getlist("endpoints", []) - if len(endpoint_ids) == 1 and endpoint_ids[0] != "": + if len(endpoint_ids) == 1 and endpoint_ids[0]: endpoint_id = endpoint_ids[0] endpoint = get_object_or_404(Endpoint, id=endpoint_id) context["filter_name"] = "Vulnerable Endpoints" @@ -486,7 +493,7 @@ def get_request_response(self, finding: Finding): burp_request = base64.b64decode(request_response.burpRequestBase64) burp_response = base64.b64decode(request_response.burpResponseBase64) except Exception as e: - logger.debug(f"unsuspected error: {e}") + logger.debug("unsuspected error: %s", e) return { "burp_request": burp_request, @@ -538,7 +545,9 @@ def get_similar_findings(self, request: HttpRequest, finding: Finding): finding_filter_class = SimilarFindingFilterWithoutObjectLookups if filter_string_matching else SimilarFindingFilter similar_findings_filter = finding_filter_class( request.GET, - queryset=get_authorized_findings(Permissions.Finding_View), + queryset=get_authorized_findings(Permissions.Finding_View) + .filter(test__engagement__product=finding.test.engagement.product) + .exclude(id=finding.id), user=request.user, finding=finding, ) @@ -1135,72 +1144,33 @@ def close_finding(request, fid): # we can do this with a Note note_type_activation = Note_Type.objects.filter(is_active=True) missing_note_types = get_missing_mandatory_notetypes(finding) if len(note_type_activation) else note_type_activation - form = CloseFindingForm(missing_note_types=missing_note_types) + form = CloseFindingForm( + missing_note_types=missing_note_types, + can_edit_mitigated_data=finding_helper.can_edit_mitigated_data(request.user), + ) if request.method == "POST": - form = CloseFindingForm(request.POST, missing_note_types=missing_note_types) - - close_external_issue(finding, "Closed by defectdojo", "github") + form = CloseFindingForm( + request.POST, + missing_note_types=missing_note_types, + can_edit_mitigated_data=finding_helper.can_edit_mitigated_data(request.user), + ) if form.is_valid(): - now = timezone.now() - new_note = form.save(commit=False) - new_note.author = request.user - new_note.date = form.cleaned_data.get("mitigated") or now - new_note.save() - finding.notes.add(new_note) - - messages.add_message( - request, messages.SUCCESS, "Note Saved.", extra_tags="alert-success", - ) + messages.add_message(request, messages.SUCCESS, "Note Saved.", extra_tags="alert-success") if len(missing_note_types) <= 1: - finding.active = False - now = timezone.now() - finding.mitigated = form.cleaned_data.get("mitigated") or now - finding.mitigated_by = ( - form.cleaned_data.get("mitigated_by") or request.user + finding_helper.close_finding( + finding=finding, + user=request.user, + is_mitigated=True, + mitigated=form.cleaned_data.get("mitigated"), + mitigated_by=form.cleaned_data.get("mitigated_by") or request.user, + false_p=form.cleaned_data.get("false_p", False), + out_of_scope=form.cleaned_data.get("out_of_scope", False), + duplicate=form.cleaned_data.get("duplicate", False), + note_entry=form.cleaned_data.get("entry"), + note_type=form.cleaned_data.get("note_type"), ) - finding.is_mitigated = True - finding.under_review = False - finding.last_reviewed = finding.mitigated - finding.last_reviewed_by = request.user - finding.false_p = form.cleaned_data.get("false_p", False) - finding.out_of_scope = form.cleaned_data.get("out_of_scope", False) - finding.duplicate = form.cleaned_data.get("duplicate", False) - endpoint_status = finding.status_finding.all() - for status in endpoint_status: - status.mitigated_by = ( - form.cleaned_data.get("mitigated_by") or request.user - ) - status.mitigated_time = form.cleaned_data.get("mitigated") or now - status.mitigated = True - status.last_modified = timezone.now() - status.save() - # Clear the risk acceptance, if present - ra_helper.risk_unaccept(request.user, finding) - - # Manage the jira status changes - push_to_jira = False - # Determine if the finding is in a group. if so, not push to jira - finding_in_group = finding.has_finding_group - # Check if there is a jira issue that needs to be updated - jira_issue_exists = finding.has_jira_issue or (finding.finding_group and finding.finding_group.has_jira_issue) - # fetch the project - jira_instance = jira_helper.get_jira_instance(finding) - jira_project = jira_helper.get_jira_project(finding) - # Only push if the finding is not in a group - if jira_issue_exists: - # Determine if any automatic sync should occur - push_to_jira = jira_helper.is_push_all_issues(finding) or jira_instance.finding_jira_sync - # Add the closing note - if (jira_project.push_notes or push_to_jira) and not finding_in_group: - jira_helper.add_comment(finding, new_note, force_push=True) - # Save the finding - finding.save(push_to_jira=(push_to_jira and not finding_in_group)) - # we only push the group after saving the finding to make sure - # the updated data of the finding is pushed as part of the group - if push_to_jira and finding_in_group: - jira_helper.push_to_jira(finding.finding_group) messages.add_message( request, @@ -1209,17 +1179,7 @@ def close_finding(request, fid): extra_tags="alert-success", ) - # Note: this notification has not be moved to "@receiver(pre_save, sender=Finding)" method as many other notifications - # Because it could generate too much noise, we keep it here only for findings created by hand in WebUI - # TODO: but same should be implemented for API endpoint - - create_notification( - event="finding_closed", - title=_("Closing of %s") % finding.title, - finding=finding, - description=f'The finding "{finding.title}" was closed by {request.user}', - url=reverse("view_finding", args=(finding.id,)), - ) + # Notification sent by helper return HttpResponseRedirect( reverse("view_test", args=(finding.test.id,)), ) @@ -1597,11 +1557,11 @@ def request_finding_review(request, fid): reviewers = Dojo_User.objects.filter(id__in=form.cleaned_data["reviewers"]) reviewers_string = ", ".join([f"{user} ({user.id})" for user in reviewers]) reviewers_usernames = [user.username for user in reviewers] - logger.debug(f"Asking {reviewers_string} for review") + logger.debug("Asking %s for review", reviewers_string) create_notification( event="review_requested", # TODO: - if 'review_requested' functionality will be supported by API as well, 'create_notification' needs to be migrated to place where it will be able to cover actions from both interfaces - title="Finding review requested", + title=f"Finding review requested for Test created for {finding.test.engagement.product}: {finding.test.engagement.name}: {finding.test} - {finding.title}", requested_by=user, note=new_note, finding=finding, @@ -2470,7 +2430,7 @@ def merge_finding_product(request, pid): finding.tags.add("merged-inactive") # Update the finding to merge into - if finding_descriptions != "": + if finding_descriptions: finding_to_merge_into.description = f"{finding_to_merge_into.description}\n\n{finding_descriptions}" if finding_to_merge_into.static_finding: @@ -2479,7 +2439,7 @@ def merge_finding_product(request, pid): if finding_to_merge_into.dynamic_finding: dynamic = finding.dynamic_finding - if finding_references != "": + if finding_references: finding_to_merge_into.references = f"{finding_to_merge_into.references}\n{finding_references}" finding_to_merge_into.static_finding = static @@ -2865,17 +2825,10 @@ def finding_bulk_update_all(request, pid=None): finding.save() if form.cleaned_data["tags"]: - for finding in finds: - tags = form.cleaned_data["tags"] - logger.debug( - "bulk_edit: setting tags for: %i %s %s", - finding.id, - finding, - tags, - ) - # currently bulk edit overwrites existing tags - finding.tags = tags - finding.save() + tags = form.cleaned_data["tags"] + logger.debug("bulk_edit: adding tags to %d findings: %s", finds.count(), tags) + # Delegate parsing and handling of strings/iterables to helper + bulk_add_tags_to_instances(tag_or_tags=tags, instances=finds, tag_field_name="tags") error_counts = defaultdict(lambda: 0) success_count = 0 @@ -2919,8 +2872,6 @@ def finding_bulk_update_all(request, pid=None): error_counts = defaultdict(lambda: 0) success_count = 0 for finding in finds: - from dojo.tools import tool_issue_updater - tool_issue_updater.async_tool_issue_update(finding) # not sure yet if we want to support bulk unlink, so leave as commented out for now @@ -3028,7 +2979,10 @@ def get_missing_mandatory_notetypes(finding): def mark_finding_duplicate(request, original_id, duplicate_id): original = get_object_or_404(Finding, id=original_id) - duplicate = get_object_or_404(Finding, id=duplicate_id) + duplicate = get_object_or_404( + Finding.objects.filter(test__engagement__product=original.test.engagement.product), + id=duplicate_id, + ) if original.test.engagement != duplicate.test.engagement: if (original.test.engagement.deduplication_on_engagement @@ -3113,7 +3067,10 @@ def reset_finding_duplicate_status(request, duplicate_id): def set_finding_as_original_internal(user, finding_id, new_original_id): finding = get_object_or_404(Finding, id=finding_id) - new_original = get_object_or_404(Finding, id=new_original_id) + new_original = get_object_or_404( + Finding.objects.filter(test__engagement__product=finding.test.engagement.product), + id=new_original_id, + ) if finding.test.engagement != new_original.test.engagement: if (finding.test.engagement.deduplication_on_engagement diff --git a/dojo/finding_group/signals.py b/dojo/finding_group/signals.py index 1e2d771b557..3e7ffe7c7b7 100644 --- a/dojo/finding_group/signals.py +++ b/dojo/finding_group/signals.py @@ -1,5 +1,8 @@ +import contextlib + from auditlog.models import LogEntry from django.conf import settings +from django.contrib.auth import get_user_model from django.contrib.contenttypes.models import ContentType from django.db.models.signals import post_delete from django.dispatch import receiver @@ -8,20 +11,45 @@ from dojo.models import Finding_Group from dojo.notifications.helper import create_notification +from dojo.pghistory_models import DojoEvents @receiver(post_delete, sender=Finding_Group) def finding_group_post_delete(sender, instance, using, origin, **kwargs): if instance == origin: description = _('The finding group "%(name)s" was deleted') % {"name": instance.name} + user = None + if settings.ENABLE_AUDITLOG: - if le := LogEntry.objects.filter( - action=LogEntry.Action.DELETE, - content_type=ContentType.objects.get(app_label="dojo", model="finding_group"), - object_id=instance.id, - ).order_by("-id").first(): + # First try to find deletion author in pghistory events + # Look for delete events for this specific finding_group instance + pghistory_delete_events = DojoEvents.objects.filter( + pgh_obj_model="dojo.Finding_Group", + pgh_obj_id=instance.id, + pgh_label="delete", + ).order_by("-pgh_created_at") + + if pghistory_delete_events.exists(): + latest_delete = pghistory_delete_events.first() + # Extract user from pghistory context + if latest_delete.user: + User = get_user_model() + with contextlib.suppress(User.DoesNotExist): + user = User.objects.get(id=latest_delete.user) + + # Fall back to django-auditlog if no user found in pghistory + if not user: + if le := LogEntry.objects.filter( + action=LogEntry.Action.DELETE, + content_type=ContentType.objects.get(app_label="dojo", model="finding_group"), + object_id=instance.id, + ).order_by("-id").first(): + user = le.actor + + # Update description with user if found + if user: description = _('The finding group "%(name)s" was deleted by %(user)s') % { - "name": instance.name, "user": le.actor} + "name": instance.name, "user": user} create_notification(event="finding_group_deleted", # template does not exists, it will default to "other" but this event name needs to stay because of unit testing title=_("Deletion of %(name)s") % {"name": instance.name}, description=description, diff --git a/dojo/fixtures/questionnaire_testdata.json b/dojo/fixtures/questionnaire_testdata.json index c95278c83ac..2e75807c2ac 100644 --- a/dojo/fixtures/questionnaire_testdata.json +++ b/dojo/fixtures/questionnaire_testdata.json @@ -1,52 +1,4 @@ [ - { - "fields": { - "model": "question", - "app_label": "dojo" - }, - "model": "contenttypes.contenttype", - "pk": 65 - }, - { - "fields": { - "model": "answer", - "app_label": "dojo" - }, - "model": "contenttypes.contenttype", - "pk": 68 - }, - { - "fields": { - "model": "textquestion", - "app_label": "dojo" - }, - "model": "contenttypes.contenttype", - "pk": 66 - }, - { - "fields": { - "model": "textanswer", - "app_label": "dojo" - }, - "model": "contenttypes.contenttype", - "pk": 69 - }, - { - "fields": { - "model": "choicequestion", - "app_label": "dojo" - }, - "model": "contenttypes.contenttype", - "pk": 71 - }, - { - "fields": { - "model": "choiceanswer", - "app_label": "dojo" - }, - "model": "contenttypes.contenttype", - "pk": 70 - }, { "pk": 1, "model": "auth.user", @@ -211,7 +163,7 @@ "model": "dojo.question", "pk": 14, "fields": { - "polymorphic_ctype": 66, + "polymorphic_ctype": ["dojo", "textquestion"], "created": "2015-03-30T19:57:22Z", "modified": "2015-03-30T19:57:22Z", "order": 1, @@ -223,7 +175,7 @@ "model": "dojo.question", "pk": 15, "fields": { - "polymorphic_ctype": 66, + "polymorphic_ctype": ["dojo", "textquestion"], "created": "2015-03-30T19:57:34Z", "modified": "2015-03-30T19:57:34Z", "order": 1, @@ -235,7 +187,7 @@ "model": "dojo.question", "pk": 16, "fields": { - "polymorphic_ctype": 66, + "polymorphic_ctype": ["dojo", "textquestion"], "created": "2015-03-30T19:57:55Z", "modified": "2015-03-30T19:57:55Z", "order": 1, @@ -247,7 +199,7 @@ "model": "dojo.question", "pk": 17, "fields": { - "polymorphic_ctype": 66, + "polymorphic_ctype": ["dojo", "textquestion"], "created": "2015-03-30T19:58:36Z", "modified": "2015-03-30T19:58:36Z", "order": 1, @@ -259,7 +211,7 @@ "model": "dojo.question", "pk": 18, "fields": { - "polymorphic_ctype": 66, + "polymorphic_ctype": ["dojo", "textquestion"], "created": "2015-03-30T20:00:35Z", "modified": "2015-03-30T20:00:35Z", "order": 1, @@ -271,7 +223,7 @@ "model": "dojo.question", "pk": 19, "fields": { - "polymorphic_ctype": 66, + "polymorphic_ctype": ["dojo", "textquestion"], "created": "2015-03-30T20:00:46Z", "modified": "2015-03-30T20:00:46Z", "order": 1, @@ -283,7 +235,7 @@ "model": "dojo.question", "pk": 20, "fields": { - "polymorphic_ctype": 66, + "polymorphic_ctype": ["dojo", "textquestion"], "created": "2015-03-30T20:00:58Z", "modified": "2015-03-30T20:00:58Z", "order": 1, @@ -295,7 +247,7 @@ "model": "dojo.question", "pk": 44, "fields": { - "polymorphic_ctype": 71, + "polymorphic_ctype": ["dojo", "choicequestion"], "created": "2023-03-02T17:58:59.698Z", "modified": "2023-03-02T17:58:59.737Z", "order": 1, @@ -458,7 +410,7 @@ "model": "dojo.answer", "pk": 1, "fields": { - "polymorphic_ctype": 70, + "polymorphic_ctype": ["dojo", "choiceanswer"], "created": "2023-03-02T19:07:55.430Z", "modified": "2023-03-02T19:07:55.447Z", "question": 44, @@ -469,7 +421,7 @@ "model": "dojo.answer", "pk": 2, "fields": { - "polymorphic_ctype": 69, + "polymorphic_ctype": ["dojo", "textanswer"], "created": "2023-03-02T19:14:07.816Z", "modified": "2023-03-02T19:14:07.822Z", "question": 14, @@ -480,7 +432,7 @@ "model": "dojo.answer", "pk": 3, "fields": { - "polymorphic_ctype": 69, + "polymorphic_ctype": ["dojo", "textanswer"], "created": "2023-03-02T19:14:07.829Z", "modified": "2023-03-02T19:14:07.833Z", "question": 15, @@ -491,7 +443,7 @@ "model": "dojo.answer", "pk": 4, "fields": { - "polymorphic_ctype": 69, + "polymorphic_ctype": ["dojo", "textanswer"], "created": "2023-03-02T19:14:07.838Z", "modified": "2023-03-02T19:14:07.841Z", "question": 16, @@ -502,7 +454,7 @@ "model": "dojo.answer", "pk": 5, "fields": { - "polymorphic_ctype": 69, + "polymorphic_ctype": ["dojo", "textanswer"], "created": "2023-03-02T19:14:07.845Z", "modified": "2023-03-02T19:14:07.848Z", "question": 17, @@ -513,7 +465,7 @@ "model": "dojo.answer", "pk": 6, "fields": { - "polymorphic_ctype": 69, + "polymorphic_ctype": ["dojo", "textanswer"], "created": "2023-03-02T19:14:07.853Z", "modified": "2023-03-02T19:14:07.856Z", "question": 19, @@ -524,7 +476,7 @@ "model": "dojo.answer", "pk": 7, "fields": { - "polymorphic_ctype": 69, + "polymorphic_ctype": ["dojo", "textanswer"], "created": "2023-03-02T19:14:07.861Z", "modified": "2023-03-02T19:14:07.864Z", "question": 20, @@ -535,7 +487,7 @@ "model": "dojo.answer", "pk": 8, "fields": { - "polymorphic_ctype": 69, + "polymorphic_ctype": ["dojo", "textanswer"], "created": "2023-03-02T19:14:07.868Z", "modified": "2023-03-02T19:14:07.871Z", "question": 18, diff --git a/dojo/forms.py b/dojo/forms.py index eae48e4e937..3ed2961c170 100644 --- a/dojo/forms.py +++ b/dojo/forms.py @@ -18,6 +18,7 @@ from django.contrib.auth.password_validation import validate_password from django.core import validators from django.core.exceptions import ValidationError +from django.core.validators import URLValidator from django.db.models import Count, Q from django.forms import modelformset_factory from django.forms.widgets import Select, Widget @@ -36,6 +37,7 @@ from dojo.engagement.queries import get_authorized_engagements from dojo.finding.queries import get_authorized_findings from dojo.group.queries import get_authorized_groups, get_group_member_roles +from dojo.labels import get_labels from dojo.models import ( EFFORT_FOR_FIXING_CHOICES, SEVERITY_CHOICES, @@ -117,6 +119,8 @@ logger = logging.getLogger(__name__) +labels = get_labels() + RE_DATE = re.compile(r"(\d{4})-(\d\d?)-(\d\d?)$") FINDING_STATUS = (("verified", "Verified"), @@ -243,6 +247,11 @@ class Product_TypeForm(forms.ModelForm): description = forms.CharField(widget=forms.Textarea(attrs={}), required=False) + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.fields["critical_product"].label = labels.ORG_CRITICAL_PRODUCT_LABEL + self.fields["key_product"].label = labels.ORG_KEY_PRODUCT_LABEL + class Meta: model = Product_Type fields = ["name", "description", "critical_product", "key_product"] @@ -279,6 +288,7 @@ def __init__(self, *args, **kwargs): self.fields["users"].queryset = Dojo_User.objects.exclude( Q(is_superuser=True) | Q(id__in=current_members)).exclude(is_active=False).order_by("first_name", "last_name") + self.fields["product_type"].label = labels.ORG_LABEL self.fields["product_type"].disabled = True class Meta: @@ -287,7 +297,8 @@ class Meta: class Add_Product_Type_Member_UserForm(forms.ModelForm): - product_types = forms.ModelMultipleChoiceField(queryset=Product_Type.objects.none(), required=True, label="Product Types") + product_types = forms.ModelMultipleChoiceField(queryset=Product_Type.objects.none(), required=True, + label=labels.ORG_PLURAL_LABEL) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -305,6 +316,7 @@ class Delete_Product_Type_MemberForm(Edit_Product_Type_MemberForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields["role"].disabled = True + self.fields["product_type"].label = labels.ORG_LABEL class Test_TypeForm(forms.ModelForm): @@ -330,7 +342,7 @@ class ProductForm(forms.ModelForm): description = forms.CharField(widget=forms.Textarea(attrs={}), required=True) - prod_type = forms.ModelChoiceField(label="Product Type", + prod_type = forms.ModelChoiceField(label=labels.ORG_LABEL, queryset=Product_Type.objects.none(), required=True) @@ -339,13 +351,16 @@ class ProductForm(forms.ModelForm): required=True, initial="Default") - product_manager = forms.ModelChoiceField(queryset=Dojo_User.objects.exclude(is_active=False).order_by("first_name", "last_name"), required=False) + product_manager = forms.ModelChoiceField(label=labels.ASSET_MANAGER_LABEL, + queryset=Dojo_User.objects.exclude(is_active=False).order_by("first_name", "last_name"), required=False) technical_contact = forms.ModelChoiceField(queryset=Dojo_User.objects.exclude(is_active=False).order_by("first_name", "last_name"), required=False) team_manager = forms.ModelChoiceField(queryset=Dojo_User.objects.exclude(is_active=False).order_by("first_name", "last_name"), required=False) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields["prod_type"].queryset = get_authorized_product_types(Permissions.Product_Type_Add_Product) + self.fields["enable_product_tag_inheritance"].label = labels.ASSET_TAG_INHERITANCE_ENABLE_LABEL + self.fields["enable_product_tag_inheritance"].help_text = labels.ASSET_TAG_INHERITANCE_ENABLE_HELP if prod_type_id := kwargs.get("instance", Product()).prod_type_id: # we are editing existing instance self.fields["prod_type"].queryset |= Product_Type.objects.filter(pk=prod_type_id) # even if user does not have permission for any other ProdType we need to add at least assign ProdType to make form submittable (otherwise empty list was here which generated invalid form) @@ -384,8 +399,6 @@ class EditFindingGroupForm(forms.ModelForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - import dojo.jira_link.helper as jira_helper - self.fields["push_to_jira"] = forms.BooleanField() self.fields["push_to_jira"].required = False self.fields["push_to_jira"].help_text = "Checking this will overwrite content of your JIRA issue, or create one." @@ -416,6 +429,7 @@ class Edit_Product_MemberForm(forms.ModelForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields["product"].disabled = True + self.fields["product"].label = labels.ASSET_LABEL self.fields["user"].queryset = Dojo_User.objects.order_by("first_name", "last_name") self.fields["user"].disabled = True @@ -430,6 +444,7 @@ class Add_Product_MemberForm(forms.ModelForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields["product"].disabled = True + self.fields["product"].label = labels.ASSET_LABEL current_members = Product_Member.objects.filter(product=self.initial["product"]).values_list("user", flat=True) self.fields["users"].queryset = Dojo_User.objects.exclude( Q(is_superuser=True) @@ -441,7 +456,8 @@ class Meta: class Add_Product_Member_UserForm(forms.ModelForm): - products = forms.ModelMultipleChoiceField(queryset=Product.objects.none(), required=True, label="Products") + products = forms.ModelMultipleChoiceField(queryset=Product.objects.none(), required=True, + label=labels.ASSET_PLURAL_LABEL) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -514,6 +530,8 @@ class ImportScanForm(forms.Form): active_verified_choices = [("not_specified", "Not specified (default)"), ("force_to_true", "Force to True"), ("force_to_false", "Force to False")] + test_title = forms.CharField(max_length=255, required=False, label="Test Title", + help_text="Optional title for the Test to be created. If empty, the scan type is used.") scan_date = forms.DateTimeField( required=False, label="Scan Completion Date", @@ -560,15 +578,14 @@ class ImportScanForm(forms.Form): # Exposing the choice as two different check boxes. # If 'close_old_findings_product_scope' is selected, the backend will ensure that both flags are set. close_old_findings = forms.BooleanField(help_text="Old findings no longer present in the new report get closed as mitigated when importing. " - "If service has been set, only the findings for this service will be closed. " + "If service has been set, only the findings for this service will be closed; " + "if no service is set, only findings without a service will be closed. " "This affects findings within the same engagement by default.", label="Close old findings", required=False, initial=False) - close_old_findings_product_scope = forms.BooleanField(help_text="Old findings no longer present in the new report get closed as mitigated when importing. " - "If service has been set, only the findings for this service will be closed. " - "This affects findings within the same product.", - label="Close old findings within this product", + close_old_findings_product_scope = forms.BooleanField(help_text=labels.ASSET_FINDINGS_CLOSE_HELP, + label=labels.ASSET_FINDINGS_CLOSE_LABEL, required=False, initial=False) apply_tags_to_findings = forms.BooleanField( @@ -1002,9 +1019,9 @@ class EngForm(forms.ModelForm): )) description = forms.CharField(widget=forms.Textarea(attrs={}), required=False, help_text="Description of the engagement and details regarding the engagement.") - product = forms.ModelChoiceField(label="Product", - queryset=Product.objects.none(), - required=True) + product = forms.ModelChoiceField(label=labels.ASSET_LABEL, + queryset=Product.objects.none(), + required=True) target_start = forms.DateField(widget=forms.TextInput( attrs={"class": "datepicker", "autocomplete": "off"})) target_end = forms.DateField(widget=forms.TextInput( @@ -1469,7 +1486,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields["endpoints"].queryset = Endpoint.objects.filter(product=self.instance.test.engagement.product) - self.fields["mitigated_by"].queryset = get_authorized_users(Permissions.Test_Edit) + self.fields["mitigated_by"].queryset = get_authorized_users(Permissions.Finding_Edit) # do not show checkbox if finding is not accepted and simple risk acceptance is disabled # if checked, always show to allow unaccept also with full risk acceptance enabled @@ -1776,8 +1793,8 @@ class AddEndpointForm(forms.Form): "Each must be valid.", widget=forms.widgets.Textarea(attrs={"rows": "15", "cols": "400"})) product = forms.CharField(required=True, - widget=forms.widgets.HiddenInput(), help_text="The product this endpoint should be " - "associated with.") + label=labels.ASSET_LABEL, help_text=labels.ASSET_ENDPOINT_HELP, + widget=forms.widgets.HiddenInput()) tags = TagField(required=False, help_text="Add tags that help describe this endpoint. " "Choose from the list or add new tags. Press Enter key to add.") @@ -1787,7 +1804,10 @@ def __init__(self, *args, **kwargs): if "product" in kwargs: product = kwargs.pop("product") super().__init__(*args, **kwargs) - self.fields["product"] = forms.ModelChoiceField(queryset=get_authorized_products(Permissions.Endpoint_Add)) + self.fields["product"] = forms.ModelChoiceField( + queryset=get_authorized_products(Permissions.Endpoint_Add), + label=labels.ASSET_LABEL, + help_text=labels.ASSET_ENDPOINT_HELP) if product is not None: self.fields["product"].initial = product.id @@ -1897,17 +1917,17 @@ class CloseFindingForm(forms.ModelForm): def __init__(self, *args, **kwargs): queryset = kwargs.pop("missing_note_types") + # must pop custom kwargs before calling parent __init__ to avoid unexpected kwarg errors + self.can_edit_mitigated_data = kwargs.pop("can_edit_mitigated_data") if "can_edit_mitigated_data" in kwargs \ + else False super().__init__(*args, **kwargs) if len(queryset) == 0: self.fields["note_type"].widget = forms.HiddenInput() else: self.fields["note_type"] = forms.ModelChoiceField(queryset=queryset, label="Note Type", required=True) - self.can_edit_mitigated_data = kwargs.pop("can_edit_mitigated_data") if "can_edit_mitigated_data" in kwargs \ - else False - if self.can_edit_mitigated_data: - self.fields["mitigated_by"].queryset = get_authorized_users(Permissions.Test_Edit) + self.fields["mitigated_by"].queryset = get_authorized_users(Permissions.Finding_Edit) self.fields["mitigated"].initial = self.instance.mitigated self.fields["mitigated_by"].initial = self.instance.mitigated_by if disclaimer := get_system_setting("disclaimer_notes"): @@ -2194,6 +2214,7 @@ class Add_Product_GroupForm(forms.ModelForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields["product"].disabled = True + self.fields["product"].label = labels.ASSET_LABEL current_groups = Product_Group.objects.filter(product=self.initial["product"]).values_list("group", flat=True) authorized_groups = get_authorized_groups(Permissions.Group_View) authorized_groups = authorized_groups.exclude(id__in=current_groups) @@ -2205,7 +2226,8 @@ class Meta: class Add_Product_Group_GroupForm(forms.ModelForm): - products = forms.ModelMultipleChoiceField(queryset=Product.objects.none(), required=True, label="Products") + products = forms.ModelMultipleChoiceField(queryset=Product.objects.none(), required=True, + label=labels.ASSET_PLURAL_LABEL) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -2224,6 +2246,7 @@ class Edit_Product_Group_Form(forms.ModelForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields["product"].disabled = True + self.fields["product"].label = labels.ASSET_LABEL self.fields["group"].disabled = True class Meta: @@ -2247,6 +2270,7 @@ def __init__(self, *args, **kwargs): authorized_groups = authorized_groups.exclude(id__in=current_groups) self.fields["groups"].queryset = authorized_groups self.fields["product_type"].disabled = True + self.fields["product_type"].label = labels.ORG_LABEL class Meta: model = Product_Type_Group @@ -2254,7 +2278,8 @@ class Meta: class Add_Product_Type_Group_GroupForm(forms.ModelForm): - product_types = forms.ModelMultipleChoiceField(queryset=Product_Type.objects.none(), required=True, label="Product Types") + product_types = forms.ModelMultipleChoiceField(queryset=Product_Type.objects.none(), required=True, + label=labels.ORG_PLURAL_LABEL) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -2273,6 +2298,7 @@ class Edit_Product_Type_Group_Form(forms.ModelForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields["product_type"].disabled = True + self.fields["product_type"].label = labels.ORG_LABEL self.fields["group"].disabled = True class Meta: @@ -2413,6 +2439,7 @@ class Meta: def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) current_user = get_current_user() + self.fields["role"].help_text = labels.ASSET_GLOBAL_ROLE_HELP if not current_user.is_superuser: self.fields["role"].disabled = True @@ -2432,6 +2459,7 @@ class ProductCountsFormBase(forms.Form): class ProductTypeCountsForm(ProductCountsFormBase): product_type = forms.ModelChoiceField(required=True, queryset=Product_Type.objects.none(), + label=labels.ORG_LABEL, error_messages={ "required": "*"}) @@ -2443,6 +2471,7 @@ def __init__(self, *args, **kwargs): class ProductTagCountsForm(ProductCountsFormBase): product_tag = forms.ModelChoiceField(required=True, queryset=Product.tags.tag_model.objects.none().order_by("name"), + label=labels.ASSET_TAG_LABEL, error_messages={ "required": "*"}) @@ -2595,7 +2624,6 @@ class BaseJiraForm(forms.ModelForm): password = forms.CharField(widget=forms.PasswordInput, required=True, help_text=JIRA_Instance._meta.get_field("password").help_text, label=JIRA_Instance._meta.get_field("password").verbose_name) def test_jira_connection(self): - import dojo.jira_link.helper as jira_helper try: # Attempt to validate the credentials before moving forward jira_helper.get_jira_connection_raw(self.cleaned_data["url"], @@ -2664,13 +2692,6 @@ class Meta: fields = ["id"] -# class JIRA_ProjectForm(forms.ModelForm): - -# class Meta: -# model = JIRA_Project -# exclude = ['product'] - - class Product_API_Scan_ConfigurationForm(forms.ModelForm): def __init__(self, *args, **kwargs): @@ -2767,7 +2788,6 @@ class Meta: exclude = ["product"] def clean(self): - from django.core.validators import URLValidator form_data = self.cleaned_data try: @@ -2848,7 +2868,6 @@ class Meta: order = ["name"] def clean(self): - from django.core.validators import URLValidator form_data = self.cleaned_data try: @@ -2941,6 +2960,20 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields["default_group_role"].queryset = get_group_member_roles() + self.fields["enable_product_tracking_files"].label = labels.SETTINGS_TRACKED_FILES_ENABLE_LABEL + self.fields["enable_product_tracking_files"].help_text = labels.SETTINGS_TRACKED_FILES_ENABLE_HELP + + self.fields[ + "enforce_verified_status_product_grading"].label = labels.SETTINGS_ASSET_GRADING_ENFORCE_VERIFIED_LABEL + self.fields[ + "enforce_verified_status_product_grading"].help_text = labels.SETTINGS_ASSET_GRADING_ENFORCE_VERIFIED_HELP + + self.fields["enable_product_grade"].label = labels.SETTINGS_ASSET_GRADING_ENABLE_LABEL + self.fields["enable_product_grade"].help_text = labels.SETTINGS_ASSET_GRADING_ENABLE_HELP + + self.fields["enable_product_tag_inheritance"].label = labels.SETTINGS_ASSET_TAG_INHERITANCE_ENABLE_LABEL + self.fields["enable_product_tag_inheritance"].help_text = labels.SETTINGS_ASSET_TAG_INHERITANCE_ENABLE_HELP + def clean(self): cleaned_data = super().clean() enable_jira_value = cleaned_data.get("enable_jira") @@ -3060,7 +3093,6 @@ class Meta: fields = ["inherit_from_product", "jira_instance", "project_key", "issue_template_dir", "epic_issue_type_name", "component", "custom_fields", "jira_labels", "default_assignee", "enabled", "add_vulnerability_id_to_jira_label", "push_all_issues", "enable_engagement_epic_mapping", "push_notes", "product_jira_sla_notification", "risk_acceptance_expiration_notification"] def __init__(self, *args, **kwargs): - from dojo.jira_link import helper as jira_helper # if the form is shown for an engagement, we set a placeholder text around inherited settings from product self.target = kwargs.pop("target", "product") self.product = kwargs.pop("product", None) diff --git a/dojo/github.py b/dojo/github.py index 011a3feb28e..1f0f33c3313 100644 --- a/dojo/github.py +++ b/dojo/github.py @@ -16,7 +16,7 @@ def reopen_external_issue_github(find, note, prod, eng): - from dojo.utils import get_system_setting + from dojo.utils import get_system_setting # noqa: PLC0415 circular import if not get_system_setting("enable_github"): return @@ -47,7 +47,7 @@ def reopen_external_issue_github(find, note, prod, eng): def close_external_issue_github(find, note, prod, eng): - from dojo.utils import get_system_setting + from dojo.utils import get_system_setting # noqa: PLC0415 circular import if not get_system_setting("enable_github"): return @@ -78,7 +78,7 @@ def close_external_issue_github(find, note, prod, eng): def update_external_issue_github(find, prod, eng): - from dojo.utils import get_system_setting + from dojo.utils import get_system_setting # noqa: PLC0415 circular import if not get_system_setting("enable_github"): return @@ -106,7 +106,7 @@ def update_external_issue_github(find, prod, eng): def add_external_issue_github(find, prod, eng): - from dojo.utils import get_system_setting + from dojo.utils import get_system_setting # noqa: PLC0415 circular import if not get_system_setting("enable_github"): return diff --git a/dojo/group/views.py b/dojo/group/views.py index fa2fd1e65b1..df1e6e815b2 100644 --- a/dojo/group/views.py +++ b/dojo/group/views.py @@ -39,6 +39,7 @@ get_product_type_groups_for_group, ) from dojo.group.utils import get_auth_group_name +from dojo.labels import get_labels from dojo.models import Dojo_Group, Dojo_Group_Member, Global_Role, Product_Group, Product_Type_Group from dojo.utils import ( add_breadcrumb, @@ -51,6 +52,9 @@ logger = logging.getLogger(__name__) +labels = get_labels() + + class ListGroups(View): def get_groups(self): return get_authorized_groups(Permissions.Group_View) @@ -514,6 +518,7 @@ def delete_group_member(request, mid): def add_product_group(request, gid): group = get_object_or_404(Dojo_Group, id=gid) group_form = Add_Product_Group_GroupForm(initial={"group": group.id}) + page_name = str(labels.ASSET_GROUPS_ADD_LABEL) if request.method == "POST": group_form = Add_Product_Group_GroupForm(request.POST, initial={"group": group.id}) @@ -529,12 +534,13 @@ def add_product_group(request, gid): product_group.save() messages.add_message(request, messages.SUCCESS, - "Product groups added successfully.", + labels.ASSET_GROUPS_ADD_SUCCESS_MESSAGE, extra_tags="alert-success") return HttpResponseRedirect(reverse("view_group", args=(gid, ))) - add_breadcrumb(title="Add Product Group", top_level=False, request=request) + add_breadcrumb(title=page_name, top_level=False, request=request) return render(request, "dojo/new_product_group_group.html", { + "name": page_name, "group": group, "form": group_form, }) @@ -544,6 +550,7 @@ def add_product_group(request, gid): def add_product_type_group(request, gid): group = get_object_or_404(Dojo_Group, id=gid) group_form = Add_Product_Type_Group_GroupForm(initial={"group": group.id}) + page_name = str(labels.ORG_GROUPS_ADD_LABEL) if request.method == "POST": group_form = Add_Product_Type_Group_GroupForm(request.POST, initial={"group": group.id}) @@ -559,12 +566,13 @@ def add_product_type_group(request, gid): product_type_group.save() messages.add_message(request, messages.SUCCESS, - "Product type groups added successfully.", + labels.ORG_GROUPS_ADD_SUCCESS_MESSAGE, extra_tags="alert-success") return HttpResponseRedirect(reverse("view_group", args=(gid, ))) - add_breadcrumb(title="Add Product Type Group", top_level=False, request=request) + add_breadcrumb(title=page_name, top_level=False, request=request) return render(request, "dojo/new_product_type_group_group.html", { + "name": page_name, "group": group, "form": group_form, }) diff --git a/dojo/importers/base_importer.py b/dojo/importers/base_importer.py index 19513566c91..f6d754ba929 100644 --- a/dojo/importers/base_importer.py +++ b/dojo/importers/base_importer.py @@ -1,6 +1,7 @@ import base64 import logging +from celery import chord, group from django.conf import settings from django.core.exceptions import ValidationError from django.core.files.base import ContentFile @@ -10,6 +11,7 @@ from django.utils.timezone import make_aware import dojo.finding.helper as finding_helper +from dojo import utils from dojo.importers.endpoint_manager import EndpointManager from dojo.importers.options import ImporterOptions from dojo.models import ( @@ -24,6 +26,7 @@ Endpoint, FileUpload, Finding, + System_Settings, Test, Test_Import, Test_Import_Finding_Action, @@ -31,6 +34,7 @@ Vulnerability_Id, ) from dojo.notifications.helper import create_notification +from dojo.tag_utils import bulk_add_tags_to_instances from dojo.tools.factory import get_parser from dojo.tools.parser_test import ParserTest from dojo.utils import max_safe @@ -333,6 +337,9 @@ def update_import_history( ) -> Test_Import: """Creates a record of the import or reimport operation that has occurred.""" # Quick fail check to determine if we even wanted this + if settings.TRACK_IMPORT_HISTORY is False: + return None + if untouched_findings is None: untouched_findings = [] if reactivated_findings is None: @@ -341,8 +348,6 @@ def update_import_history( closed_findings = [] if new_findings is None: new_findings = [] - if settings.TRACK_IMPORT_HISTORY is False: - return None # Log the current state of what has occurred in case there could be # deviation from what is displayed in the view logger.debug( @@ -374,42 +379,80 @@ def update_import_history( ) # Create a history record for each finding - for finding in closed_findings: - self.create_import_history_record_safe(Test_Import_Finding_Action( - test_import=test_import, - finding=finding, - action=IMPORT_CLOSED_FINDING, - )) - for finding in new_findings: - self.create_import_history_record_safe(Test_Import_Finding_Action( - test_import=test_import, - finding=finding, - action=IMPORT_CREATED_FINDING, - )) - for finding in reactivated_findings: - self.create_import_history_record_safe(Test_Import_Finding_Action( - test_import=test_import, - finding=finding, - action=IMPORT_REACTIVATED_FINDING, - )) - for finding in untouched_findings: - self.create_import_history_record_safe(Test_Import_Finding_Action( - test_import=test_import, - finding=finding, - action=IMPORT_UNTOUCHED_FINDING, - )) + finding_action_mappings = [ + (closed_findings, IMPORT_CLOSED_FINDING), + (new_findings, IMPORT_CREATED_FINDING), + (reactivated_findings, IMPORT_REACTIVATED_FINDING), + (untouched_findings, IMPORT_UNTOUCHED_FINDING), + ] + + # In longer running imports it can happen that the async_dupe_delete task removes a finding before the history record is created + # We filter out these findings here to avoid FK violations (IntegrityError) + all_findings = [] + for _list, _ in finding_action_mappings: + all_findings.extend(_list) + existing_findings = finding_helper.filter_findings_by_existence(all_findings) if all_findings else [] + existing_ids = {f.id for f in existing_findings} + + # Collect all import history records using the validated IDs + import_history_records = [] + for findings, action in finding_action_mappings: + import_history_records.extend( + Test_Import_Finding_Action( + test_import=test_import, + finding_id=finding.id, + action=action, + ) + for finding in findings + if finding.id in existing_ids + ) + + # Bulk create all at once and let Django handle batching internally. + # Still in even more rare cases a finding can be deleted once we arrive here. + # If any integrity error occurs, fall back to inserting all records individually. + # The bulk_create is atomic so all batches will succeed or all will fail/rollback + try: + # keep bulk failure contained so fallback can proceed in TestCase transaction + Test_Import_Finding_Action.objects.bulk_create( + import_history_records, + ignore_conflicts=True, + batch_size=100, + ) + except IntegrityError: + logger.warning("IntegrityError occurred while bulk creating Test_Import_Finding_Actions, falling back to individual inserts") + for record in import_history_records: + self.create_import_history_record_safe(record) # Add any tags to the findings imported if necessary if self.apply_tags_to_findings and self.tags: - for finding in test_import.findings_affected.all(): - for tag in self.tags: - self.add_tags_safe(finding, tag) + findings_qs = test_import.findings_affected.all() + try: + bulk_add_tags_to_instances( + tag_or_tags=self.tags, + instances=findings_qs, + tag_field_name="tags", + ) + except IntegrityError: + # Fallback to safe per-instance tagging if concurrent deletes occur + for finding in findings_qs: + for tag in self.tags: + self.add_tags_safe(finding, tag) + # Add any tags to any endpoints of the findings imported if necessary if self.apply_tags_to_endpoints and self.tags: - for finding in test_import.findings_affected.all(): - for endpoint in finding.endpoints.all(): - for tag in self.tags: - self.add_tags_safe(endpoint, tag) + # Collect all endpoints linked to the affected findings + endpoints_qs = Endpoint.objects.filter(finding__in=test_import.findings_affected.all()).distinct() + try: + bulk_add_tags_to_instances( + tag_or_tags=self.tags, + instances=endpoints_qs, + tag_field_name="tags", + ) + except IntegrityError: + for finding in test_import.findings_affected.all(): + for endpoint in finding.endpoints.all(): + for tag in self.tags: + self.add_tags_safe(endpoint, tag) return test_import @@ -418,10 +461,10 @@ def create_import_history_record_safe( test_import_finding_action, ): """Creates an import history record, while catching any IntegrityErrors that might happen because of the background job having deleted a finding""" - logger.debug(f"creating Test_Import_Finding_Action for finding: {test_import_finding_action.finding.id} action: {test_import_finding_action.action}") + logger.debug(f"creating Test_Import_Finding_Action for finding_id: {test_import_finding_action.finding_id} action: {test_import_finding_action.action}") try: test_import_finding_action.save() - except IntegrityError as e: + except (IntegrityError, ValueError) as e: # This try catch makes us look we don't know what we're doing, but in https://github.com/DefectDojo/django-DefectDojo/issues/6217 we decided that for now this is the best solution logger.warning("Error creating Test_Import_Finding_Action: %s", e) logger.debug("Error creating Test_Import_Finding_Action, finding marked as duplicate and deleted ?") @@ -532,6 +575,47 @@ def update_test_type_from_internal_test(self, internal_test: ParserTest) -> None self.test.test_type.dynamic_tool = dynamic_tool self.test.test_type.save() + def maybe_launch_post_processing_chord( + self, + post_processing_task_signatures, + current_batch_number: int, + max_batch_size: int, + * + is_final_batch: bool, + ) -> tuple[list, int, bool]: + """ + Helper to optionally launch a chord of post-processing tasks with a calculate-grade callback + when async is desired. Uses exponential batch sizing up to the configured max batch size. + + Returns a tuple of (post_processing_task_signatures, current_batch_number, launched) + where launched indicates whether a chord/group was dispatched and signatures were reset. + """ + launched = False + if not post_processing_task_signatures: + return post_processing_task_signatures, current_batch_number, launched + + current_batch_size = min(2 ** current_batch_number, max_batch_size) + batch_full = len(post_processing_task_signatures) >= current_batch_size + + if batch_full or is_final_batch: + product = self.test.engagement.product + system_settings = System_Settings.objects.get() + if system_settings.enable_product_grade: + calculate_grade_signature = utils.calculate_grade_signature(product) + chord(post_processing_task_signatures)(calculate_grade_signature) + else: + group(post_processing_task_signatures).apply_async() + + logger.debug( + f"Launched chord with {len(post_processing_task_signatures)} tasks (batch #{current_batch_number}, size: {len(post_processing_task_signatures)})", + ) + post_processing_task_signatures = [] + if not is_final_batch: + current_batch_number += 1 + launched = True + + return post_processing_task_signatures, current_batch_number, launched + def verify_tool_configuration_from_test(self): """ Verify that the Tool_Configuration supplied along with the @@ -682,14 +766,11 @@ def process_endpoints( logger.debug("endpoints_to_add: %s", endpoints_to_add) self.endpoint_manager.chunk_endpoints_and_disperse(finding, endpoints_to_add) - def process_vulnerability_ids( + def process_cve( self, finding: Finding, ) -> Finding: - """ - Parse the `unsaved_vulnerability_ids` field from findings after they are parsed - to create `Vulnerability_Id` objects with the finding associated correctly - """ + """Ensure cve is set from the unsaved_vulnerability_ids field, or vice versa.""" # Synchronize the cve field with the unsaved_vulnerability_ids # We do this to be as flexible as possible to handle the fields until # the cve field is not needed anymore and can be removed. @@ -703,6 +784,16 @@ def process_vulnerability_ids( # If there is no list, make one with the value of the cve field finding.unsaved_vulnerability_ids = [finding.cve] + return finding + + def process_vulnerability_ids( + self, + finding: Finding, + ) -> Finding: + """ + Parse the `unsaved_vulnerability_ids` field from findings after they are parsed + to create `Vulnerability_Id` objects with the finding associated correctly + """ if finding.unsaved_vulnerability_ids: # Remove old vulnerability ids - keeping this call only because of flake8 Vulnerability_Id.objects.filter(finding=finding).delete() @@ -736,6 +827,7 @@ def mitigate_finding( note_message: str, *, finding_groups_enabled: bool, + product_grading_option: bool = True, ) -> None: """ Mitigates a finding, all endpoint statuses, leaves a note on the finding @@ -757,9 +849,9 @@ def mitigate_finding( # to avoid pushing a finding group multiple times, we push those outside of the loop if finding_groups_enabled and finding.finding_group: # don't try to dedupe findings that we are closing - finding.save(dedupe_option=False) + finding.save(dedupe_option=False, product_grading_option=product_grading_option) else: - finding.save(dedupe_option=False, push_to_jira=self.push_to_jira) + finding.save(dedupe_option=False, push_to_jira=self.push_to_jira, product_grading_option=product_grading_option) def notify_scan_added( self, diff --git a/dojo/importers/default_importer.py b/dojo/importers/default_importer.py index 14456954fe8..d127ed33f6a 100644 --- a/dojo/importers/default_importer.py +++ b/dojo/importers/default_importer.py @@ -5,8 +5,9 @@ from django.db.models.query_utils import Q from django.urls import reverse -import dojo.finding.helper as finding_helper import dojo.jira_link.helper as jira_helper +from dojo.decorators import we_want_async +from dojo.finding import helper as finding_helper from dojo.importers.base_importer import BaseImporter, Parser from dojo.importers.options import ImporterOptions from dojo.models import ( @@ -16,6 +17,7 @@ Test_Import, ) from dojo.notifications.helper import create_notification +from dojo.utils import perform_product_grading from dojo.validators import clean_tags logger = logging.getLogger(__name__) @@ -155,6 +157,11 @@ def process_findings( parsed_findings: list[Finding], **kwargs: dict, ) -> list[Finding]: + # Progressive batching for chord execution + post_processing_task_signatures = [] + current_batch_number = 1 + max_batch_size = 1024 + """ Saves findings in memory that were parsed from the scan report into the database. This process involves first saving associated objects such as endpoints, files, @@ -166,13 +173,17 @@ def process_findings( logger.debug("starting import of %i parsed findings.", len(parsed_findings) if parsed_findings else 0) group_names_to_findings_dict = {} - for non_clean_unsaved_finding in parsed_findings: - # make sure the severity is something is digestible - unsaved_finding = self.sanitize_severity(non_clean_unsaved_finding) - # Filter on minimum severity if applicable - if Finding.SEVERITIES[unsaved_finding.severity] > Finding.SEVERITIES[self.minimum_severity]: - # finding's severity is below the configured threshold : ignoring the finding + # Pre-sanitize and filter by minimum severity + cleaned_findings = [] + for raw_finding in parsed_findings or []: + sanitized = self.sanitize_severity(raw_finding) + if Finding.SEVERITIES[sanitized.severity] > Finding.SEVERITIES[self.minimum_severity]: + logger.debug("skipping finding due to minimum severity filter (finding=%s severity=%s min=%s)", sanitized.title, sanitized.severity, self.minimum_severity) continue + cleaned_findings.append(sanitized) + + for idx, unsaved_finding in enumerate(cleaned_findings): + is_final_finding = idx == len(cleaned_findings) - 1 # Some parsers provide "mitigated" field but do not set timezone (because they are probably not available in the report) # Finding.mitigated is DateTimeField and it requires timezone @@ -183,7 +194,7 @@ def process_findings( unsaved_finding.reporter = self.user unsaved_finding.last_reviewed_by = self.user unsaved_finding.last_reviewed = self.now - logger.debug("process_parsed_findings: active from report: %s, verified from report: %s", unsaved_finding.active, unsaved_finding.verified) + logger.debug("process_parsed_finding: unique_id_from_tool: %s, hash_code: %s, active from report: %s, verified from report: %s", unsaved_finding.unique_id_from_tool, unsaved_finding.hash_code, unsaved_finding.active, unsaved_finding.verified) # indicates an override. Otherwise, do not change the value of unsaved_finding.active if self.active is not None: unsaved_finding.active = self.active @@ -198,9 +209,13 @@ def process_findings( # Force parsers to use unsaved_tags (stored in below after saving) unsaved_finding.tags = None - # postprocessing will be done on next save. + finding = self.process_cve(unsaved_finding) + # Calculate hash_code before saving based on unsaved_endpoints and unsaved_vulnerability_ids + finding.set_hash_code(True) + + # postprocessing will be done after processing related fields like endpoints, vulnerability ids, etc. unsaved_finding.save_no_options() - finding = unsaved_finding + # Determine how the finding should be grouped self.process_finding_groups( finding, @@ -218,11 +233,32 @@ def process_findings( finding = self.process_vulnerability_ids(finding) # Categorize this finding as a new one new_findings.append(finding) - # to avoid pushing a finding group multiple times, we push those outside of the loop - if self.findings_groups_enabled and self.group_by: - finding.save() + # all data is already saved on the finding, we only need to trigger post processing + + # We create a signature for the post processing task so we can decide to apply it async or sync + push_to_jira = self.push_to_jira and (not self.findings_groups_enabled or not self.group_by) + post_processing_task_signature = finding_helper.post_process_finding_save_signature( + finding, + dedupe_option=True, + rules_option=True, + product_grading_option=False, + issue_updater_option=True, + push_to_jira=push_to_jira, + ) + + post_processing_task_signatures.append(post_processing_task_signature) + + # Check if we should launch a chord (batch full or end of findings) + if we_want_async(async_user=self.user) and post_processing_task_signatures: + post_processing_task_signatures, current_batch_number, _ = self.maybe_launch_post_processing_chord( + post_processing_task_signatures, + current_batch_number, + max_batch_size, + is_final_finding, + ) else: - finding.save(push_to_jira=self.push_to_jira) + # Execute task immediately for synchronous processing + post_processing_task_signature() for (group_name, findings) in group_names_to_findings_dict.items(): finding_helper.add_findings_to_auto_group( @@ -238,6 +274,11 @@ def process_findings( else: jira_helper.push_to_jira(findings[0]) + # Note: All chord batching is now handled within the loop above + + # Always perform an initial grading, even though it might get overwritten later. + perform_product_grading(self.test.engagement.product) + sync = kwargs.get("sync", True) if not sync: return [serialize("json", [finding]) for finding in new_findings] @@ -315,12 +356,17 @@ def close_old_findings( "as it is not present anymore in recent scans." ), finding_groups_enabled=self.findings_groups_enabled, + product_grading_option=False, ) # push finding groups to jira since we only only want to push whole groups if self.findings_groups_enabled and self.push_to_jira: for finding_group in {finding.finding_group for finding in old_findings if finding.finding_group is not None}: jira_helper.push_to_jira(finding_group) + # Calculate grade once after all findings have been closed + if old_findings: + perform_product_grading(self.test.engagement.product) + return old_findings def parse_findings_static_test_type( diff --git a/dojo/importers/default_reimporter.py b/dojo/importers/default_reimporter.py index 4fd9065af90..7adb2c65c48 100644 --- a/dojo/importers/default_reimporter.py +++ b/dojo/importers/default_reimporter.py @@ -6,6 +6,7 @@ import dojo.finding.helper as finding_helper import dojo.jira_link.helper as jira_helper +from dojo.decorators import we_want_async from dojo.importers.base_importer import BaseImporter, Parser from dojo.importers.options import ImporterOptions from dojo.models import ( @@ -15,6 +16,7 @@ Test, Test_Import, ) +from dojo.utils import perform_product_grading from dojo.validators import clean_tags logger = logging.getLogger(__name__) @@ -176,18 +178,31 @@ def process_findings( self.reactivated_items = [] self.unchanged_items = [] self.group_names_to_findings_dict = {} + # Progressive batching for chord execution + post_processing_task_signatures = [] + current_batch_number = 1 + max_batch_size = 1024 logger.debug(f"starting reimport of {len(parsed_findings) if parsed_findings else 0} items.") logger.debug("STEP 1: looping over findings from the reimported report and trying to match them to existing findings") deduplicationLogger.debug(f"Algorithm used for matching new findings to existing findings: {self.deduplication_algorithm}") - for non_clean_unsaved_finding in parsed_findings: - # make sure the severity is something is digestible - unsaved_finding = self.sanitize_severity(non_clean_unsaved_finding) - # Filter on minimum severity if applicable - if Finding.SEVERITIES[unsaved_finding.severity] > Finding.SEVERITIES[self.minimum_severity]: - # finding's severity is below the configured threshold : ignoring the finding + # Pre-sanitize and filter by minimum severity to avoid loop control pitfalls + cleaned_findings = [] + for raw_finding in parsed_findings or []: + sanitized = self.sanitize_severity(raw_finding) + if Finding.SEVERITIES[sanitized.severity] > Finding.SEVERITIES[self.minimum_severity]: + logger.debug( + "skipping finding due to minimum severity filter (finding=%s severity=%s min=%s)", + getattr(sanitized, "title", ""), + sanitized.severity, + self.minimum_severity, + ) continue + cleaned_findings.append(sanitized) + + for idx, unsaved_finding in enumerate(cleaned_findings): + is_final = idx == len(cleaned_findings) - 1 # Some parsers provide "mitigated" field but do not set timezone (because they are probably not available in the report) # Finding.mitigated is DateTimeField and it requires timezone if unsaved_finding.mitigated and not unsaved_finding.mitigated.tzinfo: @@ -236,12 +251,31 @@ def process_findings( finding, unsaved_finding, ) - # finding = new finding or existing finding still in the upload report - # to avoid pushing a finding group multiple times, we push those outside of the loop - if self.findings_groups_enabled and self.group_by: - finding.save() - else: - finding.save(push_to_jira=self.push_to_jira) + # all data is already saved on the finding, we only need to trigger post processing + + # Execute post-processing task immediately if async, otherwise execute synchronously + push_to_jira = self.push_to_jira and (not self.findings_groups_enabled or not self.group_by) + + post_processing_task_signature = finding_helper.post_process_finding_save_signature( + finding, + dedupe_option=True, + rules_option=True, + product_grading_option=False, + issue_updater_option=True, + push_to_jira=push_to_jira, + ) + post_processing_task_signatures.append(post_processing_task_signature) + + # Check if we should launch a chord (batch full or end of findings) + if we_want_async(async_user=self.user) and post_processing_task_signatures: + post_processing_task_signatures, current_batch_number, _ = self.maybe_launch_post_processing_chord( + post_processing_task_signatures, + current_batch_number, + max_batch_size, + is_final, + ) + else: + post_processing_task_signature() self.to_mitigate = (set(self.original_items) - set(self.reactivated_items) - set(self.unchanged_items)) # due to #3958 we can have duplicates inside the same report @@ -253,6 +287,12 @@ def process_findings( self.untouched = set(self.unchanged_items) - set(self.to_mitigate) - set(self.new_items) - set(self.reactivated_items) # Process groups self.process_groups_for_all_findings(**kwargs) + + # Note: All chord batching is now handled within the loop above + + # Synchronous tasks were already executed during processing, just calculate grade + perform_product_grading(self.test.engagement.product) + # Process the results and return them back return self.process_results(**kwargs) @@ -287,6 +327,7 @@ def close_old_findings( finding, f"Mitigated by {self.test.test_type} re-upload.", finding_groups_enabled=self.findings_groups_enabled, + product_grading_option=False, ) mitigated_findings.append(finding) # push finding groups to jira since we only only want to push whole groups @@ -294,6 +335,10 @@ def close_old_findings( for finding_group in {finding.finding_group for finding in findings if finding.finding_group is not None}: jira_helper.push_to_jira(finding_group) + # Calculate grade once after all findings have been closed + if mitigated_findings: + perform_product_grading(self.test.engagement.product) + return mitigated_findings def parse_findings_static_test_type( @@ -336,11 +381,14 @@ def match_new_finding_to_existing_finding( hash_code=unsaved_finding.hash_code, ).exclude(hash_code=None).order_by("id") if self.deduplication_algorithm == "unique_id_from_tool": + deduplicationLogger.debug(f"unique_id_from_tool: {unsaved_finding.unique_id_from_tool}") return Finding.objects.filter( test=self.test, unique_id_from_tool=unsaved_finding.unique_id_from_tool, ).exclude(unique_id_from_tool=None).order_by("id") if self.deduplication_algorithm == "unique_id_from_tool_or_hash_code": + deduplicationLogger.debug(f"unique_id_from_tool: {unsaved_finding.unique_id_from_tool}") + deduplicationLogger.debug(f"hash_code: {unsaved_finding.hash_code}") query = Finding.objects.filter( Q(test=self.test), (Q(hash_code__isnull=False) & Q(hash_code=unsaved_finding.hash_code)) @@ -500,10 +548,13 @@ def process_matched_mitigated_finding( if existing_finding.get_sla_configuration().restart_sla_on_reactivation: # restart the sla start date to the current date, finding.save() will set new sla_expiration_date existing_finding.sla_start_date = self.now + existing_finding = self.process_cve(existing_finding) + if existing_finding.get_sla_configuration().restart_sla_on_reactivation: + # restart the sla start date to the current date, finding.save() will set new sla_expiration_date + existing_finding.sla_start_date = self.now + # don't dedupe before endpoints are added, postprocessing will be done on next save (in calling method) + existing_finding.save_no_options() - existing_finding.save(dedupe_option=False) - # don't dedupe before endpoints are added - existing_finding.save(dedupe_option=False) note = Notes(entry=f"Re-activated by {self.scan_type} re-upload.", author=self.user) note.save() endpoint_statuses = existing_finding.status_finding.exclude( @@ -551,6 +602,9 @@ def process_matched_active_finding( existing_finding.active = False if self.verified is not None: existing_finding.verified = self.verified + existing_finding = self.process_cve(existing_finding) + existing_finding.save_no_options() + elif unsaved_finding.risk_accepted or unsaved_finding.false_p or unsaved_finding.out_of_scope: logger.debug("Reimported mitigated item matches a finding that is currently open, closing.") logger.debug( @@ -563,6 +617,8 @@ def process_matched_active_finding( existing_finding.active = False if self.verified is not None: existing_finding.verified = self.verified + existing_finding = self.process_cve(existing_finding) + existing_finding.save_no_options() else: # if finding is the same but list of affected was changed, finding is marked as unchanged. This is a known issue self.unchanged_items.append(existing_finding) @@ -597,6 +653,8 @@ def process_finding_that_was_not_matched( # scan_date was provided, override value from parser if self.scan_date_override: unsaved_finding.date = self.scan_date.date() + unsaved_finding = self.process_cve(unsaved_finding) + # Hash code is already calculated earlier as it's the primary matching criteria for reimport # Save it. Don't dedupe before endpoints are added. unsaved_finding.save_no_options() finding = unsaved_finding @@ -640,7 +698,7 @@ def finding_post_processing( # Process vulnerability IDs if finding_from_report.unsaved_vulnerability_ids: finding.unsaved_vulnerability_ids = finding_from_report.unsaved_vulnerability_ids - + # legacy cve field has already been processed/set earlier return self.process_vulnerability_ids(finding) def process_groups_for_all_findings( diff --git a/dojo/importers/endpoint_manager.py b/dojo/importers/endpoint_manager.py index 7f408c909dc..f733d5c9e5a 100644 --- a/dojo/importers/endpoint_manager.py +++ b/dojo/importers/endpoint_manager.py @@ -84,7 +84,7 @@ def reactivate_endpoint_status( for endpoint_status in endpoint_status_list: # Only reactivate endpoints that are actually mitigated if endpoint_status.mitigated: - logger.debug("Re-import: reactivating endpoint %s that is present in this scan", str(endpoint_status.endpoint)) + logger.debug("Re-import: reactivating endpoint %s that is present in this scan", endpoint_status.endpoint) endpoint_status.mitigated_by = None endpoint_status.mitigated_time = None endpoint_status.mitigated = False @@ -111,7 +111,7 @@ def clean_unsaved_endpoints( try: endpoint.clean() except ValidationError as e: - logger.warning(f"DefectDojo is storing broken endpoint because cleaning wasn't successful: {e}") + logger.warning("DefectDojo is storing broken endpoint because cleaning wasn't successful: %s", e) def chunk_endpoints_and_reactivate( self, diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py index a28ca353301..9dbbd6deeee 100644 --- a/dojo/jira_link/helper.py +++ b/dojo/jira_link/helper.py @@ -1,3 +1,4 @@ +import importlib import io import json import logging @@ -206,7 +207,7 @@ def can_be_pushed_to_jira(obj, form=None): return False, f"Finding below the minimum JIRA severity threshold ({System_Settings.objects.get().jira_minimum_severity}).", "error_below_minimum_threshold" elif isinstance(obj, Finding_Group): finding_group_status = _safely_get_obj_status_for_jira(obj) - logger.error(f"Finding group status: {finding_group_status}") + logger.error("Finding group status: %s", finding_group_status) if "Empty" in finding_group_status: return False, f"{to_str_typed(obj)} cannot be pushed to jira as it contains no findings above minimum treshold.", "error_empty" @@ -444,7 +445,6 @@ def connect_to_jira(jira_server, jira_username, jira_password): def get_jira_connect_method(): if hasattr(settings, "JIRA_CONNECT_METHOD"): try: - import importlib mn, _, fn = settings.JIRA_CONNECT_METHOD.rpartition(".") m = importlib.import_module(mn) return getattr(m, fn) @@ -1655,7 +1655,7 @@ def process_jira_project_form(request, instance=None, target=None, product=None, # jform = JIRAProjectForm(request.POST, instance=instance if instance else JIRA_Project(), product=product) jform = JIRAProjectForm(request.POST, instance=instance, target=target, product=product, engagement=engagement) # logging has_changed because it sometimes doesn't do what we expect - logger.debug("jform has changed: %s", str(jform.has_changed())) + logger.debug("jform has changed: %s", jform.has_changed()) if jform.has_changed(): # if no data was changed, no need to do anything! logger.debug("jform changed_data: %s", jform.changed_data) @@ -1778,7 +1778,7 @@ def escape_for_jira(text): def process_resolution_from_jira(finding, resolution_id, resolution_name, assignee_name, jira_now, jira_issue, finding_group: Finding_Group = None) -> bool: """Processes the resolution field in the JIRA issue and updated the finding in Defect Dojo accordingly""" - import dojo.risk_acceptance.helper as ra_helper + import dojo.risk_acceptance.helper as ra_helper # noqa: PLC0415 import error status_changed = False resolved = resolution_id is not None jira_instance = get_jira_instance(finding) diff --git a/dojo/jira_link/views.py b/dojo/jira_link/views.py index 24161edf7d6..75949aec7bf 100644 --- a/dojo/jira_link/views.py +++ b/dojo/jira_link/views.py @@ -387,7 +387,7 @@ def post(self, request): create_notification( event="jira_config_added", title=f"New addition of JIRA: {jform.cleaned_data.get('configuration_name')}", - description=f"JIRA \"{jform.cleaned_data.get('configuration_name')}\" was added by {request.user}", + description=f'JIRA "{jform.cleaned_data.get('configuration_name')}" was added by {request.user}', url=request.build_absolute_uri(reverse("jira"))) return HttpResponseRedirect(reverse("jira")) @@ -432,7 +432,7 @@ def post(self, request): create_notification( event="jira_config_added", title=f"New addition of JIRA: {jform.cleaned_data.get('configuration_name')}", - description=f"JIRA \"{jform.cleaned_data.get('configuration_name')}\" was added by {request.user}", + description=f'JIRA "{jform.cleaned_data.get('configuration_name')}" was added by {request.user}', url=request.build_absolute_uri(reverse("jira"))) return HttpResponseRedirect(reverse("jira")) @@ -486,7 +486,7 @@ def post(self, request, jid=None): create_notification( event="jira_config_edited", title=f"Edit of JIRA: {jform.cleaned_data.get('configuration_name')}", - description=f"JIRA \"{jform.cleaned_data.get('configuration_name')}\" was edited by {request.user}", + description=f'JIRA "{jform.cleaned_data.get('configuration_name')}" was edited by {request.user}', url=request.build_absolute_uri(reverse("jira"))) return HttpResponseRedirect(reverse("jira")) diff --git a/dojo/labels.py b/dojo/labels.py new file mode 100644 index 00000000000..ce5ea520c5d --- /dev/null +++ b/dojo/labels.py @@ -0,0 +1,85 @@ +""" +This module provides centralized access to application text copy. For the time being, this centralization is necessary +as some elements (forms.py, templates) require access to labels from across different model packages. + +Each model package that needs to support text copy can provide its own 'labels.py' that can be registered here. That +module should provide a set of stable dictionary keys that can be used to reference text copy within the app, as well as +a dictionary that maps these keys to the text copy. + +In this file, the sets of keys and the text copy dictionaries for all such model packages should be imported and added +to the corresponding structures: LabelsProxy should extend the set of keys, and the 'labels' variable should have the +text copy dictionary added to it. The LabelsProxy serves to provide easy autocomplete/linter compatibility with the +full list of text copy keys that exist over the program, until things are more modularized on a per-model basis. + +For templates, a `label` context processor has been added, so developers can just use labels.ATTRIBUTE_NAME. + +In views/Python code, developers should first import get_labels() and set it to a variable, e.g., labels = get_labels(). +Then they can simply use labels.ATTRIBUTE_NAME. + +For the stable keys, some conventions used: + Each copy attribute name starts with a noun representing the overarching model/object type the label is for. + Attribute suffixes are as follows: + _LABEL -> short label, used for UI/API fields + _MESSAGE -> a longer message displayed as a toast or displayed on the page + _HELP -> helptext (for help_text kwargs/popover content) +""" +import logging + +from dojo.asset.labels import AssetLabelsKeys +from dojo.asset.labels import labels as asset_labels +from dojo.organization.labels import OrganizationLabelsKeys +from dojo.organization.labels import labels as organization_labels +from dojo.system_settings.labels import SystemSettingsLabelsKeys +from dojo.system_settings.labels import labels as system_settings_labels + +logger = logging.getLogger(__name__) + + +class LabelsProxy( + AssetLabelsKeys, + OrganizationLabelsKeys, + SystemSettingsLabelsKeys, +): + + """ + Proxy class for text copy. The purpose of this is to allow easy access to the copy from within templates, and to + allow for IDE code completion. This inherits from the various copy key classes so IDEs can statically determine what + attributes ("labels") are available. After initialization, all attributes defined on this class are set to the value + of the appropriate text. + """ + + def _get_label_entries(self): + """Returns a dict of all "label" entries from this class.""" + cl = self.__class__ + return { + name: getattr(cl, name) for name in dir(cl) if not name.startswith("_")} + + def __init__(self, label_set: dict[str, str]): + """ + The initializer takes a dict set of labels and sets the corresponding attribute defined in this class to the + value specified in the dict (e.g., self.ASSET_GROUPS_DELETE_SUCCESS_MESSAGE is set to + labels[K.ASSET_GROUPS_DELETE_SUCCESS_MESSAGE]). + + As a side benefit, this will explode if any label defined on this class is not present in the given dict: a + runtime check that a labels dict must be complete. + """ + for _l, _v in self._get_label_entries().items(): + try: + setattr(self, _l, label_set[_v]) + except KeyError: + error_message = f"Supplied copy dictionary does not provide entry for {_l}" + logger.error(error_message) + raise ValueError(error_message) + + +# The full set of text copy, mapping the stable key entries to their respective text copy values +labels: dict[str, str] = asset_labels | organization_labels | system_settings_labels + + +# The labels proxy object +labels_proxy = LabelsProxy(labels) + + +def get_labels() -> LabelsProxy: + """Method for getting a LabelsProxy initialized with the correct set of labels.""" + return labels_proxy diff --git a/dojo/management/commands/flush_auditlog.py b/dojo/management/commands/flush_auditlog.py new file mode 100644 index 00000000000..6e3594363c7 --- /dev/null +++ b/dojo/management/commands/flush_auditlog.py @@ -0,0 +1,25 @@ +from django.core.management.base import BaseCommand + +from dojo.auditlog import run_flush_auditlog + + +class Command(BaseCommand): + help = "Flush old audit log entries based on retention and batching settings" + + def add_arguments(self, parser): + parser.add_argument("--retention-months", type=int, default=None, help="Override retention period in months") + parser.add_argument("--batch-size", type=int, default=None, help="Override batch size") + parser.add_argument("--max-batches", type=int, default=None, help="Override max batches per run") + parser.add_argument("--dry-run", action="store_true", help="Only show how many entries would be deleted") + + def handle(self, *args, **options): + deleted_total, batches_done, reached_limit = run_flush_auditlog( + retention_period=options.get("retention_months"), + batch_size=options.get("batch_size"), + max_batches=options.get("max_batches"), + dry_run=options.get("dry_run", False), + ) + verb = "Would delete" if options.get("dry_run") else "Deleted" + style = self.style.WARNING if options.get("dry_run") else self.style.SUCCESS + suffix = " (reached max batches)" if reached_limit else "" + self.stdout.write(style(f"{verb} {deleted_total} audit log entries in {batches_done} batches{suffix}.")) diff --git a/dojo/management/commands/import_all_unittest_scans.py b/dojo/management/commands/import_all_unittest_scans.py index 04a4c84f156..cc4dd266250 100644 --- a/dojo/management/commands/import_all_unittest_scans.py +++ b/dojo/management/commands/import_all_unittest_scans.py @@ -120,7 +120,7 @@ def import_scan_with_params(self, filename, scan_type="ZAP Scan", engagement=1, return self.import_scan(payload, expected_http_status_code) def import_all_unittest_scans(self, product_name_prefix=None, tests_per_engagement=10, engagements_per_product=50, products_per_product_type=15, *, include_very_big_scans=False, **kwargs): - logger.info(f"product_name_prefix: {product_name_prefix}, tests_per_engagement: {tests_per_engagement}, engagements_per_product: {engagements_per_product}, products_per_product_type: {products_per_product_type}") + logger.info("product_name_prefix: %s, tests_per_engagement: %s, engagements_per_product: %s, products_per_product_type: %s", product_name_prefix, tests_per_engagement, engagements_per_product, products_per_product_type) product_type_prefix = "Sample scans " + datetime.now().strftime("%Y-%m-%d %H:%M:%S") product_type_index = 1 @@ -159,7 +159,7 @@ def import_all_unittest_scans(self, product_name_prefix=None, tests_per_engageme for attribute_name in dir(module): attribute = getattr(module, attribute_name) if isclass(attribute) and attribute_name.lower() == module_name.replace("_", "") + "parser": - logger.debug(f"Loading {module_name} parser") + logger.debug("Loading %s parser", module_name) scan_dir = Path("unittests") / "scans" / module_name for scan_file in scan_dir.glob("*.json"): if include_very_big_scans or scan_file.name != "very_many_vulns.json": # jfrog_xray file is huge and takes too long to import @@ -183,12 +183,12 @@ def import_all_unittest_scans(self, product_name_prefix=None, tests_per_engageme error_messages[module_name + "/" + scan_file.name] = result.get("message", str(e)) except: - logger.exception(f"failed to load {module_name}") + logger.exception("failed to load %s", module_name) raise - logger.error(f"Error count: {error_count}") + logger.error("Error count: %s", error_count) for scan, message in error_messages.items(): - logger.error(f"Error importing scan {scan}: {message}") + logger.error("Error importing scan %s: %s", scan, message) def handle(self, *args, **options): logger.info("EXPERIMENTAL: This command may be changed/deprecated/removed without prior notice.") diff --git a/dojo/management/commands/import_github_languages.py b/dojo/management/commands/import_github_languages.py index 9e1c45ffb32..89ddf80f7b8 100644 --- a/dojo/management/commands/import_github_languages.py +++ b/dojo/management/commands/import_github_languages.py @@ -43,7 +43,7 @@ def handle(self, *args, **options): try: language_type, created = Language_Type.objects.get_or_create(language=name) except Language_Type.MultipleObjectsReturned: - logger.warning(f"Language_Type {name} exists multiple times") + logger.warning("Language_Type %s exists multiple times", name) continue if created: @@ -52,4 +52,4 @@ def handle(self, *args, **options): language_type.color = element.get("color", 0) language_type.save() - logger.info(f"Finished importing languages from GitHub, added {new_language_types} Language_Types") + logger.info("Finished importing languages from GitHub, added %s Language_Types", new_language_types) diff --git a/dojo/management/commands/import_unittest_scan.py b/dojo/management/commands/import_unittest_scan.py new file mode 100644 index 00000000000..59341c719b9 --- /dev/null +++ b/dojo/management/commands/import_unittest_scan.py @@ -0,0 +1,243 @@ +import json +import logging +import time +from importlib import import_module +from importlib.util import find_spec +from inspect import isclass +from pathlib import Path + +from django.core.management.base import BaseCommand, CommandError +from django.urls import reverse +from rest_framework.authtoken.models import Token +from rest_framework.test import APIClient + +from unittests.test_dashboard import User + +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + + help = ( + "Import a specific unittest scan by filename. " + "Automatically deduces scan type from path and creates product/engagement using auto_create_context." + ) + + def add_arguments(self, parser): + parser.add_argument( + "scan_file", + type=str, + help="Path to scan file relative to unittests/scans/ (e.g., 'zap/zap_sample.json')", + ) + parser.add_argument( + "--product-name", + type=str, + default="command import", + help="Product name to import into (default: 'command import')", + ) + parser.add_argument( + "--engagement-name", + type=str, + default="command import", + help="Engagement name to import into (default: 'command import')", + ) + parser.add_argument( + "--product-type-name", + type=str, + default="command import", + help="Product type name to use (default: 'command import')", + ) + parser.add_argument( + "--minimum-severity", + type=str, + default="Low", + choices=["Critical", "High", "Medium", "Low", "Info"], + help="Minimum severity to import (default: Low)", + ) + parser.add_argument( + "--active", + action="store_true", + default=True, + help="Mark findings as active (default: True)", + ) + parser.add_argument( + "--verified", + action="store_true", + default=False, + help="Mark findings as verified (default: False)", + ) + parser.add_argument( + "--tags", + action="append", + default=[], + help=( + "Tag(s) to apply to the imported Test (repeat --tags to add multiple). " + "Example: --tags perf --tags jfrog" + ), + ) + + def get_test_admin(self): + return User.objects.get(username="admin") + + def import_scan(self, payload, expected_http_status_code=201): + testuser = self.get_test_admin() + token = Token.objects.get(user=testuser) + client = APIClient() + client.credentials(HTTP_AUTHORIZATION="Token " + token.key) + + response = client.post(reverse("importscan-list"), payload) + if expected_http_status_code != response.status_code: + msg = f"Expected HTTP status code {expected_http_status_code}, got {response.status_code}: {response.content[:1000]}" + raise CommandError(msg) + return json.loads(response.content) + + def deduce_scan_type_from_path(self, scan_file_path): + """ + Deduce the scan type from the file path by finding the corresponding parser. + + Args: + scan_file_path: Path like 'zap/zap_sample.json' or 'stackhawk/stackhawk_sample.json' + + Returns: + tuple: (scan_type, parser_class) or raises CommandError if not found + + """ + # Extract the directory name (parser module name) + path_parts = Path(scan_file_path).parts + if len(path_parts) < 2: + msg = f"Scan file path must include directory: {scan_file_path}" + raise CommandError(msg) + + module_name = path_parts[0] + + # Try to find and load the parser module + try: + if not find_spec(f"dojo.tools.{module_name}.parser"): + msg = f"No parser module found for '{module_name}'" + raise CommandError(msg) + + module = import_module(f"dojo.tools.{module_name}.parser") + + # Find the parser class + parser_class = None + expected_class_name = module_name.replace("_", "") + "parser" + + for attribute_name in dir(module): + attribute = getattr(module, attribute_name) + if isclass(attribute) and attribute_name.lower() == expected_class_name: + parser_class = attribute + break + + if not parser_class: + msg = f"No parser class found in module '{module_name}'" + raise CommandError(msg) + + # Get the scan type from the parser + parser_instance = parser_class() + scan_types = parser_instance.get_scan_types() + + if not scan_types: + msg = f"Parser '{module_name}' has no scan types" + raise CommandError(msg) + + return scan_types[0], parser_class + + except ImportError as e: + msg = f"Failed to import parser module '{module_name}': {e}" + raise CommandError(msg) + + def import_unittest_scan(self, scan_file, product_name, engagement_name, product_type_name, + minimum_severity, active, verified, tags): + """ + Import a specific unittest scan file. + + Args: + scan_file: Path to scan file relative to unittests/scans/ + product_name: Name of product to create/use + engagement_name: Name of engagement to create/use + product_type_name: Name of product type to create/use + minimum_severity: Minimum severity level + active: Whether findings should be active + verified: Whether findings should be verified + + """ + # Validate scan file exists + scan_path = Path("unittests/scans") / scan_file + if not scan_path.exists(): + msg = f"Scan file not found: {scan_path}" + raise CommandError(msg) + + # Deduce scan type from path + scan_type, _parser_class = self.deduce_scan_type_from_path(scan_file) + + logger.info(f"Importing scan '{scan_file}' using scan type '{scan_type}'") + logger.info(f"Target: Product '{product_name}' -> Engagement '{engagement_name}'") + + # Import the scan using auto_create_context + with scan_path.open(encoding="utf-8") as testfile: + payload = { + "minimum_severity": minimum_severity, + "scan_type": scan_type, + "file": testfile, + "version": "1.0.1", + "active": active, + "verified": verified, + "apply_tags_to_findings": True, + "apply_tags_to_endpoints": True, + "auto_create_context": True, + "product_type_name": product_type_name, + "product_name": product_name, + "engagement_name": engagement_name, + "close_old_findings": False, + } + + if tags: + payload["tags"] = tags + + result = self.import_scan(payload) + + logger.info(f"Successfully imported scan. Test ID: {result.get('test_id')}") + logger.info(f"Import summary: {result.get('scan_save_message', 'No summary available')}") + + return result + + def handle(self, *args, **options): + scan_file = options["scan_file"] + product_name = options["product_name"] + engagement_name = options["engagement_name"] + product_type_name = options["product_type_name"] + minimum_severity = options["minimum_severity"] + active = options["active"] + verified = options["verified"] + tags = options["tags"] + + start_time = time.time() + + try: + self.import_unittest_scan( + scan_file=scan_file, + product_name=product_name, + engagement_name=engagement_name, + product_type_name=product_type_name, + minimum_severity=minimum_severity, + active=active, + verified=verified, + tags=tags, + ) + + end_time = time.time() + duration = end_time - start_time + + self.stdout.write( + self.style.SUCCESS( + f"Successfully imported '{scan_file}' into product '{product_name}' " + f"(took {duration:.2f} seconds)", + ), + ) + + except Exception as e: + end_time = time.time() + duration = end_time - start_time + logger.exception(f"Failed to import scan '{scan_file}' after {duration:.2f} seconds") + msg = f"Import failed after {duration:.2f} seconds: {e}" + raise CommandError(msg) diff --git a/dojo/management/commands/pghistory_backfill.py b/dojo/management/commands/pghistory_backfill.py new file mode 100644 index 00000000000..52367e32c1c --- /dev/null +++ b/dojo/management/commands/pghistory_backfill.py @@ -0,0 +1,265 @@ +""" +Management command to backfill existing data into django-pghistory. + +This command creates initial snapshots for all existing records in tracked models. +""" +import logging + +from django.apps import apps +from django.conf import settings +from django.core.management.base import BaseCommand +from django.utils import timezone + +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + help = "Backfill existing data into django-pghistory" + + def add_arguments(self, parser): + parser.add_argument( + "--model", + type=str, + help='Specific model to backfill (e.g., "Finding", "Product")', + ) + parser.add_argument( + "--batch-size", + type=int, + default=1000, + help="Number of records to process in each batch (default: 1000)", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Show what would be done without actually creating events", + ) + + def get_excluded_fields(self, model_name): + """Get the list of excluded fields for a specific model from pghistory configuration.""" + # Define excluded fields for each model (matching auditlog.py) + excluded_fields_map = { + "Dojo_User": ["password"], + "Product": ["updated"], # This is the key change + "Cred_User": ["password"], + "Notification_Webhooks": ["header_name", "header_value"], + } + return excluded_fields_map.get(model_name, []) + + def handle(self, *args, **options): + if not settings.ENABLE_AUDITLOG or settings.AUDITLOG_TYPE != "django-pghistory": + self.stdout.write( + self.style.WARNING( + "pghistory is not enabled. Set DD_ENABLE_AUDITLOG=True and " + "DD_AUDITLOG_TYPE=django-pghistory", + ), + ) + return + + # Models that are tracked by pghistory + tracked_models = [ + "Dojo_User", "Endpoint", "Engagement", "Finding", "Finding_Group", + "Product_Type", "Product", "Test", "Risk_Acceptance", + "Finding_Template", "Cred_User", "Notification_Webhooks", + ] + + specific_model = options.get("model") + if specific_model: + if specific_model not in tracked_models: + self.stdout.write( + self.style.ERROR( + f'Model "{specific_model}" is not tracked by pghistory. ' + f'Available models: {", ".join(tracked_models)}', + ), + ) + return + tracked_models = [specific_model] + + batch_size = options["batch_size"] + dry_run = options["dry_run"] + + if dry_run: + self.stdout.write( + self.style.WARNING("DRY RUN MODE - No events will be created"), + ) + + total_processed = 0 + self.stdout.write(f"Starting backfill for {len(tracked_models)} model(s)...") + + for model_name in tracked_models: + self.stdout.write(f"\nProcessing {model_name}...") + + try: + # Get the Django model + Model = apps.get_model("dojo", model_name) + + # Get total count + total_count = Model.objects.count() + if total_count == 0: + self.stdout.write(f" No records found for {model_name}") + continue + + self.stdout.write(f" Found {total_count:,} records") + + # Get the corresponding Event model for bulk operations + event_table_name = f"{model_name}Event" + try: + EventModel = apps.get_model("dojo", event_table_name) + except LookupError: + self.stdout.write( + self.style.ERROR( + f" Event model {event_table_name} not found. " + f"Is {model_name} tracked by pghistory?", + ), + ) + continue + + # Get IDs of records that already have initial_import events + existing_initial_import_ids = set( + EventModel.objects.filter(pgh_label="initial_import").values_list("pgh_obj_id", flat=True), + ) + + # Filter to only get records that don't have initial_import events + records_needing_backfill = Model.objects.exclude(id__in=existing_initial_import_ids) + backfill_count = records_needing_backfill.count() + existing_count = len(existing_initial_import_ids) + + # Log the breakdown + self.stdout.write(f" Records with initial_import events: {existing_count:,}") + self.stdout.write(f" Records needing initial_import events: {backfill_count:,}") + + if backfill_count == 0: + self.stdout.write( + self.style.SUCCESS(f" ✓ All {total_count:,} records already have initial_import events"), + ) + processed = total_count + continue + + if dry_run: + self.stdout.write(f" Would process {backfill_count:,} records in batches of {batch_size:,}...") + else: + self.stdout.write(f" Processing {backfill_count:,} records in batches of {batch_size:,}...") + + # Process records one by one and bulk insert every batch_size records + processed = 0 + event_records = [] + failed_records = [] + + for instance in records_needing_backfill.iterator(): + try: + # Create event record with all model fields + event_data = {} + + # Get excluded fields for this model from pghistory configuration + excluded_fields = self.get_excluded_fields(model_name) + + # Copy all fields from the instance to event_data, except excluded ones + for field in instance._meta.fields: + field_name = field.name + if field_name not in excluded_fields: + field_value = getattr(instance, field_name) + event_data[field_name] = field_value + + # Explicitly preserve created timestamp from the original instance + # Only if not excluded and exists + if hasattr(instance, "created") and instance.created and "created" not in excluded_fields: + event_data["created"] = instance.created + # Note: We don't preserve 'updated' for Product since it's excluded + + # Add pghistory-specific fields + event_data.update({ + "pgh_label": "initial_import", + "pgh_obj": instance, # ForeignKey to the original object + "pgh_context": None, # No context for backfilled events + }) + + # Set pgh_created_at to current time (this is for the event creation time) + # The created/updated fields above contain the original instance timestamps + event_data["pgh_created_at"] = timezone.now() + + event_records.append(EventModel(**event_data)) + + except Exception as e: + failed_records.append(instance.id) + logger.error( + f"Failed to prepare event for {model_name} ID {instance.id}: {e}", + ) + + # Bulk create when we hit batch_size records + if len(event_records) >= batch_size: + if not dry_run and event_records: + try: + attempted = len(event_records) + created_objects = EventModel.objects.bulk_create(event_records, batch_size=batch_size) + actually_created = len(created_objects) if created_objects else 0 + processed += actually_created + + if actually_created != attempted: + logger.warning( + f"bulk_create for {model_name}: attempted {attempted}, " + f"actually created {actually_created} ({attempted - actually_created} skipped)", + ) + except Exception as e: + logger.error(f"Failed to bulk create events for {model_name}: {e}") + raise + elif dry_run: + processed += len(event_records) + + event_records = [] # Reset for next batch + + # Progress update + progress = (processed / backfill_count) * 100 + self.stdout.write(f" Processed {processed:,}/{backfill_count:,} records needing backfill ({progress:.1f}%)") + + # Handle remaining records + if event_records: + if not dry_run: + try: + attempted = len(event_records) + created_objects = EventModel.objects.bulk_create(event_records, batch_size=batch_size) + actually_created = len(created_objects) if created_objects else 0 + processed += actually_created + + if actually_created != attempted: + logger.warning( + f"bulk_create final batch for {model_name}: attempted {attempted}, " + f"actually created {actually_created} ({attempted - actually_created} skipped)", + ) + except Exception as e: + logger.error(f"Failed to bulk create final batch for {model_name}: {e}") + raise + else: + processed += len(event_records) + + # Final progress update + if backfill_count > 0: + progress = (processed / backfill_count) * 100 + self.stdout.write(f" Processed {processed:,}/{backfill_count:,} records needing backfill ({progress:.1f}%)") + + total_processed += processed + + # Show completion summary + if failed_records: + self.stdout.write( + self.style.WARNING( + f" ⚠ Completed {model_name}: {processed:,} records processed, " + f"{len(failed_records)} records failed", + ), + ) + else: + self.stdout.write( + self.style.SUCCESS( + f" ✓ Completed {model_name}: {processed:,} records", + ), + ) + + except Exception as e: + self.stdout.write( + self.style.ERROR(f" ✗ Failed to process {model_name}: {e}"), + ) + logger.error(f"Error processing {model_name}: {e}") + + self.stdout.write( + self.style.SUCCESS( + f"\nBACKFILL COMPLETE: Processed {total_processed:,} records", + ), + ) diff --git a/dojo/management/commands/pghistory_clear.py b/dojo/management/commands/pghistory_clear.py new file mode 100644 index 00000000000..a2593ac25ca --- /dev/null +++ b/dojo/management/commands/pghistory_clear.py @@ -0,0 +1,206 @@ +""" +Management command to clear all pghistory Event tables. + +This command removes all historical event data from django-pghistory tables. +Use with caution as this operation is irreversible. It's meant to be used only during development/testing. +""" +import logging + +from django.apps import apps +from django.conf import settings +from django.core.management.base import BaseCommand +from django.db import connection, transaction + +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + help = "Clear all pghistory Event tables" + + def add_arguments(self, parser): + parser.add_argument( + "--dry-run", + action="store_true", + help="Show what would be cleared without actually clearing", + ) + parser.add_argument( + "--force", + action="store_true", + help="Skip confirmation prompt (use with caution)", + ) + parser.add_argument( + "--drop", + action="store_true", + help="Drop tables entirely instead of truncating (EXTREMELY DESTRUCTIVE)", + ) + + def handle(self, *args, **options): + if not settings.ENABLE_AUDITLOG or settings.AUDITLOG_TYPE != "django-pghistory": + self.stdout.write( + self.style.WARNING( + "pghistory is not enabled. Set DD_ENABLE_AUDITLOG=True and " + "DD_AUDITLOG_TYPE=django-pghistory", + ), + ) + return + + # All pghistory Event tables based on tracked models + event_tables = [ + "Cred_UserEvent", + "Dojo_UserEvent", + "EndpointEvent", + "EngagementEvent", + "Finding_GroupEvent", + "Finding_TemplateEvent", + "FindingEvent", + "Notification_WebhooksEvent", + "Product_TypeEvent", + "ProductEvent", + "Risk_AcceptanceEvent", + "TestEvent", + ] + + dry_run = options["dry_run"] + force = options["force"] + drop_tables = options["drop"] + + if dry_run: + self.stdout.write( + self.style.WARNING("DRY RUN MODE - No data will be cleared"), + ) + + total_records = 0 + table_counts = {} + + # First, count all records + self.stdout.write("Analyzing pghistory Event tables...") + for table_name in event_tables: + try: + EventModel = apps.get_model("dojo", table_name) + count = EventModel.objects.count() + table_counts[table_name] = count + total_records += count + + if count > 0: + self.stdout.write(f" {table_name}: {count:,} records") + else: + self.stdout.write(f" {table_name}: empty") + + except LookupError: + self.stdout.write( + self.style.WARNING(f" {table_name}: table not found (skipping)"), + ) + continue + except Exception as e: + self.stdout.write( + self.style.ERROR(f" {table_name}: error counting records - {e}"), + ) + continue + + if total_records == 0: + self.stdout.write( + self.style.SUCCESS("No pghistory records found. Nothing to clear."), + ) + return + + self.stdout.write(f"\nTotal records to clear: {total_records:,}") + + if dry_run: + operation = "drop" if drop_tables else "clear" + self.stdout.write( + self.style.SUCCESS( + f"\nDRY RUN COMPLETE: Would {operation} {total_records:,} records " + f"from {len([t for t in table_counts.values() if t > 0])} tables", + ), + ) + return + + # Confirmation prompt + if not force: + if drop_tables: + self.stdout.write( + self.style.ERROR( + f"\n🚨 EXTREMELY DESTRUCTIVE WARNING: This will DROP {len([t for t in table_counts.values() if t > 0])} " + f"pghistory Event tables entirely, deleting {total_records:,} records and the table structure! " + "You will need to recreate tables and run migrations to restore them!", + ), + ) + else: + self.stdout.write( + self.style.WARNING( + f"\n⚠️ WARNING: This will permanently delete {total_records:,} " + "pghistory records. This operation cannot be undone!", + ), + ) + + operation_type = "DROP TABLES" if drop_tables else "truncate tables" + confirm = input(f"Are you sure you want to {operation_type}? Type 'yes' to continue: ") + if confirm.lower() != "yes": + self.stdout.write(self.style.ERROR("Operation cancelled.")) + return + + # Clear the tables using TRUNCATE or DROP + operation_verb = "Dropping" if drop_tables else "Truncating" + self.stdout.write(f"\n{operation_verb} pghistory Event tables...") + cleared_records = 0 + cleared_tables = 0 + + for table_name in event_tables: + if table_counts.get(table_name, 0) == 0: + continue # Skip empty tables + + try: + EventModel = apps.get_model("dojo", table_name) + + # Use raw SQL TRUNCATE or DROP for better performance on large tables + with transaction.atomic(): + count = table_counts.get(table_name, 0) + if count > 0: + # Get the actual database table name + db_table = EventModel._meta.db_table + + with connection.cursor() as cursor: + if drop_tables: + # DROP TABLE - completely removes the table structure + cursor.execute(f'DROP TABLE IF EXISTS "{db_table}" CASCADE') + operation_past = "Dropped" + else: + # TRUNCATE TABLE - removes all data but keeps table structure + cursor.execute(f'TRUNCATE TABLE "{db_table}" RESTART IDENTITY CASCADE') + operation_past = "Truncated" + + cleared_records += count + cleared_tables += 1 + self.stdout.write( + self.style.SUCCESS(f" ✓ {operation_past} {table_name}: {count:,} records"), + ) + + except LookupError: + # Already handled in counting phase + continue + except Exception as e: + operation_verb_lower = "drop" if drop_tables else "truncate" + self.stdout.write( + self.style.ERROR(f" ✗ Failed to {operation_verb_lower} {table_name}: {e}"), + ) + logger.error(f"Error {operation_verb_lower}ing {table_name}: {e}") + + # Final success message + if drop_tables: + self.stdout.write( + self.style.SUCCESS( + f"\n🎉 DROP COMPLETE: Dropped {cleared_tables} tables with {cleared_records:,} records", + ), + ) + self.stdout.write( + self.style.WARNING( + "⚠️ Remember to run migrations to recreate the dropped tables!", + ), + ) + else: + self.stdout.write( + self.style.SUCCESS( + f"\n🎉 CLEARING COMPLETE: Cleared {cleared_records:,} records " + f"from {cleared_tables} tables", + ), + ) diff --git a/dojo/metrics/urls.py b/dojo/metrics/urls.py index a121403cc1d..f0643322f1c 100644 --- a/dojo/metrics/urls.py +++ b/dojo/metrics/urls.py @@ -1,27 +1,101 @@ +from django.conf import settings from django.urls import re_path from dojo.metrics import views +from dojo.utils import redirect_view -urlpatterns = [ - # metrics - re_path(r"^metrics$", views.metrics, {"mtype": "All"}, - name="metrics"), - re_path(r"^critical_product_metrics$", views.critical_product_metrics, {"mtype": "All"}, - name="critical_product_metrics"), - re_path(r"^metrics/all$", views.metrics, {"mtype": "All"}, - name="metrics_all"), - re_path(r"^metrics/product/type$", views.metrics, {"mtype": "All"}, - name="metrics_product_type"), - re_path(r"^metrics/simple$", views.simple_metrics, - name="simple_metrics"), - re_path(r"^metrics/product/type/(?P\d+)$", - views.metrics, name="product_type_metrics"), - re_path(r"^metrics/product/type/counts$", - views.product_type_counts, name="product_type_counts"), - re_path(r"^metrics/product/tag/counts$", - views.product_tag_counts, name="product_tag_counts"), - re_path(r"^metrics/engineer$", views.engineer_metrics, - name="engineer_metrics"), - re_path(r"^metrics/engineer/(?P\d+)$", views.view_engineer, - name="view_engineer"), -] +# TODO: remove the else: branch once v3 migration is complete +if settings.ENABLE_V3_ORGANIZATION_ASSET_RELABEL: + urlpatterns = [ + # metrics + re_path( + r"^metrics$", + views.metrics, + {"mtype": "All"}, + name="metrics", + ), + re_path( + r"^critical_asset_metrics$", + views.critical_product_metrics, + {"mtype": "All"}, + name="critical_product_metrics", + ), + re_path( + r"^metrics/all$", + views.metrics, + {"mtype": "All"}, + name="metrics_all", + ), + re_path( + r"^metrics/organization$", + views.metrics, + {"mtype": "All"}, + name="metrics_product_type", + ), + re_path( + r"^metrics/simple$", + views.simple_metrics, + name="simple_metrics", + ), + re_path( + r"^metrics/organization/(?P\d+)$", + views.metrics, + name="product_type_metrics", + ), + re_path( + r"^metrics/organization/counts$", + views.product_type_counts, + name="product_type_counts", + ), + re_path( + r"^metrics/asset/tag/counts$", + views.product_tag_counts, + name="product_tag_counts", + ), + re_path( + r"^metrics/engineer$", + views.engineer_metrics, + name="engineer_metrics", + ), + re_path( + r"^metrics/engineer/(?P\d+)$", + views.view_engineer, + name="view_engineer", + ), + # TODO: Backwards compatibility; remove after v3 migration is complete + re_path(r"^critical_product_metrics$", redirect_view("critical_product_metrics")), + re_path(r"^metrics/product/type$", redirect_view("metrics_product_type")), + re_path(r"^metrics/product/type/(?P\d+)$", redirect_view("product_type_metrics")), + re_path(r"^metrics/product/type/counts$", redirect_view("product_type_counts")), + re_path(r"^metrics/product/tag/counts$", redirect_view("product_tag_counts")), + ] +else: + urlpatterns = [ + # metrics + re_path(r"^metrics$", views.metrics, {"mtype": "All"}, + name="metrics"), + re_path(r"^critical_product_metrics$", views.critical_product_metrics, {"mtype": "All"}, + name="critical_product_metrics"), + re_path(r"^metrics/all$", views.metrics, {"mtype": "All"}, + name="metrics_all"), + re_path(r"^metrics/product/type$", views.metrics, {"mtype": "All"}, + name="metrics_product_type"), + re_path(r"^metrics/simple$", views.simple_metrics, + name="simple_metrics"), + re_path(r"^metrics/product/type/(?P\d+)$", + views.metrics, name="product_type_metrics"), + re_path(r"^metrics/product/type/counts$", + views.product_type_counts, name="product_type_counts"), + re_path(r"^metrics/product/tag/counts$", + views.product_tag_counts, name="product_tag_counts"), + re_path(r"^metrics/engineer$", views.engineer_metrics, + name="engineer_metrics"), + re_path(r"^metrics/engineer/(?P\d+)$", views.view_engineer, + name="view_engineer"), + # Forward compatibility + re_path(r"^critical_asset_metrics$", redirect_view("critical_product_metrics")), + re_path(r"^metrics/organization$", redirect_view("metrics_product_type")), + re_path(r"^metrics/organization/(?P\d+)$", redirect_view("product_type_metrics")), + re_path(r"^metrics/organization/counts$", redirect_view("product_type_counts")), + re_path(r"^metrics/asset/tag/counts$", redirect_view("product_tag_counts")), + ] diff --git a/dojo/metrics/utils.py b/dojo/metrics/utils.py index 4b98d7ec4ee..913d62c2361 100644 --- a/dojo/metrics/utils.py +++ b/dojo/metrics/utils.py @@ -343,7 +343,7 @@ def get_date_range( return start_date, end_date -def severity_count( +def severity_count[MetricsQuerySet: (QuerySet[Finding], QuerySet[Endpoint_Status])]( queryset: MetricsQuerySet, method: str, expression: str, @@ -409,7 +409,7 @@ def js_epoch( return int(d.timestamp()) * 1000 -def get_charting_data( +def get_charting_data[MetricsQuerySet: (QuerySet[Finding], QuerySet[Endpoint_Status])]( qs: MetricsQuerySet, start_date: date, period: MetricsPeriod, @@ -472,7 +472,7 @@ def period_deltas(start_date, end_date): return weeks_between, months_between -def aggregate_counts_by_period( +def aggregate_counts_by_period[MetricsQuerySet: (QuerySet[Finding], QuerySet[Endpoint_Status])]( qs: MetricsQuerySet, period: MetricsPeriod, metrics_type: MetricsType, @@ -582,7 +582,7 @@ def get_closed_in_period_details( ) -def findings_queryset( +def findings_queryset[MetricsQuerySet: (QuerySet[Finding], QuerySet[Endpoint_Status])]( qs: MetricsQuerySet, ) -> QuerySet[Finding]: """ diff --git a/dojo/metrics/views.py b/dojo/metrics/views.py index 24c68381806..42e045eeb98 100644 --- a/dojo/metrics/views.py +++ b/dojo/metrics/views.py @@ -22,6 +22,7 @@ from dojo.authorization.roles_permissions import Permissions from dojo.filters import UserFilter from dojo.forms import ProductTagCountsForm, ProductTypeCountsForm, SimpleMetricsForm +from dojo.labels import get_labels from dojo.metrics.utils import ( endpoint_queries, finding_queries, @@ -49,6 +50,9 @@ logger = logging.getLogger(__name__) +labels = get_labels() + + """ Greg, Jay status: in production @@ -58,7 +62,7 @@ def critical_product_metrics(request, mtype): template = "dojo/metrics.html" - page_name = _("Critical Product Metrics") + page_name = str(labels.ASSET_METRICS_CRITICAL_LABEL) critical_products = get_authorized_product_types(Permissions.Product_Type_View) critical_products = critical_products.filter(critical_product=True) add_breadcrumb(title=page_name, top_level=not len(request.GET), request=request) @@ -94,10 +98,10 @@ def metrics(request, mtype): filters = {} if view == "Finding": - page_name = _("Product Type Metrics by Findings") + page_name = str(labels.ORG_METRICS_BY_FINDINGS_LABEL) filters = finding_queries(prod_type, request) elif view == "Endpoint": - page_name = _("Product Type Metrics by Affected Endpoints") + page_name = str(labels.ORG_METRICS_BY_ENDPOINTS_LABEL) filters = endpoint_queries(prod_type, request) all_findings = findings_queryset(queryset_check(filters["all"])) @@ -425,7 +429,7 @@ def product_type_counts(request): for o in overall_in_pt: aip[o["numerical_severity"]] = o["numerical_severity__count"] else: - messages.add_message(request, messages.ERROR, _("Please choose month and year and the Product Type."), + messages.add_message(request, messages.ERROR, labels.ORG_METRICS_TYPE_COUNTS_ERROR_MESSAGE, extra_tags="alert-danger") add_breadcrumb(title=_("Bi-Weekly Metrics"), top_level=True, request=request) @@ -630,8 +634,7 @@ def product_tag_counts(request): for o in overall_in_pt: aip[o["numerical_severity"]] = o["numerical_severity__count"] else: - messages.add_message(request, messages.ERROR, _("Please choose month and year and the Product Tag."), - extra_tags="alert-danger") + messages.add_message(request, messages.ERROR, labels.ASSET_METRICS_TAG_COUNTS_ERROR_MESSAGE, extra_tags="alert-danger") add_breadcrumb(title=_("Bi-Weekly Metrics"), top_level=True, request=request) diff --git a/dojo/middleware.py b/dojo/middleware.py index c83e076b86c..aa954373c1c 100644 --- a/dojo/middleware.py +++ b/dojo/middleware.py @@ -5,6 +5,7 @@ from threading import local from urllib.parse import quote +import pghistory.middleware from auditlog.context import set_actor from auditlog.middleware import AuditlogMiddleware as _AuditlogMiddleware from django.conf import settings @@ -12,7 +13,10 @@ from django.http import HttpResponseRedirect from django.urls import reverse from django.utils.functional import SimpleLazyObject +from watson.middleware import SearchContextMiddleware +from watson.search import search_context_manager +from dojo.models import Dojo_User from dojo.product_announcements import LongRunningRequestProductAnnouncement logger = logging.getLogger(__name__) @@ -59,13 +63,12 @@ def __call__(self, request): return HttpResponseRedirect(fullURL) if request.user.is_authenticated: - logger.debug("Authenticated user: %s", str(request.user)) + logger.debug("Authenticated user: %s", request.user) with suppress(ModuleNotFoundError): # to avoid unittests to fail uwsgi = __import__("uwsgi", globals(), locals(), ["set_logvar"], 0) # this populates dd_user log var, so can appear in the uwsgi logs uwsgi.set_logvar("dd_user", str(request.user)) path = request.path_info.lstrip("/") - from dojo.models import Dojo_User if Dojo_User.force_password_reset(request.user) and path != "change_password": return HttpResponseRedirect(reverse("change_password")) @@ -77,8 +80,7 @@ class DojoSytemSettingsMiddleware: def __init__(self, get_response): self.get_response = get_response - # avoid circular imports - from dojo.models import System_Settings + from dojo.models import System_Settings # noqa: PLC0415 circular import models.signals.post_save.connect(self.cleanup, sender=System_Settings) def __call__(self, request): @@ -107,7 +109,7 @@ def cleanup(cls, *args, **kwargs): # noqa: ARG003 def load(cls): # cleanup any existing settings first to ensure fresh state cls.cleanup() - from dojo.models import System_Settings + from dojo.models import System_Settings # noqa: PLC0415 circular import system_settings = System_Settings.objects.get(no_cache=True) cls._thread_local.system_settings = system_settings return system_settings @@ -120,7 +122,7 @@ def get_from_db(self, *args, **kwargs): try: from_db = super().get(*args, **kwargs) except: - from dojo.models import System_Settings + from dojo.models import System_Settings # noqa: PLC0415 circular import # this mimics the existing code that was in filters.py and utils.py. # cases I have seen triggering this is for example manage.py collectstatic inside a docker build where mysql is not available # logger.debug('unable to get system_settings from database, constructing (new) default instance. Exception was:', exc_info=True) @@ -192,6 +194,26 @@ def __call__(self, request): return self.get_response(request) +class PgHistoryMiddleware(pghistory.middleware.HistoryMiddleware): + + """ + Custom pghistory middleware for DefectDojo that extends the built-in HistoryMiddleware + to add remote_addr context following the pattern from: + https://django-pghistory.readthedocs.io/en/3.8.1/context/#middleware + """ + + def get_context(self, request): + context = super().get_context(request) + + # Add remote address with proxy support + remote_addr = request.META.get("HTTP_X_FORWARDED_FOR") + # Get the first IP if there are multiple (proxy chain), or fall back to REMOTE_ADDR + remote_addr = remote_addr.split(",")[0].strip() if remote_addr else request.META.get("REMOTE_ADDR") + + context["remote_addr"] = remote_addr + return context + + class LongRunningRequestAlertMiddleware: def __init__(self, get_response): self.get_response = get_response @@ -211,3 +233,76 @@ def __call__(self, request): LongRunningRequestProductAnnouncement(request=request, duration=duration) return response + + +class AsyncSearchContextMiddleware(SearchContextMiddleware): + + """ + Ensures Watson index updates are triggered asynchronously. + Inherits from watson's SearchContextMiddleware to minimize the amount of code we need to maintain. + """ + + def _close_search_context(self, request): + """Override watson's close behavior to trigger async updates when above threshold.""" + if search_context_manager.is_active(): + from django.conf import settings # noqa: PLC0415 circular import + + # Extract tasks and check if we should trigger async update + captured_tasks = self._extract_tasks_for_async() + + # Get total number of instances across all model types + total_instances = sum(len(pk_list) for pk_list in captured_tasks.values()) + threshold = getattr(settings, "WATSON_ASYNC_INDEX_UPDATE_THRESHOLD", 100) + + # If threshold is below 0, async updating is disabled + if threshold < 0: + logger.debug(f"AsyncSearchContextMiddleware: Async updating disabled (threshold={threshold}), using synchronous update") + elif total_instances > threshold: + logger.debug(f"AsyncSearchContextMiddleware: {total_instances} instances > {threshold} threshold, triggering async update") + self._trigger_async_index_update(captured_tasks) + # Invalidate to prevent synchronous index update by super()._close_search_context() + search_context_manager.invalidate() + else: + logger.debug(f"AsyncSearchContextMiddleware: {total_instances} instances <= {threshold} threshold, using synchronous update") + # Let watson handle synchronous update for small numbers + + super()._close_search_context(request) + + def _extract_tasks_for_async(self): + """Extract tasks from the search context and group by model type for async processing.""" + current_tasks, _is_invalid = search_context_manager._stack[-1] + + # Group by model type for efficient batch processing + model_groups = {} + for _engine, obj in current_tasks: + model_key = f"{obj._meta.app_label}.{obj._meta.model_name}" + if model_key not in model_groups: + model_groups[model_key] = [] + model_groups[model_key].append(obj.pk) + + # Log what we extracted per model type + for model_key, pk_list in model_groups.items(): + logger.debug(f"AsyncSearchContextMiddleware: Extracted {len(pk_list)} {model_key} instances for async indexing") + + return model_groups + + def _trigger_async_index_update(self, model_groups): + """Trigger async tasks to update search indexes, chunking large lists into batches of settings.WATSON_ASYNC_INDEX_UPDATE_BATCH_SIZE.""" + if not model_groups: + return + + # Import here to avoid circular import + from django.conf import settings # noqa: PLC0415 circular import + + from dojo.tasks import update_watson_search_index_for_model # noqa: PLC0415 circular import + + # Create tasks per model type, chunking large lists into configurable batches + for model_name, pk_list in model_groups.items(): + # Chunk into batches using configurable batch size (compatible with Python 3.11) + batch_size = getattr(settings, "WATSON_ASYNC_INDEX_UPDATE_BATCH_SIZE", 1000) + batches = [pk_list[i:i + batch_size] for i in range(0, len(pk_list), batch_size)] + + # Create tasks for each batch and log each one + for i, batch in enumerate(batches, 1): + logger.debug(f"AsyncSearchContextMiddleware: Triggering batch {i}/{len(batches)} for {model_name}: {len(batch)} instances") + update_watson_search_index_for_model(model_name, batch) diff --git a/dojo/models.py b/dojo/models.py index 9d3a238d9ca..741f630fb92 100644 --- a/dojo/models.py +++ b/dojo/models.py @@ -8,12 +8,13 @@ from datetime import datetime, timedelta from decimal import Decimal from pathlib import Path +from urllib.parse import urlparse from uuid import uuid4 import dateutil import hyperlink import tagulous.admin -from auditlog.registry import auditlog +from dateutil.parser import parse as datetutilsparse from dateutil.relativedelta import relativedelta from django import forms from django.conf import settings @@ -41,6 +42,7 @@ from polymorphic.models import PolymorphicModel from tagulous.models import TagField from tagulous.models.managers import FakeTagRelatedManager +from titlecase import titlecase from dojo.validators import cvss3_validator, cvss4_validator @@ -670,7 +672,7 @@ class System_Settings(models.Model): "This is a performance enhancement to avoid fetching objects unnecessarily.", )) - from dojo.middleware import System_Settings_Manager + from dojo.middleware import System_Settings_Manager # noqa: PLC0415 circular import objects = System_Settings_Manager() def clean(self): @@ -870,7 +872,6 @@ def __str__(self): return self.name def get_absolute_url(self): - from django.urls import reverse return reverse("product_type", args=[str(self.id)]) def get_breadcrumbs(self): @@ -1092,7 +1093,7 @@ def save(self, *args, **kwargs): product.async_updating = True super(Product, product).save() # launch the async task to update all finding sla expiration dates - from dojo.sla_config.helpers import update_sla_expiration_dates_sla_config_async + from dojo.sla_config.helpers import update_sla_expiration_dates_sla_config_async # noqa: I001, PLC0415 circular import update_sla_expiration_dates_sla_config_async(self, products, tuple(severities)) def clean(self): @@ -1254,11 +1255,10 @@ def save(self, *args, **kwargs): sla_config.async_updating = True super(SLA_Configuration, sla_config).save() # launch the async task to update all finding sla expiration dates - from dojo.sla_config.helpers import update_sla_expiration_dates_product_async + from dojo.sla_config.helpers import update_sla_expiration_dates_product_async # noqa: I001, PLC0415 circular import update_sla_expiration_dates_product_async(self, sla_config) def get_absolute_url(self): - from django.urls import reverse return reverse("view_product", args=[str(self.id)]) @cached_property @@ -1309,7 +1309,7 @@ def open_findings(self, start_date=None, end_date=None): if start_date is None or end_date is None: return {} - from dojo.utils import get_system_setting + from dojo.utils import get_system_setting # noqa: PLC0415 circular import findings = Finding.objects.filter(test__engagement__product=self, mitigated__isnull=True, false_p=False, @@ -1347,7 +1347,7 @@ def open_findings_list(self): @property def has_jira_configured(self): - import dojo.jira_link.helper as jira_helper + import dojo.jira_link.helper as jira_helper # noqa: PLC0415 circular import return jira_helper.has_jira_configured(self) def violates_sla(self): @@ -1578,7 +1578,6 @@ def __str__(self): "%b %d, %Y")) def get_absolute_url(self): - from django.urls import reverse return reverse("view_engagement", args=[str(self.id)]) def copy(self): @@ -1624,7 +1623,7 @@ def get_breadcrumbs(self): # only used by bulk risk acceptance api @property def unaccepted_open_findings(self): - from dojo.utils import get_system_setting + from dojo.utils import get_system_setting # noqa: PLC0415 circular import findings = Finding.objects.filter(risk_accepted=False, active=True, duplicate=False, test__engagement=self) if get_system_setting("enforce_verified_status", True) or get_system_setting("enforce_verified_status_metrics", True): @@ -1637,7 +1636,7 @@ def accept_risks(self, accepted_risks): @property def has_jira_issue(self): - import dojo.jira_link.helper as jira_helper + import dojo.jira_link.helper as jira_helper # noqa: PLC0415 circular import return jira_helper.has_jira_issue(self) @property @@ -1646,13 +1645,14 @@ def is_ci_cd(self): def delete(self, *args, **kwargs): logger.debug("%d engagement delete", self.id) - from dojo.finding import helper - helper.prepare_duplicates_for_delete(engagement=self) + from dojo.finding import helper as finding_helper # noqa: PLC0415 circular import + finding_helper.prepare_duplicates_for_delete(engagement=self) super().delete(*args, **kwargs) with suppress(Engagement.DoesNotExist, Product.DoesNotExist): # Suppressing a potential issue created from async delete removing # related objects in a separate task - calculate_grade(self.product) + from dojo.utils import perform_product_grading # noqa: PLC0415 circular import + perform_product_grading(self.product) def inherit_tags(self, potentially_existing_tags): # get a copy of the tags to be inherited @@ -1820,23 +1820,22 @@ def __str__(self): return url def get_absolute_url(self): - from django.urls import reverse return reverse("view_endpoint", args=[str(self.id)]) def clean(self): errors = [] null_char_list = ["0x00", "\x00"] db_type = connection.vendor - if self.protocol or self.protocol == "": + if self.protocol is not None: if not re.match(r"^[A-Za-z][A-Za-z0-9\.\-\+]+$", self.protocol): # https://tools.ietf.org/html/rfc3986#section-3.1 errors.append(ValidationError(f'Protocol "{self.protocol}" has invalid format')) - if self.protocol == "": + if not self.protocol: self.protocol = None - if self.userinfo or self.userinfo == "": + if self.userinfo is not None: if not re.match(r"^[A-Za-z0-9\.\-_~%\!\$&\'\(\)\*\+,;=:]+$", self.userinfo): # https://tools.ietf.org/html/rfc3986#section-3.2.1 errors.append(ValidationError(f'Userinfo "{self.userinfo}" has invalid format')) - if self.userinfo == "": + if not self.userinfo: self.userinfo = None if self.host: @@ -1848,7 +1847,7 @@ def clean(self): else: errors.append(ValidationError("Host must not be empty")) - if self.port or self.port == 0: + if self.port is not None: try: int_port = int(self.port) if not (0 <= int_port < 65536): @@ -1857,7 +1856,7 @@ def clean(self): except ValueError: errors.append(ValidationError(f'Port "{self.port}" has invalid format - it is not a number')) - if self.path or self.path == "": + if self.path is not None: while len(self.path) > 0 and self.path[0] == "/": # Endpoint store "root-less" path self.path = self.path[1:] if any(null_char in self.path for null_char in null_char_list): @@ -1866,11 +1865,11 @@ def clean(self): action_string = "Postgres does not accept NULL character. Attempting to replace with %00..." for remove_str in null_char_list: self.path = self.path.replace(remove_str, "%00") - logger.error(f'Path "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}') - if self.path == "": + logger.error('Path "%s" has invalid format - It contains the NULL character. The following action was taken: %s', old_value, action_string) + if not self.path: self.path = None - if self.query or self.query == "": + if self.query is not None: if len(self.query) > 0 and self.query[0] == "?": self.query = self.query[1:] if any(null_char in self.query for null_char in null_char_list): @@ -1879,11 +1878,11 @@ def clean(self): action_string = "Postgres does not accept NULL character. Attempting to replace with %00..." for remove_str in null_char_list: self.query = self.query.replace(remove_str, "%00") - logger.error(f'Query "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}') - if self.query == "": + logger.error('Query "%s" has invalid format - It contains the NULL character. The following action was taken: %s', old_value, action_string) + if not self.query: self.query = None - if self.fragment or self.fragment == "": + if self.fragment is not None: if len(self.fragment) > 0 and self.fragment[0] == "#": self.fragment = self.fragment[1:] if any(null_char in self.fragment for null_char in null_char_list): @@ -1892,8 +1891,8 @@ def clean(self): action_string = "Postgres does not accept NULL character. Attempting to replace with %00..." for remove_str in null_char_list: self.fragment = self.fragment.replace(remove_str, "%00") - logger.error(f'Fragment "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}') - if self.fragment == "": + logger.error('Fragment "%s" has invalid format - It contains the NULL character. The following action was taken: %s', old_value, action_string) + if not self.fragment: self.fragment = None if errors: @@ -2038,7 +2037,6 @@ def from_uri(uri): try: url = hyperlink.parse(url=uri) except UnicodeDecodeError: - from urllib.parse import urlparse url = hyperlink.parse(url="//" + urlparse(uri).netloc) except hyperlink.URLParseError as e: msg = f"Invalid URL format: {e}" @@ -2052,13 +2050,13 @@ def from_uri(uri): query_parts.append(f"{k}={v}") query_string = "&".join(query_parts) - protocol = url.scheme if url.scheme != "" else None + protocol = url.scheme or None userinfo = ":".join(url.userinfo) if url.userinfo not in {(), ("",)} else None - host = url.host if url.host != "" else None + host = url.host or None port = url.port path = "/".join(url.path)[:500] if url.path not in {None, (), ("",)} else None - query = query_string[:1000] if query_string is not None and query_string != "" else None - fragment = url.fragment[:500] if url.fragment is not None and url.fragment != "" else None + query = query_string[:1000] if query_string is not None and query_string else None + fragment = url.fragment[:500] if url.fragment is not None and url.fragment else None return Endpoint( protocol=protocol, @@ -2151,7 +2149,6 @@ def __str__(self): return str(self.test_type) def get_absolute_url(self): - from django.urls import reverse return reverse("view_test", args=[str(self.id)]) def test_type_name(self) -> str: @@ -2191,7 +2188,7 @@ def copy(self, engagement=None): # only used by bulk risk acceptance api @property def unaccepted_open_findings(self): - from dojo.utils import get_system_setting + from dojo.utils import get_system_setting # noqa: PLC0415 circular import findings = Finding.objects.filter(risk_accepted=False, active=True, duplicate=False, test=self) if get_system_setting("enforce_verified_status", True) or get_system_setting("enforce_verified_status_metrics", True): findings = findings.filter(verified=True) @@ -2254,13 +2251,15 @@ def hash_code_allows_null_cwe(self): deduplicationLogger.debug(f"HASHCODE_ALLOWS_NULL_CWE is: {hashCodeAllowsNullCwe}") return hashCodeAllowsNullCwe - def delete(self, *args, **kwargs): + def delete(self, *args, product_grading_option=True, **kwargs): logger.debug("%d test delete", self.id) super().delete(*args, **kwargs) - with suppress(Test.DoesNotExist, Engagement.DoesNotExist, Product.DoesNotExist): - # Suppressing a potential issue created from async delete removing - # related objects in a separate task - calculate_grade(self.engagement.product) + if product_grading_option: + with suppress(Test.DoesNotExist, Engagement.DoesNotExist, Product.DoesNotExist): + # Suppressing a potential issue created from async delete removing + # related objects in a separate task + from dojo.utils import perform_product_grading # noqa: PLC0415 circular import + perform_product_grading(self.engagement.product) @property def statistics(self): @@ -2740,16 +2739,15 @@ def __str__(self): def save(self, dedupe_option=True, rules_option=True, product_grading_option=True, # noqa: FBT002 issue_updater_option=True, push_to_jira=False, user=None, *args, **kwargs): # noqa: FBT002 - this is bit hard to fix nice have this universally fixed logger.debug("Start saving finding of id " + str(self.id) + " dedupe_option:" + str(dedupe_option) + " (self.pk is %s)", "None" if self.pk is None else "not None") - from dojo.finding import helper as finding_helper + from dojo.finding import helper as finding_helper # noqa: PLC0415 circular import # if not isinstance(self.date, (datetime, date)): # raise ValidationError(_("The 'date' field must be a valid date or datetime object.")) if not user: - from dojo.utils import get_current_user + from dojo.utils import get_current_user # noqa: PLC0415 circular import user = get_current_user() # Title Casing - from titlecase import titlecase self.title = titlecase(self.title[:511]) # Set the date of the finding if nothing is supplied if self.date is None: @@ -2788,9 +2786,9 @@ def save(self, dedupe_option=True, rules_option=True, product_grading_option=Tru if self.pk is None: # We enter here during the first call from serializers.py - from dojo.utils import apply_cwe_to_template - self = apply_cwe_to_template(self) - + from dojo.utils import apply_cwe_to_template # noqa: PLC0415 circular import + # No need to use the returned variable since `self` Is updated in memory + apply_cwe_to_template(self) if (self.file_path is not None) and (len(self.unsaved_endpoints) == 0): self.static_finding = True self.dynamic_finding = False @@ -2826,7 +2824,6 @@ def save(self, dedupe_option=True, rules_option=True, product_grading_option=Tru logger.debug("no options selected that require finding post processing") def get_absolute_url(self): - from django.urls import reverse return reverse("view_finding", args=[str(self.id)]) def copy(self, test=None): @@ -2861,20 +2858,22 @@ def copy(self, test=None): return copy - def delete(self, *args, **kwargs): + def delete(self, *args, product_grading_option=True, **kwargs): logger.debug("%d finding delete", self.id) - from dojo.finding import helper - helper.finding_delete(self) + from dojo.finding import helper as finding_helper # noqa: PLC0415 circular import + finding_helper.finding_delete(self) super().delete(*args, **kwargs) - with suppress(Finding.DoesNotExist, Test.DoesNotExist, Engagement.DoesNotExist, Product.DoesNotExist): - # Suppressing a potential issue created from async delete removing - # related objects in a separate task - calculate_grade(self.test.engagement.product) + if product_grading_option: + with suppress(Finding.DoesNotExist, Test.DoesNotExist, Engagement.DoesNotExist, Product.DoesNotExist): + # Suppressing a potential issue created from async delete removing + # related objects in a separate task + from dojo.utils import perform_product_grading # noqa: PLC0415 circular import + perform_product_grading(self.test.engagement.product) # only used by bulk risk acceptance api @classmethod def unaccepted_open_findings(cls): - from dojo.utils import get_system_setting + from dojo.utils import get_system_setting # noqa: PLC0415 circular import results = cls.objects.filter(active=True, duplicate=False, risk_accepted=False) if get_system_setting("enforce_verified_status", True) or get_system_setting("enforce_verified_status_metrics", True): results = results.filter(verified=True) @@ -2946,53 +2945,56 @@ def compute_hash_code_legacy(self): # Get vulnerability_ids to use for hash_code computation def get_vulnerability_ids(self): - vulnerability_id_str = "" - if self.id is None: - if self.unsaved_vulnerability_ids: + + def _get_unsaved_vulnerability_ids(finding) -> str: + if finding.unsaved_vulnerability_ids: deduplicationLogger.debug("get_vulnerability_ids before the finding was saved") # convert list of unsaved vulnerability_ids to the list of their canonical representation - vulnerability_id_str_list = [str(vulnerability_id) for vulnerability_id in self.unsaved_vulnerability_ids] + vulnerability_id_str_list = [str(vulnerability_id) for vulnerability_id in finding.unsaved_vulnerability_ids] # deduplicate (usually done upon saving finding) and sort endpoints - vulnerability_id_str = "".join(sorted(dict.fromkeys(vulnerability_id_str_list))) - else: - deduplicationLogger.debug("finding has no unsaved vulnerability references") - else: - vulnerability_ids = Vulnerability_Id.objects.filter(finding=self) - deduplicationLogger.debug("get_vulnerability_ids after the finding was saved. Vulnerability references count: " + str(vulnerability_ids.count())) - # convert list of vulnerability_ids to the list of their canonical representation - vulnerability_id_str_list = [str(vulnerability_id) for vulnerability_id in vulnerability_ids.all()] - # sort vulnerability_ids strings - vulnerability_id_str = "".join(sorted(vulnerability_id_str_list)) - return vulnerability_id_str + return "".join(sorted(dict.fromkeys(vulnerability_id_str_list))) + deduplicationLogger.debug("finding has no unsaved vulnerability references") + return "" + + def _get_saved_vulnerability_ids(finding) -> str: + if finding.id is not None: + vulnerability_ids = Vulnerability_Id.objects.filter(finding=finding) + deduplicationLogger.debug("get_vulnerability_ids after the finding was saved. Vulnerability references count: " + str(vulnerability_ids.count())) + # convert list of vulnerability_ids to the list of their canonical representation + vulnerability_id_str_list = [str(vulnerability_id) for vulnerability_id in vulnerability_ids.all()] + # sort vulnerability_ids strings + return "".join(sorted(vulnerability_id_str_list)) + return "" + + return _get_saved_vulnerability_ids(self) or _get_unsaved_vulnerability_ids(self) # Get endpoints to use for hash_code computation # (This sometimes reports "None") def get_endpoints(self): - endpoint_str = "" - if (self.id is None): - if len(self.unsaved_endpoints) > 0: + + def _get_unsaved_endpoints(finding) -> str: + if len(finding.unsaved_endpoints) > 0: deduplicationLogger.debug("get_endpoints before the finding was saved") # convert list of unsaved endpoints to the list of their canonical representation - endpoint_str_list = [str(endpoint) for endpoint in self.unsaved_endpoints] + endpoint_str_list = [str(endpoint) for endpoint in finding.unsaved_endpoints] # deduplicate (usually done upon saving finding) and sort endpoints - endpoint_str = "".join( - sorted( - dict.fromkeys(endpoint_str_list))) - else: - # we can get here when the parser defines static_finding=True but leaves dynamic_finding defaulted - # In this case, before saving the finding, both static_finding and dynamic_finding are True - # After saving dynamic_finding may be set to False probably during the saving process (observed on Bandit scan before forcing dynamic_finding=False at parser level) - deduplicationLogger.debug("trying to get endpoints on a finding before it was saved but no endpoints found (static parser wrongly identified as dynamic?") - else: - deduplicationLogger.debug("get_endpoints: after the finding was saved. Endpoints count: " + str(self.endpoints.count())) - # convert list of endpoints to the list of their canonical representation - endpoint_str_list = [str(endpoint) for endpoint in self.endpoints.all()] - # sort endpoints strings - endpoint_str = "".join( - sorted( - endpoint_str_list, - )) - return endpoint_str + return "".join(dict.fromkeys(endpoint_str_list)) + # we can get here when the parser defines static_finding=True but leaves dynamic_finding defaulted + # In this case, before saving the finding, both static_finding and dynamic_finding are True + # After saving dynamic_finding may be set to False probably during the saving process (observed on Bandit scan before forcing dynamic_finding=False at parser level) + deduplicationLogger.debug("trying to get endpoints on a finding before it was saved but no endpoints found (static parser wrongly identified as dynamic?") + return "" + + def _get_saved_endpoints(finding) -> str: + if finding.id is not None: + deduplicationLogger.debug("get_endpoints: after the finding was saved. Endpoints count: " + str(finding.endpoints.count())) + # convert list of endpoints to the list of their canonical representation + endpoint_str_list = [str(endpoint) for endpoint in finding.endpoints.all()] + # sort endpoints strings + return "".join(sorted(endpoint_str_list)) + return "" + + return _get_saved_endpoints(self) or _get_unsaved_endpoints(self) # Compute the hash_code from the fields to hash def hash_fields(self, fields_to_hash): @@ -3084,9 +3086,8 @@ def status(self): return ", ".join([str(s) for s in status]) def _age(self, start_date): - from dateutil.parser import parse if start_date and isinstance(start_date, str): - start_date = parse(start_date).date() + start_date = datetutilsparse(start_date).date() if isinstance(start_date, datetime): start_date = start_date.date() @@ -3183,7 +3184,7 @@ def github_conf_new(self): @property def has_jira_issue(self): - import dojo.jira_link.helper as jira_helper + import dojo.jira_link.helper as jira_helper # noqa: PLC0415 circular import return jira_helper.has_jira_issue(self) @cached_property @@ -3196,12 +3197,12 @@ def has_jira_group_issue(self): if not self.has_finding_group: return False - import dojo.jira_link.helper as jira_helper + import dojo.jira_link.helper as jira_helper # noqa: PLC0415 circular import return jira_helper.has_jira_issue(self.finding_group) @property def has_jira_configured(self): - import dojo.jira_link.helper as jira_helper + import dojo.jira_link.helper as jira_helper # noqa: PLC0415 circular import return jira_helper.has_jira_configured(self) @cached_property @@ -3281,7 +3282,7 @@ def latest_note(self): return "" def get_sast_source_file_path_with_link(self): - from dojo.utils import create_bleached_link + from dojo.utils import create_bleached_link # noqa: PLC0415 circular import if self.sast_source_file_path is None: return None if self.test.engagement.source_code_management_uri is None: @@ -3292,7 +3293,7 @@ def get_sast_source_file_path_with_link(self): return create_bleached_link(link, self.sast_source_file_path) def get_file_path_with_link(self): - from dojo.utils import create_bleached_link + from dojo.utils import create_bleached_link # noqa: PLC0415 circular import if self.file_path is None: return None if self.test.engagement.source_code_management_uri is None: @@ -3406,9 +3407,7 @@ def get_file_path_with_raw_link(self): return link def get_references_with_links(self): - import re - - from dojo.utils import create_bleached_link + from dojo.utils import create_bleached_link # noqa: PLC0415 circular import if self.references is None: return None matches = re.findall(r"([\(|\[]?(https?):((//)|(\\\\))+([\w\d:#@%/;$~_?\+-=\\\.&](#!)?)*[\)|\]]?)", self.references) @@ -3452,7 +3451,7 @@ def violates_sla(self): return (self.sla_expiration_date and self.sla_expiration_date < timezone.now().date()) def set_hash_code(self, dedupe_option): - from dojo.utils import get_custom_method + from dojo.utils import get_custom_method # noqa: PLC0415 circular import if hash_method := get_custom_method("FINDING_HASH_METHOD"): hash_method(self, dedupe_option) # Finding.save is called once from serializers.py with dedupe_option=False because the finding is not ready yet, for example the endpoints are not built @@ -3481,7 +3480,6 @@ def __str__(self): return self.vulnerability_id def get_absolute_url(self): - from django.urls import reverse return reverse("view_finding", args=[str(self.finding.id)]) @@ -3524,7 +3522,7 @@ def __str__(self): @property def has_jira_issue(self): - import dojo.jira_link.helper as jira_helper + import dojo.jira_link.helper as jira_helper # noqa: PLC0415 circular import return jira_helper.has_jira_issue(self) @cached_property @@ -3585,7 +3583,6 @@ def get_sla_start_date(self): return min(find.get_sla_start_date() for find in self.findings.all()) def get_absolute_url(self): - from django.urls import reverse return reverse("view_test", args=[str(self.test.id)]) class Meta: @@ -3624,7 +3621,6 @@ def __str__(self): return self.title def get_absolute_url(self): - from django.urls import reverse return reverse("edit_template", args=[str(self.id)]) def get_breadcrumbs(self): @@ -4567,7 +4563,7 @@ class TextQuestion(Question): def get_form(self): """Returns the form for this model""" - from .forms import TextQuestionForm + from .forms import TextQuestionForm # noqa: PLC0415 return TextQuestionForm @@ -4600,7 +4596,7 @@ class ChoiceQuestion(Question): def get_form(self): """Returns the form for this model""" - from .forms import ChoiceQuestionForm + from .forms import ChoiceQuestionForm # noqa: PLC0415 return ChoiceQuestionForm @@ -4700,25 +4696,12 @@ def __str__(self): return "No Response" -if settings.ENABLE_AUDITLOG: - # Register for automatic logging to database - logger.info("enabling audit logging") - auditlog.register(Dojo_User, exclude_fields=["password"]) - auditlog.register(Endpoint) - auditlog.register(Engagement) - auditlog.register(Finding, m2m_fields={"reviewers"}) - auditlog.register(Finding_Group) - auditlog.register(Product_Type) - auditlog.register(Product) - auditlog.register(Test) - auditlog.register(Risk_Acceptance) - auditlog.register(Finding_Template) - auditlog.register(Cred_User, exclude_fields=["password"]) - auditlog.register(Notification_Webhooks, exclude_fields=["header_name", "header_value"]) +# Audit logging registration is now handled in auditlog.py and configured in apps.py +# This allows for conditional registration of either django-auditlog or django-pghistory +# The audit system is configured in DojoAppConfig.ready() to ensure all models are loaded from dojo.utils import ( # noqa: E402 # there is issue due to a circular import - calculate_grade, parse_cvss_data, to_str_typed, ) diff --git a/dojo/notifications/helper.py b/dojo/notifications/helper.py index 9c16d4b2cee..f59060331d1 100644 --- a/dojo/notifications/helper.py +++ b/dojo/notifications/helper.py @@ -19,6 +19,7 @@ from dojo.authorization.roles_permissions import Permissions from dojo.celery import app from dojo.decorators import dojo_async_task, we_want_async +from dojo.labels import get_labels from dojo.models import ( Alerts, Dojo_User, @@ -41,6 +42,9 @@ logger = logging.getLogger(__name__) +labels = get_labels() + + def create_notification( event: str | None = None, title: str | None = None, @@ -120,9 +124,9 @@ def _get_system_settings(self) -> System_Settings: def _create_description(self, event: str, kwargs: dict) -> str: if kwargs.get("description") is None: if event == "product_added": - kwargs["description"] = _("Product %s has been created successfully.") % kwargs["title"] + kwargs["description"] = labels.ASSET_NOTIFICATION_WITH_NAME_CREATED_MESSAGE % {"name": kwargs["title"]} elif event == "product_type_added": - kwargs["description"] = _("Product Type %s has been created successfully.") % kwargs["title"] + kwargs["description"] = labels.ORG_NOTIFICATION_WITH_NAME_CREATED_MESSAGE % {"name": kwargs["title"]} else: kwargs["description"] = _("Event %s has occurred.") % str(event) @@ -234,7 +238,7 @@ def send_slack_notification( elif self.system_settings.slack_channel is not None: channel = self.system_settings.slack_channel logger.info( - f"Sending system notification to system channel {channel}.", + "Sending system notification to system channel %s.", channel, ) self._post_slack_message(event, user, channel, **kwargs) else: @@ -272,11 +276,11 @@ def _get_slack_user_id(self, user_email: str) -> str: if user_email == user["user"]["profile"]["email"]: if "id" in user["user"]: user_id = user["user"]["id"] - logger.debug(f"Slack user ID is {user_id}") + logger.debug("Slack user ID is %s", user_id) slack_user_is_found = True else: logger.warning( - f"A user with email {user_email} could not be found in this Slack workspace.", + "A user with email %s could not be found in this Slack workspace.", user_email, ) if not slack_user_is_found: @@ -335,9 +339,10 @@ def send_msteams_notification( "msteams", kwargs, ), + headers={"Content-Type": "application/json"}, timeout=settings.REQUESTS_TIMEOUT, ) - if res.status_code != 200: + if not (200 <= res.status_code < 300): logger.error("Error when sending message to Microsoft Teams") logger.error(res.status_code) logger.error(res.text) @@ -496,7 +501,7 @@ def _get_webhook_endpoints( if not endpoints.exists(): if user: logger.info( - f"URLs for Webhooks not configured for user '{user}': skipping user notification", + "URLs for Webhooks not configured for user '%s': skipping user notification", user, ) else: logger.info( @@ -695,8 +700,7 @@ def _process_objects(self, **kwargs: dict) -> None: self.product = finding.test.engagement.product logger.debug("Defined product of finding %s", self.product) elif (obj := kwargs.get("obj")) is not None: - from dojo.utils import get_product - + from dojo.utils import get_product # noqa: PLC0415 circular import self.product = get_product(obj) logger.debug("Defined product of obj %s", self.product) diff --git a/dojo/object/views.py b/dojo/object/views.py index ad649885bf1..96616c556a8 100644 --- a/dojo/object/views.py +++ b/dojo/object/views.py @@ -9,14 +9,18 @@ from dojo.authorization.authorization_decorators import user_is_authorized from dojo.authorization.roles_permissions import Permissions from dojo.forms import DeleteObjectsSettingsForm, ObjectSettingsForm +from dojo.labels import get_labels from dojo.models import Objects_Product, Product from dojo.utils import Product_Tab logger = logging.getLogger(__name__) +labels = get_labels() + @user_is_authorized(Product, Permissions.Product_Tracking_Files_Add, "pid") def new_object(request, pid): + page_name = labels.ASSET_TRACKED_FILES_ADD_LABEL prod = get_object_or_404(Product, id=pid) if request.method == "POST": tform = ObjectSettingsForm(request.POST) @@ -27,15 +31,17 @@ def new_object(request, pid): messages.add_message(request, messages.SUCCESS, - "Added Tracked File to a Product", + labels.ASSET_TRACKED_FILES_ADD_SUCCESS_MESSAGE, extra_tags="alert-success") return HttpResponseRedirect(reverse("view_objects", args=(pid,))) return None tform = ObjectSettingsForm() - product_tab = Product_Tab(prod, title="Add Tracked Files to a Product", tab="settings") + product_tab = Product_Tab(prod, title=str(page_name), tab="settings") return render(request, "dojo/new_object.html", - {"tform": tform, + { + "name": page_name, + "tform": tform, "product_tab": product_tab, "pid": prod.id}) @@ -45,7 +51,7 @@ def view_objects(request, pid): product = get_object_or_404(Product, id=pid) object_queryset = Objects_Product.objects.filter(product=pid).order_by("path", "folder", "artifact") - product_tab = Product_Tab(product, title="Tracked Product Files, Paths and Artifacts", tab="settings") + product_tab = Product_Tab(product, title="Tracked Files, Paths and Artifacts", tab="settings") return render(request, "dojo/view_objects.html", { @@ -60,7 +66,8 @@ def edit_object(request, pid, ttid): object_prod = Objects_Product.objects.get(pk=ttid) product = get_object_or_404(Product, id=pid) if object_prod.product != product: - msg = f"Product {pid} does not fit to product of Object {object_prod.product.id}" + msg = labels.ASSET_TRACKED_FILES_ID_MISMATCH_ERROR_MESSAGE % {"asset_id": pid, + "object_asset_id": object_prod.product.id} raise BadRequest(msg) if request.method == "POST": @@ -70,7 +77,7 @@ def edit_object(request, pid, ttid): messages.add_message(request, messages.SUCCESS, - "Tool Product Configuration Successfully Updated.", + "Tracked File Successfully Updated.", extra_tags="alert-success") return HttpResponseRedirect(reverse("view_objects", args=(pid,))) else: @@ -90,7 +97,8 @@ def delete_object(request, pid, ttid): object_prod = Objects_Product.objects.get(pk=ttid) product = get_object_or_404(Product, id=pid) if object_prod.product != product: - msg = f"Product {pid} does not fit to product of Object {object_prod.product.id}" + msg = labels.ASSET_TRACKED_FILES_ID_MISMATCH_ERROR_MESSAGE % {"asset_id": pid, + "object_asset_id": object_prod.product.id} raise BadRequest(msg) if request.method == "POST": @@ -98,12 +106,12 @@ def delete_object(request, pid, ttid): object_prod.delete() messages.add_message(request, messages.SUCCESS, - "Tracked Product Files Deleted.", + "Tracked Files Deleted.", extra_tags="alert-success") return HttpResponseRedirect(reverse("view_objects", args=(pid,))) tform = DeleteObjectsSettingsForm(instance=object_prod) - product_tab = Product_Tab(product, title="Delete Product Tool Configuration", tab="settings") + product_tab = Product_Tab(product, title="Delete Tracked File", tab="settings") return render(request, "dojo/delete_object.html", { diff --git a/dojo/organization/__init__.py b/dojo/organization/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/dojo/organization/api/__init__.py b/dojo/organization/api/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/dojo/organization/api/filters.py b/dojo/organization/api/filters.py new file mode 100644 index 00000000000..14e282e2ec9 --- /dev/null +++ b/dojo/organization/api/filters.py @@ -0,0 +1,36 @@ +from django_filters import BooleanFilter, NumberFilter +from django_filters.rest_framework import FilterSet + +from dojo.labels import get_labels +from dojo.models import ( + Product_Type, + Product_Type_Group, + Product_Type_Member, +) + +labels = get_labels() + + +class OrganizationFilterSet(FilterSet): + critical_asset = BooleanFilter(field_name="critical_product") + key_asset = BooleanFilter(field_name="key_product") + + class Meta: + model = Product_Type + fields = ("id", "name", "created", "updated") + + +class OrganizationMemberFilterSet(FilterSet): + organization_id = NumberFilter(field_name="product_type_id") + + class Meta: + model = Product_Type_Member + fields = ("id", "user_id") + + +class OrganizationGroupFilterSet(FilterSet): + asset_type_id = NumberFilter(field_name="product_type_id") + + class Meta: + model = Product_Type_Group + fields = ("id", "group_id") diff --git a/dojo/organization/api/serializers.py b/dojo/organization/api/serializers.py new file mode 100644 index 00000000000..d624c72524d --- /dev/null +++ b/dojo/organization/api/serializers.py @@ -0,0 +1,123 @@ +from rest_framework import serializers +from rest_framework.exceptions import PermissionDenied, ValidationError + +from dojo.authorization.authorization import user_has_permission +from dojo.authorization.roles_permissions import Permissions +from dojo.models import ( + Product_Type, + Product_Type_Group, + Product_Type_Member, +) +from dojo.product_type.queries import get_authorized_product_types + + +class RelatedOrganizationField(serializers.PrimaryKeyRelatedField): + def get_queryset(self): + return get_authorized_product_types(Permissions.Product_Type_View) + + +class OrganizationMemberSerializer(serializers.ModelSerializer): + organization = RelatedOrganizationField(source="product_type") + + class Meta: + model = Product_Type_Member + exclude = ("product_type",) + + def validate(self, data): + if ( + self.instance is not None + and data.get("organization") != self.instance.product_type + and not user_has_permission( + self.context["request"].user, + data.get("organization"), + Permissions.Product_Type_Manage_Members, + ) + ): + msg = "You are not permitted to add a member to this Organization" + raise PermissionDenied(msg) + + if ( + self.instance is None + or data.get("organization") != self.instance.product_type + or data.get("user") != self.instance.user + ): + members = Product_Type_Member.objects.filter( + product_type=data.get("organization"), user=data.get("user"), + ) + if members.count() > 0: + msg = "Organization Member already exists" + raise ValidationError(msg) + + if self.instance is not None and not data.get("role").is_owner: + owners = ( + Product_Type_Member.objects.filter( + product_type=data.get("organization"), role__is_owner=True, + ) + .exclude(id=self.instance.id) + .count() + ) + if owners < 1: + msg = "There must be at least one owner" + raise ValidationError(msg) + + if data.get("role").is_owner and not user_has_permission( + self.context["request"].user, + data.get("organization"), + Permissions.Product_Type_Member_Add_Owner, + ): + msg = "You are not permitted to add a member as Owner to this Organization" + raise PermissionDenied(msg) + + return data + + +class OrganizationGroupSerializer(serializers.ModelSerializer): + organization = RelatedOrganizationField(source="product_type") + + class Meta: + model = Product_Type_Group + exclude = ("product_type",) + + def validate(self, data): + if ( + self.instance is not None + and data.get("organization") != self.instance.product_type + and not user_has_permission( + self.context["request"].user, + data.get("organization"), + Permissions.Product_Type_Group_Add, + ) + ): + msg = "You are not permitted to add a group to this Organization" + raise PermissionDenied(msg) + + if ( + self.instance is None + or data.get("organization") != self.instance.product_type + or data.get("group") != self.instance.group + ): + members = Product_Type_Group.objects.filter( + product_type=data.get("organization"), group=data.get("group"), + ) + if members.count() > 0: + msg = "Organization Group already exists" + raise ValidationError(msg) + + if data.get("role").is_owner and not user_has_permission( + self.context["request"].user, + data.get("organization"), + Permissions.Product_Type_Group_Add_Owner, + ): + msg = "You are not permitted to add a group as Owner to this Organization" + raise PermissionDenied(msg) + + return data + + +class OrganizationSerializer(serializers.ModelSerializer): + critical_asset = serializers.BooleanField(source="critical_product") + key_asset = serializers.BooleanField(source="key_product") + + class Meta: + model = Product_Type + exclude = ("critical_product", "key_product") diff --git a/dojo/organization/api/urls.py b/dojo/organization/api/urls.py new file mode 100644 index 00000000000..a0bec88cb2d --- /dev/null +++ b/dojo/organization/api/urls.py @@ -0,0 +1,12 @@ +from dojo.organization.api.views import ( + OrganizationGroupViewSet, + OrganizationMemberViewSet, + OrganizationViewSet, +) + + +def add_organization_urls(router): + router.register(r"organizations", OrganizationViewSet, basename="organization") + router.register(r"organization_members", OrganizationMemberViewSet, basename="organization_member") + router.register(r"organization_groups", OrganizationGroupViewSet, basename="organization_group") + return router diff --git a/dojo/organization/api/views.py b/dojo/organization/api/views.py new file mode 100644 index 00000000000..dc9f3fc0cc2 --- /dev/null +++ b/dojo/organization/api/views.py @@ -0,0 +1,183 @@ +from django_filters.rest_framework import DjangoFilterBackend +from drf_spectacular.utils import extend_schema, extend_schema_view +from rest_framework import status +from rest_framework.decorators import action +from rest_framework.permissions import IsAuthenticated +from rest_framework.response import Response + +from dojo.api_v2 import permissions +from dojo.api_v2.serializers import ReportGenerateOptionSerializer, ReportGenerateSerializer +from dojo.api_v2.views import PrefetchDojoModelViewSet, report_generate, schema_with_prefetch +from dojo.authorization.roles_permissions import Permissions +from dojo.models import ( + Product_Type, + Product_Type_Group, + Product_Type_Member, + Role, +) +from dojo.organization.api import serializers +from dojo.organization.api.filters import ( + OrganizationGroupFilterSet, + OrganizationMemberFilterSet, +) +from dojo.product_type.queries import ( + get_authorized_product_type_groups, + get_authorized_product_type_members, + get_authorized_product_types, +) +from dojo.utils import async_delete, get_setting + + +# Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) +class OrganizationViewSet( + PrefetchDojoModelViewSet, +): + serializer_class = serializers.OrganizationSerializer + queryset = Product_Type.objects.none() + filter_backends = (DjangoFilterBackend,) + filterset_fields = [ + "id", + "name", + "critical_product", + "key_product", + "created", + "updated", + ] + permission_classes = ( + IsAuthenticated, + permissions.UserHasProductTypePermission, + ) + + def get_queryset(self): + return get_authorized_product_types( + Permissions.Product_Type_View, + ).distinct() + + # Overwrite perfom_create of CreateModelMixin to add current user as owner + def perform_create(self, serializer): + serializer.save() + product_type_data = serializer.data + product_type_data.pop("authorization_groups") + product_type_data.pop("members") + member = Product_Type_Member() + member.user = self.request.user + member.product_type = Product_Type(**product_type_data) + member.role = Role.objects.get(is_owner=True) + member.save() + + def destroy(self, request, *args, **kwargs): + instance = self.get_object() + if get_setting("ASYNC_OBJECT_DELETE"): + async_del = async_delete() + async_del.delete(instance) + else: + instance.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + @extend_schema( + request=ReportGenerateOptionSerializer, + responses={status.HTTP_200_OK: ReportGenerateSerializer}, + ) + @action( + detail=True, methods=["post"], permission_classes=[IsAuthenticated], + ) + def generate_report(self, request, pk=None): + product_type = self.get_object() + + options = {} + # prepare post data + report_options = ReportGenerateOptionSerializer( + data=request.data, + ) + if report_options.is_valid(): + options["include_finding_notes"] = report_options.validated_data[ + "include_finding_notes" + ] + options["include_finding_images"] = report_options.validated_data[ + "include_finding_images" + ] + options[ + "include_executive_summary" + ] = report_options.validated_data["include_executive_summary"] + options[ + "include_table_of_contents" + ] = report_options.validated_data["include_table_of_contents"] + else: + return Response( + report_options.errors, status=status.HTTP_400_BAD_REQUEST, + ) + + data = report_generate(request, product_type, options) + report = ReportGenerateSerializer(data) + return Response(report.data) + + +# Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) +class OrganizationMemberViewSet( + PrefetchDojoModelViewSet, +): + serializer_class = serializers.OrganizationMemberSerializer + queryset = Product_Type_Member.objects.none() + filter_backends = (DjangoFilterBackend,) + filterset_class = OrganizationMemberFilterSet + permission_classes = ( + IsAuthenticated, + permissions.UserHasProductTypeMemberPermission, + ) + + def get_queryset(self): + return get_authorized_product_type_members( + Permissions.Product_Type_View, + ).distinct() + + def destroy(self, request, *args, **kwargs): + instance = self.get_object() + if instance.role.is_owner: + owners = Product_Type_Member.objects.filter( + product_type=instance.product_type, role__is_owner=True, + ).count() + if owners <= 1: + return Response( + "There must be at least one owner", + status=status.HTTP_400_BAD_REQUEST, + ) + self.perform_destroy(instance) + return Response(status=status.HTTP_204_NO_CONTENT) + + @extend_schema( + exclude=True, + ) + def partial_update(self, request, pk=None): + # Object authorization won't work if not all data is provided + response = {"message": "Patch function is not offered in this path."} + return Response(response, status=status.HTTP_405_METHOD_NOT_ALLOWED) + + +# Authorization: object-based +@extend_schema_view(**schema_with_prefetch()) +class OrganizationGroupViewSet( + PrefetchDojoModelViewSet, +): + serializer_class = serializers.OrganizationGroupSerializer + queryset = Product_Type_Group.objects.none() + filter_backends = (DjangoFilterBackend,) + filterset_class = OrganizationGroupFilterSet + permission_classes = ( + IsAuthenticated, + permissions.UserHasProductTypeGroupPermission, + ) + + def get_queryset(self): + return get_authorized_product_type_groups( + Permissions.Product_Type_Group_View, + ).distinct() + + @extend_schema( + exclude=True, + ) + def partial_update(self, request, pk=None): + # Object authorization won't work if not all data is provided + response = {"message": "Patch function is not offered in this path."} + return Response(response, status=status.HTTP_405_METHOD_NOT_ALLOWED) diff --git a/dojo/organization/labels.py b/dojo/organization/labels.py new file mode 100644 index 00000000000..911695f5866 --- /dev/null +++ b/dojo/organization/labels.py @@ -0,0 +1,196 @@ +from django.conf import settings +from django.utils.translation import gettext_lazy as _ + + +class OrganizationLabelsKeys: + + """Directory of text copy used by the Organization model.""" + + ORG_LABEL = "org.label" + ORG_PLURAL_LABEL = "org.plural_label" + ORG_ALL_LABEL = "org.all_label" + ORG_WITH_NAME_LABEL = "org.with_name_label" + ORG_NONE_FOUND_MESSAGE = "org.none_found_label" + ORG_REPORT_LABEL = "org.report_label" + ORG_REPORT_TITLE = "org.report_title" + ORG_REPORT_WITH_NAME_TITLE = "org.report_with_name_title" + ORG_METRICS_LABEL = "org.metrics.label" + ORG_METRICS_COUNTS_LABEL = "org.metrics.counts_label" + ORG_METRICS_BY_FINDINGS_LABEL = "org.metrics_by_findings_label" + ORG_METRICS_BY_ENDPOINTS_LABEL = "org.metrics_by_endpoints_label" + ORG_METRICS_TYPE_COUNTS_ERROR_MESSAGE = "org.metrics_type_counts_error_message" + ORG_OPTIONS_LABEL = "org.options_label" + ORG_NOTIFICATION_WITH_NAME_CREATED_MESSAGE = "org.notification_with_name_created_message" + ORG_CRITICAL_PRODUCT_LABEL = "org.critical_product_label" + ORG_KEY_PRODUCT_LABEL = "org.key_product_label" + ORG_FILTERS_LABEL = "org.filters.label" + ORG_FILTERS_LABEL_HELP = "org.filters.label_help" + ORG_FILTERS_NAME_LABEL = "org.filters.name_label" + ORG_FILTERS_NAME_HELP = "org.filters.name_help" + ORG_FILTERS_NAME_EXACT_LABEL = "org.filters.name_exact_label" + ORG_FILTERS_NAME_CONTAINS_LABEL = "org.filters.name_contains_label" + ORG_FILTERS_NAME_CONTAINS_HELP = "org.filters.name_contains_help" + ORG_FILTERS_TAGS_LABEL = "org.filters.tags_label" + ORG_USERS_LABEL = "org.users.label" + ORG_USERS_NO_ACCESS_MESSAGE = "org.users.no_access_message" + ORG_USERS_ADD_ORGANIZATIONS_LABEL = "org.users.add_organizations_label" + ORG_USERS_DELETE_LABEL = "org.users.delete_label" + ORG_USERS_DELETE_SUCCESS_MESSAGE = "org.users.delete_success_message" + ORG_USERS_ADD_LABEL = "org.users.add_label" + ORG_USERS_ADD_SUCCESS_MESSAGE = "org.users.add_success_message" + ORG_USERS_UPDATE_LABEL = "org.users.update_label" + ORG_USERS_UPDATE_SUCCESS_MESSAGE = "org.users.update_success_message" + ORG_USERS_MINIMUM_NUMBER_WITH_NAME_MESSAGE = "org.users.minimum_number_with_name_message" + ORG_GROUPS_LABEL = "org.groups.label" + ORG_GROUPS_NO_ACCESS_MESSAGE = "org.groups.no_access_message" + ORG_GROUPS_ADD_ORGANIZATIONS_LABEL = "org.groups.add_organizations_label" + ORG_GROUPS_NUM_ORGANIZATIONS_LABEL = "org.groups.num_organizations_label" + ORG_GROUPS_ADD_LABEL = "org.groups.add_label" + ORG_GROUPS_ADD_SUCCESS_MESSAGE = "org.groups.add_success_message" + ORG_GROUPS_UPDATE_LABEL = "org.groups.update_label" + ORG_GROUPS_UPDATE_SUCCESS_MESSAGE = "org.groups.update_success_message" + ORG_GROUPS_DELETE_LABEL = "org.groups.delete_label" + ORG_GROUPS_DELETE_SUCCESS_MESSAGE = "org.groups.delete_success_message" + ORG_CREATE_LABEL = "org.create.label" + ORG_CREATE_SUCCESS_MESSAGE = "org.create.success_message" + ORG_READ_LABEL = "org.read.label" + ORG_READ_LIST_LABEL = "org.read.list_label" + ORG_UPDATE_LABEL = "org.update.label" + ORG_UPDATE_WITH_NAME_LABEL = "org.update.with_name_label" + ORG_UPDATE_SUCCESS_MESSAGE = "org.update.success_message" + ORG_DELETE_LABEL = "org.delete.label" + ORG_DELETE_WITH_NAME_LABEL = "org.delete.with_name_label" + ORG_DELETE_CONFIRM_MESSAGE = "org.delete.confirm_message" + ORG_DELETE_SUCCESS_MESSAGE = "org.delete.success_message" + ORG_DELETE_SUCCESS_ASYNC_MESSAGE = "org.delete.success_async_message" + ORG_DELETE_WITH_NAME_SUCCESS_MESSAGE = "org.delete.with_name_success_message" + ORG_DELETE_WITH_NAME_WITH_USER_SUCCESS_MESSAGE = "org.delete.with_name_with_user_success_message" + + +# TODO: remove the else: branch once v3 migration is complete +if settings.ENABLE_V3_ORGANIZATION_ASSET_RELABEL: + labels = { + OrganizationLabelsKeys.ORG_LABEL: _("Organization"), + OrganizationLabelsKeys.ORG_PLURAL_LABEL: _("Organizations"), + OrganizationLabelsKeys.ORG_ALL_LABEL: _("All Organizations"), + OrganizationLabelsKeys.ORG_WITH_NAME_LABEL: _("Organization '%(name)s'"), + OrganizationLabelsKeys.ORG_NONE_FOUND_MESSAGE: _("No Organizations found"), + OrganizationLabelsKeys.ORG_REPORT_LABEL: _("Organization Report"), + OrganizationLabelsKeys.ORG_REPORT_TITLE: _("Organization Report"), + OrganizationLabelsKeys.ORG_REPORT_WITH_NAME_TITLE: _("Organization Report: %(name)s"), + OrganizationLabelsKeys.ORG_METRICS_LABEL: _("Organization Metrics"), + OrganizationLabelsKeys.ORG_METRICS_COUNTS_LABEL: _("Organization Counts"), + OrganizationLabelsKeys.ORG_METRICS_BY_FINDINGS_LABEL: _("Organization Metrics by Findings"), + OrganizationLabelsKeys.ORG_METRICS_BY_ENDPOINTS_LABEL: _("Organization Metrics by Affected Endpoints"), + OrganizationLabelsKeys.ORG_METRICS_TYPE_COUNTS_ERROR_MESSAGE: _("Please choose month and year and the Organization."), + OrganizationLabelsKeys.ORG_OPTIONS_LABEL: _("Organization Options"), + OrganizationLabelsKeys.ORG_NOTIFICATION_WITH_NAME_CREATED_MESSAGE: _("Organization %(name)s has been created successfully."), + OrganizationLabelsKeys.ORG_CRITICAL_PRODUCT_LABEL: _("Critical Asset"), + OrganizationLabelsKeys.ORG_KEY_PRODUCT_LABEL: _("Key Asset"), + OrganizationLabelsKeys.ORG_FILTERS_LABEL: _("Organization"), + OrganizationLabelsKeys.ORG_FILTERS_LABEL_HELP: _("Search for Organization names that are an exact match"), + OrganizationLabelsKeys.ORG_FILTERS_NAME_LABEL: _("Organization Name"), + OrganizationLabelsKeys.ORG_FILTERS_NAME_HELP: _("Search for Organization names that are an exact match"), + OrganizationLabelsKeys.ORG_FILTERS_NAME_EXACT_LABEL: _("Exact Organization Name"), + OrganizationLabelsKeys.ORG_FILTERS_NAME_CONTAINS_LABEL: _("Organization Name Contains"), + OrganizationLabelsKeys.ORG_FILTERS_NAME_CONTAINS_HELP: _("Search for Organization names that contain a given pattern"), + OrganizationLabelsKeys.ORG_FILTERS_TAGS_LABEL: _("Tags (Organization)"), + OrganizationLabelsKeys.ORG_USERS_LABEL: _("Organizations this User can access"), + OrganizationLabelsKeys.ORG_USERS_NO_ACCESS_MESSAGE: _("This User is not assigned to any Organizations."), + OrganizationLabelsKeys.ORG_USERS_ADD_ORGANIZATIONS_LABEL: _("Add Organizations"), + OrganizationLabelsKeys.ORG_USERS_DELETE_LABEL: _("Delete Organization Member"), + OrganizationLabelsKeys.ORG_USERS_DELETE_SUCCESS_MESSAGE: _("Organization member deleted successfully."), + OrganizationLabelsKeys.ORG_USERS_ADD_LABEL: _("Add Organization Member"), + OrganizationLabelsKeys.ORG_USERS_ADD_SUCCESS_MESSAGE: _("Organization members added successfully."), + OrganizationLabelsKeys.ORG_USERS_UPDATE_LABEL: _("Edit Organization Member"), + OrganizationLabelsKeys.ORG_USERS_UPDATE_SUCCESS_MESSAGE: _("Organization member updated successfully."), + OrganizationLabelsKeys.ORG_USERS_MINIMUM_NUMBER_WITH_NAME_MESSAGE: _("There must be at least one owner for Organization %(name)s."), + OrganizationLabelsKeys.ORG_GROUPS_LABEL: _("Organizations this Group can access"), + OrganizationLabelsKeys.ORG_GROUPS_NO_ACCESS_MESSAGE: _("This Group cannot access any Organizations."), + OrganizationLabelsKeys.ORG_GROUPS_ADD_ORGANIZATIONS_LABEL: _("Add Organizations"), + OrganizationLabelsKeys.ORG_GROUPS_NUM_ORGANIZATIONS_LABEL: _("Number of Organizations"), + OrganizationLabelsKeys.ORG_GROUPS_ADD_LABEL: _("Add Organization Group"), + OrganizationLabelsKeys.ORG_GROUPS_ADD_SUCCESS_MESSAGE: _("Organization groups added successfully."), + OrganizationLabelsKeys.ORG_GROUPS_UPDATE_LABEL: _("Edit Organization Group"), + OrganizationLabelsKeys.ORG_GROUPS_UPDATE_SUCCESS_MESSAGE: _("Organization group updated successfully."), + OrganizationLabelsKeys.ORG_GROUPS_DELETE_LABEL: _("Delete Organization Group"), + OrganizationLabelsKeys.ORG_GROUPS_DELETE_SUCCESS_MESSAGE: _("Organization group deleted successfully."), + OrganizationLabelsKeys.ORG_CREATE_LABEL: _("Add Organization"), + OrganizationLabelsKeys.ORG_CREATE_SUCCESS_MESSAGE: _("Organization added successfully."), + OrganizationLabelsKeys.ORG_READ_LABEL: _("View Organization"), + OrganizationLabelsKeys.ORG_READ_LIST_LABEL: _("List Organizations"), + OrganizationLabelsKeys.ORG_UPDATE_LABEL: _("Edit Organization"), + OrganizationLabelsKeys.ORG_UPDATE_WITH_NAME_LABEL: _("Edit Organization %(name)s"), + OrganizationLabelsKeys.ORG_UPDATE_SUCCESS_MESSAGE: _("Organization updated successfully."), + OrganizationLabelsKeys.ORG_DELETE_LABEL: _("Delete Organization"), + OrganizationLabelsKeys.ORG_DELETE_WITH_NAME_LABEL: _("Delete Organization %(name)s"), + OrganizationLabelsKeys.ORG_DELETE_CONFIRM_MESSAGE: _( + "Deleting this Organization will remove any related objects associated with it. These relationships are listed below:"), + OrganizationLabelsKeys.ORG_DELETE_SUCCESS_MESSAGE: _("Organization and relationships removed."), + OrganizationLabelsKeys.ORG_DELETE_SUCCESS_ASYNC_MESSAGE: _("Organization and relationships will be removed in the background."), + OrganizationLabelsKeys.ORG_DELETE_WITH_NAME_SUCCESS_MESSAGE: _('The Organization "%(name)s" was deleted'), + OrganizationLabelsKeys.ORG_DELETE_WITH_NAME_WITH_USER_SUCCESS_MESSAGE: _('The Organization "%(name)s" was deleted by %(user)s'), + } +else: + labels = { + OrganizationLabelsKeys.ORG_LABEL: _("Product Type"), + OrganizationLabelsKeys.ORG_PLURAL_LABEL: _("Product Types"), + OrganizationLabelsKeys.ORG_ALL_LABEL: _("All Product Types"), + OrganizationLabelsKeys.ORG_WITH_NAME_LABEL: _("Product Type '%(name)s'"), + OrganizationLabelsKeys.ORG_NONE_FOUND_MESSAGE: _("No Product Types found"), + OrganizationLabelsKeys.ORG_REPORT_LABEL: _("Product Type Report"), + OrganizationLabelsKeys.ORG_REPORT_TITLE: _("Product Type Report"), + OrganizationLabelsKeys.ORG_REPORT_WITH_NAME_TITLE: _("Product Type Report: %(name)s"), + OrganizationLabelsKeys.ORG_METRICS_LABEL: _("Product Type Metrics"), + OrganizationLabelsKeys.ORG_METRICS_COUNTS_LABEL: _("Product Type Counts"), + OrganizationLabelsKeys.ORG_METRICS_BY_FINDINGS_LABEL: _("Product Type Metrics by Findings"), + OrganizationLabelsKeys.ORG_METRICS_BY_ENDPOINTS_LABEL: _("Product Type Metrics by Affected Endpoints"), + OrganizationLabelsKeys.ORG_METRICS_TYPE_COUNTS_ERROR_MESSAGE: _("Please choose month and year and the Product Type."), + OrganizationLabelsKeys.ORG_OPTIONS_LABEL: _("Product Type Options"), + OrganizationLabelsKeys.ORG_NOTIFICATION_WITH_NAME_CREATED_MESSAGE: _("Product Type %(name)s has been created successfully."), + OrganizationLabelsKeys.ORG_CRITICAL_PRODUCT_LABEL: _("Critical Product"), + OrganizationLabelsKeys.ORG_KEY_PRODUCT_LABEL: _("Key Product"), + OrganizationLabelsKeys.ORG_FILTERS_LABEL: _("Product Type"), + OrganizationLabelsKeys.ORG_FILTERS_LABEL_HELP: _("Search for Product Type names that are an exact match"), + OrganizationLabelsKeys.ORG_FILTERS_NAME_LABEL: _("Product Type Name"), + OrganizationLabelsKeys.ORG_FILTERS_NAME_HELP: _("Search for Product Type names that are an exact match"), + OrganizationLabelsKeys.ORG_FILTERS_NAME_EXACT_LABEL: _("Exact Product Type Name"), + OrganizationLabelsKeys.ORG_FILTERS_NAME_CONTAINS_LABEL: _("Product Type Name Contains"), + OrganizationLabelsKeys.ORG_FILTERS_NAME_CONTAINS_HELP: _("Search for Product Type names that contain a given pattern"), + OrganizationLabelsKeys.ORG_FILTERS_TAGS_LABEL: _("Tags (Product Type)"), + OrganizationLabelsKeys.ORG_USERS_LABEL: _("Product Types this User can access"), + OrganizationLabelsKeys.ORG_USERS_NO_ACCESS_MESSAGE: _("This User is not assigned to any Product Types."), + OrganizationLabelsKeys.ORG_USERS_ADD_ORGANIZATIONS_LABEL: _("Add Product Types"), + OrganizationLabelsKeys.ORG_USERS_DELETE_LABEL: _("Delete Product Type Member"), + OrganizationLabelsKeys.ORG_USERS_DELETE_SUCCESS_MESSAGE: _("Product Type member deleted successfully."), + OrganizationLabelsKeys.ORG_USERS_ADD_LABEL: _("Add Product Type Member"), + OrganizationLabelsKeys.ORG_USERS_ADD_SUCCESS_MESSAGE: _("Product Type members added successfully."), + OrganizationLabelsKeys.ORG_USERS_UPDATE_LABEL: _("Edit Product Type Member"), + OrganizationLabelsKeys.ORG_USERS_UPDATE_SUCCESS_MESSAGE: _("Product Type member updated successfully."), + OrganizationLabelsKeys.ORG_USERS_MINIMUM_NUMBER_WITH_NAME_MESSAGE: _("There must be at least one owner for Product Type %(name)s."), + OrganizationLabelsKeys.ORG_GROUPS_LABEL: _("Product Types this Group can access"), + OrganizationLabelsKeys.ORG_GROUPS_NO_ACCESS_MESSAGE: _("This Group cannot access any Product Types."), + OrganizationLabelsKeys.ORG_GROUPS_ADD_ORGANIZATIONS_LABEL: _("Add Product Types"), + OrganizationLabelsKeys.ORG_GROUPS_NUM_ORGANIZATIONS_LABEL: _("Number of Product Types"), + OrganizationLabelsKeys.ORG_GROUPS_ADD_LABEL: _("Add Product Type Group"), + OrganizationLabelsKeys.ORG_GROUPS_ADD_SUCCESS_MESSAGE: _("Product Type groups added successfully."), + OrganizationLabelsKeys.ORG_GROUPS_UPDATE_LABEL: _("Edit Product Type Group"), + OrganizationLabelsKeys.ORG_GROUPS_UPDATE_SUCCESS_MESSAGE: _("Product Type group updated successfully."), + OrganizationLabelsKeys.ORG_GROUPS_DELETE_LABEL: _("Delete Product Type Group"), + OrganizationLabelsKeys.ORG_GROUPS_DELETE_SUCCESS_MESSAGE: _("Product Type group deleted successfully."), + OrganizationLabelsKeys.ORG_CREATE_LABEL: _("Add Product Type"), + OrganizationLabelsKeys.ORG_CREATE_SUCCESS_MESSAGE: _("Product Type added successfully."), + OrganizationLabelsKeys.ORG_READ_LABEL: _("View Product Type"), + OrganizationLabelsKeys.ORG_READ_LIST_LABEL: _("List Product Types"), + OrganizationLabelsKeys.ORG_UPDATE_LABEL: _("Edit Product Type"), + OrganizationLabelsKeys.ORG_UPDATE_WITH_NAME_LABEL: _("Edit Product Type %(name)s"), + OrganizationLabelsKeys.ORG_UPDATE_SUCCESS_MESSAGE: _("Product Type updated successfully."), + OrganizationLabelsKeys.ORG_DELETE_LABEL: _("Delete Product Type"), + OrganizationLabelsKeys.ORG_DELETE_WITH_NAME_LABEL: _("Delete Product Type %(name)s"), + OrganizationLabelsKeys.ORG_DELETE_CONFIRM_MESSAGE: _( + "Deleting this Product Type will remove any related objects associated with it. These relationships are listed below:"), + OrganizationLabelsKeys.ORG_DELETE_SUCCESS_MESSAGE: _("Product Type and relationships removed."), + OrganizationLabelsKeys.ORG_DELETE_SUCCESS_ASYNC_MESSAGE: _("Product Type and relationships will be removed in the background."), + OrganizationLabelsKeys.ORG_DELETE_WITH_NAME_SUCCESS_MESSAGE: _('The product type "%(name)s" was deleted'), + OrganizationLabelsKeys.ORG_DELETE_WITH_NAME_WITH_USER_SUCCESS_MESSAGE: _('The product type "%(name)s" was deleted by %(user)s'), + } diff --git a/dojo/organization/urls.py b/dojo/organization/urls.py new file mode 100644 index 00000000000..ceba6767d96 --- /dev/null +++ b/dojo/organization/urls.py @@ -0,0 +1,125 @@ +from django.conf import settings +from django.urls import re_path + +from dojo.product import views as product_views +from dojo.product_type import views +from dojo.utils import redirect_view + +# TODO: remove the else: branch once v3 migration is complete +if settings.ENABLE_V3_ORGANIZATION_ASSET_RELABEL: + urlpatterns = [ + re_path( + r"^organization$", + views.product_type, + name="product_type", + ), + re_path( + r"^organization/(?P\d+)$", + views.view_product_type, + name="view_product_type", + ), + re_path( + r"^organization/(?P\d+)/edit$", + views.edit_product_type, + name="edit_product_type", + ), + re_path( + r"^organization/(?P\d+)/delete$", + views.delete_product_type, + name="delete_product_type", + ), + re_path( + r"^organization/add$", + views.add_product_type, + name="add_product_type", + ), + re_path( + r"^organization/(?P\d+)/add_asset", + product_views.new_product, + name="add_product_to_product_type", + ), + re_path( + r"^organization/(?P\d+)/add_member$", + views.add_product_type_member, + name="add_product_type_member", + ), + re_path( + r"^organization/member/(?P\d+)/edit$", + views.edit_product_type_member, + name="edit_product_type_member", + ), + re_path( + r"^organization/member/(?P\d+)/delete$", + views.delete_product_type_member, + name="delete_product_type_member", + ), + re_path( + r"^organization/(?P\d+)/add_group$", + views.add_product_type_group, + name="add_product_type_group", + ), + re_path( + r"^organization/group/(?P\d+)/edit$", + views.edit_product_type_group, + name="edit_product_type_group", + ), + re_path( + r"^organization/group/(?P\d+)/delete$", + views.delete_product_type_group, + name="delete_product_type_group", + ), + # TODO: Backwards compatibility; remove after v3 migration is complete + re_path(r"^product/type$", redirect_view("product_type")), + re_path(r"^product/type/(?P\d+)$", redirect_view("view_product_type")), + re_path(r"^product/type/(?P\d+)/edit$", redirect_view("edit_product_type")), + re_path(r"^product/type/(?P\d+)/delete$", redirect_view("delete_product_type")), + re_path(r"^product/type/add$", redirect_view("add_product_type")), + re_path(r"^product/type/(?P\d+)/add_product", redirect_view("add_product_to_product_type")), + re_path(r"^product/type/(?P\d+)/add_member$", redirect_view("add_product_type_member")), + re_path(r"^product/type/member/(?P\d+)/edit$", redirect_view("edit_product_type_member")), + re_path(r"^product/type/member/(?P\d+)/delete$", redirect_view("delete_product_type_member")), + re_path(r"^product/type/(?P\d+)/add_group$", redirect_view("add_product_type_group")), + re_path(r"^product/type/group/(?P\d+)/edit$", redirect_view("edit_product_type_group")), + re_path(r"^product/type/group/(?P\d+)/delete$", redirect_view("delete_product_type_group")), + ] +else: + urlpatterns = [ + # product type + re_path(r"^product/type$", views.product_type, name="product_type"), + re_path(r"^product/type/(?P\d+)$", + views.view_product_type, name="view_product_type"), + re_path(r"^product/type/(?P\d+)/edit$", + views.edit_product_type, name="edit_product_type"), + re_path(r"^product/type/(?P\d+)/delete$", + views.delete_product_type, name="delete_product_type"), + re_path(r"^product/type/add$", views.add_product_type, + name="add_product_type"), + re_path(r"^product/type/(?P\d+)/add_product", + product_views.new_product, + name="add_product_to_product_type"), + re_path(r"^product/type/(?P\d+)/add_member$", views.add_product_type_member, + name="add_product_type_member"), + re_path(r"^product/type/member/(?P\d+)/edit$", views.edit_product_type_member, + name="edit_product_type_member"), + re_path(r"^product/type/member/(?P\d+)/delete$", views.delete_product_type_member, + name="delete_product_type_member"), + re_path(r"^product/type/(?P\d+)/add_group$", views.add_product_type_group, + name="add_product_type_group"), + re_path(r"^product/type/group/(?P\d+)/edit$", views.edit_product_type_group, + name="edit_product_type_group"), + re_path(r"^product/type/group/(?P\d+)/delete$", views.delete_product_type_group, + name="delete_product_type_group"), + # Forward compatibility + re_path(r"^organization$", redirect_view("product_type")), + re_path(r"^organization/(?P\d+)$", redirect_view("view_product_type")), + re_path(r"^organization/(?P\d+)/edit$", redirect_view("edit_product_type")), + re_path(r"^organization/(?P\d+)/delete$", redirect_view("delete_product_type")), + re_path(r"^organization/add$", redirect_view("add_product_type")), + re_path(r"^organization/(?P\d+)/add_product", redirect_view("add_product_to_product_type")), + re_path(r"^organization/(?P\d+)/add_member$", redirect_view("add_product_type_member")), + re_path(r"^organization/member/(?P\d+)/edit$", redirect_view("edit_product_type_member")), + re_path(r"^organization/member/(?P\d+)/delete$", redirect_view("delete_product_type_member")), + re_path(r"^organization/(?P\d+)/add_group$", redirect_view("add_product_type_group")), + re_path(r"^organization/group/(?P\d+)/edit$", redirect_view("edit_product_type_group")), + re_path(r"^organization/group/(?P\d+)/delete$", redirect_view("delete_product_type_group")), + ] diff --git a/dojo/pghistory_models.py b/dojo/pghistory_models.py new file mode 100644 index 00000000000..936bd939c60 --- /dev/null +++ b/dojo/pghistory_models.py @@ -0,0 +1,31 @@ +""" +Custom pghistory models for DefectDojo. + +This module contains custom proxy models for pghistory Events +to expose context fields as structured fields. + +Note: Performance indexes for the pghistory_context table are managed +via Django migration 0244_pghistory_indices.py rather than +through model Meta classes, since the context table is managed by +the pghistory library itself. +""" +import pghistory.models +from django.db import models + + +class DojoEvents(pghistory.models.Events): + + """ + Custom Events proxy model that exposes context fields as structured fields. + + This allows querying and displaying context data like user, url, and remote_addr + as regular model fields instead of accessing nested JSON data. + """ + + user = pghistory.ProxyField("pgh_context__user", models.IntegerField(null=True)) + url = pghistory.ProxyField("pgh_context__url", models.TextField(null=True)) + remote_addr = pghistory.ProxyField("pgh_context__remote_addr", models.CharField(max_length=45, null=True)) + + class Meta: + proxy = True + app_label = "dojo" diff --git a/dojo/pipeline.py b/dojo/pipeline.py index cde01e0b0d9..888cce0ba06 100644 --- a/dojo/pipeline.py +++ b/dojo/pipeline.py @@ -71,7 +71,7 @@ def update_azure_groups(backend, uid, user=None, social=None, *args, **kwargs): soc = user.social_auth.order_by("-created").first() token = soc.extra_data["access_token"] group_names = [] - if "groups" not in kwargs["response"] or kwargs["response"]["groups"] == "": + if "groups" not in kwargs["response"] or not kwargs["response"]["groups"]: logger.warning("No groups in response. Stopping to update groups of user based on azureAD") return group_IDs = kwargs["response"]["groups"] @@ -93,13 +93,13 @@ def update_azure_groups(backend, uid, user=None, social=None, *args, **kwargs): logger.debug("detected " + group_from_response + " as group name and will not call microsoft graph") group_name = group_from_response - if settings.AZUREAD_TENANT_OAUTH2_GROUPS_FILTER == "" or re.search(settings.AZUREAD_TENANT_OAUTH2_GROUPS_FILTER, group_name): + if not settings.AZUREAD_TENANT_OAUTH2_GROUPS_FILTER or re.search(settings.AZUREAD_TENANT_OAUTH2_GROUPS_FILTER, group_name): group_names.append(group_name) else: logger.debug("Skipping group " + group_name + " due to AZUREAD_TENANT_OAUTH2_GROUPS_FILTER " + settings.AZUREAD_TENANT_OAUTH2_GROUPS_FILTER) continue except Exception as e: - logger.error(f"Could not call microsoft graph API or save groups to member: {e}") + logger.error("Could not call microsoft graph API or save groups to member: %s", e) if len(group_names) > 0: assign_user_to_groups(user, group_names, Dojo_Group.AZURE) if settings.AZUREAD_TENANT_OAUTH2_CLEANUP_GROUPS: @@ -114,18 +114,18 @@ def assign_user_to_groups(user, group_names, social_provider): for group_name in group_names: group, created_group = Dojo_Group.objects.get_or_create(name=group_name, social_provider=social_provider) if created_group: - logger.debug("Group %s for social provider %s was created", str(group), social_provider) + logger.debug("Group %s for social provider %s was created", group, social_provider) _group_member, is_member_created = Dojo_Group_Member.objects.get_or_create(group=group, user=user, defaults={ "role": Role.objects.get(id=Roles.Maintainer)}) if is_member_created: - logger.debug("User %s become member of group %s (social provider: %s)", user, str(group), social_provider) + logger.debug("User %s become member of group %s (social provider: %s)", user, group, social_provider) def cleanup_old_groups_for_user(user, group_names): for group_member in Dojo_Group_Member.objects.select_related("group").filter(user=user): group = group_member.group if str(group) not in group_names: - logger.debug("Deleting membership of user %s from %s group %s", user, group.social_provider, str(group)) + logger.debug("Deleting membership of user %s from %s group %s", user, group.social_provider, group) group_member.delete() diff --git a/dojo/product/helpers.py b/dojo/product/helpers.py index ff7ada2dbd9..aeadec0246d 100644 --- a/dojo/product/helpers.py +++ b/dojo/product/helpers.py @@ -18,16 +18,16 @@ def propagate_tags_on_product(product_id, *args, **kwargs): def propagate_tags_on_product_sync(product): # enagagements - logger.debug(f"Propagating tags from {product} to all engagements") + logger.debug("Propagating tags from %s to all engagements", product) propagate_tags_on_object_list(Engagement.objects.filter(product=product)) # tests - logger.debug(f"Propagating tags from {product} to all tests") + logger.debug("Propagating tags from %s to all tests", product) propagate_tags_on_object_list(Test.objects.filter(engagement__product=product)) # findings - logger.debug(f"Propagating tags from {product} to all findings") + logger.debug("Propagating tags from %s to all findings", product) propagate_tags_on_object_list(Finding.objects.filter(test__engagement__product=product)) # endpoints - logger.debug(f"Propagating tags from {product} to all endpoints") + logger.debug("Propagating tags from %s to all endpoints", product) propagate_tags_on_object_list(Endpoint.objects.filter(product=product)) diff --git a/dojo/product/signals.py b/dojo/product/signals.py index 61678e26a28..0ed9a62747c 100644 --- a/dojo/product/signals.py +++ b/dojo/product/signals.py @@ -2,14 +2,20 @@ from auditlog.models import LogEntry from django.conf import settings +from django.contrib.auth import get_user_model from django.contrib.contenttypes.models import ContentType from django.db.models.signals import post_delete, post_save from django.dispatch import receiver from django.urls import reverse from django.utils.translation import gettext as _ +from dojo.labels import get_labels from dojo.models import Product from dojo.notifications.helper import create_notification +from dojo.pghistory_models import DojoEvents +from dojo.utils import get_current_user + +labels = get_labels() @receiver(post_save, sender=Product) @@ -27,15 +33,43 @@ def product_post_save(sender, instance, created, **kwargs): def product_post_delete(sender, instance, **kwargs): # Catch instances in async delete where a single object is deleted more than once with contextlib.suppress(sender.DoesNotExist): - description = _('The product "%(name)s" was deleted') % {"name": instance.name} + description = labels.ASSET_DELETE_WITH_NAME_SUCCESS_MESSAGE % {"name": instance.name} + user = None + if settings.ENABLE_AUDITLOG: - if le := LogEntry.objects.filter( - action=LogEntry.Action.DELETE, - content_type=ContentType.objects.get(app_label="dojo", model="product"), - object_id=instance.id, - ).order_by("-id").first(): - description = _('The product "%(name)s" was deleted by %(user)s') % { - "name": instance.name, "user": le.actor} + # First try to find deletion author in pghistory events + # Look for delete events for this specific product instance + pghistory_delete_events = DojoEvents.objects.filter( + pgh_obj_model="dojo.Product", + pgh_obj_id=instance.id, + pgh_label="delete", + ).order_by("-pgh_created_at") + + if pghistory_delete_events.exists(): + latest_delete = pghistory_delete_events.first() + # Extract user from pghistory context + if latest_delete.user: + User = get_user_model() + with contextlib.suppress(User.DoesNotExist): + user = User.objects.get(id=latest_delete.user) + + # Fall back to django-auditlog if no user found in pghistory + if not user: + if le := LogEntry.objects.filter( + action=LogEntry.Action.DELETE, + content_type=ContentType.objects.get(app_label="dojo", model="product"), + object_id=instance.id, + ).order_by("-id").first(): + user = le.actor + + if not user: + current_user = get_current_user() + user = current_user + + # Update description with user if found + if user: + description = labels.ASSET_DELETE_WITH_NAME_WITH_USER_SUCCESS_MESSAGE % {"name": instance.name, "user": user} + create_notification(event="product_deleted", # template does not exists, it will default to "other" but this event name needs to stay because of unit testing title=_("Deletion of %(name)s") % {"name": instance.name}, description=description, diff --git a/dojo/product/urls.py b/dojo/product/urls.py deleted file mode 100644 index 8e3568e5905..00000000000 --- a/dojo/product/urls.py +++ /dev/null @@ -1,76 +0,0 @@ -from django.urls import re_path - -from dojo.engagement import views as dojo_engagement_views -from dojo.product import views - -urlpatterns = [ - # product - re_path(r"^product$", views.product, name="product"), - re_path(r"^product/(?P\d+)$", views.view_product, - name="view_product"), - re_path(r"^product/(?P\d+)/components$", views.view_product_components, - name="view_product_components"), - re_path(r"^product/(?P\d+)/engagements$", views.view_engagements, - name="view_engagements"), - re_path( - r"^product/(?P\d+)/import_scan_results$", - dojo_engagement_views.ImportScanResultsView.as_view(), - name="import_scan_results_prod"), - re_path(r"^product/(?P\d+)/metrics$", views.view_product_metrics, - name="view_product_metrics"), - re_path(r"^product/(?P\d+)/async_burndown_metrics$", views.async_burndown_metrics, - name="async_burndown_metrics"), - re_path(r"^product/(?P\d+)/edit$", views.edit_product, - name="edit_product"), - re_path(r"^product/(?P\d+)/delete$", views.delete_product, - name="delete_product"), - re_path(r"^product/add", views.new_product, name="new_product"), - re_path(r"^product/(?P\d+)/new_engagement$", views.new_eng_for_app, - name="new_eng_for_prod"), - re_path(r"^product/(?P\d+)/new_technology$", views.new_tech_for_prod, - name="new_tech_for_prod"), - re_path(r"^technology/(?P\d+)/edit$", views.edit_technology, - name="edit_technology"), - re_path(r"^technology/(?P\d+)/delete$", views.delete_technology, - name="delete_technology"), - re_path(r"^product/(?P\d+)/new_engagement/cicd$", views.new_eng_for_app_cicd, - name="new_eng_for_prod_cicd"), - re_path(r"^product/(?P\d+)/add_meta_data$", views.add_meta_data, - name="add_meta_data"), - re_path(r"^product/(?P\d+)/edit_notifications$", views.edit_notifications, - name="edit_notifications"), - re_path(r"^product/(?P\d+)/edit_meta_data$", views.edit_meta_data, - name="edit_meta_data"), - re_path( - r"^product/(?P\d+)/ad_hoc_finding$", - views.AdHocFindingView.as_view(), - name="ad_hoc_finding"), - re_path(r"^product/(?P\d+)/engagement_presets$", views.engagement_presets, - name="engagement_presets"), - re_path(r"^product/(?P\d+)/engagement_presets/(?P\d+)/edit$", views.edit_engagement_presets, - name="edit_engagement_presets"), - re_path(r"^product/(?P\d+)/engagement_presets/add$", views.add_engagement_presets, - name="add_engagement_presets"), - re_path(r"^product/(?P\d+)/engagement_presets/(?P\d+)/delete$", views.delete_engagement_presets, - name="delete_engagement_presets"), - re_path(r"^product/(?P\d+)/add_member$", views.add_product_member, - name="add_product_member"), - re_path(r"^product/member/(?P\d+)/edit$", views.edit_product_member, - name="edit_product_member"), - re_path(r"^product/member/(?P\d+)/delete$", views.delete_product_member, - name="delete_product_member"), - re_path(r"^product/(?P\d+)/add_api_scan_configuration$", views.add_api_scan_configuration, - name="add_api_scan_configuration"), - re_path(r"^product/(?P\d+)/view_api_scan_configurations$", views.view_api_scan_configurations, - name="view_api_scan_configurations"), - re_path(r"^product/(?P\d+)/edit_api_scan_configuration/(?P\d+)$", views.edit_api_scan_configuration, - name="edit_api_scan_configuration"), - re_path(r"^product/(?P\d+)/delete_api_scan_configuration/(?P\d+)$", views.delete_api_scan_configuration, - name="delete_api_scan_configuration"), - re_path(r"^product/(?P\d+)/add_group$", views.add_product_group, - name="add_product_group"), - re_path(r"^product/group/(?P\d+)/edit$", views.edit_product_group, - name="edit_product_group"), - re_path(r"^product/group/(?P\d+)/delete$", views.delete_product_group, - name="delete_product_group"), -] diff --git a/dojo/product/views.py b/dojo/product/views.py index d8b16a13167..6884877398a 100644 --- a/dojo/product/views.py +++ b/dojo/product/views.py @@ -70,6 +70,7 @@ ProductNotificationsForm, SLA_Configuration, ) +from dojo.labels import get_labels from dojo.models import ( App_Analysis, Benchmark_Product_Summary, @@ -133,6 +134,8 @@ logger = logging.getLogger(__name__) +labels = get_labels() + def product(request): prods = get_authorized_products(Permissions.Product_View) @@ -159,7 +162,7 @@ def product(request): # Get benchmark types for the template benchmark_types = Benchmark_Type.objects.filter(enabled=True).order_by("name") - add_breadcrumb(title=_("Product List"), top_level=not len(request.GET), request=request) + add_breadcrumb(title=str(labels.ASSET_READ_LIST_LABEL), top_level=not len(request.GET), request=request) return render(request, "dojo/product.html", { "prod_list": prod_list, @@ -305,7 +308,7 @@ def view_product(request, pid): total = critical + high + medium + low + info - product_tab = Product_Tab(prod, title=_("Product"), tab="overview") + product_tab = Product_Tab(prod, title=str(labels.ASSET_LABEL), tab="overview") return render(request, "dojo/view_product_details.html", { "prod": prod, "product_tab": product_tab, @@ -338,7 +341,7 @@ def view_product(request, pid): @user_is_authorized(Product, Permissions.Component_View, "pid") def view_product_components(request, pid): prod = get_object_or_404(Product, id=pid) - product_tab = Product_Tab(prod, title=_("Product"), tab="components") + product_tab = Product_Tab(prod, title=str(labels.ASSET_LABEL), tab="components") separator = ", " # Get components ordered by component_name and concat component versions to the same row @@ -718,7 +721,7 @@ def view_product_metrics(request, pid): open_vulnerabilities = [["CWE-" + str(f.get("cwe")), f.get("count")] for f in open_vulnerabilities] all_vulnerabilities = [["CWE-" + str(f.get("cwe")), f.get("count")] for f in all_vulnerabilities] - product_tab = Product_Tab(prod, title=_("Product"), tab="metrics") + product_tab = Product_Tab(prod, title=str(labels.ASSET_LABEL), tab="metrics") return render(request, "dojo/product_metrics.html", { "prod": prod, @@ -892,12 +895,6 @@ def prefetch_for_view_engagements(engagements, recent_test_day_count): return engagements -# Authorization is within the import_scan_results method -def import_scan_results_prod(request, pid=None): - from dojo.engagement.views import import_scan_results - return import_scan_results(request, pid=pid) - - def new_product(request, ptid=None): if get_authorized_product_types(Permissions.Product_Type_Add_Product).count() == 0: raise PermissionDenied @@ -926,7 +923,7 @@ def new_product(request, ptid=None): product = form.save() messages.add_message(request, messages.SUCCESS, - _("Product added successfully."), + labels.ASSET_CREATE_SUCCESS_MESSAGE, extra_tags="alert-success") success, jira_project_form = jira_helper.process_jira_project_form(request, product=product) error = not success @@ -973,7 +970,7 @@ def new_product(request, ptid=None): gform = GITHUB_Product_Form() if get_system_setting("enable_github") else None - add_breadcrumb(title=_("New Product"), top_level=False, request=request) + add_breadcrumb(title=str(labels.ASSET_CREATE_LABEL), top_level=False, request=request) return render(request, "dojo/new_product.html", {"form": form, "jform": jira_project_form, @@ -1003,13 +1000,13 @@ def edit_product(request, pid): if form.is_valid(): initial_sla_config = Product.objects.get(pk=form.instance.id).sla_configuration form.save() - msg = "Product updated successfully." + msg = labels.ASSET_UPDATE_SUCCESS_MESSAGE # check if the SLA config was changed, append additional context to message if initial_sla_config != form.instance.sla_configuration: - msg += " All SLA expiration dates for findings within this product will be recalculated asynchronously for the newly assigned SLA configuration." + msg += " " + labels.ASSET_UPDATE_SLA_CHANGED_MESSAGE messages.add_message(request, messages.SUCCESS, - _(msg), + msg, extra_tags="alert-success") success, jform = jira_helper.process_jira_project_form(request, instance=jira_project, product=product) @@ -1046,7 +1043,7 @@ def edit_product(request, pid): else: gform = None - product_tab = Product_Tab(product, title=_("Edit Product"), tab="settings") + product_tab = Product_Tab(product, title=str(labels.ASSET_UPDATE_LABEL), tab="settings") return render(request, "dojo/edit_product.html", {"form": form, @@ -1070,9 +1067,9 @@ def delete_product(request, pid): if get_setting("ASYNC_OBJECT_DELETE"): async_del = async_delete() async_del.delete(product) - message = _("Product and relationships will be removed in the background.") + message = labels.ASSET_DELETE_SUCCESS_ASYNC_MESSAGE else: - message = _("Product and relationships removed.") + message = labels.ASSET_DELETE_SUCCESS_MESSAGE product.delete() messages.add_message(request, messages.SUCCESS, @@ -1092,11 +1089,12 @@ def delete_product(request, pid): collector.collect([product]) rels = collector.nested() - product_tab = Product_Tab(product, title=_("Product"), tab="settings") + product_tab = Product_Tab(product, title=str(labels.ASSET_LABEL), tab="settings") logger.debug("delete_product: GET RENDER") return render(request, "dojo/delete_product.html", { + "label_delete_with_name": labels.ASSET_DELETE_WITH_NAME_LABEL % {"name": product}, "product": product, "form": form, "product_tab": product_tab, @@ -1705,6 +1703,7 @@ def edit_notifications(request, pid): def add_product_member(request, pid): product = get_object_or_404(Product, pk=pid) memberform = Add_Product_MemberForm(initial={"product": product.id}) + page_name = str(labels.ASSET_USERS_MEMBER_ADD_LABEL) if request.method == "POST": memberform = Add_Product_MemberForm(request.POST, initial={"product": product.id}) if memberform.is_valid(): @@ -1726,11 +1725,12 @@ def add_product_member(request, pid): product_member.save() messages.add_message(request, messages.SUCCESS, - _("Product members added successfully."), + labels.ASSET_USERS_MEMBER_ADD_SUCCESS_MESSAGE, extra_tags="alert-success") return HttpResponseRedirect(reverse("view_product", args=(pid,))) - product_tab = Product_Tab(product, title=_("Add Product Member"), tab="settings") + product_tab = Product_Tab(product, title=page_name, tab="settings") return render(request, "dojo/new_product_member.html", { + "name": page_name, "product": product, "form": memberform, "product_tab": product_tab, @@ -1741,6 +1741,7 @@ def add_product_member(request, pid): def edit_product_member(request, memberid): member = get_object_or_404(Product_Member, pk=memberid) memberform = Edit_Product_MemberForm(instance=member) + page_name = str(labels.ASSET_USERS_MEMBER_UPDATE_LABEL) if request.method == "POST": memberform = Edit_Product_MemberForm(request.POST, instance=member) if memberform.is_valid(): @@ -1754,13 +1755,14 @@ def edit_product_member(request, memberid): memberform.save() messages.add_message(request, messages.SUCCESS, - _("Product member updated successfully."), + labels.ASSET_USERS_MEMBER_UPDATE_SUCCESS_MESSAGE, extra_tags="alert-success") if is_title_in_breadcrumbs("View User"): return HttpResponseRedirect(reverse("view_user", args=(member.user.id,))) return HttpResponseRedirect(reverse("view_product", args=(member.product.id,))) - product_tab = Product_Tab(member.product, title=_("Edit Product Member"), tab="settings") + product_tab = Product_Tab(member.product, title=page_name, tab="settings") return render(request, "dojo/edit_product_member.html", { + "name": page_name, "memberid": memberid, "form": memberform, "product_tab": product_tab, @@ -1771,6 +1773,7 @@ def edit_product_member(request, memberid): def delete_product_member(request, memberid): member = get_object_or_404(Product_Member, pk=memberid) memberform = Delete_Product_MemberForm(instance=member) + page_name = str(labels.ASSET_USERS_MEMBER_DELETE_LABEL) if request.method == "POST": memberform = Delete_Product_MemberForm(request.POST, instance=member) member = memberform.instance @@ -1778,15 +1781,16 @@ def delete_product_member(request, memberid): member.delete() messages.add_message(request, messages.SUCCESS, - _("Product member deleted successfully."), + labels.ASSET_USERS_MEMBER_DELETE_SUCCESS_MESSAGE, extra_tags="alert-success") if is_title_in_breadcrumbs("View User"): return HttpResponseRedirect(reverse("view_user", args=(member.user.id,))) if user == request.user: return HttpResponseRedirect(reverse("product")) return HttpResponseRedirect(reverse("view_product", args=(member.product.id,))) - product_tab = Product_Tab(member.product, title=_("Delete Product Member"), tab="settings") + product_tab = Product_Tab(member.product, title=page_name, tab="settings") return render(request, "dojo/delete_product_member.html", { + "name": page_name, "memberid": memberid, "form": memberform, "product_tab": product_tab, @@ -1929,6 +1933,7 @@ def edit_product_group(request, groupid): logger.error(groupid) group = get_object_or_404(Product_Group, pk=groupid) groupform = Edit_Product_Group_Form(instance=group) + page_name = str(labels.ASSET_GROUPS_UPDATE_LABEL) if request.method == "POST": groupform = Edit_Product_Group_Form(request.POST, instance=group) @@ -1943,14 +1948,15 @@ def edit_product_group(request, groupid): groupform.save() messages.add_message(request, messages.SUCCESS, - _("Product group updated successfully."), + labels.ASSET_GROUPS_UPDATE_SUCCESS_MESSAGE, extra_tags="alert-success") if is_title_in_breadcrumbs("View Group"): return HttpResponseRedirect(reverse("view_group", args=(group.group.id,))) return HttpResponseRedirect(reverse("view_product", args=(group.product.id,))) - product_tab = Product_Tab(group.product, title=_("Edit Product Group"), tab="settings") + product_tab = Product_Tab(group.product, title=page_name, tab="settings") return render(request, "dojo/edit_product_group.html", { + "name": page_name, "groupid": groupid, "form": groupform, "product_tab": product_tab, @@ -1961,6 +1967,7 @@ def edit_product_group(request, groupid): def delete_product_group(request, groupid): group = get_object_or_404(Product_Group, pk=groupid) groupform = Delete_Product_GroupForm(instance=group) + page_name = str(labels.ASSET_GROUPS_DELETE_LABEL) if request.method == "POST": groupform = Delete_Product_GroupForm(request.POST, instance=group) @@ -1968,7 +1975,7 @@ def delete_product_group(request, groupid): group.delete() messages.add_message(request, messages.SUCCESS, - _("Product group deleted successfully."), + labels.ASSET_GROUPS_DELETE_SUCCESS_MESSAGE, extra_tags="alert-success") if is_title_in_breadcrumbs("View Group"): return HttpResponseRedirect(reverse("view_group", args=(group.group.id,))) @@ -1976,8 +1983,9 @@ def delete_product_group(request, groupid): # page return HttpResponseRedirect(reverse("view_product", args=(group.product.id,))) - product_tab = Product_Tab(group.product, title=_("Delete Product Group"), tab="settings") + product_tab = Product_Tab(group.product, title=page_name, tab="settings") return render(request, "dojo/delete_product_group.html", { + "name": page_name, "groupid": groupid, "form": groupform, "product_tab": product_tab, @@ -1988,6 +1996,7 @@ def delete_product_group(request, groupid): def add_product_group(request, pid): product = get_object_or_404(Product, pk=pid) group_form = Add_Product_GroupForm(initial={"product": product.id}) + page_name = str(labels.ASSET_GROUPS_ADD_LABEL) if request.method == "POST": group_form = Add_Product_GroupForm(request.POST, initial={"product": product.id}) @@ -2010,11 +2019,12 @@ def add_product_group(request, pid): product_group.save() messages.add_message(request, messages.SUCCESS, - _("Product groups added successfully."), + labels.ASSET_GROUPS_ADD_SUCCESS_MESSAGE, extra_tags="alert-success") return HttpResponseRedirect(reverse("view_product", args=(pid,))) - product_tab = Product_Tab(product, title=_("Edit Product Group"), tab="settings") + product_tab = Product_Tab(product, title=page_name, tab="settings") return render(request, "dojo/new_product_group.html", { + "name": page_name, "product": product, "form": group_form, "product_tab": product_tab, diff --git a/dojo/product_type/signals.py b/dojo/product_type/signals.py index b376de46845..523e7dcedc4 100644 --- a/dojo/product_type/signals.py +++ b/dojo/product_type/signals.py @@ -1,15 +1,21 @@ import contextlib from auditlog.models import LogEntry +from crum import get_current_user from django.conf import settings +from django.contrib.auth import get_user_model from django.contrib.contenttypes.models import ContentType from django.db.models.signals import post_delete, post_save from django.dispatch import receiver from django.urls import reverse from django.utils.translation import gettext as _ +from dojo.labels import get_labels from dojo.models import Product_Type from dojo.notifications.helper import create_notification +from dojo.pghistory_models import DojoEvents + +labels = get_labels() @receiver(post_save, sender=Product_Type) @@ -27,15 +33,45 @@ def product_type_post_save(sender, instance, created, **kwargs): def product_type_post_delete(sender, instance, **kwargs): # Catch instances in async delete where a single object is deleted more than once with contextlib.suppress(sender.DoesNotExist): - description = _('The product type "%(name)s" was deleted') % {"name": instance.name} + description = labels.ORG_DELETE_WITH_NAME_SUCCESS_MESSAGE % {"name": instance.name} + user = None + if settings.ENABLE_AUDITLOG: - if le := LogEntry.objects.filter( - action=LogEntry.Action.DELETE, - content_type=ContentType.objects.get(app_label="dojo", model="product_type"), - object_id=instance.id, - ).order_by("-id").first(): - description = _('The product type "%(name)s" was deleted by %(user)s') % { - "name": instance.name, "user": le.actor} + # First try to find deletion author in pghistory events + # Look for delete events for this specific product_type instance + pghistory_delete_events = DojoEvents.objects.filter( + pgh_obj_model="dojo.Product_Type", + pgh_obj_id=instance.id, + pgh_label="delete", + ).order_by("-pgh_created_at") + + if pghistory_delete_events.exists(): + latest_delete = pghistory_delete_events.first() + # Extract user from pghistory context + if latest_delete.user: + User = get_user_model() + with contextlib.suppress(User.DoesNotExist): + user = User.objects.get(id=latest_delete.user) + + # Fall back to django-auditlog if no user found in pghistory + if not user: + if le := LogEntry.objects.filter( + action=LogEntry.Action.DELETE, + content_type=ContentType.objects.get(app_label="dojo", model="product_type"), + object_id=instance.id, + ).order_by("-id").first(): + user = le.actor + + # Since adding pghistory as auditlog option, this signal here runs before the django-auditlog signal + # Fallback to the current user of the request (Which might be not available for ASYNC_OBJECT_DELETE scenario's) + if not user: + current_user = get_current_user() + user = current_user + + # Update description with user if found + if user: + description = labels.ORG_DELETE_WITH_NAME_WITH_USER_SUCCESS_MESSAGE % {"name": instance.name, "user": user} + create_notification(event="product_type_deleted", # template does not exists, it will default to "other" but this event name needs to stay because of unit testing title=_("Deletion of %(name)s") % {"name": instance.name}, description=description, diff --git a/dojo/product_type/urls.py b/dojo/product_type/urls.py deleted file mode 100644 index dd64a5e4c06..00000000000 --- a/dojo/product_type/urls.py +++ /dev/null @@ -1,32 +0,0 @@ -from django.urls import re_path - -from dojo.product import views as product_views -from dojo.product_type import views - -urlpatterns = [ - # product type - re_path(r"^product/type$", views.product_type, name="product_type"), - re_path(r"^product/type/(?P\d+)$", - views.view_product_type, name="view_product_type"), - re_path(r"^product/type/(?P\d+)/edit$", - views.edit_product_type, name="edit_product_type"), - re_path(r"^product/type/(?P\d+)/delete$", - views.delete_product_type, name="delete_product_type"), - re_path(r"^product/type/add$", views.add_product_type, - name="add_product_type"), - re_path(r"^product/type/(?P\d+)/add_product", - product_views.new_product, - name="add_product_to_product_type"), - re_path(r"^product/type/(?P\d+)/add_member$", views.add_product_type_member, - name="add_product_type_member"), - re_path(r"^product/type/member/(?P\d+)/edit$", views.edit_product_type_member, - name="edit_product_type_member"), - re_path(r"^product/type/member/(?P\d+)/delete$", views.delete_product_type_member, - name="delete_product_type_member"), - re_path(r"^product/type/(?P\d+)/add_group$", views.add_product_type_group, - name="add_product_type_group"), - re_path(r"^product/type/group/(?P\d+)/edit$", views.edit_product_type_group, - name="edit_product_type_group"), - re_path(r"^product/type/group/(?P\d+)/delete$", views.delete_product_type_group, - name="delete_product_type_group"), -] diff --git a/dojo/product_type/views.py b/dojo/product_type/views.py index b15894f2500..28553db8cfc 100644 --- a/dojo/product_type/views.py +++ b/dojo/product_type/views.py @@ -26,6 +26,7 @@ Edit_Product_Type_MemberForm, Product_TypeForm, ) +from dojo.labels import get_labels from dojo.models import Finding, Product, Product_Type, Product_Type_Group, Product_Type_Member, Role from dojo.product.queries import get_authorized_products from dojo.product_type.queries import ( @@ -53,6 +54,8 @@ Product Type views """ +labels = get_labels() + def product_type(request): prod_types = get_authorized_product_types(Permissions.Product_Type_View) @@ -63,7 +66,7 @@ def product_type(request): pts.object_list = prefetch_for_product_type(pts.object_list) - page_name = _("Product Type List") + page_name = str(labels.ORG_READ_LIST_LABEL) add_breadcrumb(title=page_name, top_level=True, request=request) return render(request, "dojo/product_type.html", { @@ -100,7 +103,7 @@ def prefetch_for_product_type(prod_types): @user_has_global_permission(Permissions.Product_Type_Add) def add_product_type(request): - page_name = _("Add Product Type") + page_name = str(labels.ORG_CREATE_LABEL) form = Product_TypeForm() if request.method == "POST": form = Product_TypeForm(request.POST) @@ -113,7 +116,7 @@ def add_product_type(request): member.save() messages.add_message(request, messages.SUCCESS, - _("Product type added successfully."), + str(labels.ORG_CREATE_SUCCESS_MESSAGE), extra_tags="alert-success") return HttpResponseRedirect(reverse("product_type")) add_breadcrumb(title=page_name, top_level=False, request=request) @@ -126,7 +129,7 @@ def add_product_type(request): @user_is_authorized(Product_Type, Permissions.Product_Type_View, "ptid") def view_product_type(request, ptid): - page_name = _("View Product Type") + page_name = str(labels.ORG_READ_LABEL) pt = get_object_or_404(Product_Type, pk=ptid) members = get_authorized_members_for_product_type(pt, Permissions.Product_Type_View) global_members = get_authorized_global_members_for_product_type(pt, Permissions.Product_Type_View) @@ -163,9 +166,9 @@ def delete_product_type(request, ptid): if get_setting("ASYNC_OBJECT_DELETE"): async_del = async_delete() async_del.delete(product_type) - message = "Product Type and relationships will be removed in the background." + message = labels.ORG_DELETE_SUCCESS_ASYNC_MESSAGE else: - message = "Product Type and relationships removed." + message = labels.ORG_DELETE_SUCCESS_MESSAGE product_type.delete() messages.add_message(request, messages.SUCCESS, @@ -180,17 +183,17 @@ def delete_product_type(request, ptid): collector.collect([product_type]) rels = collector.nested() - add_breadcrumb(title=_("Delete Product Type"), top_level=False, request=request) - return render(request, "dojo/delete_product_type.html", - {"product_type": product_type, - "form": form, - "rels": rels, - }) + add_breadcrumb(title=str(labels.ORG_DELETE_LABEL), top_level=False, request=request) + return render(request, "dojo/delete_product_type.html", { + "label_delete_with_name": labels.ORG_DELETE_WITH_NAME_LABEL % {"name": product_type}, + "form": form, + "rels": rels, + }) @user_is_authorized(Product_Type, Permissions.Product_Type_Edit, "ptid") def edit_product_type(request, ptid): - page_name = "Edit Product Type" + page_name = str(labels.ORG_UPDATE_LABEL) pt = get_object_or_404(Product_Type, pk=ptid) members = get_authorized_members_for_product_type(pt, Permissions.Product_Type_Manage_Members) pt_form = Product_TypeForm(instance=pt) @@ -201,7 +204,7 @@ def edit_product_type(request, ptid): messages.add_message( request, messages.SUCCESS, - _("Product type updated successfully."), + labels.ORG_UPDATE_SUCCESS_MESSAGE, extra_tags="alert-success", ) return HttpResponseRedirect(reverse("product_type")) @@ -209,6 +212,7 @@ def edit_product_type(request, ptid): add_breadcrumb(title=page_name, top_level=False, request=request) return render(request, "dojo/edit_product_type.html", { "name": page_name, + "label_edit_with_name": labels.ORG_UPDATE_WITH_NAME_LABEL % {"name": pt.name}, "pt_form": pt_form, "pt": pt, "members": members}) @@ -216,6 +220,7 @@ def edit_product_type(request, ptid): @user_is_authorized(Product_Type, Permissions.Product_Type_Manage_Members, "ptid") def add_product_type_member(request, ptid): + page_name = str(labels.ORG_USERS_ADD_LABEL) pt = get_object_or_404(Product_Type, pk=ptid) memberform = Add_Product_Type_MemberForm(initial={"product_type": pt.id}) if request.method == "POST": @@ -238,11 +243,12 @@ def add_product_type_member(request, ptid): product_type_member.save() messages.add_message(request, messages.SUCCESS, - _("Product type members added successfully."), + labels.ORG_USERS_ADD_SUCCESS_MESSAGE, extra_tags="alert-success") return HttpResponseRedirect(reverse("view_product_type", args=(ptid, ))) - add_breadcrumb(title=_("Add Product Type Member"), top_level=False, request=request) + add_breadcrumb(title=page_name, top_level=False, request=request) return render(request, "dojo/new_product_type_member.html", { + "name": page_name, "pt": pt, "form": memberform, }) @@ -250,7 +256,7 @@ def add_product_type_member(request, ptid): @user_is_authorized(Product_Type_Member, Permissions.Product_Type_Manage_Members, "memberid") def edit_product_type_member(request, memberid): - page_name = _("Edit Product Type Member") + page_name = str(labels.ORG_USERS_UPDATE_LABEL) member = get_object_or_404(Product_Type_Member, pk=memberid) memberform = Edit_Product_Type_MemberForm(instance=member) if request.method == "POST": @@ -260,7 +266,8 @@ def edit_product_type_member(request, memberid): owners = Product_Type_Member.objects.filter(product_type=member.product_type, role__is_owner=True).exclude(id=member.id).count() if owners < 1: messages.add_message(request, messages.SUCCESS, - _("There must be at least one owner for Product Type %(product_type_name)s.") % {"product_type_name": member.product_type.name}, + labels.ORG_USERS_MINIMUM_NUMBER_WITH_NAME_MESSAGE + % {"name": member.product_type.name}, extra_tags="alert-warning") if is_title_in_breadcrumbs("View User"): return HttpResponseRedirect(reverse("view_user", args=(member.user.id, ))) @@ -274,7 +281,7 @@ def edit_product_type_member(request, memberid): memberform.save() messages.add_message(request, messages.SUCCESS, - _("Product type member updated successfully."), + labels.ORG_USERS_UPDATE_SUCCESS_MESSAGE, extra_tags="alert-success") if is_title_in_breadcrumbs("View User"): return HttpResponseRedirect(reverse("view_user", args=(member.user.id, ))) @@ -289,7 +296,7 @@ def edit_product_type_member(request, memberid): @user_is_authorized(Product_Type_Member, Permissions.Product_Type_Member_Delete, "memberid") def delete_product_type_member(request, memberid): - page_name = "Delete Product Type Member" + page_name = str(labels.ORG_USERS_DELETE_LABEL) member = get_object_or_404(Product_Type_Member, pk=memberid) memberform = Delete_Product_Type_MemberForm(instance=member) if request.method == "POST": @@ -308,7 +315,7 @@ def delete_product_type_member(request, memberid): member.delete() messages.add_message(request, messages.SUCCESS, - _("Product type member deleted successfully."), + labels.ORG_USERS_DELETE_SUCCESS_MESSAGE, extra_tags="alert-success") if is_title_in_breadcrumbs("View User"): return HttpResponseRedirect(reverse("view_user", args=(member.user.id, ))) @@ -325,7 +332,7 @@ def delete_product_type_member(request, memberid): @user_is_authorized(Product_Type, Permissions.Product_Type_Group_Add, "ptid") def add_product_type_group(request, ptid): - page_name = "Add Product Type Group" + page_name = str(labels.ORG_GROUPS_ADD_LABEL) pt = get_object_or_404(Product_Type, pk=ptid) group_form = Add_Product_Type_GroupForm(initial={"product_type": pt.id}) @@ -349,7 +356,7 @@ def add_product_type_group(request, ptid): product_type_group.save() messages.add_message(request, messages.SUCCESS, - _("Product type groups added successfully."), + labels.ORG_GROUPS_ADD_SUCCESS_MESSAGE, extra_tags="alert-success") return HttpResponseRedirect(reverse("view_product_type", args=(ptid,))) @@ -363,7 +370,7 @@ def add_product_type_group(request, ptid): @user_is_authorized(Product_Type_Group, Permissions.Product_Type_Group_Edit, "groupid") def edit_product_type_group(request, groupid): - page_name = "Edit Product Type Group" + page_name = str(labels.ORG_GROUPS_UPDATE_LABEL) group = get_object_or_404(Product_Type_Group, pk=groupid) groupform = Edit_Product_Type_Group_Form(instance=group) @@ -379,7 +386,7 @@ def edit_product_type_group(request, groupid): groupform.save() messages.add_message(request, messages.SUCCESS, - _("Product type group updated successfully."), + labels.ORG_GROUPS_UPDATE_SUCCESS_MESSAGE, extra_tags="alert-success") if is_title_in_breadcrumbs("View Group"): return HttpResponseRedirect(reverse("view_group", args=(group.group.id,))) @@ -395,7 +402,7 @@ def edit_product_type_group(request, groupid): @user_is_authorized(Product_Type_Group, Permissions.Product_Type_Group_Delete, "groupid") def delete_product_type_group(request, groupid): - page_name = "Delete Product Type Group" + page_name = str(labels.ORG_GROUPS_DELETE_LABEL) group = get_object_or_404(Product_Type_Group, pk=groupid) groupform = Delete_Product_Type_GroupForm(instance=group) @@ -405,7 +412,7 @@ def delete_product_type_group(request, groupid): group.delete() messages.add_message(request, messages.SUCCESS, - _("Product type group deleted successfully."), + labels.ORG_GROUPS_DELETE_SUCCESS_MESSAGE, extra_tags="alert-success") if is_title_in_breadcrumbs("View Group"): return HttpResponseRedirect(reverse("view_group", args=(group.group.id, ))) diff --git a/dojo/reports/urls.py b/dojo/reports/urls.py index e6f8cd166cc..a12858c840d 100644 --- a/dojo/reports/urls.py +++ b/dojo/reports/urls.py @@ -1,39 +1,136 @@ +from django.conf import settings from django.urls import re_path from dojo.reports import views +from dojo.utils import redirect_view -urlpatterns = [ - # reports - re_path(r"^product/type/(?P\d+)/report$", - views.product_type_report, name="product_type_report"), - re_path(r"^product/(?P\d+)/report$", - views.product_report, name="product_report"), - re_path(r"^product/(?P\d+)/endpoint/report$", - views.product_endpoint_report, name="product_endpoint_report"), - re_path(r"^engagement/(?P\d+)/report$", views.engagement_report, - name="engagement_report"), - re_path(r"^test/(?P\d+)/report$", views.test_report, - name="test_report"), - re_path(r"^endpoint/(?P\d+)/report$", views.endpoint_report, - name="endpoint_report"), - re_path(r"^endpoint/host/(?P\d+)/report$", views.endpoint_host_report, - name="endpoint_host_report"), - re_path(r"^product/report$", - views.product_findings_report, name="product_findings_report"), - re_path(r"^reports/cover$", - views.report_cover_page, name="report_cover_page"), - re_path(r"^reports/builder$", - views.ReportBuilder.as_view(), name="report_builder"), - re_path(r"^reports/findings$", - views.report_findings, name="report_findings"), - re_path(r"^reports/endpoints$", - views.report_endpoints, name="report_endpoints"), - re_path(r"^reports/custom$", - views.CustomReport.as_view(), name="custom_report"), - re_path(r"^reports/quick$", - views.QuickReportView.as_view(), name="quick_report"), - re_path(r"^reports/csv_export$", - views.CSVExportView.as_view(), name="csv_export"), - re_path(r"^reports/excel_export$", - views.ExcelExportView.as_view(), name="excel_export"), -] +# TODO: remove the else: branch once v3 migration is complete +if settings.ENABLE_V3_ORGANIZATION_ASSET_RELABEL: + urlpatterns = [ + re_path( + r"^organization/(?P\d+)/report$", + views.product_type_report, + name="product_type_report", + ), + re_path( + r"^asset/(?P\d+)/report$", + views.product_report, + name="product_report", + ), + re_path( + r"^asset/(?P\d+)/endpoint/report$", + views.product_endpoint_report, + name="product_endpoint_report", + ), + re_path( + r"^engagement/(?P\d+)/report$", + views.engagement_report, + name="engagement_report", + ), + re_path( + r"^test/(?P\d+)/report$", + views.test_report, + name="test_report", + ), + re_path( + r"^endpoint/(?P\d+)/report$", + views.endpoint_report, + name="endpoint_report", + ), + re_path( + r"^endpoint/host/(?P\d+)/report$", + views.endpoint_host_report, + name="endpoint_host_report", + ), + re_path( + r"^asset/report$", + views.product_findings_report, + name="product_findings_report", + ), + re_path( + r"^reports/cover$", + views.report_cover_page, + name="report_cover_page", + ), + re_path( + r"^reports/builder$", + views.ReportBuilder.as_view(), + name="report_builder", + ), + re_path( + r"^reports/findings$", + views.report_findings, + name="report_findings", + ), + re_path( + r"^reports/endpoints$", + views.report_endpoints, + name="report_endpoints", + ), + re_path( + r"^reports/custom$", + views.CustomReport.as_view(), + name="custom_report", + ), + re_path( + r"^reports/quick$", + views.QuickReportView.as_view(), + name="quick_report", + ), + re_path( + r"^reports/csv_export$", + views.CSVExportView.as_view(), + name="csv_export", + ), + re_path( + r"^reports/excel_export$", + views.ExcelExportView.as_view(), + name="excel_export", + ), + # TODO: Backwards compatibility; remove after v3 migration is complete + re_path(r"^product/type/(?P\d+)/report$", redirect_view("product_type_report")), + re_path(r"^product/(?P\d+)/report$", redirect_view("product_report")), + re_path(r"^product/(?P\d+)/endpoint/report$", redirect_view("product_endpoint_report")), + re_path(r"^product/report$", redirect_view("product_findings_report")), + ] +else: + urlpatterns = [ + # reports + re_path(r"^product/type/(?P\d+)/report$", + views.product_type_report, name="product_type_report"), + re_path(r"^product/(?P\d+)/report$", + views.product_report, name="product_report"), + re_path(r"^product/(?P\d+)/endpoint/report$", + views.product_endpoint_report, name="product_endpoint_report"), + re_path(r"^engagement/(?P\d+)/report$", views.engagement_report, + name="engagement_report"), + re_path(r"^test/(?P\d+)/report$", views.test_report, + name="test_report"), + re_path(r"^endpoint/(?P\d+)/report$", views.endpoint_report, + name="endpoint_report"), + re_path(r"^endpoint/host/(?P\d+)/report$", views.endpoint_host_report, + name="endpoint_host_report"), + re_path(r"^product/report$", + views.product_findings_report, name="product_findings_report"), + re_path(r"^reports/cover$", + views.report_cover_page, name="report_cover_page"), + re_path(r"^reports/builder$", + views.ReportBuilder.as_view(), name="report_builder"), + re_path(r"^reports/findings$", + views.report_findings, name="report_findings"), + re_path(r"^reports/endpoints$", + views.report_endpoints, name="report_endpoints"), + re_path(r"^reports/custom$", + views.CustomReport.as_view(), name="custom_report"), + re_path(r"^reports/quick$", + views.QuickReportView.as_view(), name="quick_report"), + re_path(r"^reports/csv_export$", + views.CSVExportView.as_view(), name="csv_export"), + re_path(r"^reports/excel_export$", + views.ExcelExportView.as_view(), name="excel_export"), + # Forward compatibility + re_path(r"^organization/(?P\d+)/report$", redirect_view("product_type_report")), + re_path(r"^asset/(?P\d+)/report$", redirect_view("product_report")), + re_path(r"^asset/(?P\d+)/endpoint/report$", redirect_view("product_endpoint_report")), + re_path(r"^asset/report$", redirect_view("product_findings_report")), + ] diff --git a/dojo/reports/views.py b/dojo/reports/views.py index d5853c375b2..ae6a99804eb 100644 --- a/dojo/reports/views.py +++ b/dojo/reports/views.py @@ -27,6 +27,7 @@ from dojo.finding.queries import get_authorized_findings from dojo.finding.views import BaseListFindings from dojo.forms import ReportOptionsForm +from dojo.labels import get_labels from dojo.models import Dojo_User, Endpoint, Engagement, Finding, Product, Product_Type, Test from dojo.reports.widgets import ( CoverPage, @@ -51,6 +52,10 @@ logger = logging.getLogger(__name__) + +labels = get_labels() + + EXCEL_CHAR_LIMIT = 32767 @@ -192,6 +197,7 @@ def report_findings(request): "title_words": title_words, "component_words": component_words, "title": "finding-list", + "asset_label": labels.ASSET_LABEL, }) @@ -383,8 +389,8 @@ def generate_report(request, obj, *, host_view=False): if type(obj).__name__ == "Product_Type": product_type = obj template = "dojo/product_type_pdf_report.html" - report_name = "Product Type Report: " + str(product_type) - report_title = "Product Type Report" + report_name = labels.ORG_REPORT_WITH_NAME_TITLE % {"name": str(product_type)} + report_title = labels.ORG_REPORT_LABEL findings = report_finding_filter_class(request.GET, prod_type=product_type, queryset=prefetch_related_findings_for_report(Finding.objects.filter( test__engagement__product__prod_type=product_type))) products = Product.objects.filter(prod_type=product_type, @@ -433,8 +439,8 @@ def generate_report(request, obj, *, host_view=False): elif type(obj).__name__ == "Product": product = obj template = "dojo/product_pdf_report.html" - report_name = "Product Report: " + str(product) - report_title = "Product Report" + report_name = labels.ASSET_REPORT_WITH_NAME_TITLE % {"name": str(product)} + report_title = labels.ASSET_REPORT_LABEL findings = report_finding_filter_class(request.GET, product=product, queryset=prefetch_related_findings_for_report(Finding.objects.filter( test__engagement__product=product))) ids = set(finding.id for finding in findings.qs) # noqa: C401 @@ -605,7 +611,7 @@ def generate_report(request, obj, *, host_view=False): product_tab = Product_Tab(test.engagement.product, title="Test Report", tab="engagements") product_tab.setEngagement(test.engagement) elif product: - product_tab = Product_Tab(product, title="Product Report", tab="findings") + product_tab = Product_Tab(product, title=str(labels.ASSET_REPORT_LABEL), tab="findings") elif endpoints: if host_view: product_tab = Product_Tab(endpoint.product, title="Endpoint Host Report", tab="endpoints") diff --git a/dojo/reports/widgets.py b/dojo/reports/widgets.py index 0dc5df4e1bd..47e0c6afe19 100644 --- a/dojo/reports/widgets.py +++ b/dojo/reports/widgets.py @@ -18,6 +18,7 @@ ReportFindingFilterWithoutObjectLookups, ) from dojo.forms import CustomReportOptionsForm +from dojo.labels import get_labels from dojo.models import Endpoint, Finding from dojo.utils import get_page_items, get_system_setting, get_words_for_field @@ -26,6 +27,8 @@ to be included. Each widget will provide a set of options, reporesented by form elements, to be included. """ +labels = get_labels() + class CustomReportJsonForm(forms.Form): json = forms.CharField() @@ -299,6 +302,7 @@ def get_option_form(self): "request": self.request, "title": self.title, "extra_help": self.extra_help, + "asset_label": labels.ASSET_LABEL, }) return mark_safe(html) diff --git a/dojo/search/views.py b/dojo/search/views.py index d075331bbc5..cf8bcb27061 100644 --- a/dojo/search/views.py +++ b/dojo/search/views.py @@ -15,7 +15,7 @@ from dojo.engagement.queries import get_authorized_engagements from dojo.filters import FindingFilter, FindingFilterWithoutObjectLookups from dojo.finding.queries import get_authorized_findings, get_authorized_vulnerability_ids, prefetch_for_findings -from dojo.forms import SimpleSearchForm +from dojo.forms import FindingBulkUpdateForm, SimpleSearchForm from dojo.models import Engagement, Finding, Finding_Template, Languages, Product, Test from dojo.product.queries import get_authorized_app_analysis, get_authorized_products from dojo.test.queries import get_authorized_tests @@ -390,7 +390,9 @@ def simple_search(request): "form": form, "activetab": activetab, "show_product_column": True, - "generic": generic}) + "generic": generic, + "bulk_edit_form": FindingBulkUpdateForm(request.GET), + }) if cookie: response.set_cookie("highlight", value=keywords_query, @@ -422,9 +424,9 @@ def parse_search_query(clean_query): else: keywords.append(vulnerability_id_fix(query_part)) - logger.debug(f"query: {clean_query}") - logger.debug(f"operators: {operators}") - logger.debug(f"keywords: {keywords}") + logger.debug("query: %s", clean_query) + logger.debug("operators: %s", operators) + logger.debug("keywords: %s", keywords) return operators, keywords diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 3b66121d62f..baf28ec2b38 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -15,6 +15,7 @@ from pathlib import Path import environ +import pghistory from celery.schedules import crontab from netaddr import IPNetwork, IPSet @@ -30,6 +31,10 @@ DD_SITE_URL=(str, "http://localhost:8080"), DD_DEBUG=(bool, False), DD_DJANGO_DEBUG_TOOLBAR_ENABLED=(bool, False), + # django-auditlog imports django-jsonfield-backport raises a warning that can be ignored, + # see https://github.com/laymonage/django-jsonfield-backport + # debug_toolbar.E001 is raised when running tests in dev mode via run-unittests.sh + DD_SILENCED_SYSTEM_CHECKS=(list, ["debug_toolbar.E001", "django_jsonfield_backport.W001"]), DD_TEMPLATE_DEBUG=(bool, False), DD_LOG_LEVEL=(str, ""), DD_DJANGO_METRICS_ENABLED=(bool, False), @@ -86,10 +91,12 @@ DD_CELERY_TASK_SERIALIZER=(str, "pickle"), DD_CELERY_PASS_MODEL_BY_ID=(str, True), DD_CELERY_LOG_LEVEL=(str, "INFO"), + DD_TAG_BULK_ADD_BATCH_SIZE=(int, 1000), + # Minimum number of model updated instances before search index updates as performaed asynchronously. Set to -1 to disable async updates. + DD_WATSON_ASYNC_INDEX_UPDATE_THRESHOLD=(int, 100), + DD_WATSON_ASYNC_INDEX_UPDATE_BATCH_SIZE=(int, 1000), DD_FOOTER_VERSION=(str, ""), # models should be passed to celery by ID, default is False (for now) - DD_FORCE_LOWERCASE_TAGS=(bool, True), - DD_MAX_TAG_LENGTH=(int, 25), DD_DATABASE_ENGINE=(str, "django.db.backends.postgresql"), DD_DATABASE_HOST=(str, "postgres"), DD_DATABASE_NAME=(str, "defectdojo"), @@ -165,6 +172,7 @@ DD_SOCIAL_AUTH_GITHUB_ENTERPRISE_API_URL=(str, ""), DD_SOCIAL_AUTH_GITHUB_ENTERPRISE_KEY=(str, ""), DD_SOCIAL_AUTH_GITHUB_ENTERPRISE_SECRET=(str, ""), + DD_SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL=(bool, True), DD_SAML2_ENABLED=(bool, False), # Allows to override default SAML authentication backend. Check https://djangosaml2.readthedocs.io/contents/setup.html#custom-user-attributes-processing DD_SAML2_AUTHENTICATION_BACKENDS=(str, "djangosaml2.backends.Saml2Backend"), @@ -258,6 +266,10 @@ DD_TRACK_IMPORT_HISTORY=(bool, True), # Delete Auditlogs older than x month; -1 to keep all logs DD_AUDITLOG_FLUSH_RETENTION_PERIOD=(int, -1), + # Batch size for flushing audit logs per task run + DD_AUDITLOG_FLUSH_BATCH_SIZE=(int, 1000), + # Maximum number of batches to process per task run + DD_AUDITLOG_FLUSH_MAX_BATCHES=(int, 100), # Allow grouping of findings in the same test, for example to group findings per dependency # DD_FEATURE_FINDING_GROUPS feature is moved to system_settings, will be removed from settings file DD_FEATURE_FINDING_GROUPS=(bool, True), @@ -311,6 +323,8 @@ # If you run big import you may want to disable this because the way django-auditlog currently works, there's # a big performance hit. Especially during (re-)imports. DD_ENABLE_AUDITLOG=(bool, True), + # Audit logging system: "django-auditlog" (default) or "django-pghistory" + DD_AUDITLOG_TYPE=(str, "django-auditlog"), # Specifies whether the "first seen" date of a given report should be used over the "last seen" date DD_USE_FIRST_SEEN=(bool, False), # When set to True, use the older version of the qualys parser that is a more heavy handed in setting severity @@ -323,6 +337,8 @@ # For HTTP requests, how long connection is open before timeout # This settings apply only on requests performed by "requests" lib used in Dojo code (if some included lib is using "requests" as well, this does not apply there) DD_REQUESTS_TIMEOUT=(int, 30), + # Dictates if v3 org/asset relabeling (+url routing) will be enabled + DD_ENABLE_V3_ORGANIZATION_ASSET_RELABEL=(bool, False), ) @@ -395,6 +411,9 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param MAX_ALERTS_PER_USER = env("DD_MAX_ALERTS_PER_USER") TAG_PREFETCHING = env("DD_TAG_PREFETCHING") +# Tag bulk add batch size (used by dojo.tag_utils.bulk_add_tag_to_instances) +TAG_BULK_ADD_BATCH_SIZE = env("DD_TAG_BULK_ADD_BATCH_SIZE") + # ------------------------------------------------------------------------------ # DATABASE @@ -559,7 +578,7 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param SOCIAL_AUTH_STRATEGY = "social_django.strategy.DjangoStrategy" SOCIAL_AUTH_STORAGE = "social_django.models.DjangoStorage" SOCIAL_AUTH_ADMIN_USER_SEARCH_FIELDS = ["username", "first_name", "last_name", "email"] -SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = True +SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL = env("DD_SOCIAL_AUTH_USERNAME_IS_FULL_EMAIL") GOOGLE_OAUTH_ENABLED = env("DD_SOCIAL_AUTH_GOOGLE_OAUTH2_ENABLED") SOCIAL_AUTH_GOOGLE_OAUTH2_KEY = env("DD_SOCIAL_AUTH_GOOGLE_OAUTH2_KEY") @@ -739,6 +758,7 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param # Override default Django behavior for incorrect URLs APPEND_SLASH = env("DD_APPEND_SLASH") + # Whether to use a secure cookie for the CSRF cookie. CSRF_COOKIE_SECURE = env("DD_CSRF_COOKIE_SECURE") CSRF_COOKIE_SAMESITE = env("DD_CSRF_COOKIE_SAMESITE") @@ -768,6 +788,8 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param # DEFECTDOJO SPECIFIC # ------------------------------------------------------------------------------ +ENABLE_V3_ORGANIZATION_ASSET_RELABEL = env("DD_ENABLE_V3_ORGANIZATION_ASSET_RELABEL") + # Credential Key CREDENTIAL_AES_256_KEY = env("DD_CREDENTIAL_AES_256_KEY") DB_KEY = env("DD_CREDENTIAL_AES_256_KEY") @@ -778,11 +800,6 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param # Used to configure a custom version in the footer of the base.html template. FOOTER_VERSION = env("DD_FOOTER_VERSION") -# Django-tagging settings -FORCE_LOWERCASE_TAGS = env("DD_FORCE_LOWERCASE_TAGS") -MAX_TAG_LENGTH = env("DD_MAX_TAG_LENGTH") - - # ------------------------------------------------------------------------------ # ADMIN # ------------------------------------------------------------------------------ @@ -865,6 +882,7 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param "dojo.context_processors.bind_alert_count", "dojo.context_processors.bind_announcement", "dojo.context_processors.session_expiry_notification", + "dojo.context_processors.labels", ], }, }, @@ -884,10 +902,8 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param "polymorphic", # provides admin templates "django.contrib.admin", "django.contrib.humanize", - "auditlog", "dojo", "watson", - "tagging", # not used, but still needed for migration 0065_django_tagulous.py (v1.10.0) "imagekit", "multiselectfield", "rest_framework", @@ -900,6 +916,9 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param "tagulous", "fontawesomefree", "django_filters", + "auditlog", + "pgtrigger", + "pghistory", ) # ------------------------------------------------------------------------------ @@ -918,9 +937,9 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param "dojo.middleware.LoginRequiredMiddleware", "dojo.middleware.AdditionalHeaderMiddleware", "social_django.middleware.SocialAuthExceptionMiddleware", - "watson.middleware.SearchContextMiddleware", - "dojo.middleware.AuditlogMiddleware", "crum.CurrentRequestUserMiddleware", + "dojo.middleware.AuditlogMiddleware", + "dojo.middleware.AsyncSearchContextMiddleware", "dojo.request_cache.middleware.RequestCacheMiddleware", "dojo.middleware.LongRunningRequestAlertMiddleware", ] @@ -1161,6 +1180,10 @@ def saml2_attrib_map_format(din): CELERY_IMPORTS = ("dojo.tools.tool_issue_updater", ) +# Watson async index update settings +WATSON_ASYNC_INDEX_UPDATE_THRESHOLD = env("DD_WATSON_ASYNC_INDEX_UPDATE_THRESHOLD") +WATSON_ASYNC_INDEX_UPDATE_BATCH_SIZE = env("DD_WATSON_ASYNC_INDEX_UPDATE_BATCH_SIZE") + # Celery beat scheduled tasks CELERY_BEAT_SCHEDULE = { "add-alerts": { @@ -1303,6 +1326,7 @@ def saml2_attrib_map_format(din): "Scout Suite Scan": ["file_path", "vuln_id_from_tool"], # for now we use file_path as there is no attribute for "service" "Meterian Scan": ["cwe", "component_name", "component_version", "description", "severity"], "Github Vulnerability Scan": ["title", "severity", "component_name", "vulnerability_ids", "file_path"], + "Github Secrets Detection Report": ["title", "file_path", "line"], "Solar Appscreener Scan": ["title", "file_path", "line", "severity"], "pip-audit Scan": ["vuln_id_from_tool", "component_name", "component_version"], "Rubocop Scan": ["vuln_id_from_tool", "file_path", "line"], @@ -1356,6 +1380,8 @@ def saml2_attrib_map_format(din): "Qualys Hacker Guardian Scan": ["title", "severity", "description"], "Cyberwatch scan (Galeax)": ["title", "description", "severity"], "Cycognito Scan": ["title", "severity"], + "OpenVAS Parser v2": ["title", "severity", "vuln_id_from_tool", "endpoints"], + "Snyk Issue API Scan": ["vuln_id_from_tool", "file_path"], } # Override the hardcoded settings here via the env var @@ -1372,7 +1398,7 @@ def saml2_attrib_map_format(din): logger.info(f"Replacing {key} with value {value} (previously set to {HASHCODE_FIELDS_PER_SCANNER[key]}) from env var DD_HASHCODE_FIELDS_PER_SCANNER") HASHCODE_FIELDS_PER_SCANNER[key] = value if key not in HASHCODE_FIELDS_PER_SCANNER: - logger.info(f"Adding {key} with value {value} from env var DD_HASHCODE_FIELDS_PER_SCANNER") + logger.info("Adding %s with value %s from env var DD_HASHCODE_FIELDS_PER_SCANNER", key, value) HASHCODE_FIELDS_PER_SCANNER[key] = value @@ -1427,6 +1453,7 @@ def saml2_attrib_map_format(din): "HCL AppScan on Cloud SAST XML": True, "AWS Inspector2 Scan": True, "Cyberwatch scan (Galeax)": True, + "OpenVAS Parser v2": True, } # List of fields that are known to be usable in hash_code computation) @@ -1545,6 +1572,7 @@ def saml2_attrib_map_format(din): "AWS Security Hub Scan": DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, "Meterian Scan": DEDUPE_ALGO_HASH_CODE, "Github Vulnerability Scan": DEDUPE_ALGO_HASH_CODE, + "Github Secrets Detection Report": DEDUPE_ALGO_HASH_CODE, "Cloudsploit Scan": DEDUPE_ALGO_HASH_CODE, "SARIF": DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE, "Azure Security Center Recommendations Scan": DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, @@ -1613,6 +1641,8 @@ def saml2_attrib_map_format(din): "Red Hat Satellite": DEDUPE_ALGO_HASH_CODE, "Qualys Hacker Guardian Scan": DEDUPE_ALGO_HASH_CODE, "Cyberwatch scan (Galeax)": DEDUPE_ALGO_HASH_CODE, + "OpenVAS Parser v2": DEDUPE_ALGO_HASH_CODE, + "Snyk Issue API Scan": DEDUPE_ALGO_HASH_CODE, } # Override the hardcoded settings here via the env var @@ -1626,7 +1656,7 @@ def saml2_attrib_map_format(din): logger.info(f"Replacing {key} with value {value} (previously set to {DEDUPLICATION_ALGORITHM_PER_PARSER[key]}) from env var DD_DEDUPLICATION_ALGORITHM_PER_PARSER") DEDUPLICATION_ALGORITHM_PER_PARSER[key] = value if key not in DEDUPLICATION_ALGORITHM_PER_PARSER: - logger.info(f"Adding {key} with value {value} from env var DD_DEDUPLICATION_ALGORITHM_PER_PARSER") + logger.info("Adding %s with value %s from env var DD_DEDUPLICATION_ALGORITHM_PER_PARSER", key, value) DEDUPLICATION_ALGORITHM_PER_PARSER[key] = value DUPE_DELETE_MAX_PER_RUN = env("DD_DUPE_DELETE_MAX_PER_RUN") @@ -1648,7 +1678,7 @@ def saml2_attrib_map_format(din): ("Security", "Security"), ) -if env("DD_JIRA_EXTRA_ISSUE_TYPES") != "": +if env("DD_JIRA_EXTRA_ISSUE_TYPES"): for extra_type in env("DD_JIRA_EXTRA_ISSUE_TYPES").split(","): JIRA_ISSUE_TYPE_CHOICES_CONFIG += ((extra_type, extra_type),) @@ -1812,9 +1842,7 @@ def saml2_attrib_map_format(din): # for very large objects DELETE_PREVIEW = env("DD_DELETE_PREVIEW") -# django-auditlog imports django-jsonfield-backport raises a warning that can be ignored, -# see https://github.com/laymonage/django-jsonfield-backport -SILENCED_SYSTEM_CHECKS = ["django_jsonfield_backport.W001"] +SILENCED_SYSTEM_CHECKS = env("DD_SILENCED_SYSTEM_CHECKS") VULNERABILITY_URLS = { "ALAS": "https://alas.aws.amazon.com/AL2/&&.html", # e.g. https://alas.aws.amazon.com/alas2.html @@ -1825,6 +1853,7 @@ def saml2_attrib_map_format(din): "ALSA-": "https://osv.dev/vulnerability/", # e.g. https://osv.dev/vulnerability/ALSA-2024:0827 "ASA-": "https://security.archlinux.org/", # e.g. https://security.archlinux.org/ASA-202003-8 "AVD": "https://avd.aquasec.com/misconfig/", # e.g. https://avd.aquasec.com/misconfig/avd-ksv-01010 + "AWS-": "https://aws.amazon.com/security/security-bulletins/", # e.g. https://aws.amazon.com/security/security-bulletins/AWS-2025-001 "BAM-": "https://jira.atlassian.com/browse/", # e.g. https://jira.atlassian.com/browse/BAM-25498 "BSERV-": "https://jira.atlassian.com/browse/", # e.g. https://jira.atlassian.com/browse/BSERV-19020 "C-": "https://hub.armosec.io/docs/", # e.g. https://hub.armosec.io/docs/c-0085 @@ -1904,8 +1933,11 @@ def saml2_attrib_map_format(din): # ------------------------------------------------------------------------------ AUDITLOG_FLUSH_RETENTION_PERIOD = env("DD_AUDITLOG_FLUSH_RETENTION_PERIOD") ENABLE_AUDITLOG = env("DD_ENABLE_AUDITLOG") +AUDITLOG_TYPE = env("DD_AUDITLOG_TYPE") AUDITLOG_TWO_STEP_MIGRATION = False AUDITLOG_USE_TEXT_CHANGES_IF_JSON_IS_NOT_PRESENT = False +AUDITLOG_FLUSH_BATCH_SIZE = env("DD_AUDITLOG_FLUSH_BATCH_SIZE") +AUDITLOG_FLUSH_MAX_BATCHES = env("DD_AUDITLOG_FLUSH_MAX_BATCHES") USE_FIRST_SEEN = env("DD_USE_FIRST_SEEN") USE_QUALYS_LEGACY_SEVERITY_PARSING = env("DD_QUALYS_LEGACY_SEVERITY_PARSING") @@ -1990,3 +2022,28 @@ def show_toolbar(request): "debug_toolbar.panels.profiling.ProfilingPanel", # 'cachalot.panels.CachalotPanel', ] + +######################################################################################################### +# Auditlog configuration # +######################################################################################################### + +if ENABLE_AUDITLOG: + middleware_list = list(MIDDLEWARE) + crum_index = middleware_list.index("crum.CurrentRequestUserMiddleware") + + if AUDITLOG_TYPE == "django-auditlog": + # Insert AuditlogMiddleware before CurrentRequestUserMiddleware + middleware_list.insert(crum_index, "dojo.middleware.AuditlogMiddleware") + elif AUDITLOG_TYPE == "django-pghistory": + # Insert pghistory HistoryMiddleware before CurrentRequestUserMiddleware + middleware_list.insert(crum_index, "dojo.middleware.PgHistoryMiddleware") + + MIDDLEWARE = middleware_list + +PGHISTORY_FOREIGN_KEY_FIELD = pghistory.ForeignKey(db_index=False) +PGHISTORY_CONTEXT_FIELD = pghistory.ContextForeignKey(db_index=True) +PGHISTORY_OBJ_FIELD = pghistory.ObjForeignKey(db_index=True) + +######################################################################################################### +# End of Auditlog configuration # +######################################################################################################### diff --git a/dojo/sla_config/helpers.py b/dojo/sla_config/helpers.py index a7422869536..57633d0c2ec 100644 --- a/dojo/sla_config/helpers.py +++ b/dojo/sla_config/helpers.py @@ -21,7 +21,7 @@ def update_sla_expiration_dates_product_async(product, sla_config, *args, **kwar def update_sla_expiration_dates_sla_config_sync(sla_config, products, severities=None): - logger.info(f"Updating finding SLA expiration dates within the {sla_config} SLA configuration") + logger.info("Updating finding SLA expiration dates within the %s SLA configuration", sla_config) # update each finding that is within the SLA configuration that was saved findings = Finding.objects.filter(test__engagement__product__sla_configuration_id=sla_config.id) if products: @@ -49,4 +49,4 @@ def update_sla_expiration_dates_sla_config_sync(sla_config, products, severities # reset the async updating flag to false for this sla config sla_config.async_updating = False super(SLA_Configuration, sla_config).save() - logger.info(f"DONE Updating finding SLA expiration dates within the {sla_config} SLA configuration") + logger.info("DONE Updating finding SLA expiration dates within the %s SLA configuration", sla_config) diff --git a/dojo/system_settings/labels.py b/dojo/system_settings/labels.py new file mode 100644 index 00000000000..bf23a667102 --- /dev/null +++ b/dojo/system_settings/labels.py @@ -0,0 +1,41 @@ +from django.conf import settings +from django.utils.translation import gettext_lazy as _ + + +class SystemSettingsLabelsKeys: + + """Directory of text copy used by the System_Settings model.""" + + SETTINGS_TRACKED_FILES_ENABLE_LABEL = "settings.tracked_files.enable_label" + SETTINGS_TRACKED_FILES_ENABLE_HELP = "settings.tracked_files.enable_help" + SETTINGS_ASSET_GRADING_ENFORCE_VERIFIED_LABEL = "settings.asset_grading.enforce_verified_label" + SETTINGS_ASSET_GRADING_ENFORCE_VERIFIED_HELP = "settings.asset_grading.enforce_verified_help" + SETTINGS_ASSET_GRADING_ENABLE_LABEL = "settings.asset_grading.enable_label" + SETTINGS_ASSET_GRADING_ENABLE_HELP = "settings.asset_grading.enable_help" + SETTINGS_ASSET_TAG_INHERITANCE_ENABLE_LABEL = "settings.asset_tag_inheritance.enable_label" + SETTINGS_ASSET_TAG_INHERITANCE_ENABLE_HELP = "settings.asset_tag_inheritance.enable_help" + + +# TODO: remove the else: branch once v3 migration is complete +if settings.ENABLE_V3_ORGANIZATION_ASSET_RELABEL: + labels = { + SystemSettingsLabelsKeys.SETTINGS_TRACKED_FILES_ENABLE_LABEL: _("Enable Tracked Asset Files"), + SystemSettingsLabelsKeys.SETTINGS_TRACKED_FILES_ENABLE_HELP: _("With this setting turned off, tracked Asset files will be disabled in the user interface."), + SystemSettingsLabelsKeys.SETTINGS_ASSET_GRADING_ENFORCE_VERIFIED_LABEL: _("Enforce Verified Status - Asset Grading"), + SystemSettingsLabelsKeys.SETTINGS_ASSET_GRADING_ENFORCE_VERIFIED_HELP: _("When enabled, findings must have a verified status to be considered as part of an Asset's grading."), + SystemSettingsLabelsKeys.SETTINGS_ASSET_GRADING_ENABLE_LABEL: _("Enable Asset Grading"), + SystemSettingsLabelsKeys.SETTINGS_ASSET_GRADING_ENABLE_HELP: _("Displays a grade letter next to an Asset to show the overall health."), + SystemSettingsLabelsKeys.SETTINGS_ASSET_TAG_INHERITANCE_ENABLE_LABEL: _("Enable Asset Tag Inheritance"), + SystemSettingsLabelsKeys.SETTINGS_ASSET_TAG_INHERITANCE_ENABLE_HELP: _("Enables Asset tag inheritance globally for all Assets. Any tags added on an Asset will automatically be added to all Engagements, Tests, and Findings."), + } +else: + labels = { + SystemSettingsLabelsKeys.SETTINGS_TRACKED_FILES_ENABLE_LABEL: _("Enable Product Tracking Files"), + SystemSettingsLabelsKeys.SETTINGS_TRACKED_FILES_ENABLE_HELP: _("With this setting turned off, the product tracking files will be disabled in the user interface."), + SystemSettingsLabelsKeys.SETTINGS_ASSET_GRADING_ENFORCE_VERIFIED_LABEL: _("Enforce Verified Status - Product Grading"), + SystemSettingsLabelsKeys.SETTINGS_ASSET_GRADING_ENFORCE_VERIFIED_HELP: _("When enabled, findings must have a verified status to be considered as part of a product's grading."), + SystemSettingsLabelsKeys.SETTINGS_ASSET_GRADING_ENABLE_LABEL: _("Enable Product Grading"), + SystemSettingsLabelsKeys.SETTINGS_ASSET_GRADING_ENABLE_HELP: _("Displays a grade letter next to a product to show the overall health."), + SystemSettingsLabelsKeys.SETTINGS_ASSET_TAG_INHERITANCE_ENABLE_LABEL: _("Enable Product Tag Inheritance"), + SystemSettingsLabelsKeys.SETTINGS_ASSET_TAG_INHERITANCE_ENABLE_HELP: _("Enables product tag inheritance globally for all products. Any tags added on a product will automatically be added to all Engagements, Tests, and Findings"), + } diff --git a/dojo/tag_utils.py b/dojo/tag_utils.py new file mode 100644 index 00000000000..054dc2b0a08 --- /dev/null +++ b/dojo/tag_utils.py @@ -0,0 +1,164 @@ +from __future__ import annotations + +from collections.abc import Iterable + +from django.conf import settings +from django.db import models, transaction +from tagulous.utils import parse_tags + +from dojo.models import Product # local import to avoid circulars at import time + + +def bulk_add_tags_to_instances(tag_or_tags, instances, tag_field_name: str = "tags", batch_size: int | None = None) -> int: + """ + Efficiently add tag(s) to many model instances. + + - tags can be a single string, an iterable of strings or tag objects, or a Tagulous edit string + - Works with QuerySet or list of instances + - Does not (yet) enforce TagField max_count + - Will clear the prefetch cache for the tag_field_name field to avoid stale results + + Returns the number of new relationships created across all provided tags. + """ + # Resolve batch size from settings if not provided + if batch_size is None: + batch_size = getattr(settings, "TAG_BULK_ADD_BATCH_SIZE", 1000) + + # Convert QuerySet to list if needed + if hasattr(instances, "model"): + instances = list(instances) + + if not instances: + return 0 + + # Get model class and resolve TagField from first instance + model_class = instances[0].__class__ + + # Explicitly reject Product instances for now. Bulk tagging Products should + # trigger tag inheritance propagation to child objects, which is normally + # handled by m2m signals that this utility bypasses. To avoid partial + # updates or surprising side effects, we disallow Products here. Use the + # standard `.tags.add(...)` API or a dedicated propagation-aware helper. + if model_class is Product: + msg = "bulk_add_tags_to_instances: Product instances are not supported; use Product.tags.add() or a propagation-aware helper" + raise ValueError(msg) + + try: + tag_field = model_class._meta.get_field(tag_field_name) + except Exception: + msg = f"Model {model_class.__name__} does not have field '{tag_field_name}'" + raise ValueError(msg) + + if not hasattr(tag_field, "tag_options"): + msg = f"Field '{tag_field_name}' is not a TagField" + raise ValueError(msg) + + tag_model = tag_field.related_model + through_model = tag_field.remote_field.through + + # Normalize tags into a list of tag names + tag_names = [] + try: + if isinstance(tag_or_tags, str): + space_delimiter = getattr(tag_field, "tag_options", None).space_delimiter if hasattr(tag_field, "tag_options") else False + tag_names = parse_tags(tag_or_tags, space_delimiter=space_delimiter) + elif isinstance(tag_or_tags, Iterable): + tag_names = [getattr(t, "name", str(t)) for t in tag_or_tags] + else: + tag_names = [str(tag_or_tags)] + except Exception: + tag_names = [str(tag_or_tags)] + + total_created = 0 + + # Resolve through model field names once + through_fields = {f.name: f for f in through_model._meta.fields} + source_field_name = None + target_field_name = None + for field_name, field in through_fields.items(): + if hasattr(field, "remote_field") and field.remote_field: + if field.remote_field.model == model_class: + source_field_name = field_name + elif field.remote_field.model == tag_model: + target_field_name = field_name + + for single_tag_name in tag_names: + if not single_tag_name: + continue + + # Query 1: ensure the tag exists once per tag + if tag_field.tag_options.case_sensitive: + tag, _ = tag_model.objects.get_or_create( + name=single_tag_name, + defaults={"name": single_tag_name, "protected": False}, + ) + else: + tag, _ = tag_model.objects.get_or_create( + name__iexact=single_tag_name, + defaults={"name": single_tag_name, "protected": False}, + ) + + # Process in batches to manage memory + for i in range(0, len(instances), batch_size): + batch_instances = instances[i:i + batch_size] + + with transaction.atomic(): + # Query 2: Find existing relationships in this batch + batch_ids = [instance.pk for instance in batch_instances] + existing_ids = set( + through_model.objects.filter( + **{target_field_name: tag.pk}, + ).filter( + **{f"{source_field_name}__in": batch_ids}, + ).values_list(source_field_name, flat=True), + ) + + # Find new instances that don't have this tag yet + new_instances = [instance for instance in batch_instances if instance.pk not in existing_ids] + + if new_instances: + # Query 3: Bulk create new relationships + relationships = [] + for instance in new_instances: + relationship_data = { + source_field_name: instance, + target_field_name: tag, + } + relationships.append(through_model(**relationship_data)) + + # Use ignore_conflicts=True to handle race conditions + actually_created = through_model.objects.bulk_create( + relationships, + ignore_conflicts=True, + ) + + # Count how many were actually created (Django 4.0+) + batch_created = ( + len(actually_created) + if hasattr(actually_created, "__len__") + else len(new_instances) + ) + + total_created += batch_created + + # Query 4: Update tag count + tag_model.objects.filter(pk=tag.pk).update( + count=models.F("count") + batch_created, + ) + + # Invalidate Django's prefetch cache for the tag relation on + # the affected instances so subsequent access reloads from DB. + # This avoids stale results when callers reuse the same + # in-memory objects after the bulk operation. + # It will result in a refresh from DB if the caller calls instance.tags + # In theory we could update the django-tagulous private cache of tags + # but that would create a bit of a tight link with tagulous internals. + for instance in new_instances: + prefetch_cache = getattr(instance, "_prefetched_objects_cache", None) + if prefetch_cache is not None: + prefetch_cache.pop(tag_field_name, None) + + return total_created + + +__all__ = ["bulk_add_tags_to_instances"] diff --git a/dojo/tags_signals.py b/dojo/tags_signals.py index cb7e3bee640..0cade958265 100644 --- a/dojo/tags_signals.py +++ b/dojo/tags_signals.py @@ -28,6 +28,7 @@ def product_tags_post_add_remove(sender, instance, action, **kwargs): @receiver(signals.m2m_changed, sender=Test.tags.through) @receiver(signals.m2m_changed, sender=Finding.tags.through) def make_inherited_tags_sticky(sender, instance, action, **kwargs): + """Make sure inherited tags are added back in if they are removed""" if action in {"post_add", "post_remove"}: if inherit_product_tags(instance): tag_list = [tag.name for tag in instance.tags.all()] @@ -40,6 +41,7 @@ def make_inherited_tags_sticky(sender, instance, action, **kwargs): @receiver(signals.post_save, sender=Test) @receiver(signals.post_save, sender=Finding) def inherit_tags_on_instance(sender, instance, created, **kwargs): + """Usually nothing to do when savind a model, except for new models?""" if inherit_product_tags(instance): tag_list = instance._tags_tagulous.get_tag_list() if propagate_inheritance(instance, tag_list=tag_list): diff --git a/dojo/tasks.py b/dojo/tasks.py index 90d4a928bf4..ad489a0c229 100644 --- a/dojo/tasks.py +++ b/dojo/tasks.py @@ -1,16 +1,19 @@ import logging -from datetime import date, timedelta +from datetime import timedelta -from auditlog.models import LogEntry from celery.utils.log import get_task_logger -from dateutil.relativedelta import relativedelta +from django.apps import apps from django.conf import settings from django.core.management import call_command from django.db.models import Count, Prefetch from django.urls import reverse from django.utils import timezone +from dojo.auditlog import run_flush_auditlog from dojo.celery import app +from dojo.decorators import dojo_async_task +from dojo.finding.helper import fix_loop_duplicates +from dojo.management.commands.jira_status_reconciliation import jira_status_reconciliation from dojo.models import Alerts, Announcement, Endpoint, Engagement, Finding, Product, System_Settings, User from dojo.notifications.helper import create_notification from dojo.utils import calculate_grade, sla_compute_and_notify @@ -91,22 +94,7 @@ def cleanup_alerts(*args, **kwargs): @app.task(bind=True) def flush_auditlog(*args, **kwargs): - retention_period = settings.AUDITLOG_FLUSH_RETENTION_PERIOD - - if retention_period < 0: - logger.info("Flushing auditlog is disabled") - return - - logger.info("Running Cleanup Task for Logentries with %d Months retention", retention_period) - retention_date = date.today() - relativedelta(months=retention_period) - subset = LogEntry.objects.filter(timestamp__date__lt=retention_date) - event_count = subset.count() - logger.debug("Initially received %d Logentries", event_count) - if event_count > 0: - subset._raw_delete(subset.db) - logger.debug("Total number of audit log entries deleted: %s", event_count) - else: - logger.debug("No outdated Logentries found") + run_flush_auditlog() @app.task(bind=True) @@ -127,7 +115,7 @@ def async_dupe_delete(*args, **kwargs): logger.info("delete excess duplicates (max_dupes per finding: %s, max deletes per run: %s)", dupe_max, total_duplicate_delete_count_max_per_run) deduplicationLogger.info("delete excess duplicates (max_dupes per finding: %s, max deletes per run: %s)", dupe_max, total_duplicate_delete_count_max_per_run) - # limit to 100 to prevent overlapping jobs + # limit to settings.DUPE_DELETE_MAX_PER_RUN to prevent overlapping jobs results = Finding.objects \ .filter(duplicate=True) \ .order_by() \ @@ -144,13 +132,17 @@ def async_dupe_delete(*args, **kwargs): queryset=Finding.objects.filter(duplicate=True).order_by("date"))) total_deleted_count = 0 + affected_products = set() for original in originals_with_too_many_duplicates: duplicate_list = original.original_finding.all() dupe_count = len(duplicate_list) - dupe_max for finding in duplicate_list: deduplicationLogger.debug(f"deleting finding {finding.id}:{finding.title} ({finding.hash_code}))") - finding.delete() + # Collect the product for batch grading later + affected_products.add(finding.test.engagement.product) + # Skip individual product grading during deletion + finding.delete(product_grading_option=False) total_deleted_count += 1 dupe_count -= 1 if dupe_count <= 0: @@ -163,6 +155,14 @@ def async_dupe_delete(*args, **kwargs): logger.info("total number of excess duplicates deleted: %s", total_deleted_count) + # Batch product grading for all affected products + if affected_products: + system_settings = System_Settings.objects.get() + if system_settings.enable_product_grade: + logger.info("performing batch product grading for %s products", len(affected_products)) + for product in affected_products: + calculate_grade(product) + @app.task(ignore_result=False) def celery_status(): @@ -182,13 +182,11 @@ def async_sla_compute_and_notify_task(*args, **kwargs): @app.task def jira_status_reconciliation_task(*args, **kwargs): - from dojo.management.commands.jira_status_reconciliation import jira_status_reconciliation return jira_status_reconciliation(*args, **kwargs) @app.task def fix_loop_duplicates_task(*args, **kwargs): - from dojo.finding.helper import fix_loop_duplicates return fix_loop_duplicates() @@ -222,3 +220,53 @@ def evaluate_pro_proposition(*args, **kwargs): @app.task def clear_sessions(*args, **kwargs): call_command("clearsessions") + + +@dojo_async_task +@app.task +def update_watson_search_index_for_model(model_name, pk_list, *args, **kwargs): + """ + Async task to update watson search indexes for a specific model type. + + Args: + model_key: Model identifier like 'dojo.finding' + pk_list: List of primary keys for instances of this model type. it's advised to chunk the list into batches of 1000 or less. + + """ + from watson.search import SearchContextManager, default_search_engine # noqa: PLC0415 circular import + + logger.debug(f"Starting async watson index update for {len(pk_list)} {model_name} instances") + + try: + # Create new SearchContextManager and start it + context_manager = SearchContextManager() + context_manager.start() + + # Get the default engine and model class + engine = default_search_engine + app_label, model_name = model_name.split(".") + model_class = apps.get_model(app_label, model_name) + + # Bulk load instances and add them to the context + instances = model_class.objects.filter(pk__in=pk_list) + instances_added = 0 + instances_skipped = 0 + + for instance in instances: + try: + # Add to watson context (this will trigger indexing on end()) + context_manager.add_to_context(engine, instance) + instances_added += 1 + except Exception as e: + logger.warning(f"Skipping {model_name}:{instance.pk} - {e}") + instances_skipped += 1 + continue + + # Let watson handle the bulk indexing + context_manager.end() + + logger.info(f"Completed async watson index update: {instances_added} updated, {instances_skipped} skipped") + + except Exception as e: + logger.error(f"Watson async index update failed for {model_name}: {e}") + raise diff --git a/dojo/templates/base.html b/dojo/templates/base.html index 007612ce741..c562b598cd9 100644 --- a/dojo/templates/base.html +++ b/dojo/templates/base.html @@ -233,33 +233,33 @@
  • -