diff --git a/.azuredevops/pipelines/generate-owasp-zap-report.yaml b/.azuredevops/pipelines/generate-owasp-zap-report.yaml index a820f36a01..19ddf1ba90 100644 --- a/.azuredevops/pipelines/generate-owasp-zap-report.yaml +++ b/.azuredevops/pipelines/generate-owasp-zap-report.yaml @@ -17,9 +17,9 @@ schedules: always: true variables: - - group: global_variable_group + - group: DEV_core_backend - name: dev_rg - value: rg-cohort-manager-dev-uks + value: rg-cohman-dev-uks jobs: - job: owasp_zap_report @@ -35,7 +35,8 @@ jobs: scriptLocation: 'inlineScript' inlineScript: | echo "generating report" - az account set --subscription $(DEV_SUBSCRIPTION_ID) + + az account set --subscription $(TF_VAR_TARGET_SUBSCRIPTION_ID) function_list=$(az resource list --resource-group $(dev_rg) --resource-type Microsoft.Web/sites --query "[?kind=='functionapp,linux,container'].name" | sed 's/[][]//g; s/[",]//g' | sed '/^$/d') for function in $function_list; do @@ -55,5 +56,3 @@ jobs: for function in $function_list; do cat zap_report_$function done - - diff --git a/.azuredevops/pipelines/owasp-zap-report-sandbox-core.yaml b/.azuredevops/pipelines/owasp-zap-report-sandbox-core.yaml new file mode 100644 index 0000000000..5ecd404b15 --- /dev/null +++ b/.azuredevops/pipelines/owasp-zap-report-sandbox-core.yaml @@ -0,0 +1,28 @@ +--- +name: $(Build.SourceBranchName)-$(Date:yyyyMMdd)_$(Rev:r) +trigger: none +pr: none + +resources: + repositories: + - repository: dtos-devops-templates + type: github + name: NHSDigital/dtos-devops-templates + ref: 51ab3ef022f53cd62e8fb6d3eee84872a150e6a3 + endpoint: NHSDigital + +variables: + - name: hostPoolName + value: private-pool-dev-uks + - group: SBRK_core_backend + - name: ENVIRONMENT + value: sandbox + +stages: + - template: ../templates/owasp-zap-report-core-common.yaml + parameters: + hostPoolName: $(hostPoolName) + environment: $(ENVIRONMENT) + slackWebHook: $(SLACK_WEBHOOK_URL_WORKFLOWS) + apiUrl: 'https://sbrk-uks-retrieve-cohort-distribution-data.azurewebsites.net/api/RetrieveCohortDistributionData' + webUrl: 'https://cohort-dev.non-live.screening.nhs.uk' diff --git a/.azuredevops/templates/owasp-zap-report-core-common.yaml b/.azuredevops/templates/owasp-zap-report-core-common.yaml new file mode 100644 index 0000000000..3d853b6665 --- /dev/null +++ b/.azuredevops/templates/owasp-zap-report-core-common.yaml @@ -0,0 +1,189 @@ +parameters: + - name: hostPoolName + type: string + - name: environment + type: string + - name: slackWebHook + type: string + default: '' + - name: apiKey + type: string + default: '' + - name: apiUrl + type: string + default: '' + - name: webUrl + type: string + default: '' + +stages: +- stage: zap_security_scan + displayName: "ZAP Security Scan" + jobs: + - job: discover_scan_publish + displayName: "Discover Azure Function Apps and Run ZAP" + pool: + name: ${{ parameters.hostPoolName }} + steps: + - script: | + chmod +x ./scripts/zap/run-zap-scan-api.sh + ./scripts/zap/run-zap-scan-api.sh ${{ parameters.apiUrl }} zap-reports ${{ parameters.apiKey }} + name: run_zap_scan_api + displayName: "Change permissions and run ZAP API Scan" + + - script: | + chmod +x ./scripts/zap/run-zap-scan-web.sh + ./scripts/zap/run-zap-scan-web.sh ${{ parameters.webUrl }} zap-reports + name: run_zap_scan_web + displayName: "Change permissions and run ZAP Web Scan" + + - script: | + python3 scripts/zap/generate-junit-reports.py --input zap-reports --output junit-reports --summary $(Pipeline.Workspace)/zap_summary.txt + name: generate_junit_reports + displayName: "Generate JUnit XML Reports from ZAP JSON" + + - task: PublishTestResults@2 + inputs: + testResultsFormat: 'JUnit' + testResultsFiles: 'junit-reports/*.xml' + failTaskOnFailedTests: false + name: publish_test_results + displayName: "Publish test results" + + - job: read_summary_job + displayName: "Read ZAP Summary" + dependsOn: + - discover_scan_publish + pool: + name: ${{ parameters.hostPoolName }} + steps: + - bash: | + SUMMARY_FILE="$(Pipeline.Workspace)/zap_summary.txt" + + echo "Looking for: $SUMMARY_FILE" + + if [[ -f "$SUMMARY_FILE" ]]; then + summary_content="$(cat "$SUMMARY_FILE")" + echo "Summary loaded:" + echo "$summary_content" + echo "##vso[task.setvariable variable=zap_summary;isOutput=true]$summary_content" + else + echo "summary.txt not found at $SUMMARY_FILE" + echo "##vso[task.setvariable variable=zap_summary;isOutput=true]No summary available." + fi + name: read_summary + displayName: "Read summary.txt and export as variable" + + - job: set_stage_status + displayName: Set Stage Status + dependsOn: + - read_summary_job + condition: always() + variables: + discover_scan_publish_result: $[ dependencies.discover_scan_publish.result ] + steps: + - bash: | + status="failed" + # Access the variables that were set at the job level + if [[ "$(discover_scan_publish_result)" == "Succeeded" || "$(discover_scan_publish_result)" == "Skipped" ]]; then + status="succeeded" + fi + echo "The final status of the ZAP Security Scan stage is: $status" + echo "##vso[task.setvariable variable=stage_status;isOutput=true]$status" + name: set_status + displayName: Set Stage Status + + +- stage: notify_stage + displayName: Send Slack Notification + dependsOn: + - zap_security_scan + condition: | + and( + always(), + ne('${{ parameters.slackWebHook }}', '') + ) + pool: + name: ${{ parameters.hostPoolName }} + variables: + zap_security_scan_status: $[ stageDependencies.zap_security_scan.set_stage_status.outputs['set_status.stage_status'] ] + zap_summary: $[ stageDependencies.zap_security_scan.read_summary_job.outputs['read_summary.zap_summary'] ] + jobs: + - job: set_status_job + displayName: Set Status Output + steps: + - bash: | + # Construct the JSON string + JSON_INPUT=$(cat <* + â€ĸ ** + + *📊 Note:* End-to-end tests may still be running. Please check the <#C08U1DTP6CQ|manager-tests> Slack channel for completed test results. + slackFailureMessage: | + *❌ ZAP Scan _Failed_* + + *Environment:* _${{ parameters.environment }}_ + *Requested By:* $(Build.RequestedFor) + *Branch:* $(Build.SourceBranchName) + + *ZAP Summary:* + ```$(zap_summary)``` + + *Failed Stage(s):* $(failed_stages) + *Skipped Stage(s):* $(skipped_stages) + + â€ĸ *<$(System.CollectionUri)$(System.TeamProject)/_build/results?buildId=$(Build.BuildId)|View ADO Pipeline Run $(Build.BuildId) here>* + steps: + - checkout: dtos-devops-templates + sparseCheckoutDirectories: scripts + path: templates + displayName: 'Checkout Templates Repo' + + - task: PythonScript@0 + displayName: 'Send Slack Success Notification' + condition: eq(variables.final_status, 'succeeded') + inputs: + scriptSource: 'filePath' + scriptPath: '$(Pipeline.Workspace)/templates/scripts/integrations/slack/SlackIntegrator.py' + arguments: > + --webhook "${{ parameters.slackWebHook }}" + --markdown "$(slackSuccessMessage)" + + - task: PythonScript@0 + displayName: 'Send Slack Failure Notification' + condition: eq(variables.final_status, 'failed') + inputs: + scriptSource: 'filePath' + scriptPath: '$(Pipeline.Workspace)/templates/scripts/integrations/slack/SlackIntegrator.py' + arguments: > + --webhook "${{ parameters.slackWebHook }}" + --markdown "$(slackFailureMessage)" diff --git a/scripts/zap/generate-junit-reports.py b/scripts/zap/generate-junit-reports.py new file mode 100644 index 0000000000..3d62604819 --- /dev/null +++ b/scripts/zap/generate-junit-reports.py @@ -0,0 +1,157 @@ +import json +import glob +import os +import logging +import argparse +import xml.etree.ElementTree as ET + + +logging.basicConfig( + level=logging.INFO, + format="[%(levelname)s] %(message)s" +) + + +def create_summary_file(summary_file: str, text: str): + """ + Create summary file with the provided text in the current folder. + """ + try: + with open(summary_file, "w+", encoding="utf-8") as f: + f.write(text) + except Exception as e: + logging.error(f"Failed to write to summary file {summary_file}: {e}") + + +def generate_junit_reports(input_dir: str, output_dir: str, summary_file: str): + """ + Convert ZAP JSON scan reports to JUnit XML. + + Args: + input_dir (str): Directory containing ZAP JSON reports. + output_dir (str): Directory to write JUnit XML files. + summary_file (str): File to write the summary. + """ + + if not os.path.isdir(input_dir): + logging.error(f"Input directory not found: {input_dir}") + return + + os.makedirs(output_dir, exist_ok=True) + + json_files = glob.glob(os.path.join(input_dir, "*.json")) + if not json_files: + logging.warning(f"No JSON files found in {input_dir}") + return + + summary_text = "" + + for file in json_files: + try: + logging.info(f"Processing {file}") + with open(file, "r") as f: + data = json.load(f) + except Exception as e: + logging.error(f"Failed to read JSON file {file}: {e}") + continue + + testsuite = ET.Element("testsuite", name=f"ZAP Security Scan ({file})") + + total = 0 + failures = 0 + skipped = 0 + + try: + for site in data.get("site", []): + site_name = site.get("name", "ZAP") + + for alert in site.get("alerts", []): + total += 1 + + alert_name = alert.get("alert", "Unknown Alert") + severity = alert.get("riskdesc", "") + description = alert.get("desc", "") + + testcase = ET.SubElement( + testsuite, + "testcase", + classname=site_name, + name=f"{alert_name} ({severity})" + ) + + if "High" in severity: + failures += 1 + failure = ET.SubElement( + testcase, + "failure", + message=severity + ) + failure.text = description + + elif "Medium" in severity: + skipped += 1 + skip = ET.SubElement( + testcase, + "skipped", + message=severity + ) + skip.text = description + + testsuite.set("tests", str(total)) + testsuite.set("failures", str(failures)) + testsuite.set("skipped", str(skipped)) + except Exception as e: + logging.error(f"Error processing alerts in {file}: {e}") + continue + + xml_file = os.path.join( + output_dir, + os.path.basename(file).replace(".json", ".xml") + ) + + try: + tree = ET.ElementTree(testsuite) + with open(xml_file, "wb") as f: + tree.write(f, encoding="utf-8", xml_declaration=True) + + summary_text += f"{xml_file} (Total: {total}, High: {failures}, Medium: {skipped}) " + + except Exception as e: + logging.error(f"Failed to write XML file {xml_file}: {e}") + + create_summary_file(summary_file, summary_text) + + +def parse_args(): + parser = argparse.ArgumentParser( + description="Convert ZAP JSON reports to JUnit XML format." + ) + + parser.add_argument( + "--input", + required=True, + help="Directory containing ZAP JSON reports" + ) + + parser.add_argument( + "--output", + required=True, + help="Directory to write generated JUnit XML reports" + ) + + parser.add_argument( + "--summary", + required=True, + help="File to write the summary" + ) + + return parser.parse_args() + + +def main(): + args = parse_args() + generate_junit_reports(args.input, args.output, args.summary) + + +if __name__ == "__main__": + main() diff --git a/scripts/zap/run-zap-scan-api.sh b/scripts/zap/run-zap-scan-api.sh new file mode 100644 index 0000000000..5d4d6ac989 --- /dev/null +++ b/scripts/zap/run-zap-scan-api.sh @@ -0,0 +1,85 @@ +#!/bin/bash + +set -euo pipefail + +usage() { + echo "Usage: $0 " + echo + echo " The API URL (without http protocol)" + echo " Directory to write ZAP reports into" + echo " [api_key] (Optional) API key for header injection" + exit 2 +} + +# Globals set by validate_args +API_URL="" +REPORT_DIR="" +API_KEY="" + +validate_args() { + # Require at least 2 arguments, max 3 + if [[ $# -lt 2 || $# -gt 3 ]]; then + usage + fi + + API_URL="$1" + REPORT_DIR="$2" + API_KEY="${3:-}" # Optional +} + +main() { + # Validate Docker is installed + if ! command -v docker >/dev/null 2>&1; then + echo "❌ Error: Docker is not installed or not in PATH." + exit 1 + fi + + # Prepare reports directory + mkdir -p "$REPORT_DIR" || { + echo "❌ Error: failed to create directory '$REPORT_DIR'" + exit 1 + } + chmod 777 "$REPORT_DIR" || { + echo "❌ Error: failed to set permissions on '$REPORT_DIR'" + exit 1 + } + + echo "🔍 Scanning ${API_URL}" + + # Build ZAP replacer config if API_KEY is provided + ZAP_HEADER_CFG="" + + if [[ -n "${API_KEY}" ]]; then + echo "🔑 API key provided — adding OCP-Apim-Subscription-Key header" + ZAP_HEADER_CFG=" + -config replacer.full_list(0).matchtype=REQ_HEADER + -config replacer.full_list(0).matchstr=OCP-Apim-Subscription-Key + -config replacer.full_list(0).regex=false + -config replacer.full_list(0).replacement=${API_KEY} + -config replacer.full_list(0).enabled=true + " + else + echo "â„šī¸ No API key provided — running without authentication header" + fi + + if ! docker run --rm \ + --user root \ + -v "$(pwd)/${REPORT_DIR}:/zap/wrk" \ + ghcr.io/zaproxy/zaproxy:stable \ + zap-api-scan.py \ + -t "${API_URL}" \ + -f openapi \ + -z "${ZAP_HEADER_CFG}" \ + -j \ + -J "api.json"; then + + echo "âš ī¸ Warning: ZAP scan failed for ${API_URL}, continuing..." + fi + + echo "✅ Completed. Reports saved to: ${REPORT_DIR}" +} + +# ---- Order matters: validate first, then main ---- +validate_args "$@" +main +exit 0 diff --git a/scripts/zap/run-zap-scan-web.sh b/scripts/zap/run-zap-scan-web.sh new file mode 100644 index 0000000000..19f5c75d6b --- /dev/null +++ b/scripts/zap/run-zap-scan-web.sh @@ -0,0 +1,64 @@ +#!/bin/bash + +set -euo pipefail + +usage() { + echo "Usage: $0 " + echo + echo " The WEB URL (without http protocol)" + echo " Directory to write ZAP reports into" + exit 2 +} + +# Globals set by validate_args +WEB_URL="" +REPORT_DIR="" + +validate_args() { + # Require exactly two arguments + if [[ $# -ne 2 ]]; then + usage + fi + + WEB_URL="$1" + REPORT_DIR="$2" +} + +main() { + # Validate Docker is installed + if ! command -v docker >/dev/null 2>&1; then + echo "❌ Error: Docker is not installed or not in PATH." + exit 1 + fi + + # Prepare reports directory + mkdir -p "$REPORT_DIR" || { + echo "❌ Error: failed to create directory '$REPORT_DIR'" + exit 1 + } + chmod 777 "$REPORT_DIR" || { + echo "❌ Error: failed to set permissions on '$REPORT_DIR'" + exit 1 + } + + echo "🔍 Scanning ${WEB_URL}" + + if ! docker run --rm \ + --user root \ + -v "$(pwd)/${REPORT_DIR}:/zap/wrk" \ + ghcr.io/zaproxy/zaproxy:stable \ + zap-baseline.py \ + -t "${WEB_URL}" \ + -j \ + -J "web.json"; then + + echo "âš ī¸ Warning: ZAP scan failed for ${WEB_URL}, continuing..." + fi + + echo "✅ Completed. Reports saved to: ${REPORT_DIR}" +} + +# ---- Order matters: validate first, then main ---- +validate_args "$@" +main +exit 0