From 33adab4a89677752cbad00db2515a5d5bf89c820 Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 08:49:42 -0600 Subject: [PATCH 01/17] feat: add redis role for ares workers, improve agent toolchain, and enhance log shipping **Added:** - New `redis` Ansible role to provision and configure Redis server for Ares worker message brokering, including systemd template for worker services and verification - Pre-commit hooks for ansible-lint, docsible docs generation, arch diagram auto-update, yamllint, markdownlint, and shell script linting/formatting - `.hooks/requirements.txt` with standardized Python tooling dependencies for testing and linting - Pre-commit configuration for Ansible, Markdown, YAML, and shell linting, plus documentation and architecture diagram automation - `goad_attack_box_configure.yml` playbook for post-deploy Alloy log agent configuration on golden images **Changed:** - All roles that depend on impacket now inject source-built impacket into the system Python on Kali if the apt version is <0.13.0, ensuring regsecrets module is available for NetExec - NetExec pipx venv: pipx inject now uses `--editable --force` and verification asserts regsecrets is importable, with clear failure output - Role and molecule verification for all agent roles now assert regsecrets module is present in relevant venvs (and system Python on Kali) - `base` role now installs AWS CLI v2 by default and exposes a flag to skip - `base` role's pip install for Ares Python dependencies now pins `rigging>=3.0` and omits the deprecated `dreadnode` meta-package - Added `acl` package to base system dependencies to support POSIX ACLs for log shipping and agent file permissions - Playbooks and Alloy role/templates now support `namespace` and `app` labels for better Grafana dashboard compatibility with EC2 agents - `privesc_tools` role now downloads prebuilt KrbRelayUp.exe from SharpCollection for stability, rather than cloning the repo - All docsible-generated role READMEs updated to document new and changed installation, verification, and configuration steps - `goad_attack_box.yml` now provisions redis, AWS agents, and Alloy by default, and sets ACLs on shell history files for log shipping **Removed:** - Redundant pipx path ensure logic from `base` role (now handled via profile.d) - Legacy ensurepath step after pipx reinstall in `recon_tools` (now unnecessary) --- .claude/agents/ares-operator.md | 9 + .claude/agents/python-ares-expert.md | 1 + .claude/agents/rust-ares-expert.md | 14 + .gemini/agents/ares-operator.md | 9 + .github/workflows/molecule.yaml | 420 ++++++++++++++++++ .hooks/ansible-lint-hook.sh | 13 + .hooks/docsible-hook.sh | 86 ++++ .hooks/gen-arch-diagram.py | 163 +++++++ .hooks/linters/ansible-lint.yaml | 14 + .hooks/linters/markdownlint.json | 20 + .hooks/linters/yamllint.yaml | 32 ++ .hooks/requirements.txt | 8 + .hooks/templates/docsible-template.md.j2 | 77 ++++ .pre-commit-config.yaml | 55 +++ .taskfiles/ec2/scripts/setup.sh | 48 +- .taskfiles/ec2/scripts/status.sh | 24 +- .taskfiles/remote/orchestrator-wrapper.sh | 18 +- ansible/README.md | 282 +++++++----- ansible/playbooks/ares/goad_attack_box.yml | 140 +++++- .../ares/goad_attack_box_configure.yml | 151 +++++++ ansible/playbooks/windows/target_setup.yml | 3 - ansible/requirements.yml | 4 +- ansible/roles/acl_tools/README.md | 4 + .../acl_tools/molecule/default/verify.yml | 40 ++ .../roles/acl_tools/tasks/impacket_source.yml | 21 + ansible/roles/acl_tools/tasks/linux.yml | 5 +- ansible/roles/alloy/README.md | 4 + ansible/roles/alloy/defaults/main.yml | 7 + ansible/roles/alloy/templates/config.alloy.j2 | 12 + ansible/roles/aws_cloudwatch_agent/README.md | 2 + ansible/roles/aws_ssm_agent/README.md | 16 +- ansible/roles/aws_ssm_agent/tasks/linux.yml | 57 ++- ansible/roles/base/README.md | 27 +- ansible/roles/base/defaults/main.yml | 7 +- .../roles/base/molecule/default/verify.yml | 9 +- ansible/roles/base/tasks/install_awscli.yml | 51 +++ ansible/roles/base/tasks/install_pipx.yml | 17 - ansible/roles/base/tasks/linux.yml | 13 +- ansible/roles/coercion_tools/README.md | 4 + .../molecule/default/verify.yml | 14 + .../coercion_tools/tasks/impacket_source.yml | 21 + ansible/roles/cracking_tools/README.md | 3 + .../molecule/source-build/verify.yml | 21 + .../roles/cracking_tools/tasks/hashcat.yml | 7 + .../roles/credential_access_tools/README.md | 4 + .../molecule/default/verify.yml | 16 + .../tasks/impacket_source.yml | 21 + ansible/roles/dc_audit_sacl/README.md | 3 + ansible/roles/dc_audit_sacl/tasks/main.yml | 12 +- ansible/roles/fluent_bit/README.md | 2 + .../roles/lateral_movement_tools/README.md | 4 + .../molecule/default/verify.yml | 16 + .../tasks/impacket_source.yml | 21 + ansible/roles/mythic/README.md | 2 + ansible/roles/privesc_tools/README.md | 14 +- ansible/roles/privesc_tools/defaults/main.yml | 6 +- .../privesc_tools/molecule/default/verify.yml | 25 +- .../privesc_tools/tasks/impacket_source.yml | 21 + ansible/roles/privesc_tools/tasks/linux.yml | 19 +- .../privesc_tools/tasks/pygpoabuse_pipx.yml | 15 +- ansible/roles/recon_tools/README.md | 7 +- .../recon_tools/molecule/default/verify.yml | 53 ++- .../recon_tools/tasks/impacket_source.yml | 21 + .../roles/recon_tools/tasks/netexec_pipx.yml | 33 +- ansible/roles/redis/README.md | 71 +++ ansible/roles/redis/defaults/main.yml | 15 + ansible/roles/redis/handlers/main.yml | 11 + ansible/roles/redis/meta/main.yml | 24 + ansible/roles/redis/tasks/linux.yml | 77 ++++ ansible/roles/redis/tasks/main.yml | 4 + .../redis/templates/ares-worker@.service.j2 | 19 + 71 files changed, 2245 insertions(+), 244 deletions(-) create mode 100644 .github/workflows/molecule.yaml create mode 100755 .hooks/ansible-lint-hook.sh create mode 100755 .hooks/docsible-hook.sh create mode 100755 .hooks/gen-arch-diagram.py create mode 100644 .hooks/linters/ansible-lint.yaml create mode 100644 .hooks/linters/markdownlint.json create mode 100644 .hooks/linters/yamllint.yaml create mode 100644 .hooks/requirements.txt create mode 100644 .hooks/templates/docsible-template.md.j2 create mode 100644 ansible/playbooks/ares/goad_attack_box_configure.yml create mode 100644 ansible/roles/base/tasks/install_awscli.yml create mode 100644 ansible/roles/redis/README.md create mode 100644 ansible/roles/redis/defaults/main.yml create mode 100644 ansible/roles/redis/handlers/main.yml create mode 100644 ansible/roles/redis/meta/main.yml create mode 100644 ansible/roles/redis/tasks/linux.yml create mode 100644 ansible/roles/redis/tasks/main.yml create mode 100644 ansible/roles/redis/templates/ares-worker@.service.j2 diff --git a/.claude/agents/ares-operator.md b/.claude/agents/ares-operator.md index 03dc7438..ed21353c 100644 --- a/.claude/agents/ares-operator.md +++ b/.claude/agents/ares-operator.md @@ -66,6 +66,7 @@ IMPORTANT: After code changes, ALWAYS deploy before testing. Use `task remote:ch ## Red Team Operations ### Start an operation + ```bash # via Taskfile (convenience wrappers) task red:multi TARGET=dreadgoad DOMAIN=sevenkingdoms.local @@ -80,6 +81,7 @@ task ec2:launch DOMAIN=sevenkingdoms.local TARGETS=192.168.58.10 ``` ### Monitor + ```bash # Direct CLI with transport (preferred) ares-cli --k8s ares-red ops status --latest @@ -95,6 +97,7 @@ task red:multi:tasks:list LATEST=true STATUS=failed ``` ### State injection (unblock stuck operations) + When natural progression stalls, inject state to skip past blockers: ```bash @@ -114,6 +117,7 @@ ares-cli --k8s ares-red ops inject-vulnerability op-xxx constrained_delegation 1 ``` ### Reports & Playbooks + ```bash ares-cli --k8s ares-red ops report --latest --regenerate ares-cli --k8s ares-red ops export-detection --latest # Export markdown/JSON detection playbook @@ -121,6 +125,7 @@ ares-cli --k8s ares-red ops offload-cost --latest # Sync token costs to ``` ### Maintenance + ```bash ares-cli --k8s ares-red ops backfill-domains op-xxx # Re-scan state to populate domain list ares-cli --k8s ares-red ops kill --all # Kill all running ops @@ -130,6 +135,7 @@ ares-cli --k8s ares-red ops cleanup --max-age-hours 24 # Delete old checkpoin ## Blue Team Operations ### Submit investigations + ```bash # From red team operation ares-cli --k8s ares-blue blue from-operation --latest @@ -142,6 +148,7 @@ ares-cli --k8s ares-blue blue watch --poll-interval 30 ``` ### Monitor & Reports + ```bash ares-cli --k8s ares-blue blue status --latest ares-cli --k8s ares-blue blue evidence --latest --json @@ -183,6 +190,7 @@ task config:set-model -- orchestrator gpt-5.2 ## Infrastructure & Debugging ### Health Checks + ```bash task ares:config:check # Check 1Password access and API keys task remote:status # K8s pod health @@ -191,6 +199,7 @@ task remote:logs ROLE=orchestrator # Read logs ``` ### Debugging Stuck Operations + 1. **Check Grafana** (`grafana.dev.plundr.ai`) for token usage and Loki errors. 2. **Check failed tasks**: `ares-cli --k8s ares-red ops tasks --latest --status failed`. 3. **Verify binary sync**: `task remote:check`. diff --git a/.claude/agents/python-ares-expert.md b/.claude/agents/python-ares-expert.md index 2875658a..4663ce3e 100644 --- a/.claude/agents/python-ares-expert.md +++ b/.claude/agents/python-ares-expert.md @@ -10,6 +10,7 @@ You are an expert on the **Python ares codebase** located at `/Users/l/dreadnode ## Project Overview Ares is an autonomous security operations multi-agent system with: + - **Red Team**: LLM-powered penetration testing with coordinator/worker architecture - **Blue Team**: SOC alert investigation and threat hunting diff --git a/.claude/agents/rust-ares-expert.md b/.claude/agents/rust-ares-expert.md index 302f4667..449dbe5d 100644 --- a/.claude/agents/rust-ares-expert.md +++ b/.claude/agents/rust-ares-expert.md @@ -10,6 +10,7 @@ You are an expert on the **Rust ares codebase** located at `/Users/l/dreadnode/a ## Project Overview Ares is an autonomous security operations multi-agent system ported from Python to Rust. It has: + - **Red Team**: LLM-powered penetration testing with coordinator/worker architecture - **Blue Team**: SOC alert investigation and threat hunting - **Correlation**: Red-blue activity matching and gap analysis @@ -32,12 +33,14 @@ Ares is an autonomous security operations multi-agent system ported from Python ### ares-core — Models, State, Config **models/** — Core data types: + - `core.rs`: Target, Host, User, Credential, Hash, Share - `task.rs`: AgentRole, TaskStatus, TaskInfo, TaskResult, VulnerabilityInfo, AgentInfo - `operation.rs`: OperationMeta - `blue.rs`: Evidence, TimelineEvent, BlueTaskInfo, PyramidLevel, InvestigationStage, TriageDecision **state/** — Redis state management: + - `reader.rs`: RedisStateReader — loads all state from Redis - `operations.rs`: State write operations - `blue_reader.rs` / `blue_writer.rs`: Blue team state @@ -47,11 +50,13 @@ Ares is an autonomous security operations multi-agent system ported from Python - `keys.rs`: Redis key pattern constants (ares:op:{id}:credentials, etc.) **config/** — YAML config: + - `mod.rs`: AresConfig (loads from ARES_CONFIG env or default paths) - `sections.rs`: Agent roles, timeouts, recovery, phase detection, vuln priorities - `defaults.rs`: Default values **parsing/** — Tool output parsers: + - `secretsdump.rs`, `kerberos.rs`, `ntlm.rs`, `delegation.rs`, `shares.rs`, `domain_sid.rs`, `hosts.rs` **reports/**: `redteam.rs`, `blueteam.rs`, `mitre.rs`, `dedup.rs` @@ -64,12 +69,14 @@ Ares is an autonomous security operations multi-agent system ported from Python ### ares-llm — LLM Integration **provider/** — Multi-provider abstraction: + - `mod.rs`: LlmProvider trait, ChatMessage, ToolCall, ToolDefinition, Role, StopReason, TokenUsage - `anthropic.rs`: Anthropic Messages API - `openai.rs`: OpenAI Chat Completions API - `ollama.rs`: Local Ollama **agent_loop.rs** — Multi-step agent execution: + - AgentLoopConfig: max_steps, max_tokens, temperature, retry, context management - ContextConfig: max_context_tokens (180k default), max_tool_output_chars (30k) - RetryConfig: exponential backoff with jitter @@ -78,11 +85,13 @@ Ares is an autonomous security operations multi-agent system ported from Python - `run_agent_loop()`: Main loop (prompt → LLM → tool_use → accumulate → repeat) **tool_registry/** — Tool definitions per role: + - `mod.rs`: tools_for_role(), is_callback_tool() - `recon.rs`, `credential_access/`, `lateral/`, `privesc/`, `cracker.rs`, `coercion.rs`, `acl.rs`, `blue.rs` - Each tool has JSON Schema for LLM tool_use **prompt/** — Task-specific prompt generation: + - `mod.rs`: StateSnapshot, generate_task_prompt() - Role-specific modules + Tera templates - `state_context.rs`: Format state for prompts @@ -90,6 +99,7 @@ Ares is an autonomous security operations multi-agent system ported from Python - `templates.rs`: Tera template loading **routing/** — Task payload enrichment: + - `domain.rs`: Domain normalization, NetBIOS→FQDN - `dc_discovery.rs`: Multi-tier DC discovery (DcTier) - `credentials.rs`: Find credentials for domain @@ -100,6 +110,7 @@ Ares is an autonomous security operations multi-agent system ported from Python **lib.rs**: `dispatch()` — routes tool name to implementation (100+ tools) **Tool modules**: + - `recon.rs`: nmap_scan, smb_sweep, ldap_search, bloodhound, dig_query, adidnsdump - `credential_access/`: kerberoast, secretsdump, lsassy, asrep_roast, spray, laps_dump, misc.rs, netexec_tools.rs - `cracker.rs`: hashcat, john @@ -124,6 +135,7 @@ Ares is an autonomous security operations multi-agent system ported from Python **config.rs**: OrchestratorConfig from env vars (ARES_OPERATION_ID, ARES_REDIS_URL, ARES_LLM_MODEL, etc.) **state/**: + - `shared.rs`: SharedState — Arc> - `inner.rs`: StateInner (credentials, hashes, hosts, users, shares, domains, vulns, dedup sets) - `persistence.rs`: Load/save from/to Redis @@ -131,11 +143,13 @@ Ares is an autonomous security operations multi-agent system ported from Python - `dedup.rs`: Deduplication **dispatcher/**: + - `mod.rs`: Dispatcher (queue + tracker + throttler + state) - `submission.rs`: throttled_submit() - `task_builders.rs`: request_recon(), request_crack(), etc. **automation/** — 16 background tasks: + - `crack.rs`, `credential_access.rs`, `credential_expansion.rs`, `secretsdump.rs` - `coercion.rs`, `delegation.rs`, `adcs.rs`, `privesc/acl.rs`, `s4u.rs` - `trust.rs`, `gmsa.rs`, `golden_ticket.rs`, `mssql.rs`, `bloodhound.rs`, `shares.rs` diff --git a/.gemini/agents/ares-operator.md b/.gemini/agents/ares-operator.md index 9a82d84c..87c0239c 100644 --- a/.gemini/agents/ares-operator.md +++ b/.gemini/agents/ares-operator.md @@ -73,6 +73,7 @@ IMPORTANT: After code changes, ALWAYS deploy before testing. Use `task remote:ch ## Red Team Operations ### Start an operation + ```bash # via Taskfile (convenience wrappers) task red:multi TARGET=dreadgoad DOMAIN=sevenkingdoms.local @@ -87,6 +88,7 @@ task ec2:launch DOMAIN=sevenkingdoms.local TARGETS=192.168.58.10 ``` ### Monitor + ```bash # Direct CLI with transport (preferred) ares-cli --k8s ares-red ops status --latest @@ -102,6 +104,7 @@ task red:multi:tasks:list LATEST=true STATUS=failed ``` ### State injection (unblock stuck operations) + When natural progression stalls, inject state to skip past blockers: ```bash @@ -121,6 +124,7 @@ ares-cli --k8s ares-red ops inject-vulnerability op-xxx constrained_delegation 1 ``` ### Reports & Playbooks + ```bash ares-cli --k8s ares-red ops report --latest --regenerate ares-cli --k8s ares-red ops export-detection --latest # Export markdown/JSON detection playbook @@ -128,6 +132,7 @@ ares-cli --k8s ares-red ops offload-cost --latest # Sync token costs to ``` ### Maintenance + ```bash ares-cli --k8s ares-red ops backfill-domains op-xxx # Re-scan state to populate domain list ares-cli --k8s ares-red ops kill --all # Kill all running ops @@ -137,6 +142,7 @@ ares-cli --k8s ares-red ops cleanup --max-age-hours 24 # Delete old checkpoin ## Blue Team Operations ### Submit investigations + ```bash # From red team operation ares-cli --k8s ares-blue blue from-operation --latest @@ -149,6 +155,7 @@ ares-cli --k8s ares-blue blue watch --poll-interval 30 ``` ### Monitor & Reports + ```bash ares-cli --k8s ares-blue blue status --latest ares-cli --k8s ares-blue blue evidence --latest --json @@ -190,6 +197,7 @@ task config:set-model -- orchestrator gpt-5.2 ## Infrastructure & Debugging ### Health Checks + ```bash task ares:config:check # Check 1Password access and API keys task remote:status # K8s pod health @@ -198,6 +206,7 @@ task remote:logs ROLE=orchestrator # Read logs ``` ### Debugging Stuck Operations + 1. **Check Grafana** (`grafana.dev.plundr.ai`) for token usage and Loki errors. 2. **Check failed tasks**: `ares-cli --k8s ares-red ops tasks --latest --status failed`. 3. **Verify binary sync**: `task remote:check`. diff --git a/.github/workflows/molecule.yaml b/.github/workflows/molecule.yaml new file mode 100644 index 00000000..e8bfc6ac --- /dev/null +++ b/.github/workflows/molecule.yaml @@ -0,0 +1,420 @@ +--- +name: Molecule Test +on: + merge_group: + pull_request: + branches: + - main + types: + - opened + - synchronize + - reopened + paths: + - 'ansible/**' + push: + branches: + - main + paths: + - 'ansible/**' + schedule: + # Runs every Sunday at 4 AM (see https://crontab.guru) + - cron: "0 4 * * 0" + workflow_dispatch: + inputs: + ROLE: + description: 'Role to test' + required: false + default: '' + type: string + SCENARIO: + description: 'Molecule scenario to run (default: default)' + required: false + default: 'default' + type: string + +concurrency: + # Only cancel in-progress runs for PRs, not for main branch or scheduled runs + cancel-in-progress: ${{ github.event_name == 'pull_request' && ! contains(github.event.pull_request.labels.*.name, 'renovate') }} + group: ${{ github.workflow }}-${{ github.event_name == 'pull_request' && ! contains(github.event.pull_request.labels.*.name, 'renovate') && github.event.pull_request.number || github.ref }} + +env: + ANSIBLE_FORCE_COLOR: "1" + COLLECTION_NAMESPACE: dreadnode + COLLECTION_NAME: nimbus_range + COLLECTION_PATH: ansible_collections/dreadnode/nimbus_range + REQUIREMENTS_FILE: .hooks/requirements.txt + PY_COLORS: "1" + PYTHON_VERSION: "3.13.7" + ROLE: ${{ github.event.inputs.ROLE }} + SCENARIO: ${{ github.event.inputs.SCENARIO || 'default' }} + ANSIBLE_COLLECTIONS_PATH: ~/.ansible/collections + +permissions: + actions: write + contents: write + issues: write + pull-requests: write + +jobs: + detect-changes: + runs-on: ubuntu-latest + outputs: + roles: ${{ steps.filter.outputs.roles }} + matrix: ${{ steps.generate-matrix.outputs.matrix }} + max_parallel: ${{ steps.generate-matrix.outputs.max_parallel }} + test_all: ${{ steps.check-event.outputs.test_all }} + steps: + - name: Set up git repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + fetch-depth: 0 + + - name: Check event type + id: check-event + run: | + # Test all on: push to main, schedule, workflow_dispatch without specific inputs, merge_group + if [[ "${{ github.event_name }}" == "push" ]] || \ + [[ "${{ github.event_name }}" == "schedule" ]] || \ + [[ "${{ github.event_name }}" == "merge_group" ]] || \ + [[ "${{ github.event_name }}" == "workflow_dispatch" && -z "${{ env.ROLE }}" ]]; then + echo "test_all=true" >> "$GITHUB_OUTPUT" + else + echo "test_all=false" >> "$GITHUB_OUTPUT" + fi + + - name: Detect changed files + id: filter + if: steps.check-event.outputs.test_all == 'false' + run: | + if [[ "${{ github.event_name }}" == "pull_request" ]]; then + BASE="${{ github.event.pull_request.base.sha }}" + HEAD="${{ github.event.pull_request.head.sha }}" + else + BASE="origin/main" + HEAD="HEAD" + fi + + # Get changed files under ansible/ + CHANGED_FILES=$(git diff --name-only "$BASE"..."$HEAD" -- ansible/) + echo "Changed files:" + echo "$CHANGED_FILES" + + # Extract changed roles (only those with molecule tests) + ROLES=$(echo "$CHANGED_FILES" | grep '^ansible/roles/' | cut -d'/' -f3 | sort -u | tr '\n' ' ') + echo "roles=$ROLES" >> "$GITHUB_OUTPUT" + echo "Changed roles: $ROLES" + + - name: Generate test matrix + id: generate-matrix + run: | + # Define roles with molecule tests + ROLES_WITH_MOLECULE=( + "acl_tools" + "base" + "coercion_tools" + "cracking_tools" + "credential_access_tools" + "lateral_movement_tools" + "mythic" + "privesc_tools" + "recon_tools" + ) + + # Define additional scenarios (role:scenario format) + # These only run on schedule/workflow_dispatch (too slow for PR CI) + declare -A ADDITIONAL_SCENARIOS=() + if [[ "${{ github.event_name }}" == "schedule" ]] || \ + [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then + ADDITIONAL_SCENARIOS=( + ["cracking_tools"]="source-build" + ) + fi + + if [[ "${{ steps.check-event.outputs.test_all }}" == "true" ]]; then + # Test all roles that have molecule tests + MATRIX_JSON='[' + FIRST=true + + for role in "${ROLES_WITH_MOLECULE[@]}"; do + if [[ "$FIRST" == "true" ]]; then + FIRST=false + else + MATRIX_JSON+="," + fi + MATRIX_JSON+="{\"name\":\"Role Test - ${role}\",\"path\":\"roles/${role}\",\"scenario\":\"default\"}" + + if [[ -n "${ADDITIONAL_SCENARIOS[$role]}" ]]; then + for scenario in ${ADDITIONAL_SCENARIOS[$role]}; do + MATRIX_JSON+=",{\"name\":\"Role Test - ${role} (${scenario})\",\"path\":\"roles/${role}\",\"scenario\":\"${scenario}\"}" + done + fi + done + + MATRIX_JSON+=']' + else + # Test only changed roles that have molecule tests + ROLES="${{ steps.filter.outputs.roles }}" + MATRIX_JSON="[" + FIRST=true + + if [[ -n "$ROLES" ]]; then + for role in $ROLES; do + if [[ " ${ROLES_WITH_MOLECULE[*]} " == *" ${role} "* ]]; then + if [[ "$FIRST" == "true" ]]; then + FIRST=false + else + MATRIX_JSON+="," + fi + MATRIX_JSON+="{\"name\":\"Role Test - ${role}\",\"path\":\"roles/${role}\",\"scenario\":\"default\"}" + + if [[ -n "${ADDITIONAL_SCENARIOS[$role]}" ]]; then + for scenario in ${ADDITIONAL_SCENARIOS[$role]}; do + MATRIX_JSON+=",{\"name\":\"Role Test - ${role} (${scenario})\",\"path\":\"roles/${role}\",\"scenario\":\"${scenario}\"}" + done + fi + fi + done + fi + + MATRIX_JSON+=']' + fi + + echo "matrix=$MATRIX_JSON" >> "$GITHUB_OUTPUT" + echo "Matrix to test: $MATRIX_JSON" + + # Calculate dynamic max-parallel based on matrix size + MATRIX_COUNT=$(echo "$MATRIX_JSON" | jq '. | length') + + # Handle empty matrix + if [[ $MATRIX_COUNT -eq 0 ]]; then + echo "max_parallel=1" >> "$GITHUB_OUTPUT" + echo "No tests to run (empty matrix)" + exit 0 + fi + + MAX_PARALLEL=$(( (MATRIX_COUNT * 60) / 100 )) + [[ $MAX_PARALLEL -lt 4 ]] && MAX_PARALLEL=4 + [[ $MAX_PARALLEL -gt 12 ]] && MAX_PARALLEL=12 + + echo "max_parallel=$MAX_PARALLEL" >> "$GITHUB_OUTPUT" + echo "Calculated max-parallel: $MAX_PARALLEL (based on $MATRIX_COUNT tests)" + + validate-inputs: + runs-on: ubuntu-latest + steps: + - name: Set up git repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Validate inputs + run: | + ROLES_WITH_MOLECULE=( + "acl_tools" + "base" + "coercion_tools" + "cracking_tools" + "credential_access_tools" + "lateral_movement_tools" + "mythic" + "privesc_tools" + "recon_tools" + ) + + if [[ -n "${{ env.ROLE }}" ]]; then + if [[ ! -d "ansible/roles/${{ env.ROLE }}" ]]; then + echo "::error::Role '${{ env.ROLE }}' not found in ansible/roles/" + exit 1 + fi + if [[ ! " ${ROLES_WITH_MOLECULE[*]} " == *" ${{ env.ROLE }} "* ]]; then + echo "::error::Role '${{ env.ROLE }}' does not have molecule tests" + exit 1 + fi + fi + + role_test: + needs: validate-inputs + if: ${{ github.event.inputs.ROLE != '' }} + runs-on: ubuntu-latest + timeout-minutes: 60 + strategy: + fail-fast: false + max-parallel: 4 + matrix: + include: + - name: ${{ format('Role Test - {0}', github.event.inputs.ROLE) }} + path: ${{ format('roles/{0}', github.event.inputs.ROLE) }} + + steps: + - name: Delete huge unnecessary tools folder + shell: bash + run: | + echo "Initial disk space:" + df -h + rm -rf /opt/hostedtoolcache + echo "Disk space after cleanup:" + df -h + + - name: Checkout git repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + path: ${{ env.COLLECTION_PATH }} + + - name: Set up Python + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 + with: + python-version: ${{ env.PYTHON_VERSION }} + cache: 'pip' + cache-dependency-path: '${{ env.COLLECTION_PATH }}/${{ env.REQUIREMENTS_FILE }}' + + - name: Cache Ansible collections + uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5 + with: + path: ~/.ansible/collections + key: ${{ runner.os }}-ansible-${{ hashFiles('**/requirements.yml') }} + + - name: Install dependencies + shell: bash + run: | + python3 -m pip install -r "${{ env.COLLECTION_PATH }}/${{ env.REQUIREMENTS_FILE }}" + + - name: Install galaxy dependencies + working-directory: ${{ env.COLLECTION_PATH }}/ansible + shell: bash + run: | + ansible-galaxy collection install -r requirements.yml + ansible-galaxy install -r requirements.yml + + - name: Build and install collection locally + working-directory: ${{ env.COLLECTION_PATH }}/ansible + shell: bash + run: | + ansible-galaxy collection build --force + ansible-galaxy collection install dreadnode-nimbus_range-*.tar.gz -p ~/.ansible/collections --force --pre + + - name: Run molecule test + working-directory: ${{ env.COLLECTION_PATH }}/ansible/${{ matrix.path }} + shell: bash + env: + ANSIBLE_CONFIG: ${{ env.COLLECTION_PATH }}/ansible/ansible.cfg + ANSIBLE_ROLES_PATH: ${{ env.COLLECTION_PATH }}/ansible/roles + MOLECULE_NO_LOG: "false" + run: | + set -e + molecule --version + molecule list + + if ! MOLECULE_DEBUG=1 molecule test -s "${{ env.SCENARIO }}"; then + echo "Molecule test failed. Collecting debug information..." + + echo "Docker containers:" + docker ps -a + + echo "=== Docker Container Logs ===" + while read -r container; do + echo "=== Logs from container ${container} ===" + docker logs "${container}" 2>&1 + echo "=== End logs for container ${container} ===" + done < <(docker ps -q) + + echo "=== Molecule Logs ===" + while IFS= read -r -d '' log; do + echo "Contents of ${log}:" + cat "${log}" + echo "=== End of ${log} ===" + done < <(find . -name '*.log' -print0) + + exit 1 + fi + + full_test: + needs: [validate-inputs, detect-changes] + if: ${{ github.event.inputs.ROLE == '' && fromJson(needs.detect-changes.outputs.max_parallel) > 0 }} + runs-on: ubuntu-latest + timeout-minutes: 60 + strategy: + fail-fast: false + max-parallel: ${{ fromJson(needs.detect-changes.outputs.max_parallel) }} + matrix: + include: ${{ fromJson(needs.detect-changes.outputs.matrix) }} + + steps: + - name: Delete huge unnecessary tools folder + shell: bash + run: | + echo "Initial disk space:" + df -h + rm -rf /opt/hostedtoolcache + echo "Disk space after cleanup:" + df -h + + - name: Checkout git repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + path: ${{ env.COLLECTION_PATH }} + + - name: Set up Python + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 + with: + python-version: ${{ env.PYTHON_VERSION }} + cache: 'pip' + cache-dependency-path: '${{ env.COLLECTION_PATH }}/${{ env.REQUIREMENTS_FILE }}' + + - name: Cache Ansible collections + uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5 + with: + path: ~/.ansible/collections + key: ${{ runner.os }}-ansible-${{ hashFiles('**/requirements.yml') }} + + - name: Install dependencies + shell: bash + run: | + python3 -m pip install -r "${{ env.COLLECTION_PATH }}/${{ env.REQUIREMENTS_FILE }}" + + - name: Install galaxy dependencies + working-directory: ${{ env.COLLECTION_PATH }}/ansible + shell: bash + run: | + ansible-galaxy collection install -r requirements.yml + ansible-galaxy install -r requirements.yml + + - name: Build and install collection locally + working-directory: ${{ env.COLLECTION_PATH }}/ansible + shell: bash + run: | + ansible-galaxy collection build --force + ansible-galaxy collection install dreadnode-nimbus_range-*.tar.gz -p ~/.ansible/collections --force --pre + + - name: Run molecule test + working-directory: ${{ env.COLLECTION_PATH }}/ansible/${{ matrix.path }} + shell: bash + env: + ANSIBLE_CONFIG: ${{ env.COLLECTION_PATH }}/ansible/ansible.cfg + run: | + set -e + molecule --version + molecule list + + SCENARIO="${{ matrix.scenario }}" + SCENARIO="${SCENARIO:-default}" + + if ! MOLECULE_DEBUG=1 molecule test -s "$SCENARIO"; then + echo "Molecule test failed. Collecting debug information..." + + echo "Docker containers:" + docker ps -a + + echo "=== Docker Container Logs ===" + while read -r container; do + echo "=== Logs from container ${container} ===" + docker logs "${container}" 2>&1 + echo "=== End logs for container ${container} ===" + done < <(docker ps -q) + + echo "=== Molecule Logs ===" + while IFS= read -r -d '' log; do + echo "Contents of ${log}:" + cat "${log}" + echo "=== End of ${log} ===" + done < <(find . -name '*.log' -print0) + + exit 1 + fi diff --git a/.hooks/ansible-lint-hook.sh b/.hooks/ansible-lint-hook.sh new file mode 100755 index 00000000..541e2751 --- /dev/null +++ b/.hooks/ansible-lint-hook.sh @@ -0,0 +1,13 @@ +#!/bin/bash +set -eo pipefail + +# Use offline mode locally to prevent git index corruption from concurrent +# galaxy collection installs during pre-commit hooks. In CI (GitHub Actions), +# run in online mode to allow collection installation. +if [ -z "${GITHUB_ACTIONS}" ]; then + # Local pre-commit: use offline mode + exec ansible-lint -v --force-color --offline -c .hooks/linters/ansible-lint.yaml "$@" +else + # CI: allow collection installation + exec ansible-lint -v --force-color -c .hooks/linters/ansible-lint.yaml "$@" +fi diff --git a/.hooks/docsible-hook.sh b/.hooks/docsible-hook.sh new file mode 100755 index 00000000..eceea1bc --- /dev/null +++ b/.hooks/docsible-hook.sh @@ -0,0 +1,86 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Colors +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' + +# Get repo root +REPO_ROOT=$(git rev-parse --show-toplevel) +cd "$REPO_ROOT/ansible" + +# Check if docsible is installed +if ! command -v docsible &>/dev/null; then + echo -e "${RED}Error: docsible is not installed${NC}" + echo "Please install docsible: pip3 install docsible==0.8.0" + exit 1 +fi + +# Check if template exists +TEMPLATE_PATH="$REPO_ROOT/.hooks/templates/docsible-template.md.j2" +if [ ! -f "$TEMPLATE_PATH" ]; then + echo -e "${RED}Error: Template file not found at $TEMPLATE_PATH${NC}" + echo "Please ensure the template file exists before running this hook." + exit 1 +fi + +FILES_MODIFIED=0 + +# Process roles +for role_dir in roles/*/; do + [ -d "$role_dir" ] || continue + + role_name=$(basename "$role_dir") + readme="${role_dir}README.md" + + echo "Processing role: $role_name" + + # Save original for comparison + if [ -f "$readme" ]; then + cp "$readme" "$readme.bak" + fi + + # Generate documentation for role + if output=$(docsible --role "$role_dir" --no-docsible --no-backup --comments --md-role-template "$TEMPLATE_PATH" 2>&1); then + + # Check if changed + if [ -f "$readme.bak" ]; then + if ! cmp -s "$readme.bak" "$readme"; then + echo " Updated with custom template" + FILES_MODIFIED=1 + else + echo " No changes" + fi + rm -f "$readme.bak" + else + echo " Created with custom template" + FILES_MODIFIED=1 + fi + else + echo -e "${RED} Failed to generate docs for $role_name${NC}" + echo -e "${RED} Error: $output${NC}" + # Restore original if it existed + if [ -f "$readme.bak" ]; then + mv "$readme.bak" "$readme" + fi + # Clean up any backup files before failing + find . -name "README_backup_*.md" -type f -delete 2>/dev/null || true + find . -name ".docsible" -type f -delete 2>/dev/null || true + exit 1 + fi +done + +# Clean up any docsible backup files +find . -name "README_backup_*.md" -type f -delete 2>/dev/null || true +find . -name ".docsible" -type f -delete 2>/dev/null || true + +# Exit +if [ $FILES_MODIFIED -eq 1 ]; then + echo -e "${YELLOW}Documentation updated with custom template. Review and commit.${NC}" + exit 1 +else + echo -e "${GREEN}All documentation up to date${NC}" + exit 0 +fi diff --git a/.hooks/gen-arch-diagram.py b/.hooks/gen-arch-diagram.py new file mode 100755 index 00000000..d1ef4679 --- /dev/null +++ b/.hooks/gen-arch-diagram.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python3 +""" +Ansible Collection Mermaid Diagram Generator Pre-commit Hook + +Generates a Mermaid diagram from an Ansible collection structure +and updates the ansible/README.md in the ares repo. +""" + +from pathlib import Path +import sys +import re +import subprocess + + +class AnsibleCollectionAnalyzer: + def __init__(self, collection_path: str): + self.collection_path = Path(collection_path) + self.structure = { + 'roles': [], + 'plugins': [], + 'playbooks': [] + } + + def analyze(self): + """Analyze the Ansible collection structure""" + # Analyze roles + roles_path = self.collection_path / 'roles' + if roles_path.exists(): + for role_dir in sorted(roles_path.iterdir(), key=lambda p: p.name): + if role_dir.is_dir() and not role_dir.name.startswith('.'): + self.structure['roles'].append({ + 'name': role_dir.name, + 'has_molecule': (role_dir / 'molecule').exists() + }) + + # Analyze plugins + plugins_path = self.collection_path / 'plugins' / 'modules' + if plugins_path.exists(): + for plugin_file in sorted(plugins_path.glob('*.py'), key=lambda p: p.name): + if not plugin_file.name.startswith('__'): + self.structure['plugins'].append(plugin_file.stem) + + # Analyze playbooks + playbooks_path = self.collection_path / 'playbooks' + if playbooks_path.exists(): + for item in sorted(playbooks_path.iterdir(), key=lambda p: p.name): + if item.is_dir() and not item.name.startswith('.'): + self.structure['playbooks'].append({ + 'name': item.name, + 'has_molecule': (item / 'molecule').exists() + }) + + return self.structure + + +def generate_mermaid(structure): + """Generate Mermaid diagram""" + lines = ["```mermaid", "graph TD"] + lines.append(" Collection[Ansible Collection]") + + # Add plugins + if structure['plugins']: + lines.append(" Collection --> Plugins[Plugins]") + for i, plugin in enumerate(structure['plugins']): + lines.append(f" Plugins --> P{i}[{plugin}]") + + # Add roles + if structure['roles']: + lines.append(" Collection --> Roles[Roles]") + for i, role in enumerate(structure['roles']): + role_label = role['name'] + if role['has_molecule']: + role_label += " *" + lines.append(f" Roles --> R{i}[{role_label}]") + + # Add playbooks + if structure['playbooks']: + lines.append(" Collection --> Playbooks[Playbooks]") + for i, playbook in enumerate(structure['playbooks']): + pb_label = playbook['name'] + if playbook['has_molecule']: + pb_label += " *" + lines.append(f" Playbooks --> PB{i}[{pb_label}]") + + lines.append("```") + return '\n'.join(lines) + + +def update_readme(mermaid_content, readme_path): + """Update README.md with the generated Mermaid diagram""" + if not readme_path.exists(): + print(f"README.md not found at {readme_path}") + return False + + readme_content = readme_path.read_text() + + # Define markers for the architecture section + start_marker = "## Architecture Diagram" + end_marker = "## Requirements" + + # Find the start and end positions + start_pos = readme_content.find(start_marker) + if start_pos == -1: + print("Could not find '## Architecture Diagram' section in README.md") + return False + + end_pos = readme_content.find(end_marker, start_pos) + if end_pos == -1: + next_section_pattern = re.compile(r'\n## (?!Architecture Diagram)') + match = next_section_pattern.search(readme_content, start_pos + len(start_marker)) + if match: + end_pos = match.start() + 1 + else: + print("Could not find the end of the Architecture Diagram section") + return False + + # Build the new architecture section + new_section = f"{start_marker}\n\n{mermaid_content}\n\n" + + # Replace the section + new_readme = readme_content[:start_pos] + new_section + readme_content[end_pos:] + + # Write back to README + readme_path.write_text(new_readme) + + return True + + +def main(): + """Main function for pre-commit hook""" + # Analyze collection from ansible/ subdirectory + analyzer = AnsibleCollectionAnalyzer('ansible') + structure = analyzer.analyze() + + # Generate Mermaid diagram + mermaid_content = generate_mermaid(structure) + + # Update ansible/README.md + readme_path = Path('ansible/README.md') + if update_readme(mermaid_content, readme_path): + print("Architecture diagram updated in ansible/README.md") + print(f"\nCollection summary:") + print(f" Roles: {len(structure['roles'])}") + print(f" Plugins: {len(structure['plugins'])}") + print(f" Playbooks: {len(structure['playbooks'])}") + + # Check if the README was actually modified (unstaged changes) + result = subprocess.run( + ['git', 'diff', '--name-only', 'ansible/README.md'], + capture_output=True, text=True + ) + if result.stdout.strip(): + print("ansible/README.md was updated. Please stage and re-commit.") + return 1 + + return 0 + else: + print("Failed to update ansible/README.md") + return 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/.hooks/linters/ansible-lint.yaml b/.hooks/linters/ansible-lint.yaml new file mode 100644 index 00000000..b47a639e --- /dev/null +++ b/.hooks/linters/ansible-lint.yaml @@ -0,0 +1,14 @@ +--- +# exclude_paths included in this file are parsed relative to this file's location +# and not relative to the CWD of execution. CLI arguments passed to the --exclude +# option are parsed relative to the CWD of execution. +exclude_paths: + - .github/ + - ansible/roles/mythic/tests/ + +skip_list: + - yaml + # - command-instead-of-shell + - role-name + - git-latest + - package-latest diff --git a/.hooks/linters/markdownlint.json b/.hooks/linters/markdownlint.json new file mode 100644 index 00000000..814a560c --- /dev/null +++ b/.hooks/linters/markdownlint.json @@ -0,0 +1,20 @@ +{ + "default": true, + "MD004": false, + "MD005": false, + "MD007": false, + "MD012": true, + "MD013": false, + "MD024": false, + "MD029": false, + "MD033": false, + "MD034": false, + "MD040": false, + "MD041": false, + "MD055": false, + "MD056": false, + "MD057": false, + "MD060": false, + "line-length": false, + "no-multiple-blanks": false +} diff --git a/.hooks/linters/yamllint.yaml b/.hooks/linters/yamllint.yaml new file mode 100644 index 00000000..9e2267ae --- /dev/null +++ b/.hooks/linters/yamllint.yaml @@ -0,0 +1,32 @@ +--- +extends: default + +rules: + braces: + max-spaces-inside: 1 + level: error + brackets: + max-spaces-inside: 1 + level: error + colons: + max-spaces-after: -1 + level: error + commas: + max-spaces-after: -1 + level: error + comments: disable + comments-indentation: disable + document-start: disable + empty-lines: + max: 3 + level: error + hyphens: + level: error + indentation: disable + key-duplicates: enable + line-length: disable + new-line-at-end-of-file: disable + new-lines: + type: unix + trailing-spaces: disable + truthy: disable diff --git a/.hooks/requirements.txt b/.hooks/requirements.txt new file mode 100644 index 00000000..12964a67 --- /dev/null +++ b/.hooks/requirements.txt @@ -0,0 +1,8 @@ +ansible-core==2.20.4 +ansible-lint==26.4.0 +docker==7.1.0 +docsible==0.8.0 +molecule==26.4.0 +molecule-docker==2.1.0 +molecule-plugins[docker]==25.8.12 +pre-commit==4.5.1 diff --git a/.hooks/templates/docsible-template.md.j2 b/.hooks/templates/docsible-template.md.j2 new file mode 100644 index 00000000..620ce64b --- /dev/null +++ b/.hooks/templates/docsible-template.md.j2 @@ -0,0 +1,77 @@ + +# {{ role.name }} +{%- if role.meta.galaxy_info.description %} + +## Description + +{{ role.meta.galaxy_info.description }} +{%- endif %} + +## Requirements + +- Ansible >= {{ role.meta.galaxy_info.min_ansible_version }} +{%- if role.meta.dependencies %} + +## Dependencies + +{% for dep in role.meta.dependencies %} +- {{ dep.role }} +{%- endfor %} +{%- endif %} + +## Role Variables +{%- for defaultfile in role.defaults %} + +### Default Variables ({{ defaultfile.file }}) + +| Variable | Type | Default | Description | +| -------- | ---- | ------- | ----------- | +{%- for key, details in defaultfile.data.items() %} +{%- if not key.endswith('.0') and not key.endswith('.1') and not key.endswith('.2') and not key.endswith('.3') and not key.endswith('.4') and not key.endswith('.5') and not key.endswith('.6') and '.' not in key or key.count('.') == 1 %} +| `{{ key }}` | {{ details.type }} | {{ details.value | replace('|', '|') | replace('[', '[') | replace(']', ']') | replace('\n', '\\n') }} | {{ details.description or 'No description' }} | +{%- endif %} +{%- endfor %} +{%- endfor %} +{%- if role.vars %} +{%- for varsfile in role.vars %} + +### Role Variables ({{ varsfile.file }}) + +| Variable | Type | Value | Description | +| -------- | ---- | ----- | ----------- | +{%- for key, details in varsfile.data.items() %} +| `{{ key }}` | {{ details.type }} | `{{ details.value }}` | {{ details.description or 'No description' }} | +{%- endfor %} +{%- endfor %} +{%- endif %} + +## Tasks +{%- for taskfile in role.tasks | sort(attribute='file') %} + +### {{ taskfile.file }} + +{% for task in taskfile.tasks %} +- **{{ task.name }}** ({{ task.module }}){% if task.when %} - Conditional{% endif %} +{%- endfor %} +{%- endfor %} + +## Example Playbook + +```yaml +- hosts: servers + roles: + - {{ role.name }} +``` + +## Author Information + +- **Author**: {{ role.meta.galaxy_info.author }} +- **Company**: {{ role.meta.galaxy_info.company }} +- **License**: {{ role.meta.galaxy_info.license }} + +## Platforms + +{% for platform in role.meta.galaxy_info.platforms %} +- {{ platform.name }}: {{ platform.versions | join(', ') }} +{%- endfor %} + diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b1531b2a..51729d46 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,6 +14,12 @@ repos: - id: end-of-file-fixer - id: trailing-whitespace + - repo: https://github.com/adrienverge/yamllint.git + rev: v1.38.0 + hooks: + - id: yamllint + entry: yamllint --strict -c .hooks/linters/yamllint.yaml + - repo: https://github.com/rhysd/actionlint rev: v1.7.12 hooks: @@ -25,6 +31,33 @@ repos: - id: codespell entry: codespell -q 3 -f --skip=".git,.github,README.md,target,Cargo.lock" --ignore-words-list="astroid,braket,unstall,infinit,sems,te" + - repo: https://github.com/jumanjihouse/pre-commit-hooks + rev: 3.0.0 + hooks: + - id: script-must-have-extension + name: Ensure shell scripts end with .sh + types: [shell] + - id: shellcheck + - id: shfmt + + - repo: https://github.com/igorshubovych/markdownlint-cli + rev: v0.48.0 + hooks: + - id: markdownlint + args: ['--fix', '--config', '.hooks/linters/markdownlint.json'] + + - repo: https://github.com/ansible/ansible-lint + rev: v26.4.0 + hooks: + - id: ansible-lint + # env -u GIT_INDEX_FILE prevents ansible-galaxy (called internally by + # ansible-lint) from inheriting the commit-time index file and + # corrupting it with stale blob references. + entry: env -u GIT_INDEX_FILE ansible-lint -v --force-color -c .hooks/linters/ansible-lint.yaml + language: python + always_run: true + additional_dependencies: [".[community]"] + - repo: https://github.com/Yelp/detect-secrets rev: v1.5.0 hooks: @@ -62,3 +95,25 @@ repos: language: system files: '\.rs$' pass_filenames: false + + - id: prettier + name: Run prettier + entry: .hooks/prettier.sh + language: script + types: [json, yaml] + + - id: docsible + name: Generate Ansible documentation with docsible + entry: .hooks/docsible-hook.sh + language: script + always_run: true + pass_filenames: false + stages: [pre-commit] + + - id: update-architecture-diagram + name: Update Architecture Diagram + entry: python .hooks/gen-arch-diagram.py + language: python + pass_filenames: false + always_run: false + files: ^ansible/(roles/|plugins/|playbooks/).* diff --git a/.taskfiles/ec2/scripts/setup.sh b/.taskfiles/ec2/scripts/setup.sh index 94c191c1..7b2145fc 100755 --- a/.taskfiles/ec2/scripts/setup.sh +++ b/.taskfiles/ec2/scripts/setup.sh @@ -3,26 +3,26 @@ set -euo pipefail echo "=== Installing Redis ===" -if command -v redis-server > /dev/null 2>&1; then - redis-server --version +if command -v redis-server >/dev/null 2>&1; then + redis-server --version else - if command -v apt-get > /dev/null 2>&1; then - apt-get update -qq && apt-get install -y -qq redis-server - elif command -v yum > /dev/null 2>&1; then - yum install -y redis - elif command -v dnf > /dev/null 2>&1; then - dnf install -y redis - else - echo "ERROR: No supported package manager found" - exit 1 - fi + if command -v apt-get >/dev/null 2>&1; then + apt-get update -qq && apt-get install -y -qq redis-server + elif command -v yum >/dev/null 2>&1; then + yum install -y redis + elif command -v dnf >/dev/null 2>&1; then + dnf install -y redis + else + echo "ERROR: No supported package manager found" + exit 1 + fi fi echo "=== Creating directories ===" mkdir -p /var/log/ares /etc/ares echo "=== Creating systemd worker template unit ===" -cat > /etc/systemd/system/ares-worker@.service << 'UNIT_EOF' +cat >/etc/systemd/system/ares-worker@.service <<'UNIT_EOF' [Unit] Description=Ares Worker (%i) After=redis.service @@ -47,26 +47,26 @@ WantedBy=multi-user.target UNIT_EOF echo "=== Installing cracking tools ===" -if ! command -v hashcat > /dev/null 2>&1 || ! command -v john > /dev/null 2>&1; then - if command -v apt-get > /dev/null 2>&1; then - apt-get install -y -qq hashcat john - fi +if ! command -v hashcat >/dev/null 2>&1 || ! command -v john >/dev/null 2>&1; then + if command -v apt-get >/dev/null 2>&1; then + apt-get install -y -qq hashcat john + fi fi echo "=== Fixing pip/system impacket conflicts ===" # Kali's system impacket has patches (regsecrets) that pip versions lack. # Remove any pip-installed impacket that shadows the system package. if [ -d /usr/local/lib/python3.13/dist-packages/impacket ]; then - pip3 uninstall -y impacket --break-system-packages 2> /dev/null || true - rm -rf /usr/local/lib/python3.13/dist-packages/impacket \ - /usr/local/lib/python3.13/dist-packages/impacket-*.dist-info - echo "Removed pip impacket shadow — using system package" + pip3 uninstall -y impacket --break-system-packages 2>/dev/null || true + rm -rf /usr/local/lib/python3.13/dist-packages/impacket \ + /usr/local/lib/python3.13/dist-packages/impacket-*.dist-info + echo "Removed pip impacket shadow — using system package" fi echo "=== Enabling Redis ===" -systemctl enable redis-server 2> /dev/null || systemctl enable redis 2> /dev/null || true -systemctl start redis-server 2> /dev/null || systemctl start redis 2> /dev/null || true +systemctl enable redis-server 2>/dev/null || systemctl enable redis 2>/dev/null || true +systemctl start redis-server 2>/dev/null || systemctl start redis 2>/dev/null || true systemctl daemon-reload echo "=== Setup complete ===" -redis-cli ping 2> /dev/null || echo "Redis not responding" +redis-cli ping 2>/dev/null || echo "Redis not responding" diff --git a/.taskfiles/ec2/scripts/status.sh b/.taskfiles/ec2/scripts/status.sh index f3f174f8..e5e0f250 100755 --- a/.taskfiles/ec2/scripts/status.sh +++ b/.taskfiles/ec2/scripts/status.sh @@ -2,27 +2,27 @@ # Show ares process status on EC2 echo "=== Redis ===" -redis-cli ping 2> /dev/null && redis-cli info server 2> /dev/null | grep -E "redis_version|uptime_in_seconds|connected_clients" || echo "Redis not running" +redis-cli ping 2>/dev/null && redis-cli info server 2>/dev/null | grep -E "redis_version|uptime_in_seconds|connected_clients" || echo "Redis not running" echo "" echo "=== Workers ===" for role in recon credential_access cracker acl privesc lateral coercion; do - st=$(systemctl is-active ares-worker@${role} 2> /dev/null || echo dead) - pid="" - if [ "$st" = "active" ]; then - pid=$(systemctl show ares-worker@${role} --property=MainPID --value 2> /dev/null || echo "?") - fi - printf " %-20s %-8s %s\n" "$role" "$st" "${pid:+PID: $pid}" + st=$(systemctl is-active ares-worker@${role} 2>/dev/null || echo dead) + pid="" + if [ "$st" = "active" ]; then + pid=$(systemctl show ares-worker@${role} --property=MainPID --value 2>/dev/null || echo "?") + fi + printf " %-20s %-8s %s\n" "$role" "$st" "${pid:+PID: $pid}" done echo "" echo "=== Orchestrator ===" -ORCH_PID=$(pgrep -f ares-orchestrator 2> /dev/null || true) +ORCH_PID=$(pgrep -f ares-orchestrator 2>/dev/null || true) if [ -n "$ORCH_PID" ]; then - echo " Running (PID: $ORCH_PID)" - ps -p "$ORCH_PID" -o etime=,args= 2> /dev/null | head -1 + echo " Running (PID: $ORCH_PID)" + ps -p "$ORCH_PID" -o etime=,args= 2>/dev/null | head -1 else - echo " Not running" + echo " Not running" fi echo "" @@ -31,4 +31,4 @@ df -h / | tail -1 echo "" echo "=== Logs ===" -ls -lhS /var/log/ares/ 2> /dev/null || echo " No log directory" +ls -lhS /var/log/ares/ 2>/dev/null || echo " No log directory" diff --git a/.taskfiles/remote/orchestrator-wrapper.sh b/.taskfiles/remote/orchestrator-wrapper.sh index c794e0f5..4c4e19d6 100755 --- a/.taskfiles/remote/orchestrator-wrapper.sh +++ b/.taskfiles/remote/orchestrator-wrapper.sh @@ -1,13 +1,13 @@ #!/bin/sh echo "ares-orchestrator queue dispatcher starting" >&2 while true; do - OP_REQUEST=$(RUST_LOG=error ares-cli ops claim-next --timeout 30 2> /dev/null | tail -n 1 || true) - if [ -n "$OP_REQUEST" ]; then - OP_ID=$(printf '%s\n' "$OP_REQUEST" | sed -n 's/.*"operation_id"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/p') - echo "Starting operation: ${OP_ID:-unknown}" >&2 - export ARES_OPERATION_ID="$OP_REQUEST" - ares-orchestrator - status=$? - echo "Operation ${OP_ID:-unknown} exited with status $status" >&2 - fi + OP_REQUEST=$(RUST_LOG=error ares-cli ops claim-next --timeout 30 2>/dev/null | tail -n 1 || true) + if [ -n "$OP_REQUEST" ]; then + OP_ID=$(printf '%s\n' "$OP_REQUEST" | sed -n 's/.*"operation_id"[[:space:]]*:[[:space:]]*"\([^"]*\)".*/\1/p') + echo "Starting operation: ${OP_ID:-unknown}" >&2 + export ARES_OPERATION_ID="$OP_REQUEST" + ares-orchestrator + status=$? + echo "Operation ${OP_ID:-unknown} exited with status $status" >&2 + fi done diff --git a/ansible/README.md b/ansible/README.md index b4b263fb..97417e68 100644 --- a/ansible/README.md +++ b/ansible/README.md @@ -1,127 +1,207 @@ -# Dreadnode Nimbus Range Ansible Collection - -Ansible collection for provisioning Ares security operations agents. Includes -roles for reconnaissance, credential access, privilege escalation, lateral -movement, coercion, hash cracking, and observability tooling. - -**Namespace:** `dreadnode.nimbus_range` -**Version:** 1.5.0 +# Ansible Collection: Dreadnode Nimbus Range + +[![Pre-Commit](https://github.com/dreadnode/ansible-collection-nimbus_range/actions/workflows/pre-commit.yaml/badge.svg)](https://github.com/dreadnode/ansible-collection-nimbus_range/actions/workflows/pre-commit.yaml) +[![Renovate](https://github.com/dreadnode/ansible-collection-nimbus_range/actions/workflows/renovate.yaml/badge.svg)](https://github.com/dreadnode/ansible-collection-nimbus_range/actions/workflows/renovate.yaml) + +This Ansible collection provides agent and logging setup functionality for +cloud-based ephemeral environments, focusing on operational telemetry +collection, session management, and centralized log forwarding. + +## Architecture Diagram + +```mermaid +graph TD + Collection[Ansible Collection] + Collection --> Plugins[Plugins] + Plugins --> P0[getent_passwd] + Plugins --> P1[merge_list_dicts_into_list] + Plugins --> P2[vnc_pw] + Collection --> Roles[Roles] + Roles --> R0[acl_tools *] + Roles --> R1[alloy] + Roles --> R2[aws_cloudwatch_agent] + Roles --> R3[aws_ssm_agent] + Roles --> R4[base *] + Roles --> R5[coercion_tools *] + Roles --> R6[cracking_tools *] + Roles --> R7[credential_access_tools *] + Roles --> R8[dc_audit_sacl] + Roles --> R9[fluent_bit] + Roles --> R10[lateral_movement_tools *] + Roles --> R11[mythic *] + Roles --> R12[privesc_tools *] + Roles --> R13[recon_tools *] + Roles --> R14[redis] + Collection --> Playbooks[Playbooks] + Playbooks --> PB0[ares] + Playbooks --> PB1[linux] + Playbooks --> PB2[windows] +``` ## Requirements -- Ansible >= 2.15.0 -- Python 3.13+ +- Ansible 2.18.4 or higher ## Installation -Install collection dependencies: +Install the latest version of the Nimbus Range collection: ```bash -cd ansible -ansible-galaxy collection install -r requirements.yml +ansible-galaxy collection install git+https://github.com/dreadnode/ansible-collection-nimbus_range.git,main ``` ## Roles -### Agent Roles - -| Role | Description | -| --- | --- | -| `base` | Python 3.13, uv, `/ares` workspace setup | -| `recon_tools` | nmap, netexec, bloodhound-python, certipy, impacket, rpcclient | -| `credential_access_tools` | sprayhound, lsassy, gMSADumper, kerberoasting, secretsdump | -| `cracking_tools` | hashcat, John the Ripper, rockyou.txt, SecLists (optional GPU/CUDA) | -| `acl_tools` | bloodyAD, pywhisker, dacledit | -| `privesc_tools` | certipy, krbrelayx, nopac, potato exploits, SharpGPOAbuse, WinPEAS, LinPEAS | -| `lateral_movement_tools` | evil-winrm, xfreerdp, lsassy, sshpass, pth-toolkit, impacket | -| `coercion_tools` | Responder, mitm6, Coercer, PetitPotam, ntlmrelayx | - -### Infrastructure Roles - -| Role | Description | -| --- | --- | -| `aws_ssm_agent` | AWS Systems Manager agent for remote management | -| `aws_cloudwatch_agent` | CloudWatch metrics and log collection | -| `fluent_bit` | Log forwarding to OpenSearch/Loki | -| `alloy` | Grafana Alloy observability agent | -| `mythic` | Mythic C2 framework deployment | -| `dc_audit_sacl` | Domain controller audit SACL configuration | - -## Playbooks - -### Agent Provisioning (`playbooks/ares/`) - -Each playbook provisions a specialized agent container or host: - -| Playbook | Purpose | -| --- | --- | -| `base.yml` | Base image with Python, uv, and core dependencies | -| `recon.yml` | Network reconnaissance and AD enumeration | -| `credential_access.yml` | Credential harvesting and Kerberos attacks | -| `cracker.yml` | Password cracking (CPU or GPU) | -| `acl_abuse.yml` | AD ACL/DACL exploitation | -| `privesc.yml` | Privilege escalation | -| `lateral_movement.yml` | Lateral movement and remote access | -| `coercion.yml` | NTLM relay and authentication coercion | -| `goad_attack_box.yml` | All-in-one attack workstation with all tools | - -### Infrastructure (`playbooks/linux/`, `playbooks/windows/`) - -| Playbook | Purpose | -| --- | --- | -| `linux/attacker_setup.yml` | Linux attacker box with SSM, CloudWatch, Fluent Bit | -| `linux/sliver.yml` | Sliver C2 server | -| `windows/target_setup.yml` | Windows target telemetry | +### AWS CloudWatch Agent Setup -## Usage +Installs and configures the **AWS CloudWatch Agent** for metrics and log +collection on Unix-like and Windows systems. -### Container Builds (via Warpgate) +- Role docs: [`roles/aws_cloudwatch_agent/README.md`](roles/aws_cloudwatch_agent/README.md) -Playbooks are invoked automatically by Warpgate templates during image builds: +- Collects system metrics such as CPU, disk, memory, and network. +- Enriches metrics with AWS EC2 metadata. +- Automatically installs, configures, and ensures the CloudWatch Agent is running. -```bash -export PROVISION_REPO_PATH=./ansible -warpgate build warpgate-templates/ares-recon-agent -``` +### AWS SSM Agent Setup -### Standalone Provisioning +Installs and configures the **AWS Systems Manager (SSM) Agent** for secure +remote management and automation. -Playbooks can provision existing hosts directly: +- Role docs: [`roles/aws_ssm_agent/README.md`](roles/aws_ssm_agent/README.md) -```bash -# Provision a recon agent on a remote host -ansible-playbook ansible/playbooks/ares/recon.yml \ - -i inventory.yml \ - -e target_hosts=recon-host - -# Provision inside a container -ansible-playbook ansible/playbooks/ares/recon.yml \ - -e container_build=true \ - -e target_hosts=localhost \ - -c local +- Installs SSM Agent on Linux and Windows systems. +- Configures services to automatically restart and provides monitoring scripts. +- Ensures the SSM Agent is enabled and healthy after deployment. + +### Fluent Bit Setup + +Installs and configures **Fluent Bit** for log collection, enrichment, and +forwarding to OpenSearch. + +- Role docs: [`roles/fluent_bit/README.md`](roles/fluent_bit/README.md) + +- Collects Linux system logs and Windows Event logs. +- Captures AWS SSM session activity logs. +- Enriches logs with environment metadata and deployment context. +- Forwards logs securely to an OpenSearch cluster. + +### Base Setup + +Installs the base dependencies and workspace layout required for **Ares AI agents**. + +- Role docs: [`roles/base/README.md`](roles/base/README.md) + +- Bootstraps Python toolchains, pip packages, and system utilities. +- Optionally installs uv, Rust, and pipx for downstream tooling. + +### ACL Tools Setup + +Installs and configures **Active Directory ACL exploitation tools** for Ares agents. + +- Role docs: [`roles/acl_tools/README.md`](roles/acl_tools/README.md) + +### Cracking Tools Setup + +Installs and configures **password cracking tools** and wordlists for Ares agents. + +- Role docs: [`roles/cracking_tools/README.md`](roles/cracking_tools/README.md) + +### Lateral Movement Tools Setup + +Installs and configures **lateral movement tooling** for Ares agents. + +- Role docs: [`roles/lateral_movement_tools/README.md`](roles/lateral_movement_tools/README.md) + +### Recon Tools Setup + +Installs and configures **reconnaissance tooling** for Ares agents. + +- Role docs: [`roles/recon_tools/README.md`](roles/recon_tools/README.md) + +### Credential Access Tools Setup + +Installs and configures **credential access tooling** for Ares agents. + +- Role docs: [`roles/credential_access_tools/README.md`](roles/credential_access_tools/README.md) + +### Coercion Tools Setup + +Installs and configures **coercion and relay attack tooling** for Ares agents. + +- Role docs: [`roles/coercion_tools/README.md`](roles/coercion_tools/README.md) + +### Privilege Escalation Tools Setup + +Installs and configures **privilege escalation tooling** for Ares agents. + +- Role docs: [`roles/privesc_tools/README.md`](roles/privesc_tools/README.md) + +### Grafana Alloy Setup + +Installs and configures **Grafana Alloy** on Windows hosts for log shipping. + +- Role docs: [`roles/alloy/README.md`](roles/alloy/README.md) + +### Mythic Setup + +Installs and configures the **Mythic C2 framework** and optional agent packages. + +- Role docs: [`roles/mythic/README.md`](roles/mythic/README.md) + +## Usage + +### Linux Example + +```yaml +--- +- name: Provision Linux Attack Range Box + hosts: all + gather_facts: true + vars: + fluent_bit_env: dev + fluent_bit_deployment_name: default + fluent_bit_opensearch_custom_domain: example.com + fluent_bit_opensearch_username: admin + fluent_bit_opensearch_password: password + fluent_bit_version: "4.0.1" + + roles: + # Nimbus Range roles for Ansible system configuration and monitoring + - role: dreadnode.nimbus_range.aws_ssm_agent + - role: dreadnode.nimbus_range.aws_cloudwatch_agent + - role: dreadnode.nimbus_range.fluent_bit ``` -## Custom Modules +### Windows Example + +```yaml +--- +- name: Provision Windows Attack Range Target + hosts: all + vars: + fluent_bit_env: dev + fluent_bit_deployment_name: default + fluent_bit_opensearch_custom_domain: example.com + fluent_bit_opensearch_username: admin + fluent_bit_opensearch_password: password + fluent_bit_version: "4.0.1" + + roles: + # Nimbus Range roles for Ansible system configuration and monitoring + - role: dreadnode.nimbus_range.aws_ssm_agent + - role: dreadnode.nimbus_range.aws_cloudwatch_agent + - role: dreadnode.nimbus_range.fluent_bit +``` -| Module | Description | -| --- | --- | -| `vnc_pw` | VNC password management | -| `getent_passwd` | Cross-platform user enumeration | -| `merge_list_dicts_into_list` | Data transformation utility | +## Development -## Collection Dependencies +### Release Process -| Collection | Version | -| --- | --- | -| `amazon.aws` | 11.2.0 | -| `ansible.windows` | 3.5.0 | -| `community.windows` | 3.1.0 | -| `community.docker` | 5.0.6 | -| `community.general` | 12.4.0 | -| `grafana.grafana` | 6.0.6 | -| `cowdogmoo.workstation` | main (git) | -| `l50.arsenal` | main (git) | +For information on creating new releases of this collection, see our +[Release Process Documentation](docs/releases.md). -## License +## Support -MIT +- Repository: [dreadnode/ansible-collection-nimbus_range](https://github.com/dreadnode/ansible-collection-nimbus_range) +- Issue Tracker: [GitHub Issues](https://github.com/dreadnode/ansible-collection-nimbus_range/issues) diff --git a/ansible/playbooks/ares/goad_attack_box.yml b/ansible/playbooks/ares/goad_attack_box.yml index d6cf114d..1fc6d9be 100644 --- a/ansible/playbooks/ares/goad_attack_box.yml +++ b/ansible/playbooks/ares/goad_attack_box.yml @@ -30,13 +30,14 @@ # Environment configuration alloy_env: "{{ lookup('env', 'ENVIRONMENT') | default('goad', true) }}" alloy_deployment_name: "goad-attack-box" + alloy_server_id: "" + alloy_instance_id: "" + alloy_loki_endpoint: "https://loki.dev.plundr.ai/loki/api/v1/push" + alloy_version: "1.10.1" # Python version base_python_version: "3.13.7" - # Impacket from packages for stability - recon_tools_impacket_from_source: false - # Cracking tools configuration - full wordlists for GOAD cracking_tools_install_hashcat: true cracking_tools_install_john: true @@ -85,6 +86,10 @@ privesc_tools_install_powerupsql: true roles: + # AWS infrastructure agents + - role: dreadnode.nimbus_range.aws_ssm_agent + - role: dreadnode.nimbus_range.aws_cloudwatch_agent + # Base Ares requirements - role: dreadnode.nimbus_range.base vars: @@ -126,7 +131,136 @@ vars: privesc_tools_verify_install: true + # Redis server for Ares worker message broker + - role: dreadnode.nimbus_range.redis + vars: + redis_verify_install: true + + # Grafana Alloy for log shipping to Loki + - name: Install and configure Grafana Alloy + role: grafana.grafana.alloy + become: true + vars: + alloy_config: | + // Enable debug logging to help troubleshoot + logging { + level = "debug" + format = "logfmt" + } + + // Read system logs + loki.source.file "syslog" { + targets = [ + {__path__ = "/var/log/syslog", job = "syslog"}, + {__path__ = "/var/log/auth.log", job = "auth"}, + {__path__ = "/var/log/user-data.log", job = "user-data"}, + ] + forward_to = [loki.process.linux.receiver] + } + + // Read shell history for attack activity tracking + loki.source.file "shell_history" { + targets = [ + {__path__ = "/home/kali/.zsh_history", job = "zsh_history", user = "kali"}, + {__path__ = "/root/.zsh_history", job = "zsh_history", user = "root"}, + {__path__ = "/home/kali/.bash_history", job = "bash_history", user = "kali"}, + {__path__ = "/root/.bash_history", job = "bash_history", user = "root"}, + ] + forward_to = [loki.process.attack_tools.receiver] + } + + // Process attack tool logs separately for better filtering + loki.process "attack_tools" { + // Add static labels + stage.static_labels { + values = { + environment = "{{ alloy_env }}", + deployment = "{{ alloy_deployment_name }}", + server = "{{ alloy_server_id }}", + instance_id = "{{ ansible_ec2_instance_id | default(alloy_instance_id) }}", + host = constants.hostname, + os = "linux", + log_type = "attack_activity", + } + } + forward_to = [loki.write.remote.receiver] + } + + // Process Linux system logs + loki.process "linux" { + // Add static labels + stage.static_labels { + values = { + environment = "{{ alloy_env }}", + deployment = "{{ alloy_deployment_name }}", + server = "{{ alloy_server_id }}", + instance_id = "{{ ansible_ec2_instance_id | default(alloy_instance_id) }}", + host = constants.hostname, + os = "linux", + } + } + forward_to = [loki.write.remote.receiver] + } + + // Send logs to Loki + loki.write "remote" { + endpoint { + url = "{{ alloy_loki_endpoint }}" + } + } + post_tasks: + - name: Setup shell history file permissions for Alloy + block: + - name: Create shell history files if they don't exist + ansible.builtin.file: + path: "{{ item }}" + state: touch + owner: "{{ 'kali' if 'kali' in item else 'root' }}" + group: "{{ 'kali' if 'kali' in item else 'root' }}" + mode: '0644' + modification_time: preserve + access_time: preserve + loop: + - /home/kali/.zsh_history + - /home/kali/.bash_history + - /root/.zsh_history + - /root/.bash_history + become: true + failed_when: false + + - name: Set ACL on /home/kali directory for alloy user + ansible.posix.acl: + path: /home/kali + entity: alloy + etype: user + permissions: rx + state: present + become: true + failed_when: false + + - name: Set ACL on shell history files for alloy user + ansible.posix.acl: + path: "{{ item }}" + entity: alloy + etype: user + permissions: r + state: present + loop: + - /home/kali/.zsh_history + - /home/kali/.bash_history + - /root/.zsh_history + - /root/.bash_history + become: true + failed_when: false + + - name: Restart alloy service to pick up file permissions + ansible.builtin.systemd: + name: alloy + state: restarted + become: true + failed_when: false + - name: Display GOAD attack box provisioning summary ansible.builtin.debug: msg: diff --git a/ansible/playbooks/ares/goad_attack_box_configure.yml b/ansible/playbooks/ares/goad_attack_box_configure.yml new file mode 100644 index 00000000..a754f14e --- /dev/null +++ b/ansible/playbooks/ares/goad_attack_box_configure.yml @@ -0,0 +1,151 @@ +--- +# GOAD Attack Box Post-Deploy Configuration +# Lightweight playbook for instance-specific config on a pre-baked AMI. +# Tools are already installed in the golden image — this only configures +# services that need per-instance values (Alloy labels, hostname, ACLs). + +- name: Configure GOAD Attack Box + hosts: all + gather_facts: true + become: true + + pre_tasks: + - name: Ensure acl package is installed (required for setfacl) + ansible.builtin.apt: + name: acl + state: present + become: true + + vars: + alloy_env: "{{ lookup('env', 'ENVIRONMENT') | default('goad', true) }}" + alloy_deployment_name: "goad-attack-box" + alloy_server_id: "" + alloy_instance_id: "" + alloy_loki_endpoint: "https://loki.dev.plundr.ai/loki/api/v1/push" + alloy_version: "1.10.1" + + roles: + # Grafana Alloy for log shipping to Loki + - name: Install and configure Grafana Alloy + role: grafana.grafana.alloy + become: true + vars: + alloy_config: | + // Enable debug logging to help troubleshoot + logging { + level = "debug" + format = "logfmt" + } + + // Read system logs + loki.source.file "syslog" { + targets = [ + {__path__ = "/var/log/syslog", job = "syslog"}, + {__path__ = "/var/log/auth.log", job = "auth"}, + {__path__ = "/var/log/user-data.log", job = "user-data"}, + ] + forward_to = [loki.process.linux.receiver] + } + + // Read shell history for attack activity tracking + loki.source.file "shell_history" { + targets = [ + {__path__ = "/home/kali/.zsh_history", job = "zsh_history", user = "kali"}, + {__path__ = "/root/.zsh_history", job = "zsh_history", user = "root"}, + {__path__ = "/home/kali/.bash_history", job = "bash_history", user = "kali"}, + {__path__ = "/root/.bash_history", job = "bash_history", user = "root"}, + ] + forward_to = [loki.process.attack_tools.receiver] + } + + // Process attack tool logs separately for better filtering + loki.process "attack_tools" { + // Add static labels + stage.static_labels { + values = { + environment = "{{ alloy_env }}", + deployment = "{{ alloy_deployment_name }}", + server = "{{ alloy_server_id }}", + instance_id = "{{ ansible_ec2_instance_id | default(alloy_instance_id) }}", + host = constants.hostname, + os = "linux", + log_type = "attack_activity", + } + } + forward_to = [loki.write.remote.receiver] + } + + // Process Linux system logs + loki.process "linux" { + // Add static labels + stage.static_labels { + values = { + environment = "{{ alloy_env }}", + deployment = "{{ alloy_deployment_name }}", + server = "{{ alloy_server_id }}", + instance_id = "{{ ansible_ec2_instance_id | default(alloy_instance_id) }}", + host = constants.hostname, + os = "linux", + } + } + forward_to = [loki.write.remote.receiver] + } + + // Send logs to Loki + loki.write "remote" { + endpoint { + url = "{{ alloy_loki_endpoint }}" + } + } + + post_tasks: + - name: Setup shell history file permissions for Alloy + block: + - name: Create shell history files if they don't exist + ansible.builtin.file: + path: "{{ item }}" + state: touch + owner: "{{ 'kali' if 'kali' in item else 'root' }}" + group: "{{ 'kali' if 'kali' in item else 'root' }}" + mode: '0644' + modification_time: preserve + access_time: preserve + loop: + - /home/kali/.zsh_history + - /home/kali/.bash_history + - /root/.zsh_history + - /root/.bash_history + become: true + failed_when: false + + - name: Set ACL on /home/kali directory for alloy user + ansible.posix.acl: + path: /home/kali + entity: alloy + etype: user + permissions: rx + state: present + become: true + failed_when: false + + - name: Set ACL on shell history files for alloy user + ansible.posix.acl: + path: "{{ item }}" + entity: alloy + etype: user + permissions: r + state: present + loop: + - /home/kali/.zsh_history + - /home/kali/.bash_history + - /root/.zsh_history + - /root/.bash_history + become: true + failed_when: false + + - name: Restart alloy service to pick up file permissions + ansible.builtin.systemd: + name: alloy + state: restarted + become: true + failed_when: false diff --git a/ansible/playbooks/windows/target_setup.yml b/ansible/playbooks/windows/target_setup.yml index 9f4f4d96..84c99399 100644 --- a/ansible/playbooks/windows/target_setup.yml +++ b/ansible/playbooks/windows/target_setup.yml @@ -18,6 +18,3 @@ # Install and configure Grafana Alloy for log shipping - role: dreadnode.nimbus_range.alloy - - # Configure SACL auditing on Domain Controllers for DCSync detection - - role: dreadnode.nimbus_range.dc_audit_sacl diff --git a/ansible/requirements.yml b/ansible/requirements.yml index 8d5c155f..1f959009 100644 --- a/ansible/requirements.yml +++ b/ansible/requirements.yml @@ -7,9 +7,9 @@ collections: - name: community.windows version: 3.1.0 - name: community.docker - version: 5.0.6 + version: 5.2.0 - name: community.general - version: 12.4.0 + version: 12.5.0 - name: grafana.grafana version: 6.0.6 - name: https://github.com/CowDogMoo/ansible-collection-workstation.git diff --git a/ansible/roles/acl_tools/README.md b/ansible/roles/acl_tools/README.md index d23045e4..9647c756 100644 --- a/ansible/roles/acl_tools/README.md +++ b/ansible/roles/acl_tools/README.md @@ -1,4 +1,5 @@ + # acl_tools ## Description @@ -70,6 +71,8 @@ Install and configure Active Directory ACL exploitation tools for Ares agents - **Make impacket example scripts executable** (ansible.builtin.shell) - **Check if \_\_init\_\_.py exists in impacket/examples** (ansible.builtin.stat) - **Create \_\_init\_\_.py in impacket/examples to make it a proper Python package** (ansible.builtin.copy) - Conditional +- **Check system impacket version (Kali)** (ansible.builtin.command) - Conditional +- **Install source impacket into system Python (Kali apt netexec needs it system-wide)** (ansible.builtin.pip) - Conditional - **Create symlinks for impacket scripts (impacket-* style for Kali compatibility)** (ansible.builtin.shell) - **Verify impacket regsecrets module is available** (ansible.builtin.command) - **Report impacket installation status** (ansible.builtin.debug) @@ -123,3 +126,4 @@ Install and configure Active Directory ACL exploitation tools for Ares agents - Debian: all - Kali: all + diff --git a/ansible/roles/acl_tools/molecule/default/verify.yml b/ansible/roles/acl_tools/molecule/default/verify.yml index 85b9b862..a8f9c876 100644 --- a/ansible/roles/acl_tools/molecule/default/verify.yml +++ b/ansible/roles/acl_tools/molecule/default/verify.yml @@ -111,6 +111,44 @@ - acl_tools_install_pywhisker | default(true) - acl_tools_pywhisker_check.rc == 0 + # Verify impacket regsecrets module is available in the impacket venv + # This ensures the source install is correct and __init__.py was created + - name: Verify impacket regsecrets module in venv + ansible.builtin.command: /opt/impacket/venv/bin/python -c "from impacket.examples import regsecrets; print('OK')" + register: acl_tools_regsecrets_check + changed_when: false + failed_when: false + when: acl_tools_install_dacledit | default(true) + + - name: Assert impacket regsecrets module is available + ansible.builtin.assert: + that: + - acl_tools_regsecrets_check.rc == 0 + fail_msg: "impacket.examples.regsecrets not found in impacket venv" + success_msg: "impacket regsecrets module available in venv" + when: acl_tools_install_dacledit | default(true) + + # Verify pywhisker --no-deps didn't break impacket (downgrade to 0.12.0) + - name: Check impacket version was not downgraded by pywhisker + ansible.builtin.command: /opt/impacket/venv/bin/python -c "import importlib.metadata; print(importlib.metadata.version('impacket'))" + register: acl_tools_impacket_version_check + changed_when: false + failed_when: false + when: acl_tools_install_dacledit | default(true) + + - name: Assert impacket version is 0.13.0+ + ansible.builtin.assert: + that: + - acl_tools_impacket_version_check.rc == 0 + - acl_tools_impacket_version_check.stdout is version('0.13.0', '>=') + fail_msg: >- + impacket version {{ acl_tools_impacket_version_check.stdout | default('unknown') }} + is too old (need >= 0.13.0). pywhisker may have downgraded it. + success_msg: "impacket version {{ acl_tools_impacket_version_check.stdout }} (>= 0.13.0)" + when: + - acl_tools_install_dacledit | default(true) + - acl_tools_impacket_version_check.rc == 0 + - name: Verify rpcclient is installed ansible.builtin.command: which rpcclient environment: @@ -137,4 +175,6 @@ - "bloodyAD version: {{ acl_tools_bloodyad_version.stdout if acl_tools_bloodyad_version is defined and acl_tools_bloodyad_version.rc == 0 else 'N/A' }}" - "pywhisker: {{ acl_tools_pywhisker_check.stdout if acl_tools_pywhisker_check is defined and acl_tools_pywhisker_check.rc == 0 else 'Not installed' }}" - "pywhisker version: {{ acl_tools_pywhisker_version.stdout if acl_tools_pywhisker_version is defined and acl_tools_pywhisker_version.rc == 0 else 'N/A' }}" + - "impacket regsecrets: {{ 'OK' if acl_tools_regsecrets_check.rc | default(1) == 0 else 'NOT FOUND' }}" + - "impacket version: {{ acl_tools_impacket_version_check.stdout | default('N/A') }}" - "====================================================" diff --git a/ansible/roles/acl_tools/tasks/impacket_source.yml b/ansible/roles/acl_tools/tasks/impacket_source.yml index 840007ac..d2b9acfb 100644 --- a/ansible/roles/acl_tools/tasks/impacket_source.yml +++ b/ansible/roles/acl_tools/tasks/impacket_source.yml @@ -112,6 +112,27 @@ become: true when: not acl_tools_impacket_init_check.stat.exists +- name: Check system impacket version (Kali) + ansible.builtin.command: python3 -c "import importlib.metadata; print(importlib.metadata.version('impacket'))" + register: acl_tools_system_impacket_version + changed_when: false + failed_when: false + when: + - ansible_facts['distribution'] == 'Kali' + +- name: Install source impacket into system Python (Kali apt netexec needs it system-wide) + ansible.builtin.pip: + name: "{{ acl_tools_impacket_install_dir }}" + executable: pip3 + editable: true + state: forcereinstall + extra_args: "--break-system-packages --ignore-installed" + become: true + when: + - ansible_facts['distribution'] == 'Kali' + - (acl_tools_system_impacket_version.stdout | default('0.0.0', true)) is version('0.13.0', '<') + or acl_tools_impacket_clone.changed | default(false) + - name: Create symlinks for impacket scripts (impacket-* style for Kali compatibility) ansible.builtin.shell: | for script in {{ acl_tools_impacket_install_dir }}/examples/*.py; do diff --git a/ansible/roles/acl_tools/tasks/linux.yml b/ansible/roles/acl_tools/tasks/linux.yml index c1bac9f8..ebf5a158 100644 --- a/ansible/roles/acl_tools/tasks/linux.yml +++ b/ansible/roles/acl_tools/tasks/linux.yml @@ -129,8 +129,9 @@ ansible.builtin.pip: name: "{{ acl_tools_pywhisker_package }}" executable: pip3 - # Use --ignore-installed to handle system packages that can't be uninstalled - extra_args: "{{ ('--break-system-packages ' if (base_pip_break_required | default(false)) else '') ~ '--ignore-installed' }}" + # --no-deps prevents pywhisker from downgrading impacket to 0.12.0 (its stale pin) + # impacket 0.13.0 is managed separately via impacket_source.yml + extra_args: "{{ ('--break-system-packages ' if (base_pip_break_required | default(false)) else '') ~ '--no-deps --ignore-installed' }}" become: true when: - ansible_facts['os_family'] == 'Debian' diff --git a/ansible/roles/alloy/README.md b/ansible/roles/alloy/README.md index 72238b83..475b4f7c 100644 --- a/ansible/roles/alloy/README.md +++ b/ansible/roles/alloy/README.md @@ -1,4 +1,5 @@ + # alloy ## Description @@ -20,6 +21,8 @@ Install and configure Grafana Alloy for Windows hosts | `alloy_deployment_name` | str | | No description | | `alloy_instance_id` | str | | No description | | `alloy_loki_endpoint` | str | https://loki.dev.plundr.ai/loki/api/v1/push | No description | +| `alloy_namespace` | str | | No description | +| `alloy_app` | str | | No description | | `alloy_windows_installer_url` | str | https://github.com/grafana/alloy/releases/download/v{{ alloy_version }}/alloy-installer-windows-amd64.exe.zip | No description | | `alloy_windows_temp_dir` | str | C:\Windows\Temp | No description | | `alloy_windows_install_dir` | str | C:\Program Files\GrafanaLabs\Alloy | No description | @@ -74,3 +77,4 @@ Install and configure Grafana Alloy for Windows hosts - Windows: all + diff --git a/ansible/roles/alloy/defaults/main.yml b/ansible/roles/alloy/defaults/main.yml index c9252568..4bc4cf43 100644 --- a/ansible/roles/alloy/defaults/main.yml +++ b/ansible/roles/alloy/defaults/main.yml @@ -8,6 +8,13 @@ alloy_deployment_name: "" alloy_instance_id: "" alloy_loki_endpoint: "https://loki.dev.plundr.ai/loki/api/v1/push" +# Optional labels for dashboard compatibility with Kubernetes workloads. +# Set these when running app workloads (e.g. ares agents) on EC2 so that +# Grafana dashboards using {namespace=..., app=...} selectors pick up +# the logs automatically. +alloy_namespace: "" +alloy_app: "" + # Windows-specific configuration alloy_windows_installer_url: "https://github.com/grafana/alloy/releases/download/v{{ alloy_version }}/alloy-installer-windows-amd64.exe.zip" alloy_windows_temp_dir: "C:\\Windows\\Temp" diff --git a/ansible/roles/alloy/templates/config.alloy.j2 b/ansible/roles/alloy/templates/config.alloy.j2 index ee1dc29d..40e7eb4c 100644 --- a/ansible/roles/alloy/templates/config.alloy.j2 +++ b/ansible/roles/alloy/templates/config.alloy.j2 @@ -108,6 +108,12 @@ loki.process "windows" { instance_id = "{{ alloy_instance_id }}", host = constants.hostname, os = "windows", +{% if alloy_namespace %} + namespace = "{{ alloy_namespace }}", +{% endif %} +{% if alloy_app %} + app = "{{ alloy_app }}", +{% endif %} } } forward_to = [loki.write.remote.receiver] @@ -135,6 +141,12 @@ loki.process "linux" { instance_id = "{{ alloy_instance_id }}", host = constants.hostname, os = "linux", +{% if alloy_namespace %} + namespace = "{{ alloy_namespace }}", +{% endif %} +{% if alloy_app %} + app = "{{ alloy_app }}", +{% endif %} } } forward_to = [loki.write.remote.receiver] diff --git a/ansible/roles/aws_cloudwatch_agent/README.md b/ansible/roles/aws_cloudwatch_agent/README.md index f611315a..9286cbd4 100644 --- a/ansible/roles/aws_cloudwatch_agent/README.md +++ b/ansible/roles/aws_cloudwatch_agent/README.md @@ -1,4 +1,5 @@ + # aws_cloudwatch_agent ## Description @@ -87,3 +88,4 @@ Install and configure AWS CloudWatch Agent - Debian: all - Windows: all + diff --git a/ansible/roles/aws_ssm_agent/README.md b/ansible/roles/aws_ssm_agent/README.md index 4bc3c76f..c57fb0ee 100644 --- a/ansible/roles/aws_ssm_agent/README.md +++ b/ansible/roles/aws_ssm_agent/README.md @@ -1,4 +1,5 @@ + # aws_ssm_agent ## Description @@ -34,14 +35,18 @@ Install and configure AWS SSM Agent ### linux.yml +- **Check if SSM agent is installed via snap** (ansible.builtin.command) - **Set DEBIAN_FRONTEND to noninteractive** (ansible.builtin.lineinfile) - Conditional - **Install packages** (ansible.builtin.package) - Conditional -- **Create temporary directory for SSM installation** (ansible.builtin.file) -- **Download SSM agent** (ansible.builtin.get_url) +- **Check if SSM agent is already installed via dpkg** (ansible.builtin.command) - Conditional +- **Create temporary directory for SSM installation** (ansible.builtin.file) - Conditional +- **Download SSM agent** (ansible.builtin.get_url) - Conditional - **Install SSM agent (Debian/Ubuntu)** (ansible.builtin.apt) - Conditional -- **Reload systemd** (ansible.builtin.systemd) -- **Enable and start SSM agent** (ansible.builtin.systemd) -- **Clean up temporary files** (ansible.builtin.file) +- **Reload systemd** (ansible.builtin.systemd) - Conditional +- **Enable and start SSM agent** (ansible.builtin.systemd) - Conditional +- **Refresh snap SSM agent** (ansible.builtin.command) - Conditional +- **Ensure snap SSM agent service is running** (ansible.builtin.command) - Conditional +- **Clean up temporary files** (ansible.builtin.file) - Conditional ### main.yml @@ -78,3 +83,4 @@ Install and configure AWS SSM Agent - Debian: all - Windows: all + diff --git a/ansible/roles/aws_ssm_agent/tasks/linux.yml b/ansible/roles/aws_ssm_agent/tasks/linux.yml index f4921f31..f10af29d 100644 --- a/ansible/roles/aws_ssm_agent/tasks/linux.yml +++ b/ansible/roles/aws_ssm_agent/tasks/linux.yml @@ -1,4 +1,12 @@ --- +- name: Check if SSM agent is installed via snap + ansible.builtin.command: + cmd: snap list amazon-ssm-agent + register: aws_ssm_agent_snap_check + changed_when: false + failed_when: false + become: true + - name: Set DEBIAN_FRONTEND to noninteractive ansible.builtin.lineinfile: path: /etc/environment @@ -6,7 +14,9 @@ create: true mode: '0644' become: true - when: ansible_facts['os_family'] == 'Debian' + when: + - ansible_facts['os_family'] == 'Debian' + - aws_ssm_agent_snap_check.rc != 0 - name: Install packages become: true @@ -14,15 +24,29 @@ name: "{{ aws_ssm_agent_install_packages }}" state: present update_cache: true - when: ansible_os_family in ['Debian'] + when: + - ansible_os_family in ['Debian'] + - aws_ssm_agent_snap_check.rc != 0 tags: packages +- name: Check if SSM agent is already installed via dpkg + ansible.builtin.command: + cmd: dpkg -s amazon-ssm-agent + register: aws_ssm_agent_dpkg_check + changed_when: false + failed_when: false + become: true + when: aws_ssm_agent_snap_check.rc != 0 + - name: Create temporary directory for SSM installation ansible.builtin.file: path: "{{ aws_ssm_agent_temp_dir }}" state: directory mode: '0755' become: true + when: + - aws_ssm_agent_snap_check.rc != 0 + - aws_ssm_agent_dpkg_check.rc != 0 - name: Download SSM agent ansible.builtin.get_url: @@ -30,18 +54,25 @@ dest: "{{ aws_ssm_agent_temp_dir }}/amazon-ssm-agent.deb" mode: '0644' become: true + when: + - aws_ssm_agent_snap_check.rc != 0 + - aws_ssm_agent_dpkg_check.rc != 0 - name: Install SSM agent (Debian/Ubuntu) ansible.builtin.apt: deb: "{{ aws_ssm_agent_temp_dir }}/amazon-ssm-agent.deb" state: present become: true - when: ansible_facts['os_family'] == 'Debian' + when: + - ansible_facts['os_family'] == 'Debian' + - aws_ssm_agent_snap_check.rc != 0 + - aws_ssm_agent_dpkg_check.rc != 0 - name: Reload systemd ansible.builtin.systemd: daemon_reload: true become: true + when: aws_ssm_agent_snap_check.rc != 0 - name: Enable and start SSM agent ansible.builtin.systemd: @@ -49,9 +80,29 @@ enabled: true state: started become: true + when: aws_ssm_agent_snap_check.rc != 0 + +- name: Refresh snap SSM agent + ansible.builtin.command: + cmd: snap refresh amazon-ssm-agent + changed_when: false + failed_when: false + become: true + when: aws_ssm_agent_snap_check.rc == 0 + +- name: Ensure snap SSM agent service is running + ansible.builtin.command: + cmd: snap start amazon-ssm-agent + changed_when: false + failed_when: false + become: true + when: aws_ssm_agent_snap_check.rc == 0 - name: Clean up temporary files ansible.builtin.file: path: "{{ aws_ssm_agent_temp_dir }}" state: absent become: true + when: + - aws_ssm_agent_snap_check.rc != 0 + - aws_ssm_agent_dpkg_check.rc != 0 diff --git a/ansible/roles/base/README.md b/ansible/roles/base/README.md index d9f62183..b5c3fb79 100644 --- a/ansible/roles/base/README.md +++ b/ansible/roles/base/README.md @@ -1,4 +1,5 @@ + # base ## Description @@ -25,15 +26,15 @@ Base requirements for Ares AI agents | `base_install_uv` | bool | True | No description | | `base_uv_version` | str | latest | No description | | `base_uv_install_script` | str | https://astral.sh/uv/install.sh | No description | +| `base_install_awscli` | bool | True | No description | | `base_install_rust` | bool | True | No description | | `base_rust_install_script` | str | https://sh.rustup.rs | No description | | `base_install_pipx` | bool | True | No description | | `base_pip_packages` | list | [] | No description | | `base_pip_packages.0` | str | python-dotenv | No description | -| `base_pip_packages.1` | str | dreadnode | No description | -| `base_pip_packages.2` | str | rigging | No description | -| `base_pip_packages.3` | str | pydantic | No description | -| `base_pip_packages.4` | str | asyncio | No description | +| `base_pip_packages.1` | str | rigging>=3.0 | No description | +| `base_pip_packages.2` | str | pydantic | No description | +| `base_pip_packages.3` | str | asyncio | No description | | `base_pip_externally_managed` | bool | False | No description | | `base_pip_break_required` | bool | False | No description | | `base_system_packages` | list | [] | No description | @@ -56,6 +57,7 @@ Base requirements for Ares AI agents | `base_system_packages.16` | str | jq | No description | | `base_system_packages.17` | str | htop | No description | | `base_system_packages.18` | str | tmux | No description | +| `base_system_packages.19` | str | acl | No description | | `base_system_packages_ubuntu` | list | [] | No description | | `base_system_packages_ubuntu.0` | str | dnsutils | No description | | `base_system_packages_kali` | list | [] | No description | @@ -70,12 +72,23 @@ Base requirements for Ares AI agents ## Tasks +### install_awscli.yml + + +- **Check if AWS CLI is already installed** (ansible.builtin.command) +- **Install AWS CLI v2** (block) - Conditional +- **Download AWS CLI v2 installer** (ansible.builtin.get_url) +- **Install unzip** (ansible.builtin.apt) +- **Unzip AWS CLI installer** (ansible.builtin.unarchive) +- **Run AWS CLI installer** (ansible.builtin.command) +- **Clean up AWS CLI installer** (ansible.builtin.file) +- **Verify AWS CLI installation** (ansible.builtin.command) - Conditional +- **Display AWS CLI version** (ansible.builtin.debug) - Conditional + ### install_pipx.yml - **Install pipx via apt (Debian/Ubuntu)** (ansible.builtin.apt) - Conditional -- **Check if pipx path is already in bashrc** (ansible.builtin.command) -- **Ensure pipx path is configured** (ansible.builtin.command) - Conditional - **Add pipx bin to system PATH via profile.d** (ansible.builtin.copy) - **Verify pipx installation** (ansible.builtin.command) - **Display pipx version** (ansible.builtin.debug) @@ -112,6 +125,7 @@ Base requirements for Ares AI agents - **Install Ubuntu-specific system packages** (ansible.builtin.apt) - Conditional - **Install Kali-specific system packages** (ansible.builtin.apt) - Conditional - **Remove kali-motd to suppress MOTD spam** (ansible.builtin.file) - Conditional +- **Install AWS CLI v2** (ansible.builtin.include_tasks) - Conditional - **Install uv package manager** (ansible.builtin.include_tasks) - Conditional - **Install Rust toolchain** (ansible.builtin.include_tasks) - Conditional - **Install pipx** (ansible.builtin.include_tasks) - Conditional @@ -151,3 +165,4 @@ Base requirements for Ares AI agents - Debian: all - Kali: all + diff --git a/ansible/roles/base/defaults/main.yml b/ansible/roles/base/defaults/main.yml index d1f408ab..d457ef38 100644 --- a/ansible/roles/base/defaults/main.yml +++ b/ansible/roles/base/defaults/main.yml @@ -15,6 +15,9 @@ base_install_uv: true base_uv_version: "latest" base_uv_install_script: "https://astral.sh/uv/install.sh" +# AWS CLI v2 +base_install_awscli: true + # Rust toolchain (required for building NetExec and other tools) # See: https://www.netexec.wiki/getting-started/installation/installation-on-unix base_install_rust: true @@ -27,8 +30,7 @@ base_install_pipx: true # Ares Python dependencies (installed via pip) base_pip_packages: - python-dotenv - - dreadnode - - rigging + - "rigging>=3.0" - pydantic - asyncio @@ -57,6 +59,7 @@ base_system_packages: - jq # JSON processor - htop # interactive process viewer - tmux # terminal multiplexer + - acl # setfacl/getfacl for POSIX ACLs (required by ansible.posix.acl) # Ubuntu-specific system packages (dnsutils is the traditional name) base_system_packages_ubuntu: diff --git a/ansible/roles/base/molecule/default/verify.yml b/ansible/roles/base/molecule/default/verify.yml index a0b6b456..c7dec0c7 100644 --- a/ansible/roles/base/molecule/default/verify.yml +++ b/ansible/roles/base/molecule/default/verify.yml @@ -185,7 +185,7 @@ - base_dreadnode_check.rc == 0 - base_dreadnode_check.stdout is defined - base_dreadnode_check.stdout | length > 0 - fail_msg: "dreadnode package is not importable" + fail_msg: "dreadnode package is not importable: {{ base_dreadnode_check.stderr | default('no stderr') }}" success_msg: "dreadnode package version: {{ base_dreadnode_check.stdout }}" - name: Verify rigging package is importable @@ -194,13 +194,18 @@ changed_when: false failed_when: false + - name: Debug rigging import failure + ansible.builtin.debug: + msg: "rigging import stderr: {{ base_rigging_check.stderr }}" + when: base_rigging_check.rc != 0 + - name: Assert rigging package is available ansible.builtin.assert: that: - base_rigging_check.rc == 0 - base_rigging_check.stdout is defined - base_rigging_check.stdout | length > 0 - fail_msg: "rigging package is not importable" + fail_msg: "rigging package is not importable: {{ base_rigging_check.stderr | default('no stderr') }}" success_msg: "rigging package version: {{ base_rigging_check.stdout }}" - name: Check workspace directory exists diff --git a/ansible/roles/base/tasks/install_awscli.yml b/ansible/roles/base/tasks/install_awscli.yml new file mode 100644 index 00000000..b761f48a --- /dev/null +++ b/ansible/roles/base/tasks/install_awscli.yml @@ -0,0 +1,51 @@ +--- +- name: Check if AWS CLI is already installed + ansible.builtin.command: aws --version + register: base_awscli_check + changed_when: false + failed_when: false + +- name: Install AWS CLI v2 + when: base_awscli_check.rc != 0 + block: + - name: Download AWS CLI v2 installer + ansible.builtin.get_url: + url: "https://awscli.amazonaws.com/awscli-exe-linux-{{ ansible_architecture }}.zip" + dest: /tmp/awscliv2.zip + mode: '0644' + + - name: Install unzip + ansible.builtin.apt: + name: unzip + state: present + become: true + + - name: Unzip AWS CLI installer + ansible.builtin.unarchive: + src: /tmp/awscliv2.zip + dest: /tmp + remote_src: true + + - name: Run AWS CLI installer + ansible.builtin.command: /tmp/aws/install + become: true + changed_when: true + + - name: Clean up AWS CLI installer + ansible.builtin.file: + path: "{{ item }}" + state: absent + loop: + - /tmp/awscliv2.zip + - /tmp/aws + +- name: Verify AWS CLI installation + ansible.builtin.command: aws --version + register: base_awscli_version + changed_when: false + when: base_verify_install | default(false) + +- name: Display AWS CLI version + ansible.builtin.debug: + msg: "AWS CLI: {{ base_awscli_version.stdout }}" + when: base_verify_install | default(false) diff --git a/ansible/roles/base/tasks/install_pipx.yml b/ansible/roles/base/tasks/install_pipx.yml index 1dc91be3..5f22daed 100644 --- a/ansible/roles/base/tasks/install_pipx.yml +++ b/ansible/roles/base/tasks/install_pipx.yml @@ -11,23 +11,6 @@ become: true when: ansible_facts['os_family'] == 'Debian' -- name: Check if pipx path is already in bashrc - ansible.builtin.command: grep -q '.local/bin' /root/.bashrc - register: base_pipx_path_check - changed_when: false - failed_when: false - become: true - -- name: Ensure pipx path is configured - ansible.builtin.command: pipx ensurepath - register: base_pipx_ensurepath - changed_when: base_pipx_path_check.rc != 0 - failed_when: false - become: true - environment: - HOME: /root - when: base_pipx_path_check.rc != 0 - - name: Add pipx bin to system PATH via profile.d ansible.builtin.copy: content: | diff --git a/ansible/roles/base/tasks/linux.yml b/ansible/roles/base/tasks/linux.yml index c6734ecb..b7813be8 100644 --- a/ansible/roles/base/tasks/linux.yml +++ b/ansible/roles/base/tasks/linux.yml @@ -11,7 +11,6 @@ - name: Update apt cache ansible.builtin.apt: update_cache: true - cache_valid_time: 3600 become: true when: ansible_facts['os_family'] == 'Debian' @@ -66,6 +65,10 @@ - ansible_facts['os_family'] == 'Debian' - ansible_facts['distribution'] == 'Kali' +- name: Install AWS CLI v2 + ansible.builtin.include_tasks: install_awscli.yml + when: base_install_awscli + - name: Install uv package manager ansible.builtin.include_tasks: install_uv.yml when: base_install_uv @@ -133,15 +136,13 @@ - name: Install Ares Python dependencies ansible.builtin.pip: name: "{{ base_pip_packages }}" + state: present executable: "{{ base_pip_executable }}" extra_args: >- {{ '--break-system-packages' if base_pip_break_required else '' }} - {{ '--ignore-installed' if ansible_facts['distribution'] == 'Kali' else '' }} + {{ '--ignore-installed' if (ansible_facts['distribution'] == 'Kali' or base_pip_break_required) else '' }} become: true - register: base_pip_install - changed_when: - - base_pip_install.changed - - ansible_facts['distribution'] != 'Kali' + changed_when: false - name: Create Ares workspace directory ansible.builtin.file: diff --git a/ansible/roles/coercion_tools/README.md b/ansible/roles/coercion_tools/README.md index 1df97fda..47e86217 100644 --- a/ansible/roles/coercion_tools/README.md +++ b/ansible/roles/coercion_tools/README.md @@ -1,4 +1,5 @@ + # coercion_tools ## Description @@ -91,6 +92,8 @@ Install and configure network poisoning and relay attack tools for Ares agents - **Make impacket example scripts executable** (ansible.builtin.shell) - **Check if \_\_init\_\_.py exists in impacket/examples** (ansible.builtin.stat) - **Create \_\_init\_\_.py in impacket/examples to make it a proper Python package** (ansible.builtin.copy) - Conditional +- **Check system impacket version (Kali)** (ansible.builtin.command) - Conditional +- **Install source impacket into system Python (Kali apt netexec needs it system-wide)** (ansible.builtin.pip) - Conditional - **Create symlinks for impacket scripts (impacket-* style for Kali compatibility)** (ansible.builtin.shell) - **Verify impacket regsecrets module is available** (ansible.builtin.command) - **Report impacket installation status** (ansible.builtin.debug) @@ -162,3 +165,4 @@ Install and configure network poisoning and relay attack tools for Ares agents - Debian: all - Kali: all + diff --git a/ansible/roles/coercion_tools/molecule/default/verify.yml b/ansible/roles/coercion_tools/molecule/default/verify.yml index 4f736e26..a8a0847b 100644 --- a/ansible/roles/coercion_tools/molecule/default/verify.yml +++ b/ansible/roles/coercion_tools/molecule/default/verify.yml @@ -220,6 +220,20 @@ success_msg: "krbrelayx wrapper is available at /usr/local/bin/krbrelayx" when: coercion_tools_install_krbrelayx | default(true) + # Verify impacket regsecrets module is available (required for NetExec SMB) + - name: Verify impacket regsecrets module in source venv + ansible.builtin.command: /opt/impacket/venv/bin/python -c "from impacket.examples import regsecrets; print('OK')" + register: coercion_tools_regsecrets_check + changed_when: false + failed_when: false + + - name: Assert impacket regsecrets module is available + ansible.builtin.assert: + that: + - coercion_tools_regsecrets_check.rc == 0 + fail_msg: "impacket.examples.regsecrets not found in impacket venv" + success_msg: "impacket regsecrets module available in venv" + - name: Check impacket is installed in krbrelayx venv ansible.builtin.command: "{{ coercion_tools_krbrelayx_install_dir }}/venv/bin/pip show impacket" register: coercion_tools_krbrelayx_impacket diff --git a/ansible/roles/coercion_tools/tasks/impacket_source.yml b/ansible/roles/coercion_tools/tasks/impacket_source.yml index 8cb256b1..8cb72789 100644 --- a/ansible/roles/coercion_tools/tasks/impacket_source.yml +++ b/ansible/roles/coercion_tools/tasks/impacket_source.yml @@ -112,6 +112,27 @@ become: true when: not coercion_tools_impacket_init_check.stat.exists +- name: Check system impacket version (Kali) + ansible.builtin.command: python3 -c "import importlib.metadata; print(importlib.metadata.version('impacket'))" + register: coercion_tools_system_impacket_version + changed_when: false + failed_when: false + when: + - ansible_facts['distribution'] == 'Kali' + +- name: Install source impacket into system Python (Kali apt netexec needs it system-wide) + ansible.builtin.pip: + name: "{{ coercion_tools_impacket_install_dir }}" + executable: pip3 + editable: true + state: forcereinstall + extra_args: "--break-system-packages --ignore-installed" + become: true + when: + - ansible_facts['distribution'] == 'Kali' + - (coercion_tools_system_impacket_version.stdout | default('0.0.0', true)) is version('0.13.0', '<') + or coercion_tools_impacket_clone.changed | default(false) + - name: Create symlinks for impacket scripts (impacket-* style for Kali compatibility) ansible.builtin.shell: | for script in {{ coercion_tools_impacket_install_dir }}/examples/*.py; do diff --git a/ansible/roles/cracking_tools/README.md b/ansible/roles/cracking_tools/README.md index e9f9c9be..6c12b795 100644 --- a/ansible/roles/cracking_tools/README.md +++ b/ansible/roles/cracking_tools/README.md @@ -1,4 +1,5 @@ + # cracking_tools ## Description @@ -74,6 +75,7 @@ Install and configure password cracking tools for Ares agents - **Create hashcat share directory** (ansible.builtin.file) - **Symlink hashcat OpenCL kernels** (ansible.builtin.file) - **Symlink hashcat modules** (ansible.builtin.file) +- **Symlink hashcat hcstat2 (Markov chains statistics)** (ansible.builtin.file) ### john.yml @@ -141,3 +143,4 @@ Install and configure password cracking tools for Ares agents - Debian: all - Kali: all + diff --git a/ansible/roles/cracking_tools/molecule/source-build/verify.yml b/ansible/roles/cracking_tools/molecule/source-build/verify.yml index cc23a4e5..d8595589 100644 --- a/ansible/roles/cracking_tools/molecule/source-build/verify.yml +++ b/ansible/roles/cracking_tools/molecule/source-build/verify.yml @@ -61,6 +61,26 @@ fail_msg: "hashcat source directory not found at /opt/hashcat" success_msg: "hashcat source directory exists at /opt/hashcat" + - name: Verify hashcat share directory symlinks + ansible.builtin.stat: + path: "{{ item }}" + register: cracking_tools_share_symlinks + loop: + - /usr/local/share/hashcat/OpenCL + - /usr/local/share/hashcat/modules + - /usr/local/share/hashcat/hashcat.hcstat2 + + - name: Assert hashcat share directory symlinks exist + ansible.builtin.assert: + that: + - item.stat.exists + - item.stat.islnk + fail_msg: "Missing symlink: {{ item.item }}" + success_msg: "Symlink exists: {{ item.item }}" + loop: "{{ cracking_tools_share_symlinks.results }}" + loop_control: + label: "{{ item.item }}" + - name: Display verification summary ansible.builtin.debug: msg: @@ -69,4 +89,5 @@ - "hashcat: {{ cracking_tools_hashcat_version.stdout }}" - "Binary: /usr/local/bin/hashcat" - "Source: /opt/hashcat" + - "Symlinks: OpenCL, modules, hashcat.hcstat2" - "===================================================" diff --git a/ansible/roles/cracking_tools/tasks/hashcat.yml b/ansible/roles/cracking_tools/tasks/hashcat.yml index 6cba12ff..6b267512 100644 --- a/ansible/roles/cracking_tools/tasks/hashcat.yml +++ b/ansible/roles/cracking_tools/tasks/hashcat.yml @@ -109,3 +109,10 @@ dest: /usr/local/share/hashcat/modules state: link become: true + + - name: Symlink hashcat hcstat2 (Markov chains statistics) + ansible.builtin.file: + src: /opt/hashcat/hashcat.hcstat2 + dest: /usr/local/share/hashcat/hashcat.hcstat2 + state: link + become: true diff --git a/ansible/roles/credential_access_tools/README.md b/ansible/roles/credential_access_tools/README.md index 3ea2d896..ac6377c0 100644 --- a/ansible/roles/credential_access_tools/README.md +++ b/ansible/roles/credential_access_tools/README.md @@ -1,4 +1,5 @@ + # credential_access_tools ## Description @@ -88,6 +89,8 @@ Install and configure credential access tooling for Ares agents - **Make impacket example scripts executable** (ansible.builtin.shell) - **Check if \_\_init\_\_.py exists in impacket/examples** (ansible.builtin.stat) - **Create \_\_init\_\_.py in impacket/examples to make it a proper Python package** (ansible.builtin.copy) - Conditional +- **Check system impacket version (Kali)** (ansible.builtin.command) - Conditional +- **Install source impacket into system Python (Kali apt netexec needs it system-wide)** (ansible.builtin.pip) - Conditional - **Create symlinks for impacket scripts (impacket-* style for Kali compatibility)** (ansible.builtin.shell) - **Verify impacket regsecrets module is available** (ansible.builtin.command) - **Report impacket installation status** (ansible.builtin.debug) @@ -146,3 +149,4 @@ Install and configure credential access tooling for Ares agents - Debian: all - Kali: all + diff --git a/ansible/roles/credential_access_tools/molecule/default/verify.yml b/ansible/roles/credential_access_tools/molecule/default/verify.yml index beeb9310..f710c682 100644 --- a/ansible/roles/credential_access_tools/molecule/default/verify.yml +++ b/ansible/roles/credential_access_tools/molecule/default/verify.yml @@ -37,6 +37,22 @@ success_msg: "impacket-secretsdump is installed" when: credential_access_tools_install_impacket | default(true) + # Verify impacket regsecrets module is available (required for NetExec SMB) + - name: Verify impacket regsecrets module in venv + ansible.builtin.command: /opt/impacket/venv/bin/python -c "from impacket.examples import regsecrets; print('OK')" + register: credential_access_tools_regsecrets_check + changed_when: false + failed_when: false + when: credential_access_tools_install_impacket | default(true) + + - name: Assert impacket regsecrets module is available + ansible.builtin.assert: + that: + - credential_access_tools_regsecrets_check.rc == 0 + fail_msg: "impacket.examples.regsecrets not found in impacket venv" + success_msg: "impacket regsecrets module available in venv" + when: credential_access_tools_install_impacket | default(true) + - name: Check lsassy is installed ansible.builtin.command: which lsassy register: credential_access_tools_lsassy_check diff --git a/ansible/roles/credential_access_tools/tasks/impacket_source.yml b/ansible/roles/credential_access_tools/tasks/impacket_source.yml index 130f3a86..b05775b7 100644 --- a/ansible/roles/credential_access_tools/tasks/impacket_source.yml +++ b/ansible/roles/credential_access_tools/tasks/impacket_source.yml @@ -112,6 +112,27 @@ become: true when: not credential_access_tools_impacket_init_check.stat.exists +- name: Check system impacket version (Kali) + ansible.builtin.command: python3 -c "import importlib.metadata; print(importlib.metadata.version('impacket'))" + register: credential_access_tools_system_impacket_version + changed_when: false + failed_when: false + when: + - ansible_facts['distribution'] == 'Kali' + +- name: Install source impacket into system Python (Kali apt netexec needs it system-wide) + ansible.builtin.pip: + name: "{{ credential_access_tools_impacket_install_dir }}" + executable: pip3 + editable: true + state: forcereinstall + extra_args: "--break-system-packages --ignore-installed" + become: true + when: + - ansible_facts['distribution'] == 'Kali' + - (credential_access_tools_system_impacket_version.stdout | default('0.0.0', true)) is version('0.13.0', '<') + or credential_access_tools_impacket_clone.changed | default(false) + - name: Create symlinks for impacket scripts (impacket-* style for Kali compatibility) ansible.builtin.shell: | for script in {{ credential_access_tools_impacket_install_dir }}/examples/*.py; do diff --git a/ansible/roles/dc_audit_sacl/README.md b/ansible/roles/dc_audit_sacl/README.md index 2d3f3822..d4bf9068 100644 --- a/ansible/roles/dc_audit_sacl/README.md +++ b/ansible/roles/dc_audit_sacl/README.md @@ -1,4 +1,5 @@ + # dc_audit_sacl ## Description @@ -32,6 +33,7 @@ Configure SACL auditing on Domain Controllers for attack detection - **Check if host is a Domain Controller** (ansible.windows.win_feature_info) +- **Set DC detection fact** (ansible.builtin.set_fact) - **Skip if not a Domain Controller** (ansible.builtin.debug) - Conditional - **Configure SACL auditing on Domain Controller** (block) - Conditional - **Configure auditpol for Directory Service Access** (ansible.windows.win_shell) - Conditional @@ -59,3 +61,4 @@ Configure SACL auditing on Domain Controllers for attack detection - Windows: 2019, 2022 + diff --git a/ansible/roles/dc_audit_sacl/tasks/main.yml b/ansible/roles/dc_audit_sacl/tasks/main.yml index fa5ba05a..b9f1cf1d 100644 --- a/ansible/roles/dc_audit_sacl/tasks/main.yml +++ b/ansible/roles/dc_audit_sacl/tasks/main.yml @@ -7,13 +7,19 @@ name: AD-Domain-Services register: dc_audit_sacl_adds_feature +# win_feature_info.exists means the feature name was found in Windows' catalog (always true on Server) +# We need to check if the feature is actually INSTALLED +- name: Set DC detection fact + ansible.builtin.set_fact: + dc_audit_sacl_is_dc: "{{ dc_audit_sacl_adds_feature.features[0].installed | default(false) }}" + - name: Skip if not a Domain Controller ansible.builtin.debug: - msg: "Skipping dc_audit_sacl role - host is not a Domain Controller" - when: not dc_audit_sacl_adds_feature.exists + msg: "Skipping dc_audit_sacl role - host is not a Domain Controller (AD-Domain-Services not installed)" + when: not dc_audit_sacl_is_dc - name: Configure SACL auditing on Domain Controller - when: dc_audit_sacl_adds_feature.exists + when: dc_audit_sacl_is_dc block: - name: Configure auditpol for Directory Service Access ansible.windows.win_shell: | diff --git a/ansible/roles/fluent_bit/README.md b/ansible/roles/fluent_bit/README.md index 90a2c67d..b10a5a26 100644 --- a/ansible/roles/fluent_bit/README.md +++ b/ansible/roles/fluent_bit/README.md @@ -1,4 +1,5 @@ + # fluent_bit ## Description @@ -146,3 +147,4 @@ Install and configure Fluent Bit for log management - Debian: all - Windows: all + diff --git a/ansible/roles/lateral_movement_tools/README.md b/ansible/roles/lateral_movement_tools/README.md index 9e0a2dbf..846b137b 100644 --- a/ansible/roles/lateral_movement_tools/README.md +++ b/ansible/roles/lateral_movement_tools/README.md @@ -1,4 +1,5 @@ + # lateral_movement_tools ## Description @@ -86,6 +87,8 @@ Install and configure lateral movement and credential extraction tools for Ares - **Make impacket example scripts executable** (ansible.builtin.shell) - **Check if \_\_init\_\_.py exists in impacket/examples** (ansible.builtin.stat) - **Create \_\_init\_\_.py in impacket/examples to make it a proper Python package** (ansible.builtin.copy) - Conditional +- **Check system impacket version (Kali)** (ansible.builtin.command) - Conditional +- **Install source impacket into system Python (Kali apt netexec needs it system-wide)** (ansible.builtin.pip) - Conditional - **Create symlinks for impacket scripts (impacket-* style for Kali compatibility)** (ansible.builtin.shell) - **Verify impacket regsecrets module is available** (ansible.builtin.command) - **Report impacket installation status** (ansible.builtin.debug) @@ -143,3 +146,4 @@ Install and configure lateral movement and credential extraction tools for Ares - Debian: all - Kali: all + diff --git a/ansible/roles/lateral_movement_tools/molecule/default/verify.yml b/ansible/roles/lateral_movement_tools/molecule/default/verify.yml index 0a9c4f42..2e5f25e4 100644 --- a/ansible/roles/lateral_movement_tools/molecule/default/verify.yml +++ b/ansible/roles/lateral_movement_tools/molecule/default/verify.yml @@ -126,6 +126,22 @@ - lateral_movement_tools_install_pth_toolkit | default(true) - ansible_facts['distribution'] == 'Kali' + # Verify impacket regsecrets module is available (required for NetExec SMB) + - name: Verify impacket regsecrets module in source venv + ansible.builtin.command: /opt/impacket/venv/bin/python -c "from impacket.examples import regsecrets; print('OK')" + register: lateral_movement_tools_regsecrets_check + changed_when: false + failed_when: false + when: lateral_movement_tools_impacket_from_source | default(false) + + - name: Assert impacket regsecrets module is available + ansible.builtin.assert: + that: + - lateral_movement_tools_regsecrets_check.rc == 0 + fail_msg: "impacket.examples.regsecrets not found in impacket venv" + success_msg: "impacket regsecrets module available in venv" + when: lateral_movement_tools_impacket_from_source | default(false) + - name: Display verification summary ansible.builtin.debug: msg: | diff --git a/ansible/roles/lateral_movement_tools/tasks/impacket_source.yml b/ansible/roles/lateral_movement_tools/tasks/impacket_source.yml index 9ec8a592..c1fe688a 100644 --- a/ansible/roles/lateral_movement_tools/tasks/impacket_source.yml +++ b/ansible/roles/lateral_movement_tools/tasks/impacket_source.yml @@ -112,6 +112,27 @@ become: true when: not lateral_movement_tools_impacket_init_check.stat.exists +- name: Check system impacket version (Kali) + ansible.builtin.command: python3 -c "import importlib.metadata; print(importlib.metadata.version('impacket'))" + register: lateral_movement_tools_system_impacket_version + changed_when: false + failed_when: false + when: + - ansible_facts['distribution'] == 'Kali' + +- name: Install source impacket into system Python (Kali apt netexec needs it system-wide) + ansible.builtin.pip: + name: "{{ lateral_movement_tools_impacket_install_dir }}" + executable: pip3 + editable: true + state: forcereinstall + extra_args: "--break-system-packages --ignore-installed" + become: true + when: + - ansible_facts['distribution'] == 'Kali' + - (lateral_movement_tools_system_impacket_version.stdout | default('0.0.0', true)) is version('0.13.0', '<') + or lateral_movement_tools_impacket_clone.changed | default(false) + - name: Create symlinks for impacket scripts (impacket-* style for Kali compatibility) ansible.builtin.shell: | for script in {{ lateral_movement_tools_impacket_install_dir }}/examples/*.py; do diff --git a/ansible/roles/mythic/README.md b/ansible/roles/mythic/README.md index aba87bc8..3cb311c1 100644 --- a/ansible/roles/mythic/README.md +++ b/ansible/roles/mythic/README.md @@ -1,4 +1,5 @@ + # mythic ## Description @@ -157,3 +158,4 @@ Install and configure Mythic C2 framework - Ubuntu: all - Debian: all + diff --git a/ansible/roles/privesc_tools/README.md b/ansible/roles/privesc_tools/README.md index e4efa1df..fc45692b 100644 --- a/ansible/roles/privesc_tools/README.md +++ b/ansible/roles/privesc_tools/README.md @@ -1,4 +1,5 @@ + # privesc_tools ## Description @@ -42,9 +43,8 @@ Install and configure privilege escalation tools for Ares agents | `privesc_tools_godpotato_url` | str | https://github.com/BeichenDream/GodPotato/releases/download/V1.20/GodPotato-NET4.exe | No description | | `privesc_tools_godpotato_install_dir` | str | /opt/privesc/GodPotato | No description | | `privesc_tools_install_krbrelayup` | bool | True | No description | -| `privesc_tools_krbrelayup_repo` | str | https://github.com/Dec0ne/KrbRelayUp.git | No description | +| `privesc_tools_krbrelayup_url` | str | https://github.com/Flangvik/SharpCollection/raw/master/NetFramework_4.7_Any/KrbRelayUp.exe | No description | | `privesc_tools_krbrelayup_install_dir` | str | /opt/privesc/KrbRelayUp | No description | -| `privesc_tools_krbrelayup_version` | str | main | No description | | `privesc_tools_install_sharpgpoabuse` | bool | True | No description | | `privesc_tools_sharpgpoabuse_repo` | str | https://github.com/byronkg/SharpGPOAbuse.git | No description | | `privesc_tools_sharpgpoabuse_install_dir` | str | /opt/privesc/SharpGPOAbuse | No description | @@ -105,6 +105,7 @@ Install and configure privilege escalation tools for Ares agents | `privesc_tools_binaries` | dict | {} | No description | | `privesc_tools_binaries.printspoofer` | str | /opt/privesc/PrintSpoofer/PrintSpoofer64.exe | No description | | `privesc_tools_binaries.godpotato` | str | /opt/privesc/GodPotato/GodPotato-NET4.exe | No description | +| `privesc_tools_binaries.krbrelayup` | str | /opt/privesc/KrbRelayUp/KrbRelayUp.exe | No description | | `privesc_tools_binaries.winpeas` | str | /opt/privesc/WinPEAS/winPEASany_ofs.exe | No description | | `privesc_tools_binaries.linpeas` | str | /opt/privesc/LinPEAS/linpeas.sh | No description | | `privesc_tools_binaries.powerup` | str | /opt/privesc/PowerUp/PowerUp.ps1 | No description | @@ -148,6 +149,8 @@ Install and configure privilege escalation tools for Ares agents - **Make impacket example scripts executable** (ansible.builtin.shell) - **Check if \_\_init\_\_.py exists in impacket/examples** (ansible.builtin.stat) - **Create \_\_init\_\_.py in impacket/examples to make it a proper Python package** (ansible.builtin.copy) - Conditional +- **Check system impacket version (Kali)** (ansible.builtin.command) - Conditional +- **Install source impacket into system Python (Kali apt netexec needs it system-wide)** (ansible.builtin.pip) - Conditional - **Create symlinks for impacket scripts (impacket-* style for Kali compatibility)** (ansible.builtin.shell) - **Verify impacket regsecrets module is available** (ansible.builtin.command) - **Report impacket installation status** (ansible.builtin.debug) @@ -170,7 +173,8 @@ Install and configure privilege escalation tools for Ares agents - **Create GodPotato directory** (ansible.builtin.file) - Conditional - **Download GodPotato** (ansible.builtin.get_url) - Conditional - **Clone SweetPotato from GitHub** (ansible.builtin.git) - Conditional -- **Clone KrbRelayUp from GitHub** (ansible.builtin.git) - Conditional +- **Create KrbRelayUp directory** (ansible.builtin.file) - Conditional +- **Download KrbRelayUp** (ansible.builtin.get_url) - Conditional - **Clone SharpGPOAbuse from GitHub** (ansible.builtin.git) - Conditional - **Create Seatbelt directory** (ansible.builtin.file) - Conditional - **Download Seatbelt** (ansible.builtin.get_url) - Conditional @@ -220,8 +224,7 @@ Install and configure privilege escalation tools for Ares agents - **Check if pygpoabuse is already installed via pipx** (ansible.builtin.command) - **Install pygpoabuse via pipx** (ansible.builtin.command) - Conditional -- **Create symlink for pygpoabuse.py in /usr/local/bin** (ansible.builtin.file) -- **Create pygpoabuse symlink (without .py extension)** (ansible.builtin.file) +- **Create pygpoabuse symlink in /usr/local/bin** (ansible.builtin.file) ### zerologon.yml @@ -253,3 +256,4 @@ Install and configure privilege escalation tools for Ares agents - Debian: all - Kali: all + diff --git a/ansible/roles/privesc_tools/defaults/main.yml b/ansible/roles/privesc_tools/defaults/main.yml index daf58562..5395afa0 100644 --- a/ansible/roles/privesc_tools/defaults/main.yml +++ b/ansible/roles/privesc_tools/defaults/main.yml @@ -35,11 +35,10 @@ privesc_tools_godpotato_url: "https://github.com/BeichenDream/GodPotato/releases privesc_tools_godpotato_install_dir: "/opt/privesc/GodPotato" # KrbRelayUp configuration (Kerberos relay local privesc) -# Note: Repository has no release tags, using main branch +# Note: Dec0ne/KrbRelayUp has no precompiled releases; using Flangvik/SharpCollection nightly build privesc_tools_install_krbrelayup: true -privesc_tools_krbrelayup_repo: "https://github.com/Dec0ne/KrbRelayUp.git" +privesc_tools_krbrelayup_url: "https://github.com/Flangvik/SharpCollection/raw/master/NetFramework_4.7_Any/KrbRelayUp.exe" privesc_tools_krbrelayup_install_dir: "/opt/privesc/KrbRelayUp" -privesc_tools_krbrelayup_version: "main" # SharpGPOAbuse configuration (GPO privilege escalation) # Note: FSecureLABS doesn't provide precompiled releases, using byronkg's fork @@ -150,6 +149,7 @@ privesc_tools_update_cache: true privesc_tools_binaries: printspoofer: "/opt/privesc/PrintSpoofer/PrintSpoofer64.exe" godpotato: "/opt/privesc/GodPotato/GodPotato-NET4.exe" + krbrelayup: "/opt/privesc/KrbRelayUp/KrbRelayUp.exe" winpeas: "/opt/privesc/WinPEAS/winPEASany_ofs.exe" linpeas: "/opt/privesc/LinPEAS/linpeas.sh" powerup: "/opt/privesc/PowerUp/PowerUp.ps1" diff --git a/ansible/roles/privesc_tools/molecule/default/verify.yml b/ansible/roles/privesc_tools/molecule/default/verify.yml index 6adc65ab..7db4391b 100644 --- a/ansible/roles/privesc_tools/molecule/default/verify.yml +++ b/ansible/roles/privesc_tools/molecule/default/verify.yml @@ -168,19 +168,18 @@ when: privesc_tools_install_printnightmare | default(true) # KrbRelayUp verification - - name: Check KrbRelayUp is cloned + - name: Check KrbRelayUp binary exists ansible.builtin.stat: - path: "{{ privesc_tools_krbrelayup_install_dir }}" + path: "{{ privesc_tools_krbrelayup_install_dir }}/KrbRelayUp.exe" register: privesc_tools_krbrelayup_stat when: privesc_tools_install_krbrelayup | default(true) - - name: Assert KrbRelayUp directory exists + - name: Assert KrbRelayUp binary exists ansible.builtin.assert: that: - privesc_tools_krbrelayup_stat.stat.exists - - privesc_tools_krbrelayup_stat.stat.isdir - fail_msg: "KrbRelayUp was not cloned to {{ privesc_tools_krbrelayup_install_dir }}" - success_msg: "KrbRelayUp is available at {{ privesc_tools_krbrelayup_install_dir }}" + fail_msg: "KrbRelayUp binary was not downloaded to {{ privesc_tools_krbrelayup_install_dir }}/KrbRelayUp.exe" + success_msg: "KrbRelayUp is available at {{ privesc_tools_krbrelayup_install_dir }}/KrbRelayUp.exe" when: privesc_tools_install_krbrelayup | default(true) # SharpGPOAbuse verification @@ -259,6 +258,20 @@ success_msg: "printerbug wrapper is available at /usr/local/bin/printerbug" when: privesc_tools_install_krbrelayx | default(true) + # Verify impacket regsecrets module is available (required for NetExec SMB) + - name: Verify impacket regsecrets module in source venv + ansible.builtin.command: /opt/impacket/venv/bin/python -c "from impacket.examples import regsecrets; print('OK')" + register: privesc_tools_regsecrets_check + changed_when: false + failed_when: false + + - name: Assert impacket regsecrets module is available + ansible.builtin.assert: + that: + - privesc_tools_regsecrets_check.rc == 0 + fail_msg: "impacket.examples.regsecrets not found in impacket venv" + success_msg: "impacket regsecrets module available in venv" + - name: Check impacket is installed in krbrelayx venv ansible.builtin.command: "{{ privesc_tools_krbrelayx_install_dir }}/venv/bin/pip show impacket" register: privesc_tools_krbrelayx_impacket diff --git a/ansible/roles/privesc_tools/tasks/impacket_source.yml b/ansible/roles/privesc_tools/tasks/impacket_source.yml index f28ed024..d552aeeb 100644 --- a/ansible/roles/privesc_tools/tasks/impacket_source.yml +++ b/ansible/roles/privesc_tools/tasks/impacket_source.yml @@ -112,6 +112,27 @@ become: true when: not privesc_tools_impacket_init_check.stat.exists +- name: Check system impacket version (Kali) + ansible.builtin.command: python3 -c "import importlib.metadata; print(importlib.metadata.version('impacket'))" + register: privesc_tools_system_impacket_version + changed_when: false + failed_when: false + when: + - ansible_facts['distribution'] == 'Kali' + +- name: Install source impacket into system Python (Kali apt netexec needs it system-wide) + ansible.builtin.pip: + name: "{{ privesc_tools_impacket_install_dir }}" + executable: pip3 + editable: true + state: forcereinstall + extra_args: "--break-system-packages --ignore-installed" + become: true + when: + - ansible_facts['distribution'] == 'Kali' + - (privesc_tools_system_impacket_version.stdout | default('0.0.0', true)) is version('0.13.0', '<') + or privesc_tools_impacket_clone.changed | default(false) + - name: Create symlinks for impacket scripts (impacket-* style for Kali compatibility) ansible.builtin.shell: | for script in {{ privesc_tools_impacket_install_dir }}/examples/*.py; do diff --git a/ansible/roles/privesc_tools/tasks/linux.yml b/ansible/roles/privesc_tools/tasks/linux.yml index 5972fbe3..cf77549e 100644 --- a/ansible/roles/privesc_tools/tasks/linux.yml +++ b/ansible/roles/privesc_tools/tasks/linux.yml @@ -122,12 +122,19 @@ become: true when: privesc_tools_install_sweetpotato -- name: Clone KrbRelayUp from GitHub - ansible.builtin.git: - repo: "{{ privesc_tools_krbrelayup_repo }}" - dest: "{{ privesc_tools_krbrelayup_install_dir }}" - version: "{{ privesc_tools_krbrelayup_version }}" - force: true +- name: Create KrbRelayUp directory + ansible.builtin.file: + path: "{{ privesc_tools_krbrelayup_install_dir }}" + state: directory + mode: '0755' + become: true + when: privesc_tools_install_krbrelayup + +- name: Download KrbRelayUp + ansible.builtin.get_url: + url: "{{ privesc_tools_krbrelayup_url }}" + dest: "{{ privesc_tools_krbrelayup_install_dir }}/KrbRelayUp.exe" + mode: '0755' become: true when: privesc_tools_install_krbrelayup diff --git a/ansible/roles/privesc_tools/tasks/pygpoabuse_pipx.yml b/ansible/roles/privesc_tools/tasks/pygpoabuse_pipx.yml index 9c104764..24b909eb 100644 --- a/ansible/roles/privesc_tools/tasks/pygpoabuse_pipx.yml +++ b/ansible/roles/privesc_tools/tasks/pygpoabuse_pipx.yml @@ -1,7 +1,6 @@ --- # Install pygpoabuse via pipx for dependency isolation # pygpoabuse is a Python implementation of GPO abuse for privilege escalation -# Note: pyGPOAbuse installs as pygpoabuse.py (uses scripts= not entry_points) - name: Check if pygpoabuse is already installed via pipx ansible.builtin.command: pipx list @@ -23,19 +22,9 @@ PATH: "{{ base_rust_bin_path }}:{{ base_pipx_bin_path }}:{{ ansible_facts['env']['PATH'] }}" when: "'pygpoabuse' not in privesc_tools_pygpoabuse_pipx_list.stdout | default('')" -# pyGPOAbuse uses scripts=['pygpoabuse.py'] so binary is pygpoabuse.py -- name: Create symlink for pygpoabuse.py in /usr/local/bin +- name: Create pygpoabuse symlink in /usr/local/bin ansible.builtin.file: - src: "{{ base_pipx_bin_path }}/pygpoabuse.py" - dest: /usr/local/bin/pygpoabuse.py - state: link - force: true - become: true - -# Also create pygpoabuse symlink for convenience -- name: Create pygpoabuse symlink (without .py extension) - ansible.builtin.file: - src: /usr/local/bin/pygpoabuse.py + src: "{{ base_pipx_bin_path }}/pygpoabuse" dest: /usr/local/bin/pygpoabuse state: link force: true diff --git a/ansible/roles/recon_tools/README.md b/ansible/roles/recon_tools/README.md index d7e4f12a..81d94e95 100644 --- a/ansible/roles/recon_tools/README.md +++ b/ansible/roles/recon_tools/README.md @@ -1,4 +1,5 @@ + # recon_tools ## Description @@ -98,6 +99,8 @@ Install and configure network reconnaissance tools for Ares agents - **Make impacket example scripts executable** (ansible.builtin.shell) - **Check if \_\_init\_\_.py exists in impacket/examples** (ansible.builtin.stat) - **Create \_\_init\_\_.py in impacket/examples to make it a proper Python package** (ansible.builtin.copy) - Conditional +- **Check system impacket version (Kali)** (ansible.builtin.command) - Conditional +- **Install source impacket into system Python (Kali apt netexec needs it system-wide)** (ansible.builtin.pip) - Conditional - **Create symlinks for impacket scripts (impacket-* style for Kali compatibility)** (ansible.builtin.shell) - **Verify impacket regsecrets module is available** (ansible.builtin.command) - **Report impacket installation status** (ansible.builtin.debug) @@ -156,7 +159,6 @@ Install and configure network reconnaissance tools for Ares agents - **Warn if Rust is not available** (ansible.builtin.debug) - Conditional - **Check if pipx is available** (ansible.builtin.command) - **Reinstall pipx if not available (may have been broken by package removal)** (ansible.builtin.apt) - Conditional -- **Run pipx ensurepath after reinstall** (ansible.builtin.command) - Conditional - **Verify pipx is now available** (ansible.builtin.command) - Conditional - **Fail if pipx is still not available** (ansible.builtin.fail) - Conditional - **Check if NetExec is already installed via pipx** (ansible.builtin.command) @@ -164,6 +166,8 @@ Install and configure network reconnaissance tools for Ares agents - **Upgrade NetExec if already installed** (ansible.builtin.command) - Conditional - **Report NetExec installation result** (ansible.builtin.debug) - **Inject source impacket into NetExec pipx venv** (ansible.builtin.command) - Conditional +- **Verify regsecrets is importable from NetExec pipx venv** (ansible.builtin.command) - Conditional +- **Fail if regsecrets not available in NetExec venv** (ansible.builtin.fail) - Conditional - **Find nxc binary location** (ansible.builtin.shell) - **Create symlink for nxc in /usr/local/bin** (ansible.builtin.file) - Conditional - **Find netexec binary location** (ansible.builtin.shell) @@ -192,3 +196,4 @@ Install and configure network reconnaissance tools for Ares agents - Debian: all - Kali: all + diff --git a/ansible/roles/recon_tools/molecule/default/verify.yml b/ansible/roles/recon_tools/molecule/default/verify.yml index 8cb0b319..97bac01a 100644 --- a/ansible/roles/recon_tools/molecule/default/verify.yml +++ b/ansible/roles/recon_tools/molecule/default/verify.yml @@ -108,6 +108,52 @@ Set recon_tools_impacket_from_source: true success_msg: "impacket regsecrets module is available (required for NetExec SMB)" + # Verify regsecrets is importable from the NetExec pipx venv (non-Kali) + # This catches the case where pipx inject fails to replace PyPI impacket + - name: Verify regsecrets is importable from NetExec pipx venv + ansible.builtin.command: >- + /root/.local/pipx/venvs/netexec/bin/python -c + "from impacket.examples import regsecrets; print('OK')" + register: recon_tools_pipx_regsecrets_check + changed_when: false + failed_when: false + when: + - ansible_facts['distribution'] != 'Kali' + - recon_tools_netexec_pipx_python is defined + - recon_tools_netexec_pipx_python.stat.exists | default(false) + + - name: Assert regsecrets is importable from NetExec pipx venv + ansible.builtin.assert: + that: + - recon_tools_pipx_regsecrets_check.rc == 0 + fail_msg: | + impacket.examples.regsecrets not importable from NetExec pipx venv. + The pipx inject likely failed to replace PyPI impacket with the source version. + success_msg: "regsecrets importable from NetExec pipx venv" + when: + - ansible_facts['distribution'] != 'Kali' + - recon_tools_netexec_pipx_python is defined + - recon_tools_netexec_pipx_python.stat.exists | default(false) + + # Verify regsecrets is importable from system Python (Kali) + # Kali apt netexec uses system Python, so impacket 0.13.0 must be installed system-wide + - name: Verify regsecrets is importable from system Python (Kali) + ansible.builtin.command: python3 -c "from impacket.examples import regsecrets; print('OK')" + register: recon_tools_system_regsecrets_check + changed_when: false + failed_when: false + when: ansible_facts['distribution'] == 'Kali' + + - name: Assert regsecrets is importable from system Python (Kali) + ansible.builtin.assert: + that: + - recon_tools_system_regsecrets_check.rc == 0 + fail_msg: | + impacket.examples.regsecrets not importable from system Python on Kali. + The system-wide impacket install may have failed or been downgraded. + success_msg: "regsecrets importable from system Python (Kali apt netexec will work)" + when: ansible_facts['distribution'] == 'Kali' + - name: "Check impacket secretsdump is available (Kali: impacket-secretsdump, Ubuntu: secretsdump.py)" ansible.builtin.shell: which impacket-secretsdump || which secretsdump.py environment: @@ -344,8 +390,13 @@ - "" - "--- Impacket Tools ---" - >- - regsecrets module: {{ 'OK' if recon_tools_regsecrets_check.rc == 0 + regsecrets module (venv): {{ 'OK' if recon_tools_regsecrets_check.rc == 0 else 'MISSING!' }} + - >- + regsecrets module (runtime): {{ + 'OK (pipx venv)' if (recon_tools_pipx_regsecrets_check.rc | default(1) == 0) + else ('OK (system python)' if (recon_tools_system_regsecrets_check.rc | default(1) == 0) + else 'NOT VERIFIED') }} - "impacket-secretsdump: {{ recon_tools_secretsdump_check.stdout }}" - >- impacket-GetUserSPNs: {{ diff --git a/ansible/roles/recon_tools/tasks/impacket_source.yml b/ansible/roles/recon_tools/tasks/impacket_source.yml index 8bbcbf01..e3a92a40 100644 --- a/ansible/roles/recon_tools/tasks/impacket_source.yml +++ b/ansible/roles/recon_tools/tasks/impacket_source.yml @@ -113,6 +113,27 @@ become: true when: not recon_tools_impacket_init_check.stat.exists +- name: Check system impacket version (Kali) + ansible.builtin.command: python3 -c "import importlib.metadata; print(importlib.metadata.version('impacket'))" + register: recon_tools_system_impacket_version + changed_when: false + failed_when: false + when: + - ansible_facts['distribution'] == 'Kali' + +- name: Install source impacket into system Python (Kali apt netexec needs it system-wide) + ansible.builtin.pip: + name: "{{ recon_tools_impacket_install_dir }}" + executable: pip3 + editable: true + state: forcereinstall + extra_args: "--break-system-packages --ignore-installed" + become: true + when: + - ansible_facts['distribution'] == 'Kali' + - (recon_tools_system_impacket_version.stdout | default('0.0.0', true)) is version('0.13.0', '<') + or recon_tools_impacket_clone.changed | default(false) + - name: Create symlinks for impacket scripts (impacket-* style for Kali compatibility) ansible.builtin.shell: | for script in {{ recon_tools_impacket_install_dir }}/examples/*.py; do diff --git a/ansible/roles/recon_tools/tasks/netexec_pipx.yml b/ansible/roles/recon_tools/tasks/netexec_pipx.yml index bc1ce462..a6668905 100644 --- a/ansible/roles/recon_tools/tasks/netexec_pipx.yml +++ b/ansible/roles/recon_tools/tasks/netexec_pipx.yml @@ -38,14 +38,6 @@ - recon_tools_pipx_check.rc != 0 - ansible_facts['os_family'] == 'Debian' -- name: Run pipx ensurepath after reinstall - ansible.builtin.command: pipx ensurepath - become: true - changed_when: false - when: - - recon_tools_pipx_check.rc != 0 - - ansible_facts['os_family'] == 'Debian' - - name: Verify pipx is now available ansible.builtin.command: pipx --version register: recon_tools_pipx_recheck @@ -109,7 +101,7 @@ - name: Inject source impacket into NetExec pipx venv ansible.builtin.command: >- - pipx inject netexec {{ recon_tools_impacket_install_dir }} --editable + pipx inject netexec {{ recon_tools_impacket_install_dir }} --editable --force register: recon_tools_netexec_impacket_inject changed_when: false failed_when: false @@ -121,6 +113,29 @@ - recon_tools_impacket_from_source | default(false) - recon_tools_netexec_use_pipx | default(true) +- name: Verify regsecrets is importable from NetExec pipx venv + ansible.builtin.command: >- + /root/.local/pipx/venvs/netexec/bin/python -c + "from impacket.examples import regsecrets; print('regsecrets OK')" + register: recon_tools_netexec_regsecrets_check + changed_when: false + failed_when: false + become: true + when: + - recon_tools_impacket_from_source | default(false) + - recon_tools_netexec_use_pipx | default(true) + +- name: Fail if regsecrets not available in NetExec venv + ansible.builtin.fail: + msg: | + CRITICAL: impacket.examples.regsecrets not importable from NetExec pipx venv. + The pipx inject may have failed. Check: pipx inject netexec /opt/impacket --editable --force + Inject output: {{ recon_tools_netexec_impacket_inject.stderr | default('none') }} + when: + - recon_tools_impacket_from_source | default(false) + - recon_tools_netexec_use_pipx | default(true) + - recon_tools_netexec_regsecrets_check.rc | default(1) != 0 + - name: Find nxc binary location ansible.builtin.shell: | set -o pipefail diff --git a/ansible/roles/redis/README.md b/ansible/roles/redis/README.md new file mode 100644 index 00000000..4cc70d51 --- /dev/null +++ b/ansible/roles/redis/README.md @@ -0,0 +1,71 @@ + + +# redis + +## Description + +Redis server for Ares worker message broker + +## Requirements + +- Ansible >= 2.18.4 + +## Role Variables + +### Default Variables (main.yml) + +| Variable | Type | Default | Description | +| -------- | ---- | ------- | ----------- | +| `redis_bind_address` | str | 127.0.0.1 | No description | +| `redis_port` | int | 6379 | No description | +| `redis_maxmemory` | str | 256mb | No description | +| `redis_maxmemory_policy` | str | allkeys-lru | No description | +| `redis_install_ares_worker_unit` | bool | True | No description | +| `redis_ares_worker_binary` | str | /usr/local/bin/ares-worker | No description | +| `redis_ares_log_dir` | str | /var/log/ares | No description | +| `redis_ares_config_dir` | str | /etc/ares | No description | +| `redis_verify_install` | bool | False | No description | + +## Tasks + +### linux.yml + + +- **Install Redis server** (ansible.builtin.apt) +- **Configure Redis bind address** (ansible.builtin.lineinfile) +- **Configure Redis port** (ansible.builtin.lineinfile) +- **Configure Redis maxmemory** (ansible.builtin.lineinfile) +- **Configure Redis maxmemory-policy** (ansible.builtin.lineinfile) +- **Enable and start Redis** (ansible.builtin.systemd) +- **Create Ares directories** (ansible.builtin.file) +- **Install Ares worker systemd template unit** (ansible.builtin.template) - Conditional +- **Verify Redis is responding** (ansible.builtin.command) - Conditional +- **Display Redis verification** (ansible.builtin.debug) - Conditional + +### main.yml + + +- **Include Linux tasks** (ansible.builtin.include_tasks) - Conditional + +## Example Playbook + +```yaml +- hosts: servers + roles: + - redis +``` + +## Author Information + +- **Author**: Dreadnode +- **Company**: dreadnode +- **License**: MIT + +## Platforms + + +- Ubuntu: all +- Debian: all +- Kali: all + + diff --git a/ansible/roles/redis/defaults/main.yml b/ansible/roles/redis/defaults/main.yml new file mode 100644 index 00000000..fe7f6a00 --- /dev/null +++ b/ansible/roles/redis/defaults/main.yml @@ -0,0 +1,15 @@ +--- +# Redis configuration +redis_bind_address: "127.0.0.1" +redis_port: 6379 +redis_maxmemory: "256mb" +redis_maxmemory_policy: "allkeys-lru" + +# Ares worker configuration +redis_install_ares_worker_unit: true +redis_ares_worker_binary: "/usr/local/bin/ares-worker" +redis_ares_log_dir: "/var/log/ares" +redis_ares_config_dir: "/etc/ares" + +# Verification +redis_verify_install: false diff --git a/ansible/roles/redis/handlers/main.yml b/ansible/roles/redis/handlers/main.yml new file mode 100644 index 00000000..c8830446 --- /dev/null +++ b/ansible/roles/redis/handlers/main.yml @@ -0,0 +1,11 @@ +--- +- name: Restart redis + ansible.builtin.systemd: + name: redis-server + state: restarted + become: true + +- name: Reload systemd + ansible.builtin.systemd: + daemon_reload: true + become: true diff --git a/ansible/roles/redis/meta/main.yml b/ansible/roles/redis/meta/main.yml new file mode 100644 index 00000000..7334bbcb --- /dev/null +++ b/ansible/roles/redis/meta/main.yml @@ -0,0 +1,24 @@ +--- +galaxy_info: + author: Dreadnode + namespace: dreadnode + description: Redis server for Ares worker message broker + company: dreadnode + license: MIT + role_name: redis + min_ansible_version: "2.18.4" + platforms: + - name: Ubuntu + versions: + - all + - name: Debian + versions: + - all + - name: Kali + versions: + - all + galaxy_tags: + - ares + - redis + - worker + - broker diff --git a/ansible/roles/redis/tasks/linux.yml b/ansible/roles/redis/tasks/linux.yml new file mode 100644 index 00000000..9583ac1c --- /dev/null +++ b/ansible/roles/redis/tasks/linux.yml @@ -0,0 +1,77 @@ +--- +- name: Install Redis server + ansible.builtin.apt: + name: redis-server + state: present + update_cache: true + become: true + +- name: Configure Redis bind address + ansible.builtin.lineinfile: + path: /etc/redis/redis.conf + regexp: '^bind ' + line: "bind {{ redis_bind_address }}" + become: true + notify: Restart redis + +- name: Configure Redis port + ansible.builtin.lineinfile: + path: /etc/redis/redis.conf + regexp: '^port ' + line: "port {{ redis_port }}" + become: true + notify: Restart redis + +- name: Configure Redis maxmemory + ansible.builtin.lineinfile: + path: /etc/redis/redis.conf + regexp: '^# ?maxmemory ' + line: "maxmemory {{ redis_maxmemory }}" + become: true + notify: Restart redis + +- name: Configure Redis maxmemory-policy + ansible.builtin.lineinfile: + path: /etc/redis/redis.conf + regexp: '^# ?maxmemory-policy ' + line: "maxmemory-policy {{ redis_maxmemory_policy }}" + become: true + notify: Restart redis + +- name: Enable and start Redis + ansible.builtin.systemd: + name: redis-server + enabled: true + state: started + become: true + +- name: Create Ares directories + ansible.builtin.file: + path: "{{ item }}" + state: directory + mode: '0755' + loop: + - "{{ redis_ares_log_dir }}" + - "{{ redis_ares_config_dir }}" + become: true + +- name: Install Ares worker systemd template unit + ansible.builtin.template: + src: ares-worker@.service.j2 + dest: /etc/systemd/system/ares-worker@.service + mode: '0644' + become: true + when: redis_install_ares_worker_unit + notify: Reload systemd + +- name: Verify Redis is responding + ansible.builtin.command: + cmd: redis-cli ping + register: redis_ping + changed_when: false + when: redis_verify_install + +- name: Display Redis verification + ansible.builtin.debug: + msg: "Redis ping response: {{ redis_ping.stdout }}" + when: redis_verify_install diff --git a/ansible/roles/redis/tasks/main.yml b/ansible/roles/redis/tasks/main.yml new file mode 100644 index 00000000..0f9cb2c3 --- /dev/null +++ b/ansible/roles/redis/tasks/main.yml @@ -0,0 +1,4 @@ +--- +- name: Include Linux tasks + ansible.builtin.include_tasks: linux.yml + when: ansible_os_family != 'Windows' diff --git a/ansible/roles/redis/templates/ares-worker@.service.j2 b/ansible/roles/redis/templates/ares-worker@.service.j2 new file mode 100644 index 00000000..61c57474 --- /dev/null +++ b/ansible/roles/redis/templates/ares-worker@.service.j2 @@ -0,0 +1,19 @@ +[Unit] +Description=Ares Worker (%i) +After=redis.service +Wants=redis.service + +[Service] +Type=simple +ExecStart={{ redis_ares_worker_binary }} +Environment=ARES_REDIS_URL=redis://{{ redis_bind_address }}:{{ redis_port }} +Environment=ARES_WORKER_ROLE=%i +Environment=ARES_WORKER_MODE=tool_exec +Environment=RUST_LOG=info +Restart=on-failure +RestartSec=5 +StandardOutput=append:{{ redis_ares_log_dir }}/%i.log +StandardError=append:{{ redis_ares_log_dir }}/%i.log + +[Install] +WantedBy=multi-user.target From 47357cfe23b66d56f2361e7462cf45b7f2f73850 Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 09:00:13 -0600 Subject: [PATCH 02/17] ci: update python version for pre-commit workflow to 3.14 **Changed:** - Updated the Python version used in the pre-commit GitHub Actions workflow from 3.12 to 3.14 to test compatibility with the latest Python release --- .github/workflows/pre-commit.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml index eca46e0e..04f33e3f 100644 --- a/.github/workflows/pre-commit.yaml +++ b/.github/workflows/pre-commit.yaml @@ -41,7 +41,7 @@ jobs: - name: Set up Python (for pre-commit) uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 with: - python-version: "3.12" + python-version: "3.14" - name: Install pre-commit run: python3 -m pip install pre-commit From 949df2b033ab6b68b3943493048593e50c2b42eb Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 09:06:16 -0600 Subject: [PATCH 03/17] refactor: reorganize ansible hooks and update pre-commit/ci config **Added:** - Introduced `.hooks/prettier.sh` for formatting files using Prettier **Changed:** - Moved all Ansible-related hooks and templates into `.hooks/ansible/` for better organization - Updated references in `.pre-commit-config.yaml` and hook scripts to reflect new paths for Ansible lint, docsible, and architecture diagram hooks - Modified `.github/workflows/pre-commit.yaml` to: - Update concurrency group naming for clarity - Set Python version via environment variable and bump to 3.14.3 - Add pip caching and requirements install for Python dependencies - Update Task version to 3.49.1 and enable remote taskfiles - Simplify pre-commit run command, removing redundant env var **Removed:** - Eliminated the outdated `linters` and `templates` directories under `.hooks` in favor of consolidated structure under `.hooks/ansible` --- .github/workflows/pre-commit.yaml | 19 ++++++----- .hooks/{linters => ansible}/ansible-lint.yaml | 0 .hooks/{ => ansible}/docsible-hook.sh | 2 +- .hooks/{ => ansible}/gen-arch-diagram.py | 0 .../lint-hook.sh} | 4 +-- .../templates/docsible-template.md.j2 | 0 .hooks/prettier.sh | 34 +++++++++++++++++++ .pre-commit-config.yaml | 6 ++-- 8 files changed, 51 insertions(+), 14 deletions(-) rename .hooks/{linters => ansible}/ansible-lint.yaml (100%) rename .hooks/{ => ansible}/docsible-hook.sh (96%) rename .hooks/{ => ansible}/gen-arch-diagram.py (100%) rename .hooks/{ansible-lint-hook.sh => ansible/lint-hook.sh} (75%) rename .hooks/{ => ansible}/templates/docsible-template.md.j2 (100%) create mode 100755 .hooks/prettier.sh diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml index 04f33e3f..83663edf 100644 --- a/.github/workflows/pre-commit.yaml +++ b/.github/workflows/pre-commit.yaml @@ -18,11 +18,13 @@ on: workflow_dispatch: concurrency: - group: pre-commit-${{ github.ref }} cancel-in-progress: true + group: pre-commit-${{ github.workflow }}-${{ github.ref }} env: - TASK_VERSION: 3.38.0 + PYTHON_VERSION: "3.14.3" + TASK_X_REMOTE_TASKFILES: "1" + TASK_VERSION: 3.49.1 permissions: actions: read @@ -38,19 +40,20 @@ jobs: - name: Set up git repository uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - name: Set up Python (for pre-commit) + - name: Set up Python uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 with: - python-version: "3.14" + python-version: ${{ env.PYTHON_VERSION }} + cache: 'pip' + cache-dependency-path: '.hooks/requirements.txt' - - name: Install pre-commit - run: python3 -m pip install pre-commit + - name: Install dependencies + run: python3 -m pip install -r .hooks/requirements.txt - name: Setup go-task - if: always() run: | sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b /usr/local/bin v${{ env.TASK_VERSION }} task --version - name: Run pre-commit - run: TASK_X_REMOTE_TASKFILES=1 task run-pre-commit -y + run: task -y run-pre-commit diff --git a/.hooks/linters/ansible-lint.yaml b/.hooks/ansible/ansible-lint.yaml similarity index 100% rename from .hooks/linters/ansible-lint.yaml rename to .hooks/ansible/ansible-lint.yaml diff --git a/.hooks/docsible-hook.sh b/.hooks/ansible/docsible-hook.sh similarity index 96% rename from .hooks/docsible-hook.sh rename to .hooks/ansible/docsible-hook.sh index eceea1bc..cd3f58f6 100755 --- a/.hooks/docsible-hook.sh +++ b/.hooks/ansible/docsible-hook.sh @@ -19,7 +19,7 @@ if ! command -v docsible &>/dev/null; then fi # Check if template exists -TEMPLATE_PATH="$REPO_ROOT/.hooks/templates/docsible-template.md.j2" +TEMPLATE_PATH="$REPO_ROOT/.hooks/ansible/templates/docsible-template.md.j2" if [ ! -f "$TEMPLATE_PATH" ]; then echo -e "${RED}Error: Template file not found at $TEMPLATE_PATH${NC}" echo "Please ensure the template file exists before running this hook." diff --git a/.hooks/gen-arch-diagram.py b/.hooks/ansible/gen-arch-diagram.py similarity index 100% rename from .hooks/gen-arch-diagram.py rename to .hooks/ansible/gen-arch-diagram.py diff --git a/.hooks/ansible-lint-hook.sh b/.hooks/ansible/lint-hook.sh similarity index 75% rename from .hooks/ansible-lint-hook.sh rename to .hooks/ansible/lint-hook.sh index 541e2751..cc1a724e 100755 --- a/.hooks/ansible-lint-hook.sh +++ b/.hooks/ansible/lint-hook.sh @@ -6,8 +6,8 @@ set -eo pipefail # run in online mode to allow collection installation. if [ -z "${GITHUB_ACTIONS}" ]; then # Local pre-commit: use offline mode - exec ansible-lint -v --force-color --offline -c .hooks/linters/ansible-lint.yaml "$@" + exec ansible-lint -v --force-color --offline -c .hooks/ansible/ansible-lint.yaml "$@" else # CI: allow collection installation - exec ansible-lint -v --force-color -c .hooks/linters/ansible-lint.yaml "$@" + exec ansible-lint -v --force-color -c .hooks/ansible/ansible-lint.yaml "$@" fi diff --git a/.hooks/templates/docsible-template.md.j2 b/.hooks/ansible/templates/docsible-template.md.j2 similarity index 100% rename from .hooks/templates/docsible-template.md.j2 rename to .hooks/ansible/templates/docsible-template.md.j2 diff --git a/.hooks/prettier.sh b/.hooks/prettier.sh new file mode 100755 index 00000000..f905c885 --- /dev/null +++ b/.hooks/prettier.sh @@ -0,0 +1,34 @@ +#!/bin/bash +set -eo pipefail + +# Check if npm is installed +if ! [ -x "$(command -v npm)" ]; then + echo 'Error: npm is not installed.' >&2 + exit 1 +else + # Check if Prettier is installed + if ! [ -x "$(command -v prettier)" ]; then + echo 'Error: Prettier is not installed.' >&2 + echo 'Installing Prettier...' + npm install -g prettier + fi +fi + +# Check if Prettier is installed +if ! [ -x "$(command -v prettier)" ]; then + echo 'Error: Prettier is not installed.' >&2 + exit 1 +fi + +# If no files passed, exit successfully +if [ $# -eq 0 ]; then + echo "No files to format." + exit 0 +fi + +echo "Running Prettier on staged files..." + +prettier --write "$@" + +echo "Prettier formatting completed." +exit 0 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 51729d46..4653a0da 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -53,7 +53,7 @@ repos: # env -u GIT_INDEX_FILE prevents ansible-galaxy (called internally by # ansible-lint) from inheriting the commit-time index file and # corrupting it with stale blob references. - entry: env -u GIT_INDEX_FILE ansible-lint -v --force-color -c .hooks/linters/ansible-lint.yaml + entry: env -u GIT_INDEX_FILE ansible-lint -v --force-color -c .hooks/ansible/ansible-lint.yaml language: python always_run: true additional_dependencies: [".[community]"] @@ -104,7 +104,7 @@ repos: - id: docsible name: Generate Ansible documentation with docsible - entry: .hooks/docsible-hook.sh + entry: .hooks/ansible/docsible-hook.sh language: script always_run: true pass_filenames: false @@ -112,7 +112,7 @@ repos: - id: update-architecture-diagram name: Update Architecture Diagram - entry: python .hooks/gen-arch-diagram.py + entry: python .hooks/ansible/gen-arch-diagram.py language: python pass_filenames: false always_run: false From 733050f62f4221d3800b1a058f0879992543b896 Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 09:18:45 -0600 Subject: [PATCH 04/17] ci: add ansible collection installation and local build to pre-commit workflow --- .github/workflows/pre-commit.yaml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml index 83663edf..f47e3f72 100644 --- a/.github/workflows/pre-commit.yaml +++ b/.github/workflows/pre-commit.yaml @@ -50,6 +50,16 @@ jobs: - name: Install dependencies run: python3 -m pip install -r .hooks/requirements.txt + - name: Install Ansible collections + run: | + ansible-galaxy collection install -r ansible/requirements.yml --force + + - name: Build and install local collection + working-directory: ansible + run: | + ansible-galaxy collection build --force + ansible-galaxy collection install dreadnode-nimbus_range-*.tar.gz -p ~/.ansible/collections --force --pre + - name: Setup go-task run: | sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b /usr/local/bin v${{ env.TASK_VERSION }} From af9194e63be698cfff3778a9886b52634732c8be Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 09:25:13 -0600 Subject: [PATCH 05/17] ci: add go/shfmt setup to pre-commit workflow and exclude .tmpl from shell hooks --- .github/workflows/pre-commit.yaml | 10 ++++++++++ .pre-commit-config.yaml | 3 +++ 2 files changed, 13 insertions(+) diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml index f47e3f72..27ebae51 100644 --- a/.github/workflows/pre-commit.yaml +++ b/.github/workflows/pre-commit.yaml @@ -22,6 +22,7 @@ concurrency: group: pre-commit-${{ github.workflow }}-${{ github.ref }} env: + GO_VERSION: "1.26.1" PYTHON_VERSION: "3.14.3" TASK_X_REMOTE_TASKFILES: "1" TASK_VERSION: 3.49.1 @@ -50,6 +51,15 @@ jobs: - name: Install dependencies run: python3 -m pip install -r .hooks/requirements.txt + - name: Set up Go + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6 + with: + go-version: ${{ env.GO_VERSION }} + + - name: Install go module dependencies + run: | + go install mvdan.cc/sh/v3/cmd/shfmt@latest + - name: Install Ansible collections run: | ansible-galaxy collection install -r ansible/requirements.yml --force diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4653a0da..3170fd25 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -37,8 +37,11 @@ repos: - id: script-must-have-extension name: Ensure shell scripts end with .sh types: [shell] + exclude: '\.tmpl$' - id: shellcheck + exclude: '\.tmpl$' - id: shfmt + exclude: '\.tmpl$' - repo: https://github.com/igorshubovych/markdownlint-cli rev: v0.48.0 From 9ab5fc57958ab7c1e3ad2cc24bb87cc7283a1839 Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 09:34:03 -0600 Subject: [PATCH 06/17] ci: update python versions and improve ansible-lint compatibility in pre-commit workflow **Added:** - Introduced PYTHON_VERSION_ANSIBLE_LINT environment variable for ansible-lint, allowing use of Python 3.14.3 specifically for the ansible-lint pre-commit hook - Added a dedicated Python setup step for ansible-lint using the new variable **Changed:** - Updated default PYTHON_VERSION from 3.14.3 to 3.13.5 to align with other tools - Adjusted Python setup steps to use both general and ansible-lint-specific Python versions for better tool compatibility --- .github/workflows/pre-commit.yaml | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml index 27ebae51..562394b4 100644 --- a/.github/workflows/pre-commit.yaml +++ b/.github/workflows/pre-commit.yaml @@ -23,7 +23,9 @@ concurrency: env: GO_VERSION: "1.26.1" - PYTHON_VERSION: "3.14.3" + PYTHON_VERSION: "3.13.5" + # ansible-lint v26 requires Python 3.14 for its pre-commit virtualenv + PYTHON_VERSION_ANSIBLE_LINT: "3.14.3" TASK_X_REMOTE_TASKFILES: "1" TASK_VERSION: 3.49.1 @@ -41,7 +43,12 @@ jobs: - name: Set up git repository uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - name: Set up Python + - name: Set up Python ${{ env.PYTHON_VERSION_ANSIBLE_LINT }} (for ansible-lint pre-commit hook) + uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 + with: + python-version: ${{ env.PYTHON_VERSION_ANSIBLE_LINT }} + + - name: Set up Python ${{ env.PYTHON_VERSION }} uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 with: python-version: ${{ env.PYTHON_VERSION }} From 1cecede85afb71ce7d23efd65e05e22f6ec62c96 Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 10:37:03 -0600 Subject: [PATCH 07/17] ci: add change detection to optimize rust workflow runs **Added:** - Introduced a `detect-changes` job using `dorny/paths-filter` to check for Rust-related file changes and output results for conditional job execution - Implemented conditional logic in `check`, `fmt`, and `clippy` jobs to run only when relevant Rust files are changed - Enhanced the `test` job to skip steps and output a message when no Rust changes are detected **Changed:** - Updated workflow dependencies: jobs now depend on `detect-changes` and use its output to determine execution, reducing unnecessary runs on unrelated changes - Refined job triggers by removing the previous `paths` filters from workflow events, delegating change detection to the new job and centralizing logic **Removed:** - Eliminated inline `paths` filters from pull_request and push event triggers to avoid redundant workflow executions and rely on job-level change detection --- .github/workflows/rust.yaml | 50 +++++++++++++++++++++++++++---------- 1 file changed, 37 insertions(+), 13 deletions(-) diff --git a/.github/workflows/rust.yaml b/.github/workflows/rust.yaml index 5b8820fd..28884c10 100644 --- a/.github/workflows/rust.yaml +++ b/.github/workflows/rust.yaml @@ -9,19 +9,9 @@ on: - opened - synchronize - reopened - paths: - - "ares-*/**" - - "Cargo.toml" - - "Cargo.lock" - - ".github/workflows/rust.yaml" push: branches: - main - paths: - - "ares-*/**" - - "Cargo.toml" - - "Cargo.lock" - - ".github/workflows/rust.yaml" workflow_dispatch: concurrency: @@ -39,9 +29,31 @@ permissions: pull-requests: write jobs: + detect-changes: + name: "\U0001F50E Detect changes" + runs-on: ubuntu-latest + outputs: + rust: ${{ steps.filter.outputs.rust }} + steps: + - name: Set up git repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Check for Rust changes + uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 + id: filter + with: + filters: | + rust: + - 'ares-*/**' + - 'Cargo.toml' + - 'Cargo.lock' + - '.github/workflows/rust.yaml' + check: name: "\U0001F50D Cargo check" runs-on: ubuntu-latest + needs: detect-changes + if: needs.detect-changes.outputs.rust == 'true' steps: - name: Set up git repository uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 @@ -66,15 +78,23 @@ jobs: test: name: "\U0001F9EA Tests" runs-on: ubuntu-latest - needs: check + needs: [detect-changes, check] + if: always() && (needs.detect-changes.outputs.rust != 'true' || needs.check.result == 'success') steps: + - name: Skip (no Rust changes) + if: needs.detect-changes.outputs.rust != 'true' + run: echo "No Rust changes detected, skipping tests" + - name: Set up git repository + if: needs.detect-changes.outputs.rust == 'true' uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust toolchain + if: needs.detect-changes.outputs.rust == 'true' uses: dtolnay/rust-toolchain@29eef336d9b2848a0b548edc03f92a220660cdb8 # stable - name: Cache cargo registry and build + if: needs.detect-changes.outputs.rust == 'true' uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5 with: path: | @@ -86,10 +106,11 @@ jobs: ${{ runner.os }}-cargo-test- - name: Run tests + if: needs.detect-changes.outputs.rust == 'true' run: cargo test --workspace - name: Add test summary - if: always() + if: always() && needs.detect-changes.outputs.rust == 'true' run: | { echo "## \U0001F980 Rust Test Results" @@ -102,6 +123,8 @@ jobs: fmt: name: "\U0001F4D0 Format" runs-on: ubuntu-latest + needs: detect-changes + if: needs.detect-changes.outputs.rust == 'true' steps: - name: Set up git repository uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 @@ -117,7 +140,8 @@ jobs: clippy: name: "\U0001F4CE Clippy" runs-on: ubuntu-latest - needs: check + needs: [detect-changes, check] + if: needs.detect-changes.outputs.rust == 'true' steps: - name: Set up git repository uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 From 6c3ac7802e374db5b18318f0133e84d1e0b6e25e Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 10:42:03 -0600 Subject: [PATCH 08/17] ci: update rust workflow paths to avoid self-triggering **Changed:** - Modified the `paths` filter in the rust GitHub Actions workflow to exclude `.github/workflows/rust.yaml` to prevent the workflow from triggering itself on changes to its own configuration file --- .github/workflows/rust.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/rust.yaml b/.github/workflows/rust.yaml index 28884c10..006f76a0 100644 --- a/.github/workflows/rust.yaml +++ b/.github/workflows/rust.yaml @@ -47,7 +47,6 @@ jobs: - 'ares-*/**' - 'Cargo.toml' - 'Cargo.lock' - - '.github/workflows/rust.yaml' check: name: "\U0001F50D Cargo check" From c53f6a4d79999957cf7aebbaf87cc3791f2a09ac Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 10:55:35 -0600 Subject: [PATCH 09/17] ``` ci: install ansible collections without dependencies in pre-commit workflow **Changed:** - Updated ansible-galaxy install command in pre-commit workflow to use --no-deps flag, preventing installation of dependent collections and reducing setup time in CI ``` --- .github/workflows/pre-commit.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml index 562394b4..58acc9e4 100644 --- a/.github/workflows/pre-commit.yaml +++ b/.github/workflows/pre-commit.yaml @@ -69,7 +69,7 @@ jobs: - name: Install Ansible collections run: | - ansible-galaxy collection install -r ansible/requirements.yml --force + ansible-galaxy collection install -r ansible/requirements.yml --force --no-deps - name: Build and install local collection working-directory: ansible From a9c9115a0996c8625e282d6cdf49ddcffa8843cc Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 10:56:50 -0600 Subject: [PATCH 10/17] ``` chore: add ansible-related files to gitignore **Added:** - Ignore `.ansible/` directory to prevent Ansible-related files from being tracked by git ``` --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 741c09fa..dbc5ee6d 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,9 @@ target/ *.pdb .cargo/config.toml +# Ansible +.ansible/ + # Reports and logs /reports/ /logs/ From f089bb90c2c789d36c1e3ea0023c116aa64f55ab Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 11:10:19 -0600 Subject: [PATCH 11/17] build: downgrade ansible-core version in pre-commit requirements **Changed:** - Updated `.hooks/requirements.txt` to require `ansible-core==2.19.8` instead of `2.20.4` to address compatibility or stability concerns with the newer version --- .hooks/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.hooks/requirements.txt b/.hooks/requirements.txt index 12964a67..fe28e838 100644 --- a/.hooks/requirements.txt +++ b/.hooks/requirements.txt @@ -1,4 +1,4 @@ -ansible-core==2.20.4 +ansible-core==2.19.8 ansible-lint==26.4.0 docker==7.1.0 docsible==0.8.0 From edb8acb3fc5c68ba64acfa222a69ce8bf2f60eb9 Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 11:19:31 -0600 Subject: [PATCH 12/17] feat: add ansible.posix collection to requirements **Added:** - Added ansible.posix collection version 2.1.0 to Ansible requirements for extended support of POSIX-related modules and features --- ansible/requirements.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ansible/requirements.yml b/ansible/requirements.yml index 1f959009..5b01acb0 100644 --- a/ansible/requirements.yml +++ b/ansible/requirements.yml @@ -8,6 +8,8 @@ collections: version: 3.1.0 - name: community.docker version: 5.2.0 + - name: ansible.posix + version: 2.1.0 - name: community.general version: 12.5.0 - name: grafana.grafana From 13100d6fe7e7e44115661b4e4cf26397d32079d2 Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 11:44:25 -0600 Subject: [PATCH 13/17] ``` fix: set apt cache_valid_time to reduce unnecessary cache updates **Changed:** - Set `cache_valid_time: 3600` for apt cache updates to minimize redundant cache refreshes in Debian-based systems across base and mythic roles - Standardized `update_cache` parameter to `true` (from `yes`) for clarity in mythic packages task ``` --- ansible/roles/base/tasks/linux.yml | 1 + ansible/roles/mythic/tasks/docker.yml | 1 + ansible/roles/mythic/tasks/packages.yml | 3 ++- 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/ansible/roles/base/tasks/linux.yml b/ansible/roles/base/tasks/linux.yml index b7813be8..833a9619 100644 --- a/ansible/roles/base/tasks/linux.yml +++ b/ansible/roles/base/tasks/linux.yml @@ -11,6 +11,7 @@ - name: Update apt cache ansible.builtin.apt: update_cache: true + cache_valid_time: 3600 become: true when: ansible_facts['os_family'] == 'Debian' diff --git a/ansible/roles/mythic/tasks/docker.yml b/ansible/roles/mythic/tasks/docker.yml index 43a9d464..5fbb42de 100644 --- a/ansible/roles/mythic/tasks/docker.yml +++ b/ansible/roles/mythic/tasks/docker.yml @@ -50,6 +50,7 @@ - name: Update apt cache after adding Docker repository ansible.builtin.apt: update_cache: true + cache_valid_time: 3600 become: true - name: Check available Docker versions diff --git a/ansible/roles/mythic/tasks/packages.yml b/ansible/roles/mythic/tasks/packages.yml index c6097477..7970a2ab 100644 --- a/ansible/roles/mythic/tasks/packages.yml +++ b/ansible/roles/mythic/tasks/packages.yml @@ -1,7 +1,8 @@ --- - name: Update apt cache ansible.builtin.apt: - update_cache: yes + update_cache: true + cache_valid_time: 3600 become: true - name: Install essential packages From 7db9caf78e62bf9bec58e132c0982ad6656a5d74 Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 13:14:27 -0600 Subject: [PATCH 14/17] ci: enhance workflows for environment safety, test coverage, and security reporting **Added:** - Security warning comment to meta-labeler workflow explaining risks of `pull_request_target` and referencing relevant CVEs - Upload of Semgrep SARIF results to GitHub Security tab for improved vulnerability visibility - Additional Semgrep rules for Python and GitHub Actions security checks **Changed:** - Expanded workflow trigger paths in molecule and rust workflows to include relevant workflow and requirements files for more complete test coverage - Updated cache keys in molecule, rust, and clippy jobs to use `github.ref` for more accurate cache scoping - Improved environment variable usage and consistency across molecule, pre-commit, and related workflows by setting variables explicitly and referencing them uniformly in shell scripts and steps - Simplified and modernized event detection and changed files logic in molecule workflow, including use of explicit environment variables and more robust input handling - Adjusted permissions in molecule and meta-labeler workflows to only request necessary access for increased security - Improved error handling and validation messaging in molecule workflow when roles are missing or lack molecule tests - Refactored rust workflow to remove the `detect-changes` job, simplifying job dependencies and always running check, test, fmt, and clippy jobs, relying on path filters for execution control **Removed:** - Deprecated or redundant steps for permissions and change detection in meta-labeler and rust workflows to streamline configuration and reduce maintenance burden --- .github/workflows/meta-labeler.yaml | 5 +- .github/workflows/molecule.yaml | 74 +++++++++++++++++++---------- .github/workflows/pre-commit.yaml | 4 +- .github/workflows/rust.yaml | 64 +++++++++---------------- .github/workflows/semgrep.yaml | 10 ++++ 5 files changed, 86 insertions(+), 71 deletions(-) diff --git a/.github/workflows/meta-labeler.yaml b/.github/workflows/meta-labeler.yaml index 06c022eb..5878cf37 100644 --- a/.github/workflows/meta-labeler.yaml +++ b/.github/workflows/meta-labeler.yaml @@ -1,14 +1,15 @@ --- name: "Labeler" +# SECURITY: This workflow uses pull_request_target and has access to secrets. +# NEVER add an actions/checkout step here or run any code from the PR. +# See: CVE-2025-30066, CVE-2025-30154, CVE-2026-33634 on: pull_request_target: branches: ["main"] types: ["opened", "synchronize"] permissions: - actions: read contents: read - issues: write pull-requests: write jobs: diff --git a/.github/workflows/molecule.yaml b/.github/workflows/molecule.yaml index e8bfc6ac..2eb3541a 100644 --- a/.github/workflows/molecule.yaml +++ b/.github/workflows/molecule.yaml @@ -11,11 +11,15 @@ on: - reopened paths: - 'ansible/**' + - '.github/workflows/molecule.yaml' + - '.hooks/requirements.txt' push: branches: - main paths: - 'ansible/**' + - '.github/workflows/molecule.yaml' + - '.hooks/requirements.txt' schedule: # Runs every Sunday at 4 AM (see https://crontab.guru) - cron: "0 4 * * 0" @@ -50,9 +54,7 @@ env: ANSIBLE_COLLECTIONS_PATH: ~/.ansible/collections permissions: - actions: write - contents: write - issues: write + contents: read pull-requests: write jobs: @@ -71,12 +73,15 @@ jobs: - name: Check event type id: check-event + env: + EVENT_NAME: ${{ github.event_name }} + INPUT_ROLE: ${{ env.ROLE }} run: | # Test all on: push to main, schedule, workflow_dispatch without specific inputs, merge_group - if [[ "${{ github.event_name }}" == "push" ]] || \ - [[ "${{ github.event_name }}" == "schedule" ]] || \ - [[ "${{ github.event_name }}" == "merge_group" ]] || \ - [[ "${{ github.event_name }}" == "workflow_dispatch" && -z "${{ env.ROLE }}" ]]; then + if [[ "${EVENT_NAME}" == "push" ]] || \ + [[ "${EVENT_NAME}" == "schedule" ]] || \ + [[ "${EVENT_NAME}" == "merge_group" ]] || \ + [[ "${EVENT_NAME}" == "workflow_dispatch" && -z "${INPUT_ROLE}" ]]; then echo "test_all=true" >> "$GITHUB_OUTPUT" else echo "test_all=false" >> "$GITHUB_OUTPUT" @@ -85,10 +90,14 @@ jobs: - name: Detect changed files id: filter if: steps.check-event.outputs.test_all == 'false' + env: + EVENT_NAME: ${{ github.event_name }} + PR_BASE_SHA: ${{ github.event.pull_request.base.sha }} + PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} run: | - if [[ "${{ github.event_name }}" == "pull_request" ]]; then - BASE="${{ github.event.pull_request.base.sha }}" - HEAD="${{ github.event.pull_request.head.sha }}" + if [[ "${EVENT_NAME}" == "pull_request" ]]; then + BASE="${PR_BASE_SHA}" + HEAD="${PR_HEAD_SHA}" else BASE="origin/main" HEAD="HEAD" @@ -106,6 +115,10 @@ jobs: - name: Generate test matrix id: generate-matrix + env: + EVENT_NAME: ${{ github.event_name }} + TEST_ALL: ${{ steps.check-event.outputs.test_all }} + CHANGED_ROLES: ${{ steps.filter.outputs.roles }} run: | # Define roles with molecule tests ROLES_WITH_MOLECULE=( @@ -123,14 +136,14 @@ jobs: # Define additional scenarios (role:scenario format) # These only run on schedule/workflow_dispatch (too slow for PR CI) declare -A ADDITIONAL_SCENARIOS=() - if [[ "${{ github.event_name }}" == "schedule" ]] || \ - [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then + if [[ "${EVENT_NAME}" == "schedule" ]] || \ + [[ "${EVENT_NAME}" == "workflow_dispatch" ]]; then ADDITIONAL_SCENARIOS=( ["cracking_tools"]="source-build" ) fi - if [[ "${{ steps.check-event.outputs.test_all }}" == "true" ]]; then + if [[ "${TEST_ALL}" == "true" ]]; then # Test all roles that have molecule tests MATRIX_JSON='[' FIRST=true @@ -153,7 +166,7 @@ jobs: MATRIX_JSON+=']' else # Test only changed roles that have molecule tests - ROLES="${{ steps.filter.outputs.roles }}" + ROLES="${CHANGED_ROLES}" MATRIX_JSON="[" FIRST=true @@ -206,6 +219,8 @@ jobs: uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Validate inputs + env: + INPUT_ROLE: ${{ env.ROLE }} run: | ROLES_WITH_MOLECULE=( "acl_tools" @@ -219,13 +234,13 @@ jobs: "recon_tools" ) - if [[ -n "${{ env.ROLE }}" ]]; then - if [[ ! -d "ansible/roles/${{ env.ROLE }}" ]]; then - echo "::error::Role '${{ env.ROLE }}' not found in ansible/roles/" + if [[ -n "${INPUT_ROLE}" ]]; then + if [[ ! -d "ansible/roles/${INPUT_ROLE}" ]]; then + echo "::error::Role '${INPUT_ROLE}' not found in ansible/roles/" exit 1 fi - if [[ ! " ${ROLES_WITH_MOLECULE[*]} " == *" ${{ env.ROLE }} "* ]]; then - echo "::error::Role '${{ env.ROLE }}' does not have molecule tests" + if [[ ! " ${ROLES_WITH_MOLECULE[*]} " == *" ${INPUT_ROLE} "* ]]; then + echo "::error::Role '${INPUT_ROLE}' does not have molecule tests" exit 1 fi fi @@ -269,12 +284,15 @@ jobs: uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5 with: path: ~/.ansible/collections - key: ${{ runner.os }}-ansible-${{ hashFiles('**/requirements.yml') }} + key: ${{ runner.os }}-ansible-${{ github.ref }}-${{ hashFiles('**/requirements.yml') }} - name: Install dependencies shell: bash + env: + COLL_PATH: ${{ env.COLLECTION_PATH }} + REQS_FILE: ${{ env.REQUIREMENTS_FILE }} run: | - python3 -m pip install -r "${{ env.COLLECTION_PATH }}/${{ env.REQUIREMENTS_FILE }}" + python3 -m pip install -r "${COLL_PATH}/${REQS_FILE}" - name: Install galaxy dependencies working-directory: ${{ env.COLLECTION_PATH }}/ansible @@ -297,12 +315,13 @@ jobs: ANSIBLE_CONFIG: ${{ env.COLLECTION_PATH }}/ansible/ansible.cfg ANSIBLE_ROLES_PATH: ${{ env.COLLECTION_PATH }}/ansible/roles MOLECULE_NO_LOG: "false" + MOLECULE_SCENARIO: ${{ env.SCENARIO }} run: | set -e molecule --version molecule list - if ! MOLECULE_DEBUG=1 molecule test -s "${{ env.SCENARIO }}"; then + if ! MOLECULE_DEBUG=1 molecule test -s "${MOLECULE_SCENARIO}"; then echo "Molecule test failed. Collecting debug information..." echo "Docker containers:" @@ -362,12 +381,15 @@ jobs: uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5 with: path: ~/.ansible/collections - key: ${{ runner.os }}-ansible-${{ hashFiles('**/requirements.yml') }} + key: ${{ runner.os }}-ansible-${{ github.ref }}-${{ hashFiles('**/requirements.yml') }} - name: Install dependencies shell: bash + env: + COLL_PATH: ${{ env.COLLECTION_PATH }} + REQS_FILE: ${{ env.REQUIREMENTS_FILE }} run: | - python3 -m pip install -r "${{ env.COLLECTION_PATH }}/${{ env.REQUIREMENTS_FILE }}" + python3 -m pip install -r "${COLL_PATH}/${REQS_FILE}" - name: Install galaxy dependencies working-directory: ${{ env.COLLECTION_PATH }}/ansible @@ -388,13 +410,13 @@ jobs: shell: bash env: ANSIBLE_CONFIG: ${{ env.COLLECTION_PATH }}/ansible/ansible.cfg + MOLECULE_SCENARIO: ${{ matrix.scenario }} run: | set -e molecule --version molecule list - SCENARIO="${{ matrix.scenario }}" - SCENARIO="${SCENARIO:-default}" + SCENARIO="${MOLECULE_SCENARIO:-default}" if ! MOLECULE_DEBUG=1 molecule test -s "$SCENARIO"; then echo "Molecule test failed. Collecting debug information..." diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml index 58acc9e4..707274da 100644 --- a/.github/workflows/pre-commit.yaml +++ b/.github/workflows/pre-commit.yaml @@ -78,8 +78,10 @@ jobs: ansible-galaxy collection install dreadnode-nimbus_range-*.tar.gz -p ~/.ansible/collections --force --pre - name: Setup go-task + env: + TASK_VER: ${{ env.TASK_VERSION }} run: | - sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b /usr/local/bin v${{ env.TASK_VERSION }} + sh -c "$(curl --location https://taskfile.dev/install.sh)" -- -d -b /usr/local/bin "v${TASK_VER}" task --version - name: Run pre-commit diff --git a/.github/workflows/rust.yaml b/.github/workflows/rust.yaml index 006f76a0..1ab551c1 100644 --- a/.github/workflows/rust.yaml +++ b/.github/workflows/rust.yaml @@ -9,9 +9,19 @@ on: - opened - synchronize - reopened + paths: + - 'ares-*/**' + - 'Cargo.toml' + - 'Cargo.lock' + - '.github/workflows/rust.yaml' push: branches: - main + paths: + - 'ares-*/**' + - 'Cargo.toml' + - 'Cargo.lock' + - '.github/workflows/rust.yaml' workflow_dispatch: concurrency: @@ -29,30 +39,9 @@ permissions: pull-requests: write jobs: - detect-changes: - name: "\U0001F50E Detect changes" - runs-on: ubuntu-latest - outputs: - rust: ${{ steps.filter.outputs.rust }} - steps: - - name: Set up git repository - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Check for Rust changes - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2 - id: filter - with: - filters: | - rust: - - 'ares-*/**' - - 'Cargo.toml' - - 'Cargo.lock' - check: name: "\U0001F50D Cargo check" runs-on: ubuntu-latest - needs: detect-changes - if: needs.detect-changes.outputs.rust == 'true' steps: - name: Set up git repository uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 @@ -67,9 +56,10 @@ jobs: ~/.cargo/registry ~/.cargo/git target - key: ${{ runner.os }}-cargo-check-${{ hashFiles('Cargo.lock', 'Cargo.toml') }} + key: ${{ runner.os }}-cargo-check-${{ github.ref }}-${{ hashFiles('Cargo.lock', 'Cargo.toml') }} restore-keys: | - ${{ runner.os }}-cargo-check- + ${{ runner.os }}-cargo-check-${{ github.ref }}- + ${{ runner.os }}-cargo-check-refs/heads/main- - name: Run cargo check run: cargo check --workspace @@ -77,39 +67,31 @@ jobs: test: name: "\U0001F9EA Tests" runs-on: ubuntu-latest - needs: [detect-changes, check] - if: always() && (needs.detect-changes.outputs.rust != 'true' || needs.check.result == 'success') + needs: check steps: - - name: Skip (no Rust changes) - if: needs.detect-changes.outputs.rust != 'true' - run: echo "No Rust changes detected, skipping tests" - - name: Set up git repository - if: needs.detect-changes.outputs.rust == 'true' uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - name: Install Rust toolchain - if: needs.detect-changes.outputs.rust == 'true' uses: dtolnay/rust-toolchain@29eef336d9b2848a0b548edc03f92a220660cdb8 # stable - name: Cache cargo registry and build - if: needs.detect-changes.outputs.rust == 'true' uses: actions/cache@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5 with: path: | ~/.cargo/registry ~/.cargo/git target - key: ${{ runner.os }}-cargo-test-${{ hashFiles('Cargo.lock', 'Cargo.toml') }} + key: ${{ runner.os }}-cargo-test-${{ github.ref }}-${{ hashFiles('Cargo.lock', 'Cargo.toml') }} restore-keys: | - ${{ runner.os }}-cargo-test- + ${{ runner.os }}-cargo-test-${{ github.ref }}- + ${{ runner.os }}-cargo-test-refs/heads/main- - name: Run tests - if: needs.detect-changes.outputs.rust == 'true' run: cargo test --workspace - name: Add test summary - if: always() && needs.detect-changes.outputs.rust == 'true' + if: always() run: | { echo "## \U0001F980 Rust Test Results" @@ -122,8 +104,6 @@ jobs: fmt: name: "\U0001F4D0 Format" runs-on: ubuntu-latest - needs: detect-changes - if: needs.detect-changes.outputs.rust == 'true' steps: - name: Set up git repository uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 @@ -139,8 +119,7 @@ jobs: clippy: name: "\U0001F4CE Clippy" runs-on: ubuntu-latest - needs: [detect-changes, check] - if: needs.detect-changes.outputs.rust == 'true' + needs: check steps: - name: Set up git repository uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 @@ -157,9 +136,10 @@ jobs: ~/.cargo/registry ~/.cargo/git target - key: ${{ runner.os }}-cargo-clippy-${{ hashFiles('Cargo.lock', 'Cargo.toml') }} + key: ${{ runner.os }}-cargo-clippy-${{ github.ref }}-${{ hashFiles('Cargo.lock', 'Cargo.toml') }} restore-keys: | - ${{ runner.os }}-cargo-clippy- + ${{ runner.os }}-cargo-clippy-${{ github.ref }}- + ${{ runner.os }}-cargo-clippy-refs/heads/main- - name: Run clippy run: cargo clippy --workspace -- -D warnings diff --git a/.github/workflows/semgrep.yaml b/.github/workflows/semgrep.yaml index ba8e1d4c..9420fa68 100644 --- a/.github/workflows/semgrep.yaml +++ b/.github/workflows/semgrep.yaml @@ -49,6 +49,8 @@ jobs: env: SEMGREP_RULES: >- p/rust + p/python + p/github-actions p/security-audit p/secrets p/owasp-top-ten @@ -60,3 +62,11 @@ jobs: --config="${SEMGREP_RULES}" \ --timeout="${SEMGREP_TIMEOUT}" \ --sarif --output=semgrep-results.sarif + + - name: Upload SARIF to GitHub Security tab + if: always() + uses: github/codeql-action/upload-sarif@fca7ace96b7d713c7035871441bd52efbe39f72b # v3.28.19 + with: + sarif_file: semgrep-results.sarif + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From ee9753a3d7660bc87dce12ab5d5df8f81aafb9dc Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 13:59:41 -0600 Subject: [PATCH 15/17] refactor: simplify descending sort logic using sort_by_key with Reverse **Changed:** - Replaced manual descending sort closures with `sort_by_key` and `std::cmp::Reverse` in multiple locations to improve readability and consistency: - Query results in coverage queries - Investigation and operation selection logic in blue_operations and operations modules - Simplified match logic for extracting searchable values in lateral movement analyzer by using a match guard instead of an inner if statement --- .github/workflows/semgrep.yaml | 2 +- ares-core/src/correlation/lateral/analyzer.rs | 6 ++---- ares-core/src/persistent_store/queries/coverage.rs | 2 +- ares-core/src/state/blue_operations.rs | 4 ++-- ares-core/src/state/operations.rs | 2 +- 5 files changed, 7 insertions(+), 9 deletions(-) diff --git a/.github/workflows/semgrep.yaml b/.github/workflows/semgrep.yaml index 9420fa68..8679b029 100644 --- a/.github/workflows/semgrep.yaml +++ b/.github/workflows/semgrep.yaml @@ -65,7 +65,7 @@ jobs: - name: Upload SARIF to GitHub Security tab if: always() - uses: github/codeql-action/upload-sarif@fca7ace96b7d713c7035871441bd52efbe39f72b # v3.28.19 + uses: github/codeql-action/upload-sarif@fca7ace96b7d713c7035871441bd52efbe39e27e # v3.28.19 with: sarif_file: semgrep-results.sarif env: diff --git a/ares-core/src/correlation/lateral/analyzer.rs b/ares-core/src/correlation/lateral/analyzer.rs index eec08694..17f0ac10 100644 --- a/ares-core/src/correlation/lateral/analyzer.rs +++ b/ares-core/src/correlation/lateral/analyzer.rs @@ -79,10 +79,8 @@ impl LateralMovementAnalyzer { /// Extract searchable string values from a JSON value. fn extract_searchable_values(value: &Value, out: &mut HashSet) { match value { - Value::String(s) => { - if looks_like_hostname(s) { - out.insert(s.to_lowercase()); - } + Value::String(s) if looks_like_hostname(s) => { + out.insert(s.to_lowercase()); } Value::Object(map) => { for v in map.values() { diff --git a/ares-core/src/persistent_store/queries/coverage.rs b/ares-core/src/persistent_store/queries/coverage.rs index ab6b11f4..3e1f3809 100644 --- a/ares-core/src/persistent_store/queries/coverage.rs +++ b/ares-core/src/persistent_store/queries/coverage.rs @@ -65,7 +65,7 @@ impl HistoricalQueryService { .collect(); // Sort by occurrence count descending - result.sort_by(|a, b| b.occurrence_count.cmp(&a.occurrence_count)); + result.sort_by_key(|a| std::cmp::Reverse(a.occurrence_count)); Ok(result) } diff --git a/ares-core/src/state/blue_operations.rs b/ares-core/src/state/blue_operations.rs index fc06a44d..b24db4df 100644 --- a/ares-core/src/state/blue_operations.rs +++ b/ares-core/src/state/blue_operations.rs @@ -120,13 +120,13 @@ pub async fn resolve_latest_investigation( .cloned() .collect(); if !running.is_empty() { - running.sort_by(|a, b| b.0.cmp(&a.0)); + running.sort_by_key(|x| std::cmp::Reverse(x.0)); return Ok(Some(running[0].1.clone())); } // Fall back to latest by started_at let mut all: Vec<_> = invs; - all.sort_by(|a, b| b.0.cmp(&a.0)); + all.sort_by_key(|x| std::cmp::Reverse(x.0)); Ok(Some(all[0].1.clone())) } diff --git a/ares-core/src/state/operations.rs b/ares-core/src/state/operations.rs index ec44b7c0..93e24948 100644 --- a/ares-core/src/state/operations.rs +++ b/ares-core/src/state/operations.rs @@ -215,7 +215,7 @@ pub(crate) fn pick_latest(items: &[&(Option>, String, bool)]) -> S // Prefer items with a timestamp, sort descending let mut with_time: Vec<_> = items.iter().filter(|(t, _, _)| t.is_some()).collect(); if !with_time.is_empty() { - with_time.sort_by(|a, b| b.0.cmp(&a.0)); + with_time.sort_by_key(|x| std::cmp::Reverse(x.0)); return with_time[0].1.clone(); } // Fallback: sort by op_id descending From 1f88e9b280fc34576f3d0d373809a8b37e4993aa Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 14:06:24 -0600 Subject: [PATCH 16/17] refactor: simplify sorting logic using sort_by_key with Reverse **Changed:** - Replaced manual sort_by closures with sort_by_key using std::cmp::Reverse for descending order sorting of vectors in detection/markdown.rs and ops/list.rs. This improves code readability and consistency in sorting logic. --- ares-cli/src/detection/markdown.rs | 4 ++-- ares-cli/src/ops/list.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ares-cli/src/detection/markdown.rs b/ares-cli/src/detection/markdown.rs index 5adcb690..3bbbdaa5 100644 --- a/ares-cli/src/detection/markdown.rs +++ b/ares-cli/src/detection/markdown.rs @@ -91,7 +91,7 @@ pub(crate) fn generate_detection_markdown(playbook: &DetectionPlaybook) -> Strin md.push_str("|------|-------|---------------|----------|\n"); let mut sorted_targets: Vec<_> = playbook.detection_targets.iter().collect(); - sorted_targets.sort_by(|a, b| b.pyramid_level.cmp(&a.pyramid_level)); + sorted_targets.sort_by_key(|b| std::cmp::Reverse(b.pyramid_level)); for target in sorted_targets.iter().take(20) { let value_display = truncate_str(&target.value, 40); @@ -111,7 +111,7 @@ pub(crate) fn generate_detection_markdown(playbook: &DetectionPlaybook) -> Strin // Technique-Specific Detections md.push_str("## Technique-Specific Detections\n\n"); let mut sorted_techniques: Vec<_> = playbook.technique_detections.iter().collect(); - sorted_techniques.sort_by(|(a, _), (b, _)| a.cmp(b)); + sorted_techniques.sort_by_key(|(a, _)| *a); for (tech_id, detection) in sorted_techniques { md.push_str(&format!( diff --git a/ares-cli/src/ops/list.rs b/ares-cli/src/ops/list.rs index 7e6e12a2..6cd628c9 100644 --- a/ares-cli/src/ops/list.rs +++ b/ares-cli/src/ops/list.rs @@ -48,7 +48,7 @@ pub(crate) async fn ops_list(redis_url: Option, latest: bool) -> Result< } // Sort by started_at descending - ops.sort_by(|a, b| b.checkpoint_time.cmp(&a.checkpoint_time)); + ops.sort_by_key(|b| std::cmp::Reverse(b.checkpoint_time)); println!("Multi-Agent Operations:"); println!("{}", "=".repeat(70)); From 0021ba7506ede815640e55ed90f56f58d31f7e98 Mon Sep 17 00:00:00 2001 From: Jayson Grace Date: Thu, 16 Apr 2026 14:14:15 -0600 Subject: [PATCH 17/17] fix: correct logic and iterator usage in unconstrained exploitation and task queue **Changed:** - Refined logic for dispatching the dump action after coercion by consolidating the pattern match and ensuring the delay check occurs only when all conditions are met in the unconstrained exploitation flow - Updated task queue result batching to remove unnecessary `into_iter` on `raw`, iterating directly to improve clarity and efficiency --- ares-orchestrator/src/automation/unconstrained.rs | 13 +++++++------ ares-orchestrator/src/task_queue.rs | 2 +- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/ares-orchestrator/src/automation/unconstrained.rs b/ares-orchestrator/src/automation/unconstrained.rs index f468704e..162009f0 100644 --- a/ares-orchestrator/src/automation/unconstrained.rs +++ b/ares-orchestrator/src/automation/unconstrained.rs @@ -172,12 +172,13 @@ pub async fn auto_unconstrained_exploitation( } // Coercion dispatched, waiting for delay before dump. - Some(p) if p.coercion_dispatched_at.is_some() && p.dump_attempts == 0 => { - if p.coercion_dispatched_at.unwrap().elapsed() >= COERCE_TO_DUMP_DELAY { - Action::Dump - } else { - return None; - } + Some(p) + if p.coercion_dispatched_at.is_some() + && p.dump_attempts == 0 + && p.coercion_dispatched_at.unwrap().elapsed() + >= COERCE_TO_DUMP_DELAY => + { + Action::Dump } // Dump retry — previous attempt didn't yield TGTs. diff --git a/ares-orchestrator/src/task_queue.rs b/ares-orchestrator/src/task_queue.rs index 8d7f5858..b950886f 100644 --- a/ares-orchestrator/src/task_queue.rs +++ b/ares-orchestrator/src/task_queue.rs @@ -235,7 +235,7 @@ impl TaskQueue { .context("Pipeline check_results_batch failed")?; let mut out = HashMap::with_capacity(task_ids.len()); - for (tid, data) in task_ids.iter().zip(raw.into_iter()) { + for (tid, data) in task_ids.iter().zip(raw) { let parsed = match data { Some(json) => match serde_json::from_str::(&json) { Ok(r) => Some(r),