Skip to content

Commit 2b2ca56

Browse files
committed
feat(tools): Implement AI-driven tool orchestration layer
- Added ArjunRunner for HTTP parameter discovery - Added DirsearchRunner for directory/endpoint enumeration - Upgraded NucleiRunner with full API template support - Upgraded SqlmapRunner with batch mode + output-dir - Added _parse_arjun() and _parse_dirsearch() to normalized parser - Created tool_orchestrator.py: AI decides which tools to run, executes concurrently, categorizes outputs into endpoints/params/findings - Integrated Phase 1b: Tool Orchestration into build_pipeline_artifacts() - Tool findings (Nuclei/SQLMap) seed the confirmed findings list directly - Tool endpoints (Dirsearch/Arjun) enrich schema before AI test generation - Added Dockerfile with nuclei binary + sqlmap/dirsearch/arjun via pip - Added docker-compose.yml for one-command scanning
1 parent be7d055 commit 2b2ca56

11 files changed

Lines changed: 612 additions & 13 deletions

File tree

Dockerfile

Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
# ── SecNode API Pentester — Multi-Stage Docker Image ──────────────────────────
2+
# Stage 1: System tool installer
3+
FROM python:3.12-slim AS base
4+
5+
ARG NUCLEI_VERSION=3.3.9
6+
7+
RUN apt-get update && apt-get install -y --no-install-recommends \
8+
curl wget unzip git ca-certificates \
9+
# SQLMap dependency
10+
python3 \
11+
# Dirsearch / Arjun dependencies
12+
python3-pip \
13+
&& rm -rf /var/lib/apt/lists/*
14+
15+
# ── Install nuclei binary ──────────────────────────────────────────────────────
16+
RUN ARCH=$(uname -m | sed 's/x86_64/amd64/;s/aarch64/arm64/') && \
17+
curl -sSfL \
18+
"https://github.com/projectdiscovery/nuclei/releases/download/v${NUCLEI_VERSION}/nuclei_${NUCLEI_VERSION}_linux_${ARCH}.zip" \
19+
-o /tmp/nuclei.zip && \
20+
unzip -q /tmp/nuclei.zip -d /tmp/nuclei && \
21+
mv /tmp/nuclei/nuclei /usr/local/bin/nuclei && \
22+
chmod +x /usr/local/bin/nuclei && \
23+
rm -rf /tmp/nuclei /tmp/nuclei.zip
24+
25+
# ── Install Python-based tools ─────────────────────────────────────────────────
26+
RUN pip install --no-cache-dir \
27+
sqlmap \
28+
dirsearch \
29+
arjun
30+
31+
# Stage 2: Application image
32+
FROM base AS app
33+
34+
WORKDIR /app
35+
36+
# Install the SecNode project with all Python dependencies
37+
COPY pyproject.toml ./
38+
COPY src/ ./src/
39+
RUN pip install --no-cache-dir -e ".[dev]"
40+
41+
# Pull Nuclei templates on first run (cached after)
42+
RUN nuclei -update-templates -silent || true
43+
44+
# ── Runtime config ─────────────────────────────────────────────────────────────
45+
# Environment variables expected at runtime:
46+
# NEBIUS_API_KEY — Nebius LLM API key
47+
# NEBIUS_API_BASE — Nebius API base URL
48+
# SECNODE_LLM — LiteLLM model string (e.g. nebius/deepseek-ai/DeepSeek-V3.2)
49+
# OPENAI_API_KEY — (optional) OpenAI API key
50+
51+
VOLUME ["/app/results"]
52+
53+
ENTRYPOINT ["secnodeapi"]
54+
CMD ["--help"]

docker-compose.yml

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
version: "3.9"
2+
3+
services:
4+
secnodeapi:
5+
build:
6+
context: .
7+
dockerfile: Dockerfile
8+
target: app
9+
image: secnodeapi:latest
10+
volumes:
11+
- ./results:/app/results
12+
env_file:
13+
- .env # optional — place NEBIUS_API_KEY etc. here
14+
environment:
15+
- NEBIUS_API_KEY=${NEBIUS_API_KEY:-}
16+
- NEBIUS_API_BASE=${NEBIUS_API_BASE:-https://api.tokenfactory.nebius.com/v1/}
17+
- SECNODE_LLM=${SECNODE_LLM:-nebius/deepseek-ai/DeepSeek-V3.2}
18+
- OPENAI_API_KEY=${OPENAI_API_KEY:-}
19+
# Default command — override at runtime with:
20+
# docker-compose run --rm secnodeapi --target https://example.com/swagger.json
21+
command: ["--help"]
22+
23+
# Usage examples:
24+
#
25+
# # Full agent scan
26+
# docker-compose run --rm secnodeapi \
27+
# --target http://vulnapi.testinvicti.com/swagger.json
28+
#
29+
# # Dry run only (show generated test cases)
30+
# docker-compose run --rm secnodeapi \
31+
# --target http://vulnapi.testinvicti.com/swagger.json \
32+
# --dry-run
33+
#
34+
# # With explicit model override
35+
# SECNODE_LLM=nebius/deepseek-ai/DeepSeek-R1-0528 \
36+
# docker-compose run --rm secnodeapi \
37+
# --target http://vulnapi.testinvicti.com/swagger.json

src/secnodeapi/cli.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -308,7 +308,7 @@ async def _run_schema_only(args, runtime: RuntimeConfig) -> None:
308308

309309
async def _run_dry_run(args, pipeline_input: PipelineInput) -> None:
310310
console.rule("[bold cyan]SecNode Dry-Run Mode")
311-
_, tests = await build_pipeline_artifacts(pipeline_input)
311+
_, tests, _orch = await build_pipeline_artifacts(pipeline_input)
312312
if not tests:
313313
logger.error("No tests generated. Aborting.")
314314
raise SystemExit(1)
@@ -319,7 +319,7 @@ async def _run_dry_run(args, pipeline_input: PipelineInput) -> None:
319319

320320
async def _run_full_pipeline(args, pipeline_input: PipelineInput) -> None:
321321
console.rule("[bold magenta]SecNode Legacy Pipeline")
322-
api_structure, tests = await build_pipeline_artifacts(pipeline_input)
322+
api_structure, tests, _orch = await build_pipeline_artifacts(pipeline_input)
323323
if not tests:
324324
logger.error("No tests generated. Aborting.")
325325
raise SystemExit(1)

src/secnodeapi/services/pipeline.py

Lines changed: 36 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -284,7 +284,7 @@ async def run_agent_pipeline(
284284
plan -> execute -> observe -> validate -> replan.
285285
"""
286286
console.rule("[bold cyan]Phase 1: Building Execution Plan")
287-
api_structure, seed_tests = await build_pipeline_artifacts(pipeline_input)
287+
api_structure, seed_tests, orch_result = await build_pipeline_artifacts(pipeline_input)
288288
identities = _resolve_identities(pipeline_input)
289289
queue = _deduplicate_test_cases(
290290
_apply_identity_variants(seed_tests, identities)
@@ -297,6 +297,11 @@ async def run_agent_pipeline(
297297
iteration = 0
298298
unique_actions = set()
299299

300+
# Seed confirmed findings from tool orchestration (Nuclei, SQLMap direct findings)
301+
if orch_result and orch_result.tool_findings:
302+
confirmed.extend(orch_result.tool_findings)
303+
logger.info("Seeded confirmed findings from tool orchestration", count=len(orch_result.tool_findings))
304+
300305
while queue and remaining_budget > 0 and iteration < pipeline_input.max_iterations:
301306
iteration += 1
302307
console.rule(f"[bold magenta]Phase 2: Agent Iteration {iteration}")
@@ -351,32 +356,56 @@ async def run_agent_pipeline(
351356

352357

353358
from .recon import perform_active_recon
359+
from .tool_orchestrator import run_tool_orchestration_phase
354360

355361
async def build_pipeline_artifacts(
356362
pipeline_input: PipelineInput,
357363
) -> tuple:
358-
"""Run schema load, active recon, understanding, and test generation."""
364+
"""Run schema load, active recon, tool orchestration, understanding, and test generation."""
359365
raw_schema = await fetch_schema(
360366
pipeline_input.target,
361367
proxy=pipeline_input.proxy,
362368
verify_ssl=pipeline_input.verify_ssl,
363369
)
364370
api_structure = analyze_api_structure(raw_schema)
365371

366-
# Inject Active Reconnaissance
372+
# Phase 1a — Active Reconnaissance (built-in fuzzer)
367373
discovered_endpoints = await perform_active_recon(api_structure, pipeline_input)
368374
if discovered_endpoints:
369375
api_structure.endpoints.extend(discovered_endpoints)
370-
371-
# Deduplicate endpoints by path and method
372376
unique_endpoints = {}
373377
for ep in api_structure.endpoints:
374378
unique_endpoints[(ep.path, ep.method)] = ep
375379
api_structure.endpoints = list(unique_endpoints.values())
376380

381+
# Phase 1b — External Tool Orchestration (Dirsearch, Arjun, Nuclei, SQLMap)
382+
try:
383+
orch_result = await run_tool_orchestration_phase(api_structure, api_structure.base_url)
384+
385+
# Merge tool-discovered endpoints into schema
386+
if orch_result.discovered_endpoints:
387+
for ep in orch_result.discovered_endpoints:
388+
api_structure.endpoints.append(ep)
389+
unique_endpoints = {}
390+
for ep in api_structure.endpoints:
391+
unique_endpoints[(ep.path, ep.method)] = ep
392+
api_structure.endpoints = list(unique_endpoints.values())
393+
logger.info(
394+
"Merged tool-discovered endpoints",
395+
total=len(api_structure.endpoints),
396+
added=len(orch_result.discovered_endpoints),
397+
)
398+
except Exception as e:
399+
logger.warning("Tool orchestration phase failed, continuing without tools", error=str(e))
400+
orch_result = None
401+
377402
understanding = await understand_api_with_ai(api_structure)
378-
tests = await generate_test_cases(understanding, api_structure, instructions=pipeline_input.instructions[0].model_dump() if pipeline_input.instructions else None)
379-
return api_structure, tests
403+
tests = await generate_test_cases(
404+
understanding,
405+
api_structure,
406+
instructions=pipeline_input.instructions[0].model_dump() if pipeline_input.instructions else None
407+
)
408+
return api_structure, tests, orch_result
380409

381410

382411
def build_output_dir(target: str) -> str:

0 commit comments

Comments
 (0)