Skip to content

Commit aeb3eb6

Browse files
authored
feat: 🎸 local model support (#330)
1 parent 96316c7 commit aeb3eb6

File tree

6 files changed

+157
-41
lines changed

6 files changed

+157
-41
lines changed

‎Dockerfile‎

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,7 @@ ENV PATH="/home/pentester/.local/bin:$PATH"
8383
COPY --chown=pentester:pentester pyproject.toml README.md /app/
8484
COPY --chown=pentester:pentester pentestgpt/ /app/pentestgpt/
8585
COPY --chown=pentester:pentester scripts/entrypoint.sh /home/pentester/entrypoint.sh
86+
COPY --chown=pentester:pentester scripts/ccr-config-template.json /app/scripts/ccr-config-template.json
8687

8788
# Install Python dependencies as root to system Python
8889
# Allow pip to override system packages in Docker

‎Makefile‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ help:
1313
@echo "Docker Workflow (Primary Usage):"
1414
@echo " make install Install dependencies (uv sync) and build Docker image"
1515
@echo " make config Configure authentication (interactive)"
16-
@echo " Options: OpenRouter, Anthropic API, Manual Login"
16+
@echo " Options: Claude Login, OpenRouter, Anthropic API, Local LLM"
1717
@echo " make connect Connect to container (main entry point)"
1818
@echo " make start Start container in background"
1919
@echo " make stop Stop container (keeps config)"

‎README.md‎

Lines changed: 48 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,10 +78,11 @@
7878
### Prerequisites
7979

8080
- **Docker** (required) - [Install Docker](https://docs.docker.com/get-docker/)
81-
- **Claude Model** (prioritized) - PentestGPT is optimized for Claude models via:
81+
- **LLM Provider** (choose one):
8282
- Anthropic API Key from [console.anthropic.com](https://console.anthropic.com/)
8383
- Claude OAuth Login (requires Claude subscription)
8484
- OpenRouter for alternative models at [openrouter.ai](https://openrouter.ai/keys)
85+
- Local LLM via LM Studio, Ollama, or compatible server (see [Using Local LLMs](#using-local-llms))
8586

8687
### Installation
8788

@@ -142,6 +143,52 @@ pentestgpt --target 10.10.11.50 --instruction "WordPress site, focus on plugin v
142143

143144
---
144145

146+
## Using Local LLMs
147+
148+
PentestGPT supports routing requests to local LLM servers (LM Studio, Ollama, text-generation-webui, etc.) running on your host machine.
149+
150+
### Prerequisites
151+
152+
- Local LLM server with an OpenAI-compatible API endpoint
153+
- **LM Studio**: Enable server mode (default port 1234)
154+
- **Ollama**: Run `ollama serve` (default port 11434)
155+
156+
### Setup
157+
158+
```bash
159+
# Configure PentestGPT for local LLM
160+
make config
161+
# Select option 4: Local LLM
162+
163+
# Start your local LLM server on the host machine
164+
# Then connect to the container
165+
make connect
166+
```
167+
168+
### Customizing Models
169+
170+
Edit `scripts/ccr-config-template.json` to customize:
171+
172+
- **`localLLM.api_base_url`**: Your LLM server URL (default: `host.docker.internal:1234`)
173+
- **`localLLM.models`**: Available model names on your server
174+
- **Router section**: Which models handle which operations
175+
176+
| Route | Purpose | Default Model |
177+
|-------|---------|---------------|
178+
| `default` | General tasks | openai/gpt-oss-20b |
179+
| `background` | Background operations | openai/gpt-oss-20b |
180+
| `think` | Reasoning-heavy tasks | qwen/qwen3-coder-30b |
181+
| `longContext` | Large context handling | qwen/qwen3-coder-30b |
182+
| `webSearch` | Web search operations | openai/gpt-oss-20b |
183+
184+
### Troubleshooting
185+
186+
- **Connection refused**: Ensure your LLM server is running and listening on the configured port
187+
- **Docker networking**: Use `host.docker.internal` (not `localhost`) to access host services from Docker
188+
- **Check CCR logs**: Inside the container, run `cat /tmp/ccr.log`
189+
190+
---
191+
145192
## Telemetry
146193

147194
PentestGPT collects anonymous usage data to help improve the tool. This data is sent to our [Langfuse](https://langfuse.com) project and includes:
Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
{
2+
"LOG": false,
3+
"LOG_LEVEL": "debug",
4+
"CLAUDE_PATH": "",
5+
"HOST": "127.0.0.1",
6+
"PORT": 3456,
7+
"APIKEY": "",
8+
"API_TIMEOUT_MS": "600000",
9+
"PROXY_URL": "",
10+
"transformers": [],
11+
"Providers": [
12+
{
13+
"name": "openrouter",
14+
"api_base_url": "https://openrouter.ai/api/v1/chat/completions",
15+
"api_key": "__OPENROUTER_API_KEY__",
16+
"models": [
17+
"google/gemini-2.5-pro-preview",
18+
"google/gemini-3-pro-preview",
19+
"openai/gpt-5.1"
20+
],
21+
"transformer": {
22+
"use": ["openrouter"]
23+
}
24+
},
25+
{
26+
"name": "localLLM",
27+
"api_base_url": "http://host.docker.internal:1234/v1/chat/completions",
28+
"api_key": "not-needed",
29+
"models": ["qwen/qwen3-coder-30b", "openai/gpt-oss-20b"]
30+
}
31+
],
32+
"StatusLine": {
33+
"enabled": false,
34+
"currentStyle": "default",
35+
"default": {
36+
"modules": []
37+
},
38+
"powerline": {
39+
"modules": []
40+
}
41+
},
42+
"Router": "__ROUTER_CONFIG__",
43+
"CUSTOM_ROUTER_PATH": ""
44+
}

‎scripts/config.sh‎

Lines changed: 24 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,11 @@ echo ""
3939
echo -e " ${GREEN}[3]${NC} Anthropic API Key"
4040
echo -e " Use Anthropic's Claude directly with your API key"
4141
echo ""
42+
echo -e " ${GREEN}[4]${NC} Local LLM (via LM Studio, Ollama, etc.)"
43+
echo -e " Route requests to a local LLM server on your host machine"
44+
echo ""
4245

43-
read -p "Enter your choice [1-3] (default: 1): " choice
46+
read -p "Enter your choice [1-4] (default: 1): " choice
4447
choice="${choice:-1}"
4548

4649
case $choice in
@@ -111,6 +114,26 @@ EOF
111114
echo -e "${GREEN}Anthropic API key saved!${NC}"
112115
;;
113116

117+
4)
118+
# Save auth mode for Local LLM
119+
cat > "$ENV_FILE" << EOF
120+
# PentestGPT Authentication Configuration
121+
# Generated by make config
122+
PENTESTGPT_AUTH_MODE=local
123+
EOF
124+
125+
echo -e "${GREEN}Local LLM mode selected!${NC}"
126+
echo ""
127+
echo -e "${BLUE}Setup Instructions:${NC}"
128+
echo " 1. Start your local LLM server (e.g., LM Studio) on your host machine"
129+
echo " Default expected URL: http://localhost:1234/v1/chat/completions"
130+
echo ""
131+
echo " 2. To customize models or URL, edit:"
132+
echo " scripts/ccr-config-template.json"
133+
echo ""
134+
echo " 3. Run 'make connect' to start PentestGPT"
135+
;;
136+
114137
*)
115138
echo -e "${RED}Invalid choice. Exiting.${NC}"
116139
exit 1

‎scripts/entrypoint.sh‎

Lines changed: 39 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -15,44 +15,40 @@ BLUE='\033[0;34m'
1515
YELLOW='\033[0;33m'
1616
NC='\033[0m'
1717

18-
setup_openrouter() {
19-
local api_key="$1"
18+
# Router configurations for different modes
19+
OPENROUTER_ROUTER='{"default":"openrouter,openai/gpt-5.1","background":"openrouter,openai/gpt-5.1","think":"openrouter,openai/gpt-5.1","longContext":"openrouter,openai/gpt-5.1","longContextThreshold":60000,"webSearch":"openrouter,google/gemini-3-pro-preview"}'
20+
LOCAL_ROUTER='{"default":"localLLM,openai/gpt-oss-20b","background":"localLLM,openai/gpt-oss-20b","think":"localLLM,qwen/qwen3-coder-30b","longContext":"localLLM,qwen/qwen3-coder-30b","longContextThreshold":60000,"webSearch":"localLLM,openai/gpt-oss-20b"}'
21+
22+
setup_ccr() {
23+
local mode="$1"
24+
local api_key="$2"
25+
local template_file="/app/scripts/ccr-config-template.json"
2026

2127
# Create CCR config directory if needed
2228
mkdir -p "$CCR_CONFIG_DIR"
2329

24-
# Generate CCR config with OpenRouter
25-
cat > "$CCR_CONFIG_FILE" << EOF
26-
{
27-
"LOG": false,
28-
"HOST": "127.0.0.1",
29-
"PORT": 3456,
30-
"API_TIMEOUT_MS": "600000",
31-
"Providers": [
32-
{
33-
"name": "openrouter",
34-
"api_base_url": "https://openrouter.ai/api/v1/chat/completions",
35-
"api_key": "${api_key}",
36-
"models": [
37-
"google/gemini-2.5-pro-preview",
38-
"google/gemini-3-pro-preview",
39-
"openai/gpt-5.1"
40-
],
41-
"transformer": {
42-
"use": ["openrouter"]
43-
}
44-
}
45-
],
46-
"Router": {
47-
"default": "openrouter,openai/gpt-5.1",
48-
"background": "openrouter,openai/gpt-5.1",
49-
"think": "openrouter,openai/gpt-5.1",
50-
"longContext": "openrouter,openai/gpt-5.1",
51-
"longContextThreshold": 60000,
52-
"webSearch": "openrouter,google/gemini-3-pro-preview"
53-
}
54-
}
55-
EOF
30+
# Check if template exists
31+
if [ ! -f "$template_file" ]; then
32+
echo -e "${YELLOW}Error: CCR config template not found at $template_file${NC}"
33+
exit 1
34+
fi
35+
36+
# Copy template and substitute placeholders
37+
cp "$template_file" "$CCR_CONFIG_FILE"
38+
39+
# Substitute API key (for openrouter mode)
40+
if [ -n "$api_key" ]; then
41+
sed -i "s/__OPENROUTER_API_KEY__/${api_key}/g" "$CCR_CONFIG_FILE"
42+
fi
43+
44+
# Substitute Router config based on mode (use | as delimiter to avoid conflicts with /)
45+
if [ "$mode" = "openrouter" ]; then
46+
sed -i "s|\"__ROUTER_CONFIG__\"|${OPENROUTER_ROUTER}|g" "$CCR_CONFIG_FILE"
47+
local display_model="openai/gpt-5.1"
48+
else
49+
sed -i "s|\"__ROUTER_CONFIG__\"|${LOCAL_ROUTER}|g" "$CCR_CONFIG_FILE"
50+
local display_model="localLLM (qwen/qwen3-coder-30b, openai/gpt-oss-20b)"
51+
fi
5652

5753
echo -e "${BLUE}Starting Claude Code Router...${NC}"
5854

@@ -75,14 +71,14 @@ EOF
7571
sed -i '/eval "$(ccr activate)"/d' "$BASHRC_FILE" 2>/dev/null || true
7672

7773
# Add ccr activation to bashrc
78-
echo '# CCR activation for OpenRouter' >> "$BASHRC_FILE"
74+
echo "# CCR activation for ${mode}" >> "$BASHRC_FILE"
7975
echo 'eval "$(ccr activate 2>/dev/null)" || true' >> "$BASHRC_FILE"
8076

8177
# Also export for the current session (will be inherited by exec'd shell)
8278
eval "$(ccr activate 2>/dev/null)" || true
8379

84-
echo -e "${GREEN}CCR activated with OpenRouter backend${NC}"
85-
echo -e "${BLUE}Default model: openai/gpt-5.1${NC}"
80+
echo -e "${GREEN}CCR activated with ${mode} backend${NC}"
81+
echo -e "${BLUE}Default model: ${display_model}${NC}"
8682
}
8783

8884
echo ""
@@ -95,7 +91,12 @@ case "$AUTH_MODE" in
9591
echo "Please run 'make config' and select OpenRouter option"
9692
exit 1
9793
fi
98-
setup_openrouter "$OPENROUTER_API_KEY"
94+
setup_ccr "openrouter" "$OPENROUTER_API_KEY"
95+
;;
96+
local)
97+
echo -e "${GREEN}Local LLM mode${NC}"
98+
echo -e "Ensure your local LLM server is running on host.docker.internal:1234"
99+
setup_ccr "local" ""
99100
;;
100101
anthropic)
101102
if [ -z "$ANTHROPIC_API_KEY" ]; then

0 commit comments

Comments
 (0)