|
| 1 | +# File Processor Demo - Environment Configuration |
| 2 | +# Copy this file to .env and fill in your values |
| 3 | + |
| 4 | +# ============================================================================== |
| 5 | +# LLM Provider Configuration (choose ONE) |
| 6 | +# ============================================================================== |
| 7 | + |
| 8 | +# Option 1: Anthropic Claude (recommended) |
| 9 | +ANTHROPIC_API_KEY=your-anthropic-api-key-here |
| 10 | +ANTHROPIC_MODEL=claude-sonnet-4-20250514 |
| 11 | + |
| 12 | +# Option 2: OpenAI |
| 13 | +# OPENAI_API_KEY=your-openai-api-key-here |
| 14 | +# OPENAI_MODEL=gpt-4o |
| 15 | + |
| 16 | +# Option 3: Local LLM (Ollama or LM Studio) |
| 17 | +# LOCAL_LLM_BASE_URL=http://host.docker.internal:11434/v1 |
| 18 | +# LOCAL_LLM_MODEL=llama3.2 |
| 19 | + |
| 20 | +# Force specific provider (optional - auto-detects based on keys if not set) |
| 21 | +# LLM_PROVIDER=anthropic # or: openai, local, ollama, lmstudio |
| 22 | + |
| 23 | +# ============================================================================== |
| 24 | +# Sidecar Configuration |
| 25 | +# ============================================================================== |
| 26 | + |
| 27 | +# Sidecar URL (defaults to docker service name) |
| 28 | +PREDICATE_SIDECAR_URL=http://predicate-sidecar:8787 |
| 29 | + |
| 30 | +# Agent principal identity |
| 31 | +SECURECLAW_PRINCIPAL=agent:file-processor |
| 32 | + |
| 33 | +# ============================================================================== |
| 34 | +# Optional Settings |
| 35 | +# ============================================================================== |
| 36 | + |
| 37 | +# Enable verbose logging |
| 38 | +SECURECLAW_VERBOSE=true |
| 39 | + |
| 40 | +# Cloud tracing (optional) |
| 41 | +# PREDICATE_API_KEY=your-predicate-api-key-here |
0 commit comments