-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path.env.example
More file actions
57 lines (49 loc) · 2.35 KB
/
.env.example
File metadata and controls
57 lines (49 loc) · 2.35 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
# ============================================================
# SpecForge — Environment Configuration
# ============================================================
# Backend port
BACKEND_PORT=8000
# ============================================================
# Inference Provider
# ============================================================
# "remote" — Cloud or enterprise OpenAI-compatible API
# "ollama" — Local Ollama running natively on the host machine
INFERENCE_PROVIDER=remote
# ============================================================
# Option A: Remote OpenAI-compatible API (INFERENCE_PROVIDER=remote)
# ============================================================
# INFERENCE_API_ENDPOINT: Base URL of your inference service (no /v1 suffix)
# - OpenAI: https://api.openai.com
# - Groq: https://api.groq.com/openai
# - OpenRouter: https://openrouter.ai/api
# - Custom Gateway: https://your-gateway.example.com
INFERENCE_API_ENDPOINT=https://api.openai.com
INFERENCE_API_TOKEN=your-api-token-here
INFERENCE_MODEL_NAME=gpt-4o
# ============================================================
# Option B: Ollama — native host inference (INFERENCE_PROVIDER=ollama)
# ============================================================
# INFERENCE_PROVIDER=ollama
# INFERENCE_API_ENDPOINT=http://host.docker.internal:11434
# INFERENCE_MODEL_NAME=codellama:34b
# Note: INFERENCE_API_TOKEN is not required when using Ollama.
# ============================================================
# LLM Settings
# ============================================================
LLM_TEMPERATURE=0.7
LLM_MAX_TOKENS=8000
# ============================================================
# CORS Configuration
# ============================================================
CORS_ALLOW_ORIGINS=http://localhost:3000,http://localhost:5173,http://localhost:5174
# ============================================================
# Local URL Endpoint
# ============================================================
# Only needed if your remote API endpoint is a private domain mapped in /etc/hosts.
# Otherwise leave as "not-needed".
LOCAL_URL_ENDPOINT=not-needed
# ============================================================
# SSL Verification
# ============================================================
# Set to false only for development with self-signed certificates.
VERIFY_SSL=true