-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy path.env.example
More file actions
111 lines (80 loc) · 2.69 KB
/
.env.example
File metadata and controls
111 lines (80 loc) · 2.69 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
# Heinrich: The Inventing Machine - Environment Variables Template
# Copy this file to .env and fill in your actual values
# =====================================================
# LLM Provider Configuration
# =====================================================
# Choose your LLM provider: openai, anthropic, ollama, vllm
LLM_PROVIDER=ollama
# OpenAI Configuration (if using openai)
OPENAI_API_KEY=your_openai_api_key_here
OPENAI_MODEL=gpt-4
OPENAI_TEMPERATURE=0.7
OPENAI_MAX_TOKENS=2000
# Anthropic Configuration (if using anthropic)
ANTHROPIC_API_KEY=your_anthropic_api_key_here
ANTHROPIC_MODEL=claude-3-opus-20240229
ANTHROPIC_TEMPERATURE=0.7
ANTHROPIC_MAX_TOKENS=2000
# Ollama Configuration (if using ollama for local models)
OLLAMA_BASE_URL=http://localhost:11434
OLLAMA_MODEL=llama2
OLLAMA_TEMPERATURE=0.7
# vLLM Configuration (if using vllm)
VLLM_BASE_URL=http://localhost:8000
VLLM_MODEL=your_model_name
VLLM_TEMPERATURE=0.7
# =====================================================
# Heinrich Configuration
# =====================================================
# Logging level: DEBUG, INFO, WARNING, ERROR, CRITICAL
LOG_LEVEL=INFO
# Output format: text, json, markdown
OUTPUT_FORMAT=markdown
# Enable interactive mode by default
INTERACTIVE_MODE=true
# Language preference: en, zh, ru, ar
LANGUAGE=en
# Path to custom knowledge base (optional)
CUSTOM_KNOWLEDGE_PATH=
# =====================================================
# API Server Configuration (when running in API mode)
# =====================================================
# API server host
API_HOST=0.0.0.0
# API server port
API_PORT=8080
# Enable CORS
API_CORS_ENABLED=true
# CORS origins (comma-separated)
API_CORS_ORIGINS=http://localhost:3000,http://localhost:8080
# =====================================================
# Security & Privacy
# =====================================================
# Enable content filtering
ENABLE_CONTENT_FILTER=true
# Enable ethics validation
ENABLE_ETHICS_CHECK=true
# Data retention policy (in days, 0 = no retention)
DATA_RETENTION_DAYS=0
# =====================================================
# Performance & Optimization
# =====================================================
# Enable caching
ENABLE_CACHE=true
# Cache TTL (in seconds)
CACHE_TTL=3600
# Maximum concurrent requests
MAX_CONCURRENT_REQUESTS=10
# Request timeout (in seconds)
REQUEST_TIMEOUT=60
# =====================================================
# Development Settings
# =====================================================
# Enable debug mode
DEBUG=false
# Enable profiling
ENABLE_PROFILING=false
# Save intermediate results
SAVE_INTERMEDIATE_RESULTS=false
# Intermediate results directory
INTERMEDIATE_RESULTS_DIR=/tmp/heinrich_results