Add Ollama local model provider
This commit is contained in:
@@ -59,6 +59,13 @@ ANTHROPIC_API_URL = os.getenv(
|
||||
)
|
||||
ANTHROPIC_API_VERSION = os.getenv("ANTHROPIC_API_VERSION", "2023-06-01")
|
||||
|
||||
# Ollama (локальные модели; OpenAI-compatible endpoint)
|
||||
# Обычно Ollama слушает http://localhost:11434 и предоставляет /v1/chat/completions.
|
||||
OLLAMA_MODEL = os.getenv("OLLAMA_MODEL", "llama3.1:8b")
|
||||
OLLAMA_API_URL = os.getenv(
|
||||
"OLLAMA_API_URL", "http://localhost:11434/v1/chat/completions"
|
||||
)
|
||||
|
||||
# --- Настройки распознавания речи (Deepgram) ---
|
||||
# Ключ для облачного STT (Speech-to-Text)
|
||||
DEEPGRAM_API_KEY = os.getenv("DEEPGRAM_API_KEY")
|
||||
|
||||
Reference in New Issue
Block a user