mirror of
https://github.com/GH05TCREW/pentestagent.git
synced 2026-03-21 16:40:26 +00:00
ollama: honor OLLAMA_API_BASE and support remote Ollama host; add .env.example entry
- Map OLLAMA_BASE_URL into litellm-friendly env vars before importing litellm to allow remote Ollama hosts - Use environment-driven debug for litellm logging - Add OLLAMA_API_BASE example to .env.example
This commit is contained in:
@@ -15,6 +15,11 @@ TAVILY_API_KEY=
|
||||
# Other providers: azure/, bedrock/, groq/, ollama/, together_ai/ (see litellm docs)
|
||||
PENTESTAGENT_MODEL=gpt-5
|
||||
|
||||
# Ollama local/remote API base
|
||||
# Example: http://127.0.0.1:11434 or http://192.168.0.165:11434
|
||||
# Set this when using Ollama as the provider so LiteLLM/clients point to the correct host
|
||||
# OLLAMA_API_BASE=http://127.0.0.1:11434
|
||||
|
||||
# Embeddings (for RAG knowledge base)
|
||||
# Options: openai, local (default: openai if OPENAI_API_KEY set, else local)
|
||||
# PENTESTAGENT_EMBEDDINGS=local
|
||||
|
||||
@@ -50,6 +50,23 @@ class LLM:
|
||||
|
||||
# Ensure litellm is available
|
||||
try:
|
||||
# If user provided an Ollama base URL (e.g. via .env), map it to
|
||||
# several common environment variable names that LiteLLM or
|
||||
# underlying Ollama clients may read. This helps when different
|
||||
# naming conventions are used (OLLAMA_BASE_URL vs LITELLM_OLLAMA_*).
|
||||
ollama_base = os.getenv("OLLAMA_BASE_URL") or os.getenv("OLLAMA_URL")
|
||||
if ollama_base:
|
||||
# Populate a few possible names without overwriting any that
|
||||
# are already set by the environment.
|
||||
os.environ.setdefault("OLLAMA_BASE_URL", ollama_base)
|
||||
os.environ.setdefault("OLLAMA_URL", ollama_base)
|
||||
os.environ.setdefault("OLLAMA_API_URL", ollama_base)
|
||||
os.environ.setdefault("LITELLM_OLLAMA_BASE_URL", ollama_base)
|
||||
os.environ.setdefault("LITELLM_OLLAMA_URL", ollama_base)
|
||||
# Some clients expect a host without scheme
|
||||
host_only = ollama_base.replace("http://", "").replace("https://", "")
|
||||
os.environ.setdefault("OLLAMA_HOST", host_only)
|
||||
|
||||
import litellm
|
||||
|
||||
# Drop unsupported params for models that don't support them
|
||||
|
||||
Reference in New Issue
Block a user