fix: resolve Docker networking issue for Ollama connectivity

- Modified OllamaClient to read OLLAMA_HOST environment variable
- Updated docker-compose.yml to pass OLLAMA_HOST to backend service
- Changed docker.env to use Docker gateway IP (172.18.0.1:11434)
- Configured Ollama service to bind to 0.0.0.0:11434 for container access
- Added test script to verify Ollama connectivity from within container
- All backend tests now pass including chat functionality

Co-Authored-By: PromptEngineer <jnfarooq@outlook.com>
This commit is contained in:
Devin AI 2025-07-15 21:34:17 +00:00
parent f21686f51c
commit fb75541eb3
4 changed files with 48 additions and 6 deletions

View File

@ -1,9 +1,12 @@
import requests
import json
from typing import List, Dict
import os
from typing import List, Dict, Optional
class OllamaClient:
def __init__(self, base_url: str = "http://localhost:11434"):
def __init__(self, base_url: Optional[str] = None):
if base_url is None:
base_url = os.getenv("OLLAMA_HOST", "http://localhost:11434")
self.base_url = base_url
self.api_url = f"{base_url}/api"
@ -196,4 +199,4 @@ def main():
print(f"AI: {response}")
if __name__ == "__main__":
main()
main()

View File

@ -0,0 +1,37 @@
#!/usr/bin/env python3
import os
import sys
def test_ollama_connectivity():
"""Test Ollama connectivity from within Docker container"""
print("🧪 Testing Ollama Connectivity")
print("=" * 40)
ollama_host = os.getenv('OLLAMA_HOST', 'Not set')
print(f"OLLAMA_HOST environment variable: {ollama_host}")
try:
from ollama_client import OllamaClient
client = OllamaClient()
print(f"OllamaClient base_url: {client.base_url}")
is_running = client.is_ollama_running()
print(f"Ollama running: {is_running}")
if is_running:
models = client.list_models()
print(f"Available models: {models}")
print("✅ Ollama connectivity test passed!")
return True
else:
print("❌ Ollama connectivity test failed!")
return False
except Exception as e:
print(f"❌ Error testing Ollama connectivity: {e}")
return False
if __name__ == "__main__":
success = test_ollama_connectivity()
sys.exit(0 if success else 1)

View File

@ -56,6 +56,7 @@ services:
environment:
- NODE_ENV=production
- RAG_API_URL=http://rag-api:8001
- OLLAMA_HOST=${OLLAMA_HOST:-http://172.18.0.1:11434}
volumes:
- ./backend:/app/backend
- ./shared_uploads:/app/shared_uploads
@ -100,4 +101,4 @@ volumes:
networks:
rag-network:
driver: bridge
driver: bridge

View File

@ -1,6 +1,7 @@
# Docker environment configuration
# Set this to use local Ollama instance running on host
OLLAMA_HOST=http://host.docker.internal:11434
# Note: Using Docker gateway IP instead of host.docker.internal for Linux compatibility
OLLAMA_HOST=http://172.18.0.1:11434
# Alternative: Use containerized Ollama (uncomment and run with --profile with-ollama)
# OLLAMA_HOST=http://ollama:11434
@ -8,4 +9,4 @@ OLLAMA_HOST=http://host.docker.internal:11434
# Other configuration
NODE_ENV=production
NEXT_PUBLIC_API_URL=http://localhost:8000
RAG_API_URL=http://rag-api:8001
RAG_API_URL=http://rag-api:8001