mirror of
https://github.com/zebrajr/localGPT.git
synced 2025-12-06 12:20:53 +01:00
- Modified OllamaClient to read OLLAMA_HOST environment variable - Updated docker-compose.yml to pass OLLAMA_HOST to backend service - Changed docker.env to use Docker gateway IP (172.18.0.1:11434) - Configured Ollama service to bind to 0.0.0.0:11434 for container access - Added test script to verify Ollama connectivity from within container - All backend tests now pass including chat functionality Co-Authored-By: PromptEngineer <jnfarooq@outlook.com>
38 lines
1.1 KiB
Python
38 lines
1.1 KiB
Python
#!/usr/bin/env python3
|
|
|
|
import os
|
|
import sys
|
|
|
|
def test_ollama_connectivity():
|
|
"""Test Ollama connectivity from within Docker container"""
|
|
print("🧪 Testing Ollama Connectivity")
|
|
print("=" * 40)
|
|
|
|
ollama_host = os.getenv('OLLAMA_HOST', 'Not set')
|
|
print(f"OLLAMA_HOST environment variable: {ollama_host}")
|
|
|
|
try:
|
|
from ollama_client import OllamaClient
|
|
client = OllamaClient()
|
|
print(f"OllamaClient base_url: {client.base_url}")
|
|
|
|
is_running = client.is_ollama_running()
|
|
print(f"Ollama running: {is_running}")
|
|
|
|
if is_running:
|
|
models = client.list_models()
|
|
print(f"Available models: {models}")
|
|
print("✅ Ollama connectivity test passed!")
|
|
return True
|
|
else:
|
|
print("❌ Ollama connectivity test failed!")
|
|
return False
|
|
|
|
except Exception as e:
|
|
print(f"❌ Error testing Ollama connectivity: {e}")
|
|
return False
|
|
|
|
if __name__ == "__main__":
|
|
success = test_ollama_connectivity()
|
|
sys.exit(0 if success else 1)
|