Files
nimue/test_ollama.py
arch_agent 27dcaf6552 Initial commit: Nimue AI Companion v1.0
- Langzeit- und Kurzzeitgedächtnis mit SQLite
- Ollama-Integration für lokale LLMs
- Flask-Webinterface mit Stream-Response
- Persona-System mit konfigurierbarem Charakter
- Auto-Zusammenfassung bei Token-Limit
- Rate Limiting und Sicherheitsfeatures
- Uncensored Modell-Support
2026-04-14 07:44:36 +02:00

83 lines
2.4 KiB
Python

#!/usr/bin/env python3
"""Test script for local Ollama connection"""
import requests
import sys
OLLAMA_HOST = "http://localhost:11434"
MODEL = "HammerAI/rocinante-v1.1:12b-q4_K_M"
def test_connection():
"""Test if Ollama is running"""
try:
resp = requests.get(f"{OLLAMA_HOST}/api/tags", timeout=5)
if resp.status_code == 200:
data = resp.json()
models = [m['name'] for m in data.get('models', [])]
print(f"✓ Ollama is running")
print(f" Available models: {models}")
return models
else:
print(f"✗ Ollama returned status {resp.status_code}")
return []
except requests.exceptions.ConnectionError:
print(f"✗ Cannot connect to Ollama at {OLLAMA_HOST}")
print(f" Start Ollama with: ollama serve")
return None
except Exception as e:
print(f"✗ Error: {e}")
return None
def check_model(models):
"""Check if our target model is available"""
if MODEL in models:
print(f"✓ Model {MODEL} is available")
return True
else:
print(f"✗ Model {MODEL} not found")
print(f" Available models: {models}")
print(f"\n To download, run:")
print(f" ollama pull {MODEL}")
return False
def test_generate():
"""Test simple generation"""
try:
resp = requests.post(f"{OLLAMA_HOST}/api/generate", json={
"model": MODEL,
"prompt": "Hello, who are you?",
"stream": False
}, timeout=30)
if resp.status_code == 200:
data = resp.json()
print(f"✓ Test generation successful")
print(f" Response preview: {data.get('response', '')[:100]}...")
return True
else:
print(f"✗ Generation failed: {resp.status_code}")
print(f" {resp.text}")
return False
except Exception as e:
print(f"✗ Generation error: {e}")
return False
if __name__ == "__main__":
print("=" * 50)
print("Nimue Ollama Test")
print("=" * 50)
print(f"Target: {OLLAMA_HOST}")
print(f"Model: {MODEL}")
print("-" * 50)
models = test_connection()
if models is None:
sys.exit(1)
if not check_model(models):
print("\n" + "=" * 50)
print("SETUP REQUIRED:")
print("=" * 50)
else:
print("\n" + "-" * 50)
test_generate()