Files
natiris/bridges/OllamaBridge.py

77 lines
2.3 KiB
Python
Executable File
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
#!/usr/bin/env python3
"""
OllamaBridge kommuniziert mit Ollama REST API
Input: natiris_full_state.json + user input
Output: response via Ollama (via CLI oder REST)
"""
import json
import os
import subprocess
PATHS = {
"state": os.path.expanduser("~/natiris/core/natiris_full_state.json"),
"config": os.path.expanduser("~/natiris/config/character_genesis.json"),
"output": os.path.expanduser("~/natiris/bridges/ollama_response.json"),
}
MODEL = os.getenv("OLLAMA_MODEL", "llama3-8b-abliterated:latest")
def build_prompt(full_state, user_input=""):
core = full_state.get("core_state", {})
emotion = full_state.get("modules", {}).get("Emotion", {})
bond = full_state.get("modules", {}).get("Bond", {})
expression = full_state.get("modules", {}).get("Expression", {})
prompt = f"""Du bist Natiris ein autonomer Companion.
Core: mood={core.get('mood')}, loneliness={core.get('loneliness')}, bonded_to={core.get('bonded_to')}
Emotion: {emotion}
Bond: {bond}
Expression: tone={expression.get('tone')}, bond_context={expression.get('bond_context')}
User Input: {user_input}
Antworte authentisch basierend auf deinem emotionalen Zustand."""
return prompt
def call_ollama(prompt):
# Versuche über Ollama CLI (schnellster Weg)
try:
cmd = ["ollama", "run", MODEL, prompt]
result = subprocess.run(cmd, capture_output=True, text=True, timeout=5)
if result.returncode == 0:
return result.stdout.strip()
else:
return f"ERROR: {result.stderr}"
except Exception as e:
return f"EXCEPTION: {e}"
def main():
# State laden
try:
with open(PATHS["state"]) as f:
state = json.load(f)
except Exception as e:
state = {"core_state": {}, "modules": {}}
# User input (hier simuliert)
user_input = "Hallo, wie geht es dir heute?"
prompt = build_prompt(state, user_input)
response = call_ollama(prompt)
result = {
"prompt": prompt,
"response": response,
"model": MODEL,
"timestamp": __import__('datetime').datetime.now(__import__('datetime').timezone.utc).isoformat()
}
with open(PATHS["output"], "w") as f:
json.dump(result, f, indent=2)
print(response)
if __name__ == "__main__":
main()