MenuBase/services/llm/ollama_service.py

53 lines
1.8 KiB
Python
Raw Normal View History

2025-02-06 10:11:57 -03:00
# services/llm/ollama_service.py
"""
Ollama service implementation
"""
import ollama
import json
from typing import Dict, List
from .base import LLMService
class OllamaService(LLMService):
def __init__(self, model: str = "llama3.1"):
self.model = model
def generate_text(self, prompt: str) -> str:
try:
response = ollama.generate(
model=self.model,
prompt=prompt
)
return response["response"]
except Exception as e:
print(f"Error in Ollama API call: {e}")
return None
def get_similarity_scores(self, texts_pairs: Dict[str, List[str]]) -> List[float]:
system_prompt = (
"Evaluate the semantic similarity between the following table of pairs of texts in json format on a scale from 0 to 1. "
"Return the similarity scores for every row in JSON format as a list of numbers, without any additional text or formatting."
)
request_payload = json.dumps(texts_pairs)
prompt = f"{system_prompt}\n\n{request_payload}"
try:
response = ollama.generate(
model=self.model,
prompt=prompt
)
try:
scores = json.loads(response["response"].strip())
if isinstance(scores, dict) and "similarity_scores" in scores:
return scores["similarity_scores"]
elif isinstance(scores, list):
return scores
else:
raise ValueError("Unexpected response format")
except json.JSONDecodeError:
raise ValueError("Could not decode response as JSON")
except Exception as e:
print(f"Error in Ollama similarity calculation: {e}")
return None