DAMHelper / repository /ollama.py
enricorampazzo's picture
first implementation for LLM integration
7b864ba
raw
history blame
855 Bytes
import ollama
from ollama import Options
from schema import ModelRoles
class OllamaRepository:
def __init__(self, model, system_msg, roles: ModelRoles):
self.model = model
self.system_msg = system_msg
self.roles = roles
self.message_history: list[dict[str, str]] = [{"role": self.roles.system_role, "content": system_msg}]
def send_prompt(self, prompt:str, add_to_history:bool = False) -> dict[str, str]:
options: Options = Options(temperature=0.1)
self.message_history.append({"role": self.roles.user_role, "content":prompt})
response = ollama.chat(self.model, self.message_history, options=options)
answer = {"role": self.roles.ai_role, "content": response["message"]["content"]}
if add_to_history:
self.message_history.append(answer)
return answer