Spaces:
Sleeping
Sleeping
File size: 1,102 Bytes
d015f0c 37de7f1 d015f0c 062179e d005419 0fadcb9 d005419 d015f0c 062179e d005419 0fadcb9 d005419 7b864ba 0fadcb9 d015f0c 062179e d005419 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 |
import ollama
from ollama import Options
from dam_helper.repository import Model, Repository
class OllamaRepository(Repository):
def __init__(self, model_info: Model, system_msg):
self.model_info: Model = model_info
self.system_msg: str = system_msg
self.message_history: list[dict[str, str]] = [
{"role": self.model_info.roles.system_role, "content": system_msg}]
def send_prompt(self, prompt: str, add_to_history: bool = True) -> dict[str, str]:
print("Prompt to be sent:" + prompt)
options: Options = Options(temperature=0)
self.message_history.append({"role": self.model_info.roles.user_role, "content": prompt})
response = ollama.chat(self.model_info.name, self.message_history, options=options)
answer = {"role": self.model_info.roles.ai_role, "content": response["message"]["content"]}
if add_to_history:
self.message_history.append(answer)
else:
self.message_history.pop()
return answer
def get_model_info(self) -> Model:
return self.model_info
|