Spaces:
Sleeping
Sleeping
import os | |
from pathlib import Path | |
import requests | |
from repository import ModelRoles, Model, Repository | |
class OndemandRepository(Repository): | |
session_url = "https://api.on-demand.io/chat/v1/sessions" | |
def __init__(self, model_info: Model, system_message: str = None, log_to_file: Path = None): | |
self.model_info = model_info | |
self.system_message = system_message | |
self.log_to_file = log_to_file | |
self.session_id = None | |
def init(self): | |
self._start_new_session_if_needed() | |
def _start_new_session_if_needed(self): | |
if not self.session_id: | |
headers = {"apiKey": os.getenv("API_KEY")} | |
session_body = {"pluginIds": [], "externalUserId": "virtualDAM", "modelConfigs": | |
{"temperature": 0, "fulfillmentPrompt": self.system_message} | |
} | |
response = requests.post(self.session_url, headers=headers, json=session_body) | |
response_data = response.json() | |
self.session_id = response_data["data"]["id"] | |
def get_model_roles(self) -> ModelRoles: | |
return self.model_info.roles | |
def get_model_info(self) -> Model: | |
return self.model_info | |
def send_prompt(self, prompt: str, add_to_history: bool = None) -> dict[str, str]: | |
self._start_new_session_if_needed() | |
headers = {"apiKey": os.getenv("API_KEY")} | |
body = {'endpointId': 'predefined-openai-gpt4o', 'query': prompt, 'pluginIds': [], 'responseMode': 'sync'} | |
url = f'https://api.on-demand.io/chat/v1/sessions/{self.session_id}/query' | |
response = requests.post(url, headers=headers, json=body) | |
return {"content": response.json()["data"]["answer"]} | |
def get_message_history(self) -> list[dict[str, str]]: | |
return [] | |