ctp-slack-bot / src /ctp_slack_bot /services /language_model_service.py
LiKenun's picture
Refactor #4
6853a4c
from datetime import datetime
from loguru import logger
from openai import AsyncOpenAI
from pydantic import ConfigDict
from typing import Any, Collection, Self
from ctp_slack_bot.core import ApplicationComponentBase, Settings
from ctp_slack_bot.models import Chunk
class LanguageModelService(ApplicationComponentBase):
"""
Service for language model operations.
"""
model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True)
settings: Settings
open_ai_client: AsyncOpenAI
async def answer_question(self, asker: str, question: str, context: Collection[Chunk]) -> str: # TODO: generify into just another agent.
"""Generate a response using OpenAI’s API with retrieved context.
Args:
question (str): The user’s question
context (List[RetreivedContext]): The context retreived for answering the question
Returns:
str: Generated answer
"""
logger.debug("Generating response for question “{}” using {} context chunks…", question, len(context))
messages = [
{"role": "system", "content": self.settings.system_prompt},
{"role": "user", "content": (
f"""Inquirer Name: {asker}
Current Time: {datetime.now().isoformat(" ", "seconds")}
Question:
{question}
Context from class materials and transcripts:
{'\n\n'.join(chunk.text for chunk in context)}""")}
]
response = await self.open_ai_client.chat.completions.create(
model=self.settings.chat_model,
messages=messages,
max_tokens=self.settings.max_tokens,
temperature=self.settings.temperature
)
return response.choices[0].message.content
@property
def name(self: Self) -> str:
return "language_model_service"