Spaces:
Runtime error
Runtime error
File size: 1,911 Bytes
98a6105 9fd6e20 f7e11c1 bb7c9a3 9fd6e20 bb7c9a3 9fd6e20 bb7c9a3 9fd6e20 bb7c9a3 9fd6e20 bb7c9a3 6853a4c 9fd6e20 98a6105 9fd6e20 98a6105 9fd6e20 98a6105 9fd6e20 7ee11d0 bb7c9a3 98a6105 cf871ae 7a90ab0 7ee11d0 98a6105 7ee11d0 6853a4c bb7c9a3 7ee11d0 bb7c9a3 7ee11d0 bb7c9a3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 |
from datetime import datetime
from loguru import logger
from openai import AsyncOpenAI
from pydantic import ConfigDict
from typing import Any, Collection, Self
from ctp_slack_bot.core import ApplicationComponentBase, Settings
from ctp_slack_bot.models import Chunk
class LanguageModelService(ApplicationComponentBase):
"""
Service for language model operations.
"""
model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True)
settings: Settings
open_ai_client: AsyncOpenAI
async def answer_question(self, asker: str, question: str, context: Collection[Chunk]) -> str: # TODO: generify into just another agent.
"""Generate a response using OpenAI’s API with retrieved context.
Args:
question (str): The user’s question
context (List[RetreivedContext]): The context retreived for answering the question
Returns:
str: Generated answer
"""
logger.debug("Generating response for question “{}” using {} context chunks…", question, len(context))
messages = [
{"role": "system", "content": self.settings.system_prompt},
{"role": "user", "content": (
f"""Inquirer Name: {asker}
Current Time: {datetime.now().isoformat(" ", "seconds")}
Question:
{question}
Context from class materials and transcripts:
{'\n\n'.join(chunk.text for chunk in context)}""")}
]
response = await self.open_ai_client.chat.completions.create(
model=self.settings.chat_model,
messages=messages,
max_tokens=self.settings.max_tokens,
temperature=self.settings.temperature
)
return response.choices[0].message.content
@property
def name(self: Self) -> str:
return "language_model_service"
|