KingZack commited on
Commit
3799925
·
1 Parent(s): ccde5ae

adding answering service and config parapms, and content model

Browse files
src/ctp_slack_bot/core/config.py CHANGED
@@ -36,8 +36,20 @@ class Settings(BaseSettings):
36
  # OpenAI Configuration
37
  OPENAI_API_KEY: Optional[SecretStr] = None
38
 
39
-
40
-
 
 
 
 
 
 
 
 
 
 
 
 
41
  # Logging Configuration
42
  LOG_LEVEL: Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "INFO"
43
  LOG_FORMAT: Literal["text", "json"] = "json"
 
36
  # OpenAI Configuration
37
  OPENAI_API_KEY: Optional[SecretStr] = None
38
 
39
+ # Chat Model Configuration
40
+ CHAT_MODEL: str = "gpt-3.5-turbo"
41
+ MAX_TOKENS: int = 150
42
+ TEMPERATURE: float = 0.8 # Maximum tokens for response generation
43
+ SYSTEM_PROMPT: str = """
44
+ You are a helpful teaching assistant for a data science class.
45
+ Based on the students question, you will be given context retreived from class transcripts and materials to answer their question.
46
+ Your responses should be:
47
+ 1. Accurate and based on the class content
48
+ 2. Clear and educational
49
+ 3. Concise but complete
50
+ If you're unsure about something, acknowledge it and suggest asking the professor.
51
+ """
52
+
53
  # Logging Configuration
54
  LOG_LEVEL: Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "INFO"
55
  LOG_FORMAT: Literal["text", "json"] = "json"
src/ctp_slack_bot/models/content.py CHANGED
@@ -14,5 +14,6 @@ class RetreivedContext(BaseModel):
14
  contextual_text: str
15
  metadata_source: str
16
  similarity_score: float
17
- in_reation_to_question: str = None
18
-
 
 
14
  contextual_text: str
15
  metadata_source: str
16
  similarity_score: float
17
+
18
+ said_by: str = Optional[None]
19
+ in_reation_to_question: str = Optional[None]
src/ctp_slack_bot/services/AnswerQuestionService.py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic import BaseModel, validator
2
+ from typing import List, Optional, Tuple
3
+ from ctp_slack_bot.core.config import settings
4
+ import numpy as np
5
+ from openai import OpenAI
6
+ from ctp_slack_bot.models.slack import SlackMessage
7
+ from ctp_slack_bot.models.content import RetreivedContext
8
+
9
+ class GenerateAnswer():
10
+ """
11
+ Service for language model operations.
12
+ """
13
+ def __init__(self):
14
+ self.client = OpenAI(api_key=settings.OPENAI_API_KEY)
15
+
16
+ def generate_answer(self, question: SlackMessage, context: List[RetreivedContext]) -> str:
17
+ """Generate a response using OpenAI's API with retrieved context.
18
+
19
+ Args:
20
+ question (str): The user's question
21
+ context (List[RetreivedContext]): List of RetreivedContext
22
+
23
+ Returns:
24
+ str: Generated answer
25
+ """
26
+ # Prepare context string from retrieved chunks
27
+ context_str = ""
28
+ for c in context:
29
+ context_str += f"{c.contextual_text}\n"
30
+
31
+
32
+ # logger.info(f"Generating response for question: {question}")
33
+ # logger.info(f"Using {len(context)} context chunks")
34
+
35
+ # Create messages for the chat completion
36
+ messages = [
37
+ {"role": "system", "content": settings.SYSTEM_PROMPT},
38
+ {"role": "user", "content":
39
+ f"""Student Auestion: {question.text}
40
+ Context from class materials and transcripts: {context_str}
41
+ Please answer the Student Auestion based on the Context from class materials and transcripts. If the context doesn't contain relevant information, acknowledge that and suggest asking the professor."""}
42
+ ]
43
+
44
+ # Generate response
45
+ response = self.client.chat.completions.create(
46
+ model=settings.CHAT_MODEL,
47
+ messages=messages,
48
+ max_tokens=settings.MAX_TOKENS,
49
+ temperature=settings.TEMPERATURE
50
+ )
51
+
52
+ return response.choices[0].message.content
53
+
54
+
55
+
56
+ ### REMOVE BELOW, PUT SOMEWHERE IN TESTS BUT IDK WHERE YET
57
+ # sm = SlackMessage(text="What is the capital of France?", channel_id="123", user_id="456", timestamp="789")
58
+ # context = [RetreivedContext(contextual_text="The capital of France is Paris", metadata_source="class materials", similarity_score=0.95)]
59
+ # a = GenerateAnswer()
60
+ # a.generate_answer(sm, context)