Update app.py
Browse files
app.py
CHANGED
@@ -29,11 +29,6 @@ from pydantic import BaseModel, Field
|
|
29 |
import litellm
|
30 |
from langchain.tools import Tool
|
31 |
|
32 |
-
LLM._get_litellm_model_name = lambda self, model_name: f"gemini/{model_name}" if not "/" in model_name else model_name
|
33 |
-
os.environ["LITELLM_MODEL_DEFAULT_PROVIDER"] = "gemini"
|
34 |
-
|
35 |
-
os.environ["OPENAI_API_KEY"] = "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"
|
36 |
-
|
37 |
# Configure logging
|
38 |
logging.basicConfig(level=logging.INFO)
|
39 |
logger = logging.getLogger(__name__)
|
@@ -74,13 +69,22 @@ st.write("---")
|
|
74 |
# Sidebar for API key configuration
|
75 |
with st.sidebar:
|
76 |
st.title("⚙️ Configuration")
|
|
|
|
|
|
|
77 |
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
84 |
st.divider()
|
85 |
|
86 |
# Reset button
|
@@ -89,6 +93,7 @@ with st.sidebar:
|
|
89 |
del st.session_state[key]
|
90 |
st.rerun()
|
91 |
|
|
|
92 |
#---------------------------- Utility Functions ----------------------------#
|
93 |
|
94 |
def extract_text_from_pdf(file):
|
@@ -393,7 +398,6 @@ class CaseBreakdownCrew:
|
|
393 |
self.api_key = api_key
|
394 |
|
395 |
def create_metadata_agent(self):
|
396 |
-
llm = LLM(model='gemini/gemini-2.0-flash', api_key=self.api_key)
|
397 |
return Agent(
|
398 |
role="Metadata Analyzer",
|
399 |
goal="Extract title and author information from document content",
|
@@ -405,7 +409,6 @@ class CaseBreakdownCrew:
|
|
405 |
)
|
406 |
|
407 |
def create_content_generator_agent(self):
|
408 |
-
llm = LLM(model='gemini/gemini-2.0-flash', api_key=self.api_key)
|
409 |
return Agent(
|
410 |
role="Case Study Content Generator",
|
411 |
goal="Generate comprehensive case analysis content based on section requirements",
|
@@ -417,7 +420,6 @@ class CaseBreakdownCrew:
|
|
417 |
)
|
418 |
|
419 |
def create_content_reviewer_agent(self):
|
420 |
-
llm = LLM(model='gemini/gemini-2.0-flash', api_key=self.api_key)
|
421 |
return Agent(
|
422 |
role="Content Quality Reviewer",
|
423 |
goal="Evaluate and score content for quality, relevance, and depth",
|
@@ -503,7 +505,6 @@ class CaseBreakdownCrew:
|
|
503 |
agents=[self.create_metadata_agent()],
|
504 |
tasks=[metadata_task],
|
505 |
process=Process.sequential,
|
506 |
-
llm=LLM(model='gemini/gemini-2.0-flash', api_key=self.api_key),
|
507 |
verbose=False
|
508 |
)
|
509 |
result = crew.kickoff()
|
@@ -606,9 +607,16 @@ def create_teaching_plan_crew(file_paths, llm_provider="gemini"):
|
|
606 |
tracker.set_placeholder(st.empty())
|
607 |
|
608 |
# Initialize LLM based on provider
|
609 |
-
|
610 |
-
|
611 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
612 |
|
613 |
# Define agents with callbacks for UI updates
|
614 |
pdf_analyzer = Agent(
|
@@ -700,20 +708,24 @@ def create_teaching_plan_crew(file_paths, llm_provider="gemini"):
|
|
700 |
#---------------------------- Board Plan Generator ----------------------------#
|
701 |
|
702 |
class BoardPlanAnalyzer:
|
703 |
-
def __init__(self):
|
704 |
-
|
705 |
-
|
|
|
|
|
|
|
|
|
706 |
|
707 |
if not api_key:
|
708 |
-
raise ValueError("
|
709 |
|
710 |
-
|
711 |
-
|
|
|
|
|
712 |
|
713 |
litellm.set_verbose = True
|
714 |
-
|
715 |
-
os.environ["LITELLM_MODEL_DEFAULT_PROVIDER"] = "gemini"
|
716 |
-
|
717 |
# Create agents
|
718 |
self.create_agents()
|
719 |
|
@@ -733,7 +745,6 @@ class BoardPlanAnalyzer:
|
|
733 |
description="Extracts text content from PDF files"
|
734 |
)],
|
735 |
allow_delegation=False,
|
736 |
-
llm=self.model,
|
737 |
verbose=True
|
738 |
)
|
739 |
|
@@ -750,7 +761,6 @@ class BoardPlanAnalyzer:
|
|
750 |
description="Analyzes case study and creates structured board plan"
|
751 |
)],
|
752 |
allow_delegation=False,
|
753 |
-
llm=self.model,
|
754 |
verbose=True
|
755 |
)
|
756 |
|
@@ -1088,7 +1098,7 @@ if st.session_state.uploaded_files:
|
|
1088 |
progress_bar = progress_placeholder.progress(0)
|
1089 |
|
1090 |
# Select LLM provider
|
1091 |
-
llm_provider = "gemini"
|
1092 |
|
1093 |
# Update progress
|
1094 |
progress_bar.progress(10)
|
@@ -1179,7 +1189,7 @@ if st.session_state.uploaded_files:
|
|
1179 |
if st.button("Generate Board Plan", key="board_plan_button"):
|
1180 |
try:
|
1181 |
# Select LLM provider
|
1182 |
-
llm_provider = "gemini"
|
1183 |
|
1184 |
# Initialize the board plan analyzer
|
1185 |
analyzer = BoardPlanAnalyzer(llm_provider=llm_provider)
|
|
|
29 |
import litellm
|
30 |
from langchain.tools import Tool
|
31 |
|
|
|
|
|
|
|
|
|
|
|
32 |
# Configure logging
|
33 |
logging.basicConfig(level=logging.INFO)
|
34 |
logger = logging.getLogger(__name__)
|
|
|
69 |
# Sidebar for API key configuration
|
70 |
with st.sidebar:
|
71 |
st.title("⚙️ Configuration")
|
72 |
+
api_key_source = st.radio("Select API Key Provider:",
|
73 |
+
["Google (Gemini)", "OpenAI"],
|
74 |
+
help="Choose which AI provider to use")
|
75 |
|
76 |
+
if api_key_source == "Google (Gemini)":
|
77 |
+
api_key = st.text_input("Enter your Gemini API Key", type="password",
|
78 |
+
help="Required for the AI model to function")
|
79 |
+
if api_key:
|
80 |
+
os.environ["GEMINI_API_KEY"] = api_key
|
81 |
+
os.environ["GOOGLE_API_KEY"] = api_key
|
82 |
+
else:
|
83 |
+
api_key = st.text_input("Enter your OpenAI API Key", type="password",
|
84 |
+
help="Required for the AI model to function")
|
85 |
+
if api_key:
|
86 |
+
os.environ["OPENAI_API_KEY"] = api_key
|
87 |
+
|
88 |
st.divider()
|
89 |
|
90 |
# Reset button
|
|
|
93 |
del st.session_state[key]
|
94 |
st.rerun()
|
95 |
|
96 |
+
|
97 |
#---------------------------- Utility Functions ----------------------------#
|
98 |
|
99 |
def extract_text_from_pdf(file):
|
|
|
398 |
self.api_key = api_key
|
399 |
|
400 |
def create_metadata_agent(self):
|
|
|
401 |
return Agent(
|
402 |
role="Metadata Analyzer",
|
403 |
goal="Extract title and author information from document content",
|
|
|
409 |
)
|
410 |
|
411 |
def create_content_generator_agent(self):
|
|
|
412 |
return Agent(
|
413 |
role="Case Study Content Generator",
|
414 |
goal="Generate comprehensive case analysis content based on section requirements",
|
|
|
420 |
)
|
421 |
|
422 |
def create_content_reviewer_agent(self):
|
|
|
423 |
return Agent(
|
424 |
role="Content Quality Reviewer",
|
425 |
goal="Evaluate and score content for quality, relevance, and depth",
|
|
|
505 |
agents=[self.create_metadata_agent()],
|
506 |
tasks=[metadata_task],
|
507 |
process=Process.sequential,
|
|
|
508 |
verbose=False
|
509 |
)
|
510 |
result = crew.kickoff()
|
|
|
607 |
tracker.set_placeholder(st.empty())
|
608 |
|
609 |
# Initialize LLM based on provider
|
610 |
+
if llm_provider == "gemini":
|
611 |
+
my_llm = LLM(
|
612 |
+
model='gemini/gemini-2.0-flash',
|
613 |
+
api_key=os.environ.get("GEMINI_API_KEY")
|
614 |
+
)
|
615 |
+
else:
|
616 |
+
my_llm = LLM(
|
617 |
+
model='gpt-4-turbo',
|
618 |
+
api_key=os.environ.get("OPENAI_API_KEY")
|
619 |
+
)
|
620 |
|
621 |
# Define agents with callbacks for UI updates
|
622 |
pdf_analyzer = Agent(
|
|
|
708 |
#---------------------------- Board Plan Generator ----------------------------#
|
709 |
|
710 |
class BoardPlanAnalyzer:
|
711 |
+
def __init__(self, llm_provider="gemini"):
|
712 |
+
if llm_provider == "gemini":
|
713 |
+
api_key = os.environ.get('GEMINI_API_KEY')
|
714 |
+
self.model = "gemini/gemini-2.0-flash"
|
715 |
+
else:
|
716 |
+
api_key = os.environ.get('OPENAI_API_KEY')
|
717 |
+
self.model = "gpt-4-turbo"
|
718 |
|
719 |
if not api_key:
|
720 |
+
raise ValueError(f"{llm_provider.capitalize()} API key not found")
|
721 |
|
722 |
+
if llm_provider == "gemini":
|
723 |
+
os.environ['GEMINI_API_KEY'] = api_key
|
724 |
+
else:
|
725 |
+
os.environ['OPENAI_API_KEY'] = api_key
|
726 |
|
727 |
litellm.set_verbose = True
|
728 |
+
|
|
|
|
|
729 |
# Create agents
|
730 |
self.create_agents()
|
731 |
|
|
|
745 |
description="Extracts text content from PDF files"
|
746 |
)],
|
747 |
allow_delegation=False,
|
|
|
748 |
verbose=True
|
749 |
)
|
750 |
|
|
|
761 |
description="Analyzes case study and creates structured board plan"
|
762 |
)],
|
763 |
allow_delegation=False,
|
|
|
764 |
verbose=True
|
765 |
)
|
766 |
|
|
|
1098 |
progress_bar = progress_placeholder.progress(0)
|
1099 |
|
1100 |
# Select LLM provider
|
1101 |
+
llm_provider = "gemini" if api_key_source == "Google (Gemini)" else "openai"
|
1102 |
|
1103 |
# Update progress
|
1104 |
progress_bar.progress(10)
|
|
|
1189 |
if st.button("Generate Board Plan", key="board_plan_button"):
|
1190 |
try:
|
1191 |
# Select LLM provider
|
1192 |
+
llm_provider = "gemini" if api_key_source == "Google (Gemini)" else "openai"
|
1193 |
|
1194 |
# Initialize the board plan analyzer
|
1195 |
analyzer = BoardPlanAnalyzer(llm_provider=llm_provider)
|