Spaces:
Running
Running
File size: 4,587 Bytes
1397f6e 6e2693a 1397f6e 6e2693a 8d4267c 6e2693a bca15bb 1397f6e 8c95cb1 1397f6e bca15bb 1397f6e bca15bb 1397f6e bca15bb 1397f6e bca15bb 1397f6e bca15bb 1397f6e bca15bb 1397f6e bca15bb 1397f6e bca15bb 1397f6e bca15bb 1397f6e bca15bb 1397f6e bca15bb 1397f6e bca15bb 6e2693a bca15bb 1397f6e bca15bb 1397f6e bca15bb 1397f6e bca15bb 1397f6e bca15bb 8c95cb1 1397f6e bca15bb 6e2693a bca15bb 8c95cb1 bca15bb 8c95cb1 bca15bb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 |
import os
from pathlib import Path
import litellm
from crewai import Agent, Task, Crew, Process
from crewai_tools import SerperDevTool
import gradio as gr
# Error handling for API keys
try:
# Set up API keys
litellm.api_key = os.getenv('GOOGLE_API_KEY')
os.environ['SERPER_API_KEY'] = os.getenv('SERPER_API_KEY')
if not litellm.api_key or not os.environ['SERPER_API_KEY']:
raise ValueError("API keys are missing. Please ensure both Google API Key and SERPER API Key are set.")
except Exception as e:
print(f"Error setting up API keys: {e}")
exit()
# Define the LLM
llm = "gemini/gemini-1.5-flash-exp-0827" # Your LLM model
# Initialize the tool for internet searching capabilities
try:
tool = SerperDevTool(search_url="https://google.serper.dev/scholar", n_results=10)
except Exception as e:
print(f"Error initializing search tool: {e}")
exit()
# Research agent
research_agent = Agent(
role="Research Assistant",
goal='Discover and retrieve the latest groundbreaking papers and publications on {topic}.',
verbose=True,
memory=True,
backstory=(
"You are an expert researcher who specializes in locating the most recent and relevant research papers. "
"You focus on analyzing research from credible sources like Google Scholar, ensuring they are closely aligned with the {topic}. "
"Your insights help refine ongoing research by identifying gaps and suggesting areas for improvement."
),
llm=llm,
allow_delegation=True
)
# Writer agent
writer_agent = Agent(
role="Research Key Points Writer",
goal="Extract and present the key points of relevant research papers, including publication links.",
verbose=True,
memory=True,
backstory=(
"As a skilled research writer, your task is to extract key information such as objectives, methodologies, findings, and future improvements. "
"You will list the publication links in an organized manner."
),
tools=[tool],
llm=llm,
allow_delegation=False
)
# Research task
research_task = Task(
description=(
"Identify all relevant research papers on {topic}. "
"For each paper, extract key points such as the main objectives, methodology, findings, and any significant flaws in the study. "
"Highlight gaps in the research and suggest possible improvements."
),
expected_output='A structured list of key points from relevant papers, including strengths, weaknesses, and improvement suggestions.',
tools=[tool],
agent=research_agent,
)
# Writer task
writer_task = Task(
description=(
"Compose a report highlighting the key points from {topic}-related publications. "
"The report should include the main objectives, methodologies, and findings of each paper, along with a link to the publication. "
"Ensure that the information is accurate, clear and well-organized."
),
expected_output='A markdown file (.md) containing key points and publication links for each paper.',
tools=[tool],
agent=writer_agent,
async_execution=True,
output_file='key_points_report.md'
)
# Create a Crew for processing
crew = Crew(
agents=[research_agent, writer_agent],
tasks=[research_task, writer_task],
process=Process.sequential,
)
# Define a function that will take the research topic as input and return the markdown output
def generate_report(topic):
try:
# Kickoff the Crew process with the provided topic
result = crew.kickoff(inputs={'topic': topic})
# Read the generated markdown file (assuming report is saved as 'key_points_report.md')
with open('key_points_report.md', 'r') as file:
markdown_output = file.read()
return markdown_output
except Exception as e:
return f"Error during processing: {e}"
# Gradio Interface
def gradio_interface():
# Use Column to organize input and output in vertical layout
with gr.Blocks() as interface:
gr.Markdown("<center><h1>AI Research Assistant Agent-Key Points Extractor</h1></center>")
with gr.Column():
topic_input = gr.Textbox(lines=2, placeholder="Enter your research topic/keywords", label="Research Topic/Keywords")
result_output = gr.Markdown(label="Key Points Output")
submit_button = gr.Button("Generate Report")
submit_button.click(generate_report, inputs=topic_input, outputs=result_output)
interface.launch(debug=True)
# Run the Gradio interface
if __name__ == "__main__":
gradio_interface() |