File size: 6,041 Bytes
9ada6bf bfda8f6 9ada6bf bfda8f6 9ada6bf bfda8f6 9ada6bf b63d371 9ada6bf bfda8f6 9ada6bf 18ff80a b8778bd 18ff80a b8778bd 18ff80a b8778bd 18ff80a b8778bd 18ff80a 9ada6bf b8778bd 18ff80a b8778bd 18ff80a b8778bd 18ff80a b8778bd 18ff80a b8778bd 18ff80a b8778bd 9ada6bf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 |
import os
import requests
import openai
import gradio as gr
# Fetch API keys from environment variables
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
PROXYCURL_API_KEY = os.getenv("PROXYCURL_API_KEY")
FIRECRAWL_API_KEY = os.getenv("FIRECRAWL_API_KEY")
# Function to fetch LinkedIn data using the Proxycurl API
def fetch_linkedin_data(linkedin_url):
api_key = os.getenv("PROXYCURL_API_KEY")
headers = {'Authorization': f'Bearer {api_key}'}
api_endpoint = 'https://nubela.co/proxycurl/api/v2/linkedin'
response = requests.get(api_endpoint,
params={'url': linkedin_url},
headers=headers)
if response.status_code == 200:
return response.json()
else:
return {"error": f"Error fetching LinkedIn data: {response.text}"}
# Function to fetch company information using Firecrawl API
def fetch_company_info(company_url):
api_key = os.getenv("FIRECRAWL_API_KEY")
headers = {
'Authorization': f'Bearer {api_key}',
'Content-Type': 'application/json'
}
api_endpoint = 'https://api.firecrawl.dev/v1/crawl'
data = {
"url": company_url,
"limit": 100,
"scrapeOptions": {
"formats": ["markdown", "html"]
}
}
response = requests.post(api_endpoint, json=data, headers=headers)
if response.status_code == 200:
return response.json()
else:
return {"error": f"Error fetching company information: {response.text}"}
# Function to structure the email using the "Start with Why" model
def structure_email(user_data, linkedin_info, company_info):
why = f"I am passionate about {company_info.get('mission', 'your mission')} because it aligns with my experience as {linkedin_info.get('current_role', 'a professional')}."
how = f"My skills in {user_data['role']} match the requirements and goals of your organization."
what = f"I can bring my experience in {linkedin_info.get('skills', 'relevant skills')} to help achieve {company_info.get('goal', 'your company goals')}."
structured_input = f"{why}\n\n{how}\n\n{what}"
return structured_input
# Function to generate email content using Nvidia Nemotron LLM
def generate_email_content(api_key, prompt):
openai.api_key = api_key
response = openai.Completion.create(
model="nemotron-70b",
prompt=prompt,
max_tokens=500
)
return response.choices[0].text
# Function to validate the generated email for professional tone and completeness
def validate_email(email_content):
return "Why" in email_content and "How" in email_content and "What" in email_content
# Custom Agent class to simulate behavior similar to OpenAI's Swarm framework
class Agent:
def __init__(self, name, instructions, user_data):
self.name = name
self.instructions = instructions
self.user_data = user_data
def act(self):
if self.name == "Data Collection Agent":
linkedin_info = fetch_linkedin_data(self.user_data['linkedin_url'])
company_info = fetch_company_info(self.user_data['company_url'])
return linkedin_info, company_info
elif self.name == "Email Generation Agent":
linkedin_info, company_info = self.user_data
prompt = structure_email(self.user_data[0], linkedin_info, company_info)
email_content = generate_email_content(OPENAI_API_KEY, prompt)
return email_content
# Simulated Swarm class to manage agents
class Swarm:
def __init__(self):
self.agents = []
def add_agent(self, agent):
self.agents.append(agent)
def run(self):
for agent in self.agents:
if agent.name == "Data Collection Agent":
linkedin_info, company_info = agent.act()
if "error" in linkedin_info or "error" in company_info:
return "Error fetching data. Please check the LinkedIn and company URLs."
return linkedin_info, company_info
# Function that integrates the agents and manages iterations
def run_agent(name, email, phone, linkedin_url, company_url, role):
user_data = {
"name": name,
"email": email,
"phone": phone,
"linkedin_url": linkedin_url,
"company_url": company_url,
"role": role
}
# Create a Swarm and add the Data Collection Agent
email_swarm = Swarm()
data_collection_agent = Agent("Data Collection Agent", "Collect user inputs and relevant data", user_data)
email_swarm.add_agent(data_collection_agent)
# Get data from the Data Collection Agent
linkedin_info, company_info = email_swarm.run()
if isinstance(linkedin_info, str): # If an error message is returned
return linkedin_info
# Pass the collected data to the Email Generation Agent
email_agent = Agent("Email Generation Agent", "Generate the email content", (user_data, linkedin_info, company_info))
email_content = email_agent.act()
# Validate and refine the email using a ReAct pattern with a maximum of 3 iterations
for i in range(3):
if validate_email(email_content):
return email_content
else:
refined_prompt = f"Refine: {structure_email(user_data, linkedin_info, company_info)}"
email_content = generate_email_content(OPENAI_API_KEY, refined_prompt)
return "Unable to generate a valid email after 3 attempts."
# Set up the Gradio interface
final_interface = gr.Interface(
fn=run_agent,
inputs=[
gr.Textbox(label="Name"),
gr.Textbox(label="Email"),
gr.Textbox(label="Phone Number"),
gr.Textbox(label="LinkedIn Profile URL"),
gr.Textbox(label="Company URL or Name"),
gr.Textbox(label="Role Being Applied For")
],
outputs="text",
title="Email Writing AI Agent",
description="Autonomously generate a professional email tailored to the job application."
)
if __name__ == "__main__":
final_interface.launch()
|