import os import requests import gradio as gr from openai import OpenAI import logging # Configure logging logging.basicConfig(level=logging.INFO) # Fetch API keys from environment variables OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") PROXYCURL_API_KEY = os.getenv("PROXYCURL_API_KEY") FIRECRAWL_API_KEY = os.getenv("FIRECRAWL_API_KEY") # Function to fetch LinkedIn data using the Proxycurl API def fetch_linkedin_data(linkedin_url): api_key = os.getenv("PROXYCURL_API_KEY") headers = {'Authorization': f'Bearer {api_key}'} api_endpoint = 'https://nubela.co/proxycurl/api/v2/linkedin' logging.info("Fetching LinkedIn data...") response = requests.get(api_endpoint, params={'url': linkedin_url}, headers=headers, timeout=10) if response.status_code == 200: logging.info("LinkedIn data fetched successfully.") return response.json() else: logging.error(f"Error fetching LinkedIn data: {response.text}") return {"error": f"Error fetching LinkedIn data: {response.text}"} # Function to fetch company information using Firecrawl API def fetch_company_info(company_url): api_key = os.getenv("FIRECRAWL_API_KEY") headers = { 'Authorization': f'Bearer {api_key}', 'Content-Type': 'application/json' } api_endpoint = 'https://api.firecrawl.dev/v1/crawl' data = { "url": company_url, "limit": 100, "scrapeOptions": { "formats": ["markdown", "html"] } } logging.info("Fetching company information...") response = requests.post(api_endpoint, json=data, headers=headers, timeout=15) if response.status_code == 200: logging.info("Company information fetched successfully.") return response.json() else: logging.error(f"Error fetching company information: {response.text}") return {"error": f"Error fetching company information: {response.text}"} # Function to structure the email dynamically based on inputs and fetched data def structure_email(user_data, linkedin_info, company_info): # Fetch relevant information from the LinkedIn profile and company information linkedin_role = linkedin_info.get('current_role', 'a professional') linkedin_skills = linkedin_info.get('skills', 'various relevant skills') company_mission = company_info.get('mission', 'your mission') company_goal = company_info.get('goal', 'achieving excellence in the field') # Construct the dynamic email content based on the provided and fetched information email_content = ( f"Dear Hiring Manager,\n\n" f"I am writing to express my enthusiasm for the {user_data['role']} position at {user_data['company_url']}. " f"The mission of {company_mission} resonates deeply with me, as my professional experience aligns closely with this vision.\n\n" f"Having worked as {linkedin_role}, I have developed skills in {linkedin_skills}. These skills match the needs of your organization, " f"and I am confident in my ability to contribute effectively to {company_goal}.\n\n" f"I am eager to bring my expertise in {linkedin_skills} to your team, focusing on achieving key objectives and supporting your projects. " f"My goal is to make a meaningful impact and collaborate with like-minded professionals committed to excellence.\n\n" f"I would appreciate the opportunity to discuss how my background and skills align with the needs of {user_data['company_url']}. " f"Please find my resume attached for a more detailed overview of my qualifications.\n\n" f"Thank you for your time and consideration. I look forward to the possibility of contributing to your team.\n\n" f"Best regards,\n" f"{user_data['name']}" ) return email_content # Function to validate the generated email for professional tone and completeness def validate_email(email_content): logging.info("Validating email content...") logging.info(f"Email Content for Validation: {email_content}") # Check if the generated email contains essential elements if ("enthusiasm" in email_content and "skills" in email_content and "contribute" in email_content): logging.info("Email content validation passed.") return True else: logging.info("Email content validation failed.") return False # Function to generate email content using Nvidia Nemotron LLM (non-streaming for simplicity) def generate_email_content(api_key, prompt): client = OpenAI( base_url="https://integrate.api.nvidia.com/v1", api_key=api_key ) logging.info("Generating email content...") try: response = client.chat.completions.create( model="nvidia/llama-3.1-nemotron-70b-instruct", messages=[ {"role": "user", "content": prompt} ], temperature=0.5, top_p=1, max_tokens=1024, stream=False # Disable streaming for simplicity ) if hasattr(response, 'choices') and len(response.choices) > 0: email_content = response.choices[0].message.content logging.info("Email content generated successfully.") logging.info(f"Generated Email Content: {email_content}") return email_content else: logging.error("Error: No choices found in the response.") return "Error generating email content: No valid choices." except Exception as e: logging.error(f"Error generating email content: {e}") return "Error generating email content." # Custom Agent class to simulate behavior similar to OpenAI's Swarm framework class Agent: def __init__(self, name, instructions, user_data): self.name = name self.instructions = instructions self.user_data = user_data def act(self): if self.name == "Data Collection Agent": linkedin_info = fetch_linkedin_data(self.user_data['linkedin_url']) company_info = fetch_company_info(self.user_data['company_url']) return linkedin_info, company_info elif self.name == "Email Generation Agent": user_data = self.user_data['user_data'] linkedin_info = self.user_data['linkedin_info'] company_info = self.user_data['company_info'] prompt = structure_email(user_data, linkedin_info, company_info) email_content = generate_email_content(OPENAI_API_KEY, prompt) return email_content # Simulated Swarm class to manage agents class Swarm: def __init__(self): self.agents = [] def add_agent(self, agent): self.agents.append(agent) def run(self): for agent in self.agents: if agent.name == "Data Collection Agent": linkedin_info, company_info = agent.act() if "error" in linkedin_info or "error" in company_info: return "Error fetching data. Please check the LinkedIn and company URLs." return linkedin_info, company_info # Function that integrates the agents and manages iterations def run_agent(name, email, phone, linkedin_url, company_url, role): user_data = { "name": name, "email": email, "phone": phone, "linkedin_url": linkedin_url, "company_url": company_url, "role": role } email_swarm = Swarm() data_collection_agent = Agent("Data Collection Agent", "Collect user inputs and relevant data", user_data) email_swarm.add_agent(data_collection_agent) linkedin_info, company_info = email_swarm.run() if isinstance(linkedin_info, str): return linkedin_info agent_data = { "user_data": user_data, "linkedin_info": linkedin_info, "company_info": company_info } email_agent = Agent("Email Generation Agent", "Generate the email content", agent_data) email_content = email_agent.act() for i in range(3): if validate_email(email_content): return email_content else: refined_prompt = f"Refine: {structure_email(user_data, linkedin_info, company_info)}" email_content = generate_email_content(OPENAI_API_KEY, refined_prompt) return "Unable to generate a valid email after 3 attempts." # Set up the Gradio interface final_interface = gr.Interface( fn=run_agent, inputs=[ gr.Textbox(label="Name"), gr.Textbox(label="Email"), gr.Textbox(label="Phone Number"), gr.Textbox(label="LinkedIn Profile URL"), gr.Textbox(label="Company URL or Name"), gr.Textbox(label="Role Being Applied For") ], outputs="text", title="Email Writing AI Agent", description="Autonomously generate a professional email tailored to the job application." ) if __name__ == "__main__": final_interface.launch()