from smolagents.tools import Tool import requests from typing import List, Dict import os from dotenv import load_dotenv from bs4 import BeautifulSoup load_dotenv() def scrape_indeed(position: str, location: str) -> List[Dict]: """ Scrapes job postings from Indeed. """ url = f"https://www.indeed.com/jobs?q=Odoo+{position}&l={location}" headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'} try: response = requests.get(url, headers=headers) response.raise_for_status() soup = BeautifulSoup(response.content, 'html.parser') jobs = [] for div in soup.find_all('div', class_='jobsearch-SerpJobCard'): title_element = div.find('a', title=True) title = title_element.text if title_element else 'N/A' company_element = div.find('span', class_='company') company = company_element.text.strip() if company_element else 'N/A' location_element = div.find('div', class_='location') location = location_element.text if location_element else 'N/A' link = 'https://www.indeed.com' + div.find('a', href=True)['href'] if div.find('a', href=True) else 'N/A' jobs.append({ "Title": title, "Company": company, "Location": location, "Link": link, "Source": "Indeed" }) return jobs except requests.exceptions.RequestException as e: print(f"Indeed scraping failed: {e}") return [] class LinkedInJobSearchTool(Tool): name = "linkedin_job_search" description = "Searches for job postings on LinkedIn and Indeed based on job title, location, and work mode (remote, hybrid, in-office) for Odoo profiles." inputs = { "position": {"type": "string", "description": "Job title (e.g., Data Scientist)"}, "location": {"type": "string", "description": "City or country (e.g., Germany)"}, "work_mode": {"type": "string", "description": "remote, hybrid, in-office"} } output_type = "array" def forward(self, position: str, location: str, work_mode: str) -> List[Dict]: """ Fetches job listings from LinkedIn and Indeed and returns structured JSON. """ BRAVE_API_KEY = os.getenv("BRAVE_API_KEY") if not BRAVE_API_KEY: return [{"Error": "Brave API key not found in .env file."}] linkedin_results = [] indeed_results = [] # LinkedIn Job Search base_url = "https://api.brave.com/v1/jobs" params = { "q": f"Odoo {position} {work_mode} jobs", "location": location, "api_key": BRAVE_API_KEY } try: response = requests.get(base_url, params=params) response.raise_for_status() data = response.json() linkedin_jobs = data.get("jobs", []) if linkedin_jobs: for job in linkedin_jobs: linkedin_results.append({ "Title": job['title'], "Company": job.get('company', 'N/A'), "Location": job.get('location', 'N/A'), "Posted": job.get('posted_date', 'N/A'), "Link": job.get('url', 'N/A'), "Source": "LinkedIn" }) except requests.exceptions.RequestException as e: linkedin_results = [{"Error": f"LinkedIn Error: {str(e)}"}] # Indeed Job Search indeed_results = scrape_indeed(position, location) # Combine results, prioritizing LinkedIn combined_results = linkedin_results + indeed_results # Format the results formatted_results = "" if combined_results: for job in combined_results: formatted_results += f"Title: {job.get('Title', 'N/A')}\n" formatted_results += f"Company: {job.get('Company', 'N/A')}\n" formatted_results += f"Location: {job.get('Location', 'N/A')}\n" formatted_results += f"Posted: {job.get('Posted', 'N/A')}\n" formatted_results += f"Link: {job.get('Link', 'N/A')}\n" formatted_results += f"Source: {job.get('Source', 'N/A')}\n" formatted_results += "---\n" else: formatted_results = "No jobs found. Try different keywords." return [{"Results": formatted_results}]