Update app.py
Browse files
app.py
CHANGED
|
@@ -2,39 +2,45 @@ import requests
|
|
| 2 |
from bs4 import BeautifulSoup
|
| 3 |
import pandas as pd
|
| 4 |
import gradio as gr
|
| 5 |
-
import os
|
| 6 |
from groq import Groq
|
| 7 |
import creds # Assuming creds.py holds your API key as creds.api_key
|
| 8 |
|
| 9 |
# Step 1: Scrape the free courses from Analytics Vidhya
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
|
| 14 |
-
|
|
|
|
| 15 |
|
| 16 |
-
# Extracting course title, image, and course link
|
| 17 |
-
for course_card in soup.find_all('header', class_='course-card__img-container'):
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
if img_tag:
|
| 21 |
-
title = img_tag.get('alt')
|
| 22 |
-
image_url = img_tag.get('src')
|
| 23 |
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
'
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 35 |
|
| 36 |
# Step 2: Create DataFrame
|
| 37 |
-
df = pd.DataFrame(
|
| 38 |
|
| 39 |
# Step 3: Initialize the Groq client and set the API key
|
| 40 |
client = Groq(api_key=creds.api_key) # Properly passing the API key
|
|
@@ -202,12 +208,10 @@ iface = gr.Interface(
|
|
| 202 |
description="Find the most relevant courses from Analytics Vidhya based on your query.",
|
| 203 |
theme="huggingface",
|
| 204 |
css=custom_css,
|
| 205 |
-
examples=[
|
| 206 |
-
|
| 207 |
-
|
| 208 |
-
|
| 209 |
-
["Business Analytics"]
|
| 210 |
-
],
|
| 211 |
)
|
| 212 |
|
| 213 |
if __name__ == "__main__":
|
|
|
|
| 2 |
from bs4 import BeautifulSoup
|
| 3 |
import pandas as pd
|
| 4 |
import gradio as gr
|
|
|
|
| 5 |
from groq import Groq
|
| 6 |
import creds # Assuming creds.py holds your API key as creds.api_key
|
| 7 |
|
| 8 |
# Step 1: Scrape the free courses from Analytics Vidhya
|
| 9 |
+
def scrape_courses():
|
| 10 |
+
url = "https://courses.analyticsvidhya.com/pages/all-free-courses"
|
| 11 |
+
try:
|
| 12 |
+
response = requests.get(url)
|
| 13 |
+
response.raise_for_status() # Raise an error for bad status codes
|
| 14 |
+
except requests.RequestException as e:
|
| 15 |
+
print(f"Error fetching data: {e}")
|
| 16 |
+
return []
|
| 17 |
|
| 18 |
+
soup = BeautifulSoup(response.content, 'html.parser')
|
| 19 |
+
courses = []
|
| 20 |
|
| 21 |
+
# Extracting course title, image, and course link
|
| 22 |
+
for course_card in soup.find_all('header', class_='course-card__img-container'):
|
| 23 |
+
img_tag = course_card.find('img', class_='course-card__img')
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
|
| 25 |
+
if img_tag:
|
| 26 |
+
title = img_tag.get('alt')
|
| 27 |
+
image_url = img_tag.get('src')
|
| 28 |
+
|
| 29 |
+
link_tag = course_card.find_previous('a')
|
| 30 |
+
if link_tag:
|
| 31 |
+
course_link = link_tag.get('href')
|
| 32 |
+
if not course_link.startswith('http'):
|
| 33 |
+
course_link = 'https://courses.analyticsvidhya.com' + course_link
|
| 34 |
+
|
| 35 |
+
courses.append({
|
| 36 |
+
'title': title,
|
| 37 |
+
'image_url': image_url,
|
| 38 |
+
'course_link': course_link
|
| 39 |
+
})
|
| 40 |
+
return courses
|
| 41 |
|
| 42 |
# Step 2: Create DataFrame
|
| 43 |
+
df = pd.DataFrame(scrape_courses())
|
| 44 |
|
| 45 |
# Step 3: Initialize the Groq client and set the API key
|
| 46 |
client = Groq(api_key=creds.api_key) # Properly passing the API key
|
|
|
|
| 208 |
description="Find the most relevant courses from Analytics Vidhya based on your query.",
|
| 209 |
theme="huggingface",
|
| 210 |
css=custom_css,
|
| 211 |
+
examples=[["machine learning for beginners"],
|
| 212 |
+
["advanced data visualization techniques"],
|
| 213 |
+
["python programming basics"],
|
| 214 |
+
["Business Analytics"]]
|
|
|
|
|
|
|
| 215 |
)
|
| 216 |
|
| 217 |
if __name__ == "__main__":
|