Upload app.py
Browse files
app.py
CHANGED
@@ -10,6 +10,35 @@ import yt_dlp
|
|
10 |
import requests
|
11 |
from bs4 import BeautifulSoup
|
12 |
import re
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
# Load environment variables
|
15 |
load_dotenv()
|
@@ -47,12 +76,7 @@ def get_youtube_content(url):
|
|
47 |
'extract_flat': True,
|
48 |
'quiet': True,
|
49 |
'no_warnings': True,
|
50 |
-
'
|
51 |
-
'youtube': {
|
52 |
-
'skip': ['dash', 'hls'],
|
53 |
-
}
|
54 |
-
},
|
55 |
-
'cookiesfrombrowser': ('chrome', ), # Get cookies from Chrome
|
56 |
}
|
57 |
|
58 |
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
@@ -78,7 +102,7 @@ Description:
|
|
78 |
except Exception as e:
|
79 |
st.error(f"Error getting YouTube content: {str(e)}")
|
80 |
return None
|
81 |
-
|
82 |
def get_website_content(url):
|
83 |
"""Get content from website using requests and BeautifulSoup"""
|
84 |
try:
|
|
|
10 |
import requests
|
11 |
from bs4 import BeautifulSoup
|
12 |
import re
|
13 |
+
import json
|
14 |
+
|
15 |
+
try:
|
16 |
+
with open('youtube.json', 'r') as f:
|
17 |
+
cookies = json.load(f)
|
18 |
+
|
19 |
+
cookie_content = """# Netscape HTTP Cookie File
|
20 |
+
# https://curl.haxx.se/docs/http-cookies.html
|
21 |
+
# This file is generated by yt-dlp! Edit at your own risk.
|
22 |
+
|
23 |
+
"""
|
24 |
+
for cookie in cookies:
|
25 |
+
domain = cookie.get('domain', '')
|
26 |
+
if not domain.startswith('.'): # Ensure domain starts with a dot
|
27 |
+
domain = '.' + domain
|
28 |
+
path = cookie.get('path', '/')
|
29 |
+
secure = "TRUE" if cookie.get('secure', False) else "FALSE"
|
30 |
+
expires = str(int(cookie.get('expirationDate', 2147483647)))
|
31 |
+
name = cookie.get('name', '')
|
32 |
+
value = cookie.get('value', '')
|
33 |
+
|
34 |
+
if domain and name and value:
|
35 |
+
cookie_line = f"{domain}\tTRUE\t{path}\t{secure}\t{expires}\t{name}\t{value}\n"
|
36 |
+
cookie_content += cookie_line
|
37 |
+
|
38 |
+
with open('youtube_cookies.txt', 'w', encoding='utf-8') as f:
|
39 |
+
f.write(cookie_content)
|
40 |
+
except Exception as e:
|
41 |
+
print(f"Error processing cookies: {e}")
|
42 |
|
43 |
# Load environment variables
|
44 |
load_dotenv()
|
|
|
76 |
'extract_flat': True,
|
77 |
'quiet': True,
|
78 |
'no_warnings': True,
|
79 |
+
'cookiefile': 'youtube_cookies.txt' # Use the cookies file we created
|
|
|
|
|
|
|
|
|
|
|
80 |
}
|
81 |
|
82 |
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
|
|
102 |
except Exception as e:
|
103 |
st.error(f"Error getting YouTube content: {str(e)}")
|
104 |
return None
|
105 |
+
|
106 |
def get_website_content(url):
|
107 |
"""Get content from website using requests and BeautifulSoup"""
|
108 |
try:
|