Spaces:
Sleeping
Sleeping
Commit
·
8f79bc5
1
Parent(s):
cd23862
12-09-2024
Browse files- LICENSE.md +24 -0
- backend/api/__pycache__/web_scrape.cpython-312.pyc +0 -0
- backend/api/queue.py +1 -1
- backend/api/web_scrape.py +75 -45
- backend/invoke_worker/__init__.py +2 -0
- backend/invoke_worker/__pycache__/__init__.cpython-312.pyc +0 -0
- backend/invoke_worker/__pycache__/chapter_queue.cpython-312.pyc +0 -0
- backend/invoke_worker/__pycache__/webscrape_get.cpython-312.pyc +0 -0
- backend/invoke_worker/__pycache__/webscrape_get_cover.cpython-312.pyc +0 -0
- backend/invoke_worker/chapter_queue.py +9 -0
- backend/invoke_worker/webscrape_get.py +116 -0
- backend/invoke_worker/webscrape_get_cover.py +121 -0
- backend/migrations/0001_initial.py +16 -3
- backend/migrations/0002_alter_requestwebscrapegetcovercache_id.py +19 -0
- backend/migrations/0003_requestwebscrapegetcache.py +23 -0
- backend/migrations/0004_remove_requestwebscrapegetcache_page_and_more.py +23 -0
- backend/migrations/{0002_webscrapegetcovercache.py → 0005_webscrapegetcache.py} +3 -4
- backend/migrations/__pycache__/0001_initial.cpython-312.pyc +0 -0
- backend/migrations/__pycache__/0002_alter_requestwebscrapegetcovercache_id.cpython-312.pyc +0 -0
- backend/migrations/__pycache__/0002_webscrapegetcovercache.cpython-312.pyc +0 -0
- backend/migrations/__pycache__/0003_requestwebscrapegetcache.cpython-312.pyc +0 -0
- backend/migrations/__pycache__/0004_remove_requestwebscrapegetcache_page_and_more.cpython-312.pyc +0 -0
- backend/migrations/__pycache__/0005_webscrapegetcache.cpython-312.pyc +0 -0
- backend/models/__pycache__/model_1.cpython-312.pyc +0 -0
- backend/models/__pycache__/model_cache.cpython-312.pyc +0 -0
- backend/models/model_1.py +11 -1
- backend/models/model_cache.py +11 -2
- core/__pycache__/middleware.cpython-312.pyc +0 -0
- core/__pycache__/routers.cpython-312.pyc +0 -0
- core/middleware.py +5 -2
- core/routers.py +1 -2
- frontend/app/bookmark/components/comic_component.tsx +7 -3
- frontend/app/bookmark/components/widgets/bookmark.tsx +12 -5
- frontend/app/bookmark/index.tsx +108 -82
- frontend/app/read/[source]/[comic_id]/[chapter_idx].tsx +1 -0
- frontend/app/view/[source]/[comic_id].tsx +3 -2
- frontend/app/view/componenets/widgets/bookmark.tsx +12 -5
- frontend/app/view/modules/content.tsx +10 -3
LICENSE.md
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Creative Commons Attribution-NonCommercial 4.0 International (CC BY-NC 4.0)
|
2 |
+
|
3 |
+
## Permissions
|
4 |
+
- **Share** — copy and redistribute the material in any medium or format
|
5 |
+
- **Adapt** — remix, transform, and build upon the material
|
6 |
+
|
7 |
+
The licensor cannot revoke these freedoms as long as you follow the license terms.
|
8 |
+
|
9 |
+
## Limitations
|
10 |
+
- **No Commercial Use** — You may not use the material for commercial purposes.
|
11 |
+
- **No Additional Restrictions** — You may not apply legal terms or technological measures that legally restrict others from doing anything the license permits.
|
12 |
+
- **No Endorsement** — Nothing in this license constitutes or may be construed as permission to assert or imply that you are, or that your use of the material is, connected with, or sponsored, endorsed, or granted official status by, the licensor or others designated to receive attribution.
|
13 |
+
|
14 |
+
## Conditions
|
15 |
+
- **Attribution** — You must give appropriate credit, provide a link to the license, and indicate if changes were made. You may do so in any reasonable manner, but not in any way that suggests the licensor endorses you or your use.
|
16 |
+
- **Indicate Changes** — If you modify the material, you must indicate that changes were made. You may do so in any reasonable manner, but not in any way that suggests the licensor endorses you or your use.
|
17 |
+
- **Include License** — You must include the text of, or the URI or hyperlink to, this license in any copies or substantial portions of the material.
|
18 |
+
|
19 |
+
## Notices
|
20 |
+
You do not have to comply with the license for elements of the material in the public domain or where your use is permitted by an applicable exception or limitation.
|
21 |
+
|
22 |
+
No warranties are given. The license may not give you all of the permissions necessary for your intended use. For example, other rights such as publicity, privacy, or moral rights may limit how you use the material.
|
23 |
+
|
24 |
+
For more details, see the full license text [here](https://creativecommons.org/licenses/by-nc/4.0/legalcode).
|
backend/api/__pycache__/web_scrape.cpython-312.pyc
CHANGED
Binary files a/backend/api/__pycache__/web_scrape.cpython-312.pyc and b/backend/api/__pycache__/web_scrape.cpython-312.pyc differ
|
|
backend/api/queue.py
CHANGED
@@ -19,7 +19,7 @@ env = environ.Env()
|
|
19 |
|
20 |
|
21 |
@csrf_exempt
|
22 |
-
@ratelimit(key='ip', rate='
|
23 |
def request_chapter(request):
|
24 |
try:
|
25 |
if request.method != "POST": return HttpResponseBadRequest('Allowed POST request only!', status=400)
|
|
|
19 |
|
20 |
|
21 |
@csrf_exempt
|
22 |
+
@ratelimit(key='ip', rate='30/m')
|
23 |
def request_chapter(request):
|
24 |
try:
|
25 |
if request.method != "POST": return HttpResponseBadRequest('Allowed POST request only!', status=400)
|
backend/api/web_scrape.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
|
2 |
import json, environ, requests, os, subprocess
|
3 |
-
import asyncio, uuid, shutil
|
4 |
|
5 |
from django.http import HttpResponse, JsonResponse, HttpResponseBadRequest, StreamingHttpResponse
|
6 |
from django_ratelimit.decorators import ratelimit
|
@@ -9,10 +9,10 @@ from asgiref.sync import sync_to_async
|
|
9 |
|
10 |
from backend.module import web_scrap
|
11 |
from backend.module.utils import manage_image
|
12 |
-
from backend.models.model_cache import RequestCache
|
13 |
from core.settings import BASE_DIR
|
14 |
from backend.module.utils import cloudflare_turnstile
|
15 |
-
from backend.models.model_1 import
|
|
|
16 |
|
17 |
from backend.module.utils import directory_info, date_utils
|
18 |
|
@@ -23,7 +23,7 @@ STORAGE_DIR = os.path.join(BASE_DIR,"storage")
|
|
23 |
if not os.path.exists(STORAGE_DIR): os.makedirs(STORAGE_DIR)
|
24 |
|
25 |
@csrf_exempt
|
26 |
-
@ratelimit(key='ip', rate='
|
27 |
def get_list(request):
|
28 |
if request.method != "POST": return HttpResponseBadRequest('Allowed POST request only!', status=400)
|
29 |
token = request.META.get('HTTP_X_CLOUDFLARE_TURNSTILE_TOKEN')
|
@@ -34,11 +34,6 @@ def get_list(request):
|
|
34 |
page = payload.get("page")
|
35 |
source = payload.get("source")
|
36 |
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
if search.get("text"): DATA = web_scrap.source_control[source].search.scrap(search=search,page=page)
|
43 |
else: DATA = web_scrap.source_control["colamanga"].get_list.scrap(page=page)
|
44 |
|
@@ -47,7 +42,7 @@ def get_list(request):
|
|
47 |
|
48 |
|
49 |
@csrf_exempt
|
50 |
-
@ratelimit(key='ip', rate='
|
51 |
def get(request):
|
52 |
if request.method != "POST": return HttpResponseBadRequest('Allowed POST request only!', status=400)
|
53 |
token = request.META.get('HTTP_X_CLOUDFLARE_TURNSTILE_TOKEN')
|
@@ -56,12 +51,57 @@ def get(request):
|
|
56 |
payload = json.loads(request.body)
|
57 |
id = payload.get("id")
|
58 |
source = payload.get("source")
|
59 |
-
|
|
|
|
|
|
|
|
|
60 |
try:
|
61 |
-
|
62 |
-
|
63 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
64 |
|
|
|
|
|
|
|
|
|
|
|
|
|
65 |
return HttpResponseBadRequest(str(e), status=500)
|
66 |
|
67 |
|
@@ -73,10 +113,9 @@ def get_cover(request,source,id,cover_id):
|
|
73 |
file_path = ""
|
74 |
file_name = ""
|
75 |
chunk_size = 8192
|
76 |
-
MAX_COVER_STORAGE_SIZE = 10 * 1024 * 1024 * 1024
|
77 |
|
78 |
try:
|
79 |
-
query_result =
|
80 |
if (
|
81 |
query_result
|
82 |
and os.path.exists(query_result.file_path)
|
@@ -86,37 +125,28 @@ def get_cover(request,source,id,cover_id):
|
|
86 |
file_name = os.path.basename(file_path)
|
87 |
|
88 |
else:
|
89 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
90 |
|
|
|
91 |
while True:
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
if (query_result):
|
96 |
-
file_path = query_result.file_path
|
97 |
-
if os.path.exists(file_path): shutil.rmtree(file_path)
|
98 |
-
WebscrapeGetCoverCache.objects.filter(file_path=query_result.file_path).delete()
|
99 |
-
else:
|
100 |
-
shutil.rmtree(os.path.join(STORAGE_DIR,"covers"))
|
101 |
-
break
|
102 |
else: break
|
103 |
-
|
104 |
-
|
105 |
-
DATA = web_scrap.source_control[source].get_cover.scrap(id=id,cover_id=cover_id)
|
106 |
-
if not DATA: HttpResponseBadRequest('Image Not found!', status=404)
|
107 |
-
|
108 |
-
file_path = os.path.join(STORAGE_DIR,"covers",f'{source}-{id}-{cover_id}.png')
|
109 |
-
file_name = os.path.basename(file_path)
|
110 |
-
|
111 |
-
with open(file_path, "wb") as f: f.write(DATA)
|
112 |
-
|
113 |
-
WebscrapeGetCoverCache(
|
114 |
-
file_path=file_path,
|
115 |
-
source=source,
|
116 |
-
comic_id=id,
|
117 |
-
cover_id=cover_id,
|
118 |
-
).save()
|
119 |
|
|
|
|
|
|
|
|
|
|
|
|
|
120 |
|
121 |
|
122 |
def file_iterator():
|
@@ -124,12 +154,12 @@ def get_cover(request,source,id,cover_id):
|
|
124 |
while chunk := f.read(chunk_size):
|
125 |
yield chunk
|
126 |
|
127 |
-
response = StreamingHttpResponse(file_iterator())
|
128 |
-
response['Content-Type'] = 'application/octet-stream'
|
129 |
response['Content-Length'] = os.path.getsize(file_path)
|
130 |
response['Content-Disposition'] = f'attachment; filename="{file_name}"'
|
131 |
return response
|
132 |
except Exception as e:
|
|
|
133 |
return HttpResponseBadRequest(str(e), status=500)
|
134 |
|
135 |
|
|
|
1 |
|
2 |
import json, environ, requests, os, subprocess
|
3 |
+
import asyncio, uuid, shutil, time
|
4 |
|
5 |
from django.http import HttpResponse, JsonResponse, HttpResponseBadRequest, StreamingHttpResponse
|
6 |
from django_ratelimit.decorators import ratelimit
|
|
|
9 |
|
10 |
from backend.module import web_scrap
|
11 |
from backend.module.utils import manage_image
|
|
|
12 |
from core.settings import BASE_DIR
|
13 |
from backend.module.utils import cloudflare_turnstile
|
14 |
+
from backend.models.model_1 import WebScrapeGetCoverCache, WebScrapeGetCache
|
15 |
+
from backend.models.model_cache import RequestWebScrapeGetCoverCache, RequestWebScrapeGetCache
|
16 |
|
17 |
from backend.module.utils import directory_info, date_utils
|
18 |
|
|
|
23 |
if not os.path.exists(STORAGE_DIR): os.makedirs(STORAGE_DIR)
|
24 |
|
25 |
@csrf_exempt
|
26 |
+
@ratelimit(key='ip', rate='30/m')
|
27 |
def get_list(request):
|
28 |
if request.method != "POST": return HttpResponseBadRequest('Allowed POST request only!', status=400)
|
29 |
token = request.META.get('HTTP_X_CLOUDFLARE_TURNSTILE_TOKEN')
|
|
|
34 |
page = payload.get("page")
|
35 |
source = payload.get("source")
|
36 |
|
|
|
|
|
|
|
|
|
|
|
37 |
if search.get("text"): DATA = web_scrap.source_control[source].search.scrap(search=search,page=page)
|
38 |
else: DATA = web_scrap.source_control["colamanga"].get_list.scrap(page=page)
|
39 |
|
|
|
42 |
|
43 |
|
44 |
@csrf_exempt
|
45 |
+
@ratelimit(key='ip', rate='30/m')
|
46 |
def get(request):
|
47 |
if request.method != "POST": return HttpResponseBadRequest('Allowed POST request only!', status=400)
|
48 |
token = request.META.get('HTTP_X_CLOUDFLARE_TURNSTILE_TOKEN')
|
|
|
51 |
payload = json.loads(request.body)
|
52 |
id = payload.get("id")
|
53 |
source = payload.get("source")
|
54 |
+
|
55 |
+
file_path = ""
|
56 |
+
file_name = ""
|
57 |
+
chunk_size = 8192
|
58 |
+
|
59 |
try:
|
60 |
+
query_result = WebScrapeGetCache.objects.filter(source=source,comic_id=id).first()
|
61 |
+
if (
|
62 |
+
query_result
|
63 |
+
and query_result.datetime >= date_utils.utc_time().add(-5,'hour').get()
|
64 |
+
and os.path.exists(query_result.file_path)
|
65 |
+
):
|
66 |
+
file_path = query_result.file_path
|
67 |
+
file_name = os.path.basename(file_path)
|
68 |
+
|
69 |
+
else:
|
70 |
+
request_query = RequestWebScrapeGetCache.objects.filter(source=source,comic_id=id).first()
|
71 |
+
if not request_query:
|
72 |
+
RequestWebScrapeGetCache(
|
73 |
+
source=source,
|
74 |
+
comic_id=id,
|
75 |
+
).save()
|
76 |
+
|
77 |
+
timeout = date_utils.utc_time().add(30,'second').get()
|
78 |
+
while True:
|
79 |
+
if date_utils.utc_time().get() >= timeout: return HttpResponseBadRequest('Request timeout!', status=408)
|
80 |
+
count = RequestWebScrapeGetCache.objects.filter(source=source,comic_id=id).count()
|
81 |
+
if count: time.sleep(1)
|
82 |
+
else: break
|
83 |
+
query_result = WebScrapeGetCache.objects.filter(source=source,comic_id=id).first()
|
84 |
+
|
85 |
+
if (query_result):
|
86 |
+
file_path = query_result.file_path
|
87 |
+
if not os.path.exists(file_path): return HttpResponseBadRequest('Worker is done but item not found.!', status=404)
|
88 |
+
file_name = os.path.basename(file_path)
|
89 |
+
else:
|
90 |
+
return HttpResponseBadRequest('Worker is done but item not found.!', status=404)
|
91 |
+
|
92 |
+
|
93 |
+
|
94 |
+
def file_iterator():
|
95 |
+
with open(file_path, 'r') as f:
|
96 |
+
while chunk := f.read(chunk_size):
|
97 |
+
yield chunk
|
98 |
|
99 |
+
response = StreamingHttpResponse(file_iterator(), content_type='application/json')
|
100 |
+
response['Content-Length'] = os.path.getsize(file_path)
|
101 |
+
response['Content-Disposition'] = f'attachment; filename="{file_name}"'
|
102 |
+
return response
|
103 |
+
except Exception as e:
|
104 |
+
print(e)
|
105 |
return HttpResponseBadRequest(str(e), status=500)
|
106 |
|
107 |
|
|
|
113 |
file_path = ""
|
114 |
file_name = ""
|
115 |
chunk_size = 8192
|
|
|
116 |
|
117 |
try:
|
118 |
+
query_result = WebScrapeGetCoverCache.objects.filter(source=source,comic_id=id,cover_id=cover_id).first()
|
119 |
if (
|
120 |
query_result
|
121 |
and os.path.exists(query_result.file_path)
|
|
|
125 |
file_name = os.path.basename(file_path)
|
126 |
|
127 |
else:
|
128 |
+
request_query = RequestWebScrapeGetCoverCache.objects.filter(source=source,comic_id=id,cover_id=cover_id).first()
|
129 |
+
if not request_query:
|
130 |
+
RequestWebScrapeGetCoverCache(
|
131 |
+
source=source,
|
132 |
+
comic_id=id,
|
133 |
+
cover_id=cover_id
|
134 |
+
).save()
|
135 |
|
136 |
+
timeout = date_utils.utc_time().add(30,'second').get()
|
137 |
while True:
|
138 |
+
if date_utils.utc_time().get() >= timeout: return HttpResponseBadRequest('Request timeout!', status=408)
|
139 |
+
count = RequestWebScrapeGetCoverCache.objects.filter(source=source,comic_id=id,cover_id=cover_id).count()
|
140 |
+
if count: time.sleep(1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
141 |
else: break
|
142 |
+
query_result = WebScrapeGetCoverCache.objects.filter(source=source,comic_id=id,cover_id=cover_id).first()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
143 |
|
144 |
+
if (query_result):
|
145 |
+
file_path = query_result.file_path
|
146 |
+
if not os.path.exists(file_path): return HttpResponseBadRequest('Worker is done but item not found.!', status=404)
|
147 |
+
file_name = os.path.basename(file_path)
|
148 |
+
else:
|
149 |
+
return HttpResponseBadRequest('Worker is done but item not found.!', status=404)
|
150 |
|
151 |
|
152 |
def file_iterator():
|
|
|
154 |
while chunk := f.read(chunk_size):
|
155 |
yield chunk
|
156 |
|
157 |
+
response = StreamingHttpResponse(file_iterator(), content_type='image/png')
|
|
|
158 |
response['Content-Length'] = os.path.getsize(file_path)
|
159 |
response['Content-Disposition'] = f'attachment; filename="{file_name}"'
|
160 |
return response
|
161 |
except Exception as e:
|
162 |
+
print(e)
|
163 |
return HttpResponseBadRequest(str(e), status=500)
|
164 |
|
165 |
|
backend/invoke_worker/__init__.py
CHANGED
@@ -5,5 +5,7 @@ if len(sys.argv) > 1 and not (sys.argv[1] in ['migrate', "makemigrations", "clea
|
|
5 |
from backend.invoke_worker import (
|
6 |
session,
|
7 |
chapter_queue,
|
|
|
|
|
8 |
)
|
9 |
|
|
|
5 |
from backend.invoke_worker import (
|
6 |
session,
|
7 |
chapter_queue,
|
8 |
+
webscrape_get_cover,
|
9 |
+
webscrape_get,
|
10 |
)
|
11 |
|
backend/invoke_worker/__pycache__/__init__.cpython-312.pyc
CHANGED
Binary files a/backend/invoke_worker/__pycache__/__init__.cpython-312.pyc and b/backend/invoke_worker/__pycache__/__init__.cpython-312.pyc differ
|
|
backend/invoke_worker/__pycache__/chapter_queue.cpython-312.pyc
CHANGED
Binary files a/backend/invoke_worker/__pycache__/chapter_queue.cpython-312.pyc and b/backend/invoke_worker/__pycache__/chapter_queue.cpython-312.pyc differ
|
|
backend/invoke_worker/__pycache__/webscrape_get.cpython-312.pyc
ADDED
Binary file (6.62 kB). View file
|
|
backend/invoke_worker/__pycache__/webscrape_get_cover.cpython-312.pyc
ADDED
Binary file (6.94 kB). View file
|
|
backend/invoke_worker/chapter_queue.py
CHANGED
@@ -73,8 +73,10 @@ class Job(Thread):
|
|
73 |
if (stored):
|
74 |
SocketRequestChapterQueueCache.objects.filter(id=query_result.id).delete()
|
75 |
connections['cache'].close()
|
|
|
76 |
else:
|
77 |
connections['cache'].close()
|
|
|
78 |
|
79 |
if (options.get("colorize") or options.get("translate").get("state")):
|
80 |
script = []
|
@@ -169,9 +171,11 @@ class Job(Thread):
|
|
169 |
}
|
170 |
})
|
171 |
connections['cache'].close()
|
|
|
172 |
|
173 |
else:
|
174 |
connections['cache'].close()
|
|
|
175 |
raise Exception("#1 Dowload chapter error!")
|
176 |
else:
|
177 |
input_dir = os.path.join(COMIC_STORAGE_DIR,source,comic_id,str(chapter_idx),"original")
|
@@ -226,10 +230,13 @@ class Job(Thread):
|
|
226 |
})
|
227 |
else:
|
228 |
connections['cache'].close()
|
|
|
229 |
raise Exception("#2 Dowload chapter error!")
|
230 |
connections['cache'].close()
|
|
|
231 |
else:
|
232 |
connections['cache'].close()
|
|
|
233 |
sleep(5)
|
234 |
except Exception as e:
|
235 |
print("[Error] Chapter Queue Socket:", e)
|
@@ -256,6 +263,7 @@ class Job(Thread):
|
|
256 |
|
257 |
|
258 |
connections['cache'].close()
|
|
|
259 |
sleep(10)
|
260 |
|
261 |
thread = Job()
|
@@ -303,6 +311,7 @@ class UpdateSocketQueue(Thread):
|
|
303 |
print(e)
|
304 |
|
305 |
connections['cache'].close()
|
|
|
306 |
sleep(10)
|
307 |
thread = UpdateSocketQueue()
|
308 |
thread.daemon = True
|
|
|
73 |
if (stored):
|
74 |
SocketRequestChapterQueueCache.objects.filter(id=query_result.id).delete()
|
75 |
connections['cache'].close()
|
76 |
+
connections['DB1'].close()
|
77 |
else:
|
78 |
connections['cache'].close()
|
79 |
+
connections['DB1'].close()
|
80 |
|
81 |
if (options.get("colorize") or options.get("translate").get("state")):
|
82 |
script = []
|
|
|
171 |
}
|
172 |
})
|
173 |
connections['cache'].close()
|
174 |
+
connections['DB1'].close()
|
175 |
|
176 |
else:
|
177 |
connections['cache'].close()
|
178 |
+
connections['DB1'].close()
|
179 |
raise Exception("#1 Dowload chapter error!")
|
180 |
else:
|
181 |
input_dir = os.path.join(COMIC_STORAGE_DIR,source,comic_id,str(chapter_idx),"original")
|
|
|
230 |
})
|
231 |
else:
|
232 |
connections['cache'].close()
|
233 |
+
connections['DB1'].close()
|
234 |
raise Exception("#2 Dowload chapter error!")
|
235 |
connections['cache'].close()
|
236 |
+
connections['DB1'].close()
|
237 |
else:
|
238 |
connections['cache'].close()
|
239 |
+
connections['DB1'].close()
|
240 |
sleep(5)
|
241 |
except Exception as e:
|
242 |
print("[Error] Chapter Queue Socket:", e)
|
|
|
263 |
|
264 |
|
265 |
connections['cache'].close()
|
266 |
+
connections['DB1'].close()
|
267 |
sleep(10)
|
268 |
|
269 |
thread = Job()
|
|
|
311 |
print(e)
|
312 |
|
313 |
connections['cache'].close()
|
314 |
+
connections['DB1'].close()
|
315 |
sleep(10)
|
316 |
thread = UpdateSocketQueue()
|
317 |
thread.daemon = True
|
backend/invoke_worker/webscrape_get.py
ADDED
@@ -0,0 +1,116 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import json, environ, requests, os, subprocess
|
3 |
+
import asyncio, uuid, shutil, sys
|
4 |
+
|
5 |
+
from django.http import HttpResponse, JsonResponse, HttpResponseBadRequest, StreamingHttpResponse
|
6 |
+
from django_ratelimit.decorators import ratelimit
|
7 |
+
from django.views.decorators.csrf import csrf_exempt
|
8 |
+
from asgiref.sync import sync_to_async
|
9 |
+
from django.db import connections
|
10 |
+
|
11 |
+
from backend.module import web_scrap
|
12 |
+
from backend.module.utils import manage_image
|
13 |
+
from backend.models.model_cache import RequestWebScrapeGetCache
|
14 |
+
from core.settings import BASE_DIR
|
15 |
+
from backend.module.utils import cloudflare_turnstile
|
16 |
+
from backend.models.model_1 import WebScrapeGetCache
|
17 |
+
|
18 |
+
from backend.module.utils import directory_info, date_utils
|
19 |
+
|
20 |
+
from django_thread import Thread
|
21 |
+
from time import sleep
|
22 |
+
|
23 |
+
MAX_GET_STORAGE_SIZE = 10 * 1024 * 1024 * 1024 # GB
|
24 |
+
|
25 |
+
STORAGE_DIR = os.path.join(BASE_DIR,"storage")
|
26 |
+
if not os.path.exists(STORAGE_DIR): os.makedirs(STORAGE_DIR)
|
27 |
+
|
28 |
+
class Get(Thread):
|
29 |
+
def run(self):
|
30 |
+
while True:
|
31 |
+
connections['cache'].close()
|
32 |
+
connections['DB1'].close()
|
33 |
+
is_request = False
|
34 |
+
|
35 |
+
request_query = RequestWebScrapeGetCache.objects.order_by("datetime").first()
|
36 |
+
try:
|
37 |
+
if request_query:
|
38 |
+
query_result = WebScrapeGetCache.objects.filter(source=request_query.source,comic_id=request_query.comic_id).first()
|
39 |
+
if (
|
40 |
+
query_result
|
41 |
+
and os.path.exists(query_result.file_path)
|
42 |
+
and query_result.datetime >= date_utils.utc_time().add(-5,'hour').get()
|
43 |
+
|
44 |
+
):
|
45 |
+
RequestWebScrapeGetCache.objects.filter(
|
46 |
+
source=request_query.source,
|
47 |
+
comic_id=request_query.comic_id,
|
48 |
+
).delete()
|
49 |
+
is_request = False
|
50 |
+
elif query_result:
|
51 |
+
WebScrapeGetCache.objects.filter(
|
52 |
+
file_path=query_result.file_path
|
53 |
+
).delete()
|
54 |
+
is_request = True
|
55 |
+
else: is_request = True
|
56 |
+
else: is_request = False
|
57 |
+
|
58 |
+
if is_request:
|
59 |
+
if not os.path.exists(os.path.join(STORAGE_DIR,"get",request_query.source)): os.makedirs(os.path.join(STORAGE_DIR,"get",request_query.source))
|
60 |
+
|
61 |
+
connections['cache'].close()
|
62 |
+
connections['DB1'].close()
|
63 |
+
|
64 |
+
while True:
|
65 |
+
storage_size = directory_info.GetDirectorySize(directory=os.path.join(STORAGE_DIR,"get"),max_threads=5)
|
66 |
+
if (storage_size >= MAX_GET_STORAGE_SIZE ):
|
67 |
+
request_query = WebScrapeGetCache.objects.order_by("datetime").first()
|
68 |
+
if (request_query):
|
69 |
+
file_path = request_query.file_path
|
70 |
+
if os.path.exists(file_path): os.remove(file_path)
|
71 |
+
WebScrapeGetCache.objects.filter(file_path=request_query.file_path).delete()
|
72 |
+
else:
|
73 |
+
shutil.rmtree(os.path.join(STORAGE_DIR,"get"))
|
74 |
+
break
|
75 |
+
else: break
|
76 |
+
|
77 |
+
|
78 |
+
|
79 |
+
DATA = web_scrap.source_control[request_query.source].get.scrap(id=request_query.comic_id)
|
80 |
+
if not DATA: raise Exception('Image Not found!')
|
81 |
+
|
82 |
+
file_path = os.path.join(STORAGE_DIR,"get",request_query.source,f'{request_query.comic_id}.json')
|
83 |
+
|
84 |
+
with open(file_path, "w") as f: json.dump(DATA, f, indent=None, separators=(',', ':'))
|
85 |
+
|
86 |
+
connections['cache'].close()
|
87 |
+
connections['DB1'].close()
|
88 |
+
WebScrapeGetCache(
|
89 |
+
file_path=file_path,
|
90 |
+
source=request_query.source,
|
91 |
+
comic_id=request_query.comic_id,
|
92 |
+
).save()
|
93 |
+
RequestWebScrapeGetCache.objects.filter(
|
94 |
+
source=request_query.source,
|
95 |
+
comic_id=request_query.comic_id,
|
96 |
+
).delete()
|
97 |
+
else:
|
98 |
+
sleep(5)
|
99 |
+
except Exception as e:
|
100 |
+
connections['cache'].close()
|
101 |
+
connections['DB1'].close()
|
102 |
+
RequestWebScrapeGetCache.objects.filter(
|
103 |
+
source=request_query.source,
|
104 |
+
comic_id=request_query.comic_id,
|
105 |
+
).delete()
|
106 |
+
print("[Error 'Webscrape-Get' Worker]: ")
|
107 |
+
exc_type, exc_obj, exc_tb = sys.exc_info()
|
108 |
+
line_number = exc_tb.tb_lineno
|
109 |
+
print(f"Error on line {line_number}: {e}")
|
110 |
+
sleep(5)
|
111 |
+
|
112 |
+
thread = Get()
|
113 |
+
thread.daemon = True
|
114 |
+
thread.start()
|
115 |
+
|
116 |
+
|
backend/invoke_worker/webscrape_get_cover.py
ADDED
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import json, environ, requests, os, subprocess
|
3 |
+
import asyncio, uuid, shutil, sys
|
4 |
+
|
5 |
+
from django.http import HttpResponse, JsonResponse, HttpResponseBadRequest, StreamingHttpResponse
|
6 |
+
from django_ratelimit.decorators import ratelimit
|
7 |
+
from django.views.decorators.csrf import csrf_exempt
|
8 |
+
from asgiref.sync import sync_to_async
|
9 |
+
from django.db import connections
|
10 |
+
|
11 |
+
from backend.module import web_scrap
|
12 |
+
from backend.module.utils import manage_image
|
13 |
+
from backend.models.model_cache import RequestWebScrapeGetCoverCache
|
14 |
+
from core.settings import BASE_DIR
|
15 |
+
from backend.module.utils import cloudflare_turnstile
|
16 |
+
from backend.models.model_1 import WebScrapeGetCoverCache
|
17 |
+
|
18 |
+
from backend.module.utils import directory_info, date_utils
|
19 |
+
|
20 |
+
from django_thread import Thread
|
21 |
+
from time import sleep
|
22 |
+
|
23 |
+
MAX_COVER_STORAGE_SIZE = 20 * 1024 * 1024 * 1024 # GB
|
24 |
+
|
25 |
+
STORAGE_DIR = os.path.join(BASE_DIR,"storage")
|
26 |
+
if not os.path.exists(STORAGE_DIR): os.makedirs(STORAGE_DIR)
|
27 |
+
|
28 |
+
class GetCover(Thread):
|
29 |
+
def run(self):
|
30 |
+
while True:
|
31 |
+
connections['cache'].close()
|
32 |
+
connections['DB1'].close()
|
33 |
+
is_request = False
|
34 |
+
|
35 |
+
request_query = RequestWebScrapeGetCoverCache.objects.order_by("datetime").first()
|
36 |
+
try:
|
37 |
+
|
38 |
+
|
39 |
+
if request_query:
|
40 |
+
query_result = WebScrapeGetCoverCache.objects.filter(source=request_query.source,comic_id=request_query.comic_id,cover_id=request_query.cover_id).first()
|
41 |
+
if (
|
42 |
+
query_result
|
43 |
+
and os.path.exists(query_result.file_path)
|
44 |
+
and query_result.datetime >= date_utils.utc_time().add(-5,'hour').get()
|
45 |
+
):
|
46 |
+
RequestWebScrapeGetCoverCache.objects.filter(
|
47 |
+
source=request_query.source,
|
48 |
+
comic_id=request_query.comic_id,
|
49 |
+
cover_id=request_query.cover_id
|
50 |
+
).delete()
|
51 |
+
is_request = False
|
52 |
+
elif query_result:
|
53 |
+
WebScrapeGetCoverCache.objects.filter(
|
54 |
+
file_path=query_result.file_path
|
55 |
+
).delete()
|
56 |
+
is_request = True
|
57 |
+
else: is_request = True
|
58 |
+
else: is_request = False
|
59 |
+
|
60 |
+
if is_request:
|
61 |
+
if not os.path.exists(os.path.join(STORAGE_DIR,"covers",request_query.source)): os.makedirs(os.path.join(STORAGE_DIR,"covers",request_query.source))
|
62 |
+
|
63 |
+
connections['cache'].close()
|
64 |
+
connections['DB1'].close()
|
65 |
+
|
66 |
+
while True:
|
67 |
+
storage_size = directory_info.GetDirectorySize(directory=os.path.join(STORAGE_DIR,"covers"),max_threads=5)
|
68 |
+
if (storage_size >= MAX_COVER_STORAGE_SIZE):
|
69 |
+
request_query = WebScrapeGetCoverCache.objects.order_by("datetime").first()
|
70 |
+
if (request_query):
|
71 |
+
file_path = request_query.file_path
|
72 |
+
if os.path.exists(file_path): os.remove(file_path)
|
73 |
+
WebScrapeGetCoverCache.objects.filter(file_path=request_query.file_path).delete()
|
74 |
+
else:
|
75 |
+
shutil.rmtree(os.path.join(STORAGE_DIR,"covers"))
|
76 |
+
break
|
77 |
+
else: break
|
78 |
+
|
79 |
+
|
80 |
+
|
81 |
+
DATA = web_scrap.source_control[request_query.source].get_cover.scrap(id=request_query.comic_id,cover_id=request_query.cover_id)
|
82 |
+
if not DATA: raise Exception('Image Not found!')
|
83 |
+
|
84 |
+
file_path = os.path.join(STORAGE_DIR,"covers",request_query.source,f'{request_query.comic_id}-{request_query.cover_id}.png')
|
85 |
+
|
86 |
+
with open(file_path, "wb") as f: f.write(DATA)
|
87 |
+
|
88 |
+
connections['cache'].close()
|
89 |
+
connections['DB1'].close()
|
90 |
+
WebScrapeGetCoverCache(
|
91 |
+
file_path=file_path,
|
92 |
+
source=request_query.source,
|
93 |
+
comic_id=request_query.comic_id,
|
94 |
+
cover_id=request_query.cover_id,
|
95 |
+
).save()
|
96 |
+
RequestWebScrapeGetCoverCache.objects.filter(
|
97 |
+
source=request_query.source,
|
98 |
+
comic_id=request_query.comic_id,
|
99 |
+
cover_id=request_query.cover_id
|
100 |
+
).delete()
|
101 |
+
else:
|
102 |
+
sleep(5)
|
103 |
+
except Exception as e:
|
104 |
+
connections['cache'].close()
|
105 |
+
connections['DB1'].close()
|
106 |
+
RequestWebScrapeGetCoverCache.objects.filter(
|
107 |
+
source=request_query.source,
|
108 |
+
comic_id=request_query.comic_id,
|
109 |
+
cover_id=request_query.cover_id
|
110 |
+
).delete()
|
111 |
+
print("[Error 'Webscrape-Get-Cover' Worker]: ")
|
112 |
+
exc_type, exc_obj, exc_tb = sys.exc_info()
|
113 |
+
line_number = exc_tb.tb_lineno
|
114 |
+
print(f"Error on line {line_number}: {e}")
|
115 |
+
sleep(5)
|
116 |
+
|
117 |
+
thread = GetCover()
|
118 |
+
thread.daemon = True
|
119 |
+
thread.start()
|
120 |
+
|
121 |
+
|
backend/migrations/0001_initial.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
# Generated by Django 5.1.1 on 2024-12-
|
2 |
|
3 |
import backend.models.model_1
|
4 |
import backend.models.model_cache
|
@@ -37,9 +37,12 @@ class Migration(migrations.Migration):
|
|
37 |
],
|
38 |
),
|
39 |
migrations.CreateModel(
|
40 |
-
name='
|
41 |
fields=[
|
42 |
-
('
|
|
|
|
|
|
|
43 |
('datetime', models.DateTimeField(default=backend.models.model_cache.get_current_utc_time)),
|
44 |
],
|
45 |
),
|
@@ -57,4 +60,14 @@ class Migration(migrations.Migration):
|
|
57 |
('datetime', models.DateTimeField(default=backend.models.model_cache.get_current_utc_time)),
|
58 |
],
|
59 |
),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
60 |
]
|
|
|
1 |
+
# Generated by Django 5.1.1 on 2024-12-07 07:22
|
2 |
|
3 |
import backend.models.model_1
|
4 |
import backend.models.model_cache
|
|
|
37 |
],
|
38 |
),
|
39 |
migrations.CreateModel(
|
40 |
+
name='RequestWebScrapeGetCoverCache',
|
41 |
fields=[
|
42 |
+
('id', models.UUIDField(primary_key=True, serialize=False)),
|
43 |
+
('source', models.TextField()),
|
44 |
+
('comic_id', models.TextField()),
|
45 |
+
('cover_id', models.TextField()),
|
46 |
('datetime', models.DateTimeField(default=backend.models.model_cache.get_current_utc_time)),
|
47 |
],
|
48 |
),
|
|
|
60 |
('datetime', models.DateTimeField(default=backend.models.model_cache.get_current_utc_time)),
|
61 |
],
|
62 |
),
|
63 |
+
migrations.CreateModel(
|
64 |
+
name='WebScrapeGetCoverCache',
|
65 |
+
fields=[
|
66 |
+
('file_path', models.TextField(primary_key=True, serialize=False)),
|
67 |
+
('source', models.TextField()),
|
68 |
+
('comic_id', models.TextField()),
|
69 |
+
('cover_id', models.TextField()),
|
70 |
+
('datetime', models.DateTimeField(default=backend.models.model_1.get_current_utc_time)),
|
71 |
+
],
|
72 |
+
),
|
73 |
]
|
backend/migrations/0002_alter_requestwebscrapegetcovercache_id.py
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Generated by Django 5.1.1 on 2024-12-07 07:58
|
2 |
+
|
3 |
+
import uuid
|
4 |
+
from django.db import migrations, models
|
5 |
+
|
6 |
+
|
7 |
+
class Migration(migrations.Migration):
|
8 |
+
|
9 |
+
dependencies = [
|
10 |
+
('backend', '0001_initial'),
|
11 |
+
]
|
12 |
+
|
13 |
+
operations = [
|
14 |
+
migrations.AlterField(
|
15 |
+
model_name='requestwebscrapegetcovercache',
|
16 |
+
name='id',
|
17 |
+
field=models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False),
|
18 |
+
),
|
19 |
+
]
|
backend/migrations/0003_requestwebscrapegetcache.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Generated by Django 5.1.1 on 2024-12-07 15:52
|
2 |
+
|
3 |
+
import backend.models.model_cache
|
4 |
+
from django.db import migrations, models
|
5 |
+
|
6 |
+
|
7 |
+
class Migration(migrations.Migration):
|
8 |
+
|
9 |
+
dependencies = [
|
10 |
+
('backend', '0002_alter_requestwebscrapegetcovercache_id'),
|
11 |
+
]
|
12 |
+
|
13 |
+
operations = [
|
14 |
+
migrations.CreateModel(
|
15 |
+
name='RequestWebScrapeGetCache',
|
16 |
+
fields=[
|
17 |
+
('page', models.IntegerField(primary_key=True, serialize=False)),
|
18 |
+
('source', models.TextField()),
|
19 |
+
('comic_id', models.TextField()),
|
20 |
+
('datetime', models.DateTimeField(default=backend.models.model_cache.get_current_utc_time)),
|
21 |
+
],
|
22 |
+
),
|
23 |
+
]
|
backend/migrations/0004_remove_requestwebscrapegetcache_page_and_more.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Generated by Django 5.1.1 on 2024-12-07 15:53
|
2 |
+
|
3 |
+
import uuid
|
4 |
+
from django.db import migrations, models
|
5 |
+
|
6 |
+
|
7 |
+
class Migration(migrations.Migration):
|
8 |
+
|
9 |
+
dependencies = [
|
10 |
+
('backend', '0003_requestwebscrapegetcache'),
|
11 |
+
]
|
12 |
+
|
13 |
+
operations = [
|
14 |
+
migrations.RemoveField(
|
15 |
+
model_name='requestwebscrapegetcache',
|
16 |
+
name='page',
|
17 |
+
),
|
18 |
+
migrations.AddField(
|
19 |
+
model_name='requestwebscrapegetcache',
|
20 |
+
name='id',
|
21 |
+
field=models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False),
|
22 |
+
),
|
23 |
+
]
|
backend/migrations/{0002_webscrapegetcovercache.py → 0005_webscrapegetcache.py}
RENAMED
@@ -1,4 +1,4 @@
|
|
1 |
-
# Generated by Django 5.1.1 on 2024-12-
|
2 |
|
3 |
import backend.models.model_1
|
4 |
from django.db import migrations, models
|
@@ -7,17 +7,16 @@ from django.db import migrations, models
|
|
7 |
class Migration(migrations.Migration):
|
8 |
|
9 |
dependencies = [
|
10 |
-
('backend', '
|
11 |
]
|
12 |
|
13 |
operations = [
|
14 |
migrations.CreateModel(
|
15 |
-
name='
|
16 |
fields=[
|
17 |
('file_path', models.TextField(primary_key=True, serialize=False)),
|
18 |
('source', models.TextField()),
|
19 |
('comic_id', models.TextField()),
|
20 |
-
('cover_id', models.TextField()),
|
21 |
('datetime', models.DateTimeField(default=backend.models.model_1.get_current_utc_time)),
|
22 |
],
|
23 |
),
|
|
|
1 |
+
# Generated by Django 5.1.1 on 2024-12-07 16:01
|
2 |
|
3 |
import backend.models.model_1
|
4 |
from django.db import migrations, models
|
|
|
7 |
class Migration(migrations.Migration):
|
8 |
|
9 |
dependencies = [
|
10 |
+
('backend', '0004_remove_requestwebscrapegetcache_page_and_more'),
|
11 |
]
|
12 |
|
13 |
operations = [
|
14 |
migrations.CreateModel(
|
15 |
+
name='WebScrapeGetCache',
|
16 |
fields=[
|
17 |
('file_path', models.TextField(primary_key=True, serialize=False)),
|
18 |
('source', models.TextField()),
|
19 |
('comic_id', models.TextField()),
|
|
|
20 |
('datetime', models.DateTimeField(default=backend.models.model_1.get_current_utc_time)),
|
21 |
],
|
22 |
),
|
backend/migrations/__pycache__/0001_initial.cpython-312.pyc
CHANGED
Binary files a/backend/migrations/__pycache__/0001_initial.cpython-312.pyc and b/backend/migrations/__pycache__/0001_initial.cpython-312.pyc differ
|
|
backend/migrations/__pycache__/0002_alter_requestwebscrapegetcovercache_id.cpython-312.pyc
ADDED
Binary file (835 Bytes). View file
|
|
backend/migrations/__pycache__/0002_webscrapegetcovercache.cpython-312.pyc
DELETED
Binary file (1.2 kB)
|
|
backend/migrations/__pycache__/0003_requestwebscrapegetcache.cpython-312.pyc
ADDED
Binary file (1.19 kB). View file
|
|
backend/migrations/__pycache__/0004_remove_requestwebscrapegetcache_page_and_more.cpython-312.pyc
ADDED
Binary file (945 Bytes). View file
|
|
backend/migrations/__pycache__/0005_webscrapegetcache.cpython-312.pyc
ADDED
Binary file (1.16 kB). View file
|
|
backend/models/__pycache__/model_1.cpython-312.pyc
CHANGED
Binary files a/backend/models/__pycache__/model_1.cpython-312.pyc and b/backend/models/__pycache__/model_1.cpython-312.pyc differ
|
|
backend/models/__pycache__/model_cache.cpython-312.pyc
CHANGED
Binary files a/backend/models/__pycache__/model_cache.cpython-312.pyc and b/backend/models/__pycache__/model_cache.cpython-312.pyc differ
|
|
backend/models/model_1.py
CHANGED
@@ -20,9 +20,19 @@ class ComicStorageCache(models.Model):
|
|
20 |
target_lang = models.TextField()
|
21 |
datetime = models.DateTimeField(default=get_current_utc_time)
|
22 |
|
23 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
file_path = models.TextField(primary_key=True)
|
25 |
source = models.TextField()
|
26 |
comic_id = models.TextField()
|
27 |
cover_id = models.TextField()
|
28 |
datetime = models.DateTimeField(default=get_current_utc_time)
|
|
|
|
|
|
20 |
target_lang = models.TextField()
|
21 |
datetime = models.DateTimeField(default=get_current_utc_time)
|
22 |
|
23 |
+
|
24 |
+
class WebScrapeGetCache(models.Model):
|
25 |
+
file_path = models.TextField(primary_key=True)
|
26 |
+
source = models.TextField()
|
27 |
+
comic_id = models.TextField()
|
28 |
+
datetime = models.DateTimeField(default=get_current_utc_time)
|
29 |
+
|
30 |
+
|
31 |
+
class WebScrapeGetCoverCache(models.Model):
|
32 |
file_path = models.TextField(primary_key=True)
|
33 |
source = models.TextField()
|
34 |
comic_id = models.TextField()
|
35 |
cover_id = models.TextField()
|
36 |
datetime = models.DateTimeField(default=get_current_utc_time)
|
37 |
+
|
38 |
+
|
backend/models/model_cache.py
CHANGED
@@ -4,8 +4,17 @@ import uuid
|
|
4 |
|
5 |
def get_current_utc_time(): return date_utils.utc_time().get()
|
6 |
|
7 |
-
class
|
8 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
datetime = models.DateTimeField(default=get_current_utc_time)
|
10 |
|
11 |
class CloudflareTurnStileCache(models.Model):
|
|
|
4 |
|
5 |
def get_current_utc_time(): return date_utils.utc_time().get()
|
6 |
|
7 |
+
class RequestWebScrapeGetCoverCache(models.Model):
|
8 |
+
id = models.UUIDField(primary_key=True, default = uuid.uuid4)
|
9 |
+
source = models.TextField()
|
10 |
+
comic_id = models.TextField()
|
11 |
+
cover_id = models.TextField()
|
12 |
+
datetime = models.DateTimeField(default=get_current_utc_time)
|
13 |
+
|
14 |
+
class RequestWebScrapeGetCache(models.Model):
|
15 |
+
id = models.UUIDField(primary_key=True, default = uuid.uuid4)
|
16 |
+
source = models.TextField()
|
17 |
+
comic_id = models.TextField()
|
18 |
datetime = models.DateTimeField(default=get_current_utc_time)
|
19 |
|
20 |
class CloudflareTurnStileCache(models.Model):
|
core/__pycache__/middleware.cpython-312.pyc
CHANGED
Binary files a/core/__pycache__/middleware.cpython-312.pyc and b/core/__pycache__/middleware.cpython-312.pyc differ
|
|
core/__pycache__/routers.cpython-312.pyc
CHANGED
Binary files a/core/__pycache__/routers.cpython-312.pyc and b/core/__pycache__/routers.cpython-312.pyc differ
|
|
core/middleware.py
CHANGED
@@ -36,8 +36,11 @@ class SequentialRequestMiddleware:
|
|
36 |
request_type = request.scope.get("type")
|
37 |
request_path = request.path
|
38 |
print(request_path)
|
39 |
-
if
|
40 |
-
|
|
|
|
|
|
|
41 |
with TimeoutContext(30) as executor:
|
42 |
self.__Lock.acquire()
|
43 |
try:
|
|
|
36 |
request_type = request.scope.get("type")
|
37 |
request_path = request.path
|
38 |
print(request_path)
|
39 |
+
if (
|
40 |
+
request_type == "http"
|
41 |
+
and request_path in ["/api/web_scrap/get_list/"]
|
42 |
+
):
|
43 |
+
print("IT THREAD")
|
44 |
with TimeoutContext(30) as executor:
|
45 |
self.__Lock.acquire()
|
46 |
try:
|
core/routers.py
CHANGED
@@ -7,7 +7,6 @@ CACHE_MODELS = [cls.__name__.lower() for name, cls in inspect.getmembers(model_c
|
|
7 |
DB1_MODELS = [cls.__name__.lower() for name, cls in inspect.getmembers(model_1, inspect.isclass) if cls.__module__ == model_1.__name__]
|
8 |
DB2_MODELS = [cls.__name__.lower() for name, cls in inspect.getmembers(model_2, inspect.isclass) if cls.__module__ == model_2.__name__]
|
9 |
|
10 |
-
|
11 |
class Router:
|
12 |
def db_for_read(self, model, **hints):
|
13 |
if model._meta.model_name in CACHE_MODELS:
|
@@ -26,7 +25,7 @@ class Router:
|
|
26 |
elif model._meta.model_name in DB2_MODELS:
|
27 |
return 'DB2'
|
28 |
return 'default'
|
29 |
-
|
30 |
def allow_relation(self, obj1, obj2, **hints):
|
31 |
return True
|
32 |
|
|
|
7 |
DB1_MODELS = [cls.__name__.lower() for name, cls in inspect.getmembers(model_1, inspect.isclass) if cls.__module__ == model_1.__name__]
|
8 |
DB2_MODELS = [cls.__name__.lower() for name, cls in inspect.getmembers(model_2, inspect.isclass) if cls.__module__ == model_2.__name__]
|
9 |
|
|
|
10 |
class Router:
|
11 |
def db_for_read(self, model, **hints):
|
12 |
if model._meta.model_name in CACHE_MODELS:
|
|
|
25 |
elif model._meta.model_name in DB2_MODELS:
|
26 |
return 'DB2'
|
27 |
return 'default'
|
28 |
+
|
29 |
def allow_relation(self, obj1, obj2, **hints):
|
30 |
return True
|
31 |
|
frontend/app/bookmark/components/comic_component.tsx
CHANGED
@@ -49,7 +49,6 @@ const ComicComponent = ({item, SELECTED_BOOKMARK, SET_SELECTED_BOOKMARK}:any) =>
|
|
49 |
setIsLoading(true)
|
50 |
setStyles(__styles(themeTypeContext,Dimensions))
|
51 |
const stored_bookmark = await Storage.get("bookmark") || []
|
52 |
-
console.log(stored_bookmark)
|
53 |
cover.current = await CoverStorage.get(`${item.source}-${item.id}`) || ""
|
54 |
setIsLoading(false)
|
55 |
})()
|
@@ -60,7 +59,12 @@ const ComicComponent = ({item, SELECTED_BOOKMARK, SET_SELECTED_BOOKMARK}:any) =>
|
|
60 |
};
|
61 |
},[]))
|
62 |
|
63 |
-
return (<>{styles && !isLoading &&
|
|
|
|
|
|
|
|
|
|
|
64 |
<TouchableRipple
|
65 |
rippleColor={Theme[themeTypeContext].ripple_color_outlined}
|
66 |
onPress={()=>{router.navigate(`/view/${item.source}/${item.id}/?mode=local`)}}
|
@@ -76,7 +80,7 @@ const ComicComponent = ({item, SELECTED_BOOKMARK, SET_SELECTED_BOOKMARK}:any) =>
|
|
76 |
<Text style={styles.item_title}>{item.info.title}</Text>
|
77 |
</>
|
78 |
</TouchableRipple>
|
79 |
-
|
80 |
|
81 |
|
82 |
}
|
|
|
49 |
setIsLoading(true)
|
50 |
setStyles(__styles(themeTypeContext,Dimensions))
|
51 |
const stored_bookmark = await Storage.get("bookmark") || []
|
|
|
52 |
cover.current = await CoverStorage.get(`${item.source}-${item.id}`) || ""
|
53 |
setIsLoading(false)
|
54 |
})()
|
|
|
59 |
};
|
60 |
},[]))
|
61 |
|
62 |
+
return (<>{styles && !isLoading && <View
|
63 |
+
style={{
|
64 |
+
marginHorizontal: Math.max((Dimensions.width+Dimensions.height)/2*0.015,8),
|
65 |
+
marginVertical: 16,
|
66 |
+
}}
|
67 |
+
>
|
68 |
<TouchableRipple
|
69 |
rippleColor={Theme[themeTypeContext].ripple_color_outlined}
|
70 |
onPress={()=>{router.navigate(`/view/${item.source}/${item.id}/?mode=local`)}}
|
|
|
80 |
<Text style={styles.item_title}>{item.info.title}</Text>
|
81 |
</>
|
82 |
</TouchableRipple>
|
83 |
+
</View>}</>)
|
84 |
|
85 |
|
86 |
}
|
frontend/app/bookmark/components/widgets/bookmark.tsx
CHANGED
@@ -81,13 +81,20 @@ const BookmarkWidget: React.FC<BookmarkWidgetProps> = ({
|
|
81 |
gap:18,
|
82 |
}}
|
83 |
>
|
84 |
-
<
|
85 |
style={{
|
86 |
-
|
87 |
-
fontFamily:"roboto-medium",
|
88 |
-
fontSize:(Dimensions.width+Dimensions.height)/2*0.025
|
89 |
}}
|
90 |
-
>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
91 |
<View
|
92 |
style={{
|
93 |
width:"auto",
|
|
|
81 |
gap:18,
|
82 |
}}
|
83 |
>
|
84 |
+
<View
|
85 |
style={{
|
86 |
+
flex:1,
|
|
|
|
|
87 |
}}
|
88 |
+
>
|
89 |
+
<Text
|
90 |
+
style={{
|
91 |
+
color:"white",
|
92 |
+
fontFamily:"roboto-medium",
|
93 |
+
fontSize:(Dimensions.width+Dimensions.height)/2*0.025,
|
94 |
+
width:"100%"
|
95 |
+
}}
|
96 |
+
>{item.label}</Text>
|
97 |
+
</View>
|
98 |
<View
|
99 |
style={{
|
100 |
width:"auto",
|
frontend/app/bookmark/index.tsx
CHANGED
@@ -51,23 +51,50 @@ const Index = ({}:any) => {
|
|
51 |
|
52 |
const [COMIC_DATA, SET_COMIC_DATA] = useState<any>([])
|
53 |
|
54 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
55 |
(async ()=>{
|
56 |
if (!SELECTED_BOOKMARK) return
|
57 |
const stored_comic = await ComicStorage.getByTag(SELECTED_BOOKMARK)
|
58 |
-
console.log(stored_comic)
|
59 |
SET_COMIC_DATA(stored_comic)
|
60 |
})()
|
61 |
-
},[SELECTED_BOOKMARK
|
62 |
|
63 |
useFocusEffect(useCallback(() => {
|
64 |
(async ()=>{
|
65 |
setIsLoading(true)
|
66 |
const stored_bookmark = await Storage.get("bookmark") || []
|
67 |
-
console.log(stored_bookmark)
|
68 |
|
69 |
SET_BOOKMARK_DATA(stored_bookmark)
|
70 |
-
console.log("AA",stored_bookmark.length )
|
71 |
if (stored_bookmark.length) {
|
72 |
SET_SELECTED_BOOKMARK(stored_bookmark[0])
|
73 |
}
|
@@ -91,11 +118,11 @@ const Index = ({}:any) => {
|
|
91 |
setIsLoading(true)
|
92 |
setShowMenuContext(true)
|
93 |
setStyles(__styles(themeTypeContext,Dimensions))
|
94 |
-
|
95 |
return () => {
|
96 |
controller.abort();
|
97 |
};
|
98 |
},[]))
|
|
|
99 |
|
100 |
return (<>{styles && ! isLoading
|
101 |
? <>
|
@@ -167,7 +194,6 @@ const Index = ({}:any) => {
|
|
167 |
contentContainerStyle={{
|
168 |
flex:1,
|
169 |
flexGrow: 1,
|
170 |
-
justifyContent: 'center',
|
171 |
}}
|
172 |
horizontal={true}
|
173 |
data={BOOKMARK_DATA}
|
@@ -188,7 +214,7 @@ const Index = ({}:any) => {
|
|
188 |
setWidgetContext({state:true,component:
|
189 |
<BookmarkWidget
|
190 |
setIsLoading={setIsLoading}
|
191 |
-
onRefresh={()=>{setOnRefresh(
|
192 |
|
193 |
/>
|
194 |
})
|
@@ -197,85 +223,85 @@ const Index = ({}:any) => {
|
|
197 |
<Icon source={require("@/assets/icons/tag-edit-outline.png")} size={((Dimensions.width+Dimensions.height)/2)*0.0325} color={Theme[themeTypeContext].icon_color}/>
|
198 |
</TouchableRipple>
|
199 |
</View>
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
|
207 |
-
|
208 |
-
|
209 |
-
|
210 |
-
|
211 |
-
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
|
216 |
-
backgroundColor:"transparent",
|
217 |
-
display:"flex",
|
218 |
-
justifyContent:"center",
|
219 |
-
alignItems:"center",
|
220 |
-
flexDirection:"row",
|
221 |
-
gap:12,
|
222 |
-
}}
|
223 |
-
>
|
224 |
-
<>{BOOKMARK_DATA.length
|
225 |
-
? <>
|
226 |
-
{search.text && COMIC_DATA.length
|
227 |
-
? <>
|
228 |
-
<Icon source={"magnify-scan"} color={Theme[themeTypeContext].icon_color} size={((Dimensions.width+Dimensions.height)/2)*0.03}/>
|
229 |
-
<Text selectable={false}
|
230 |
-
style={{
|
231 |
-
fontFamily:"roboto-bold",
|
232 |
-
fontSize:((Dimensions.width+Dimensions.height)/2)*0.025,
|
233 |
-
color:Theme[themeTypeContext].text_color,
|
234 |
-
}}
|
235 |
-
>Search no result</Text>
|
236 |
-
</>
|
237 |
-
: <>
|
238 |
-
<Icon source={require("@/assets/icons/tag-hidden.png")} color={Theme[themeTypeContext].icon_color} size={((Dimensions.width+Dimensions.height)/2)*0.03}/>
|
239 |
-
<Text selectable={false}
|
240 |
-
style={{
|
241 |
-
fontFamily:"roboto-bold",
|
242 |
-
fontSize:((Dimensions.width+Dimensions.height)/2)*0.025,
|
243 |
-
color:Theme[themeTypeContext].text_color,
|
244 |
-
}}
|
245 |
-
>This tag is empty.</Text>
|
246 |
-
</>
|
247 |
-
}
|
248 |
-
</>
|
249 |
-
: <View style={{
|
250 |
display:"flex",
|
251 |
-
flexDirection:"column",
|
252 |
justifyContent:"center",
|
253 |
alignItems:"center",
|
254 |
-
|
255 |
-
|
256 |
-
}}
|
257 |
-
|
258 |
-
|
259 |
-
|
260 |
-
|
261 |
-
|
262 |
-
|
263 |
-
|
264 |
-
|
265 |
-
|
266 |
-
|
267 |
-
|
268 |
-
|
269 |
-
|
270 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
271 |
|
272 |
-
|
273 |
-
|
|
|
|
|
274 |
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
/>
|
279 |
</View>
|
280 |
|
281 |
</>
|
|
|
51 |
|
52 |
const [COMIC_DATA, SET_COMIC_DATA] = useState<any>([])
|
53 |
|
54 |
+
const [reRender, setReRender] = useState(false)
|
55 |
+
const max_column = useRef(0)
|
56 |
+
|
57 |
+
|
58 |
+
useFocusEffect(useCallback(() => {
|
59 |
+
|
60 |
+
if (!reRender) return
|
61 |
+
else setReRender(false)
|
62 |
+
|
63 |
+
},[reRender]))
|
64 |
+
|
65 |
+
useFocusEffect(useCallback(() => {
|
66 |
+
const item_box = Math.max(((Dimensions.width+Dimensions.height)/2)*0.225,100) + (Math.max((Dimensions.width+Dimensions.height)/2*0.015,8))*2;
|
67 |
+
|
68 |
+
const number_of_column = Math.floor((Dimensions.width-(((Dimensions.width+Dimensions.height)/2)*0.035)-32)/item_box)
|
69 |
+
if (max_column.current !== number_of_column) {
|
70 |
+
max_column.current = number_of_column
|
71 |
+
setReRender(true)
|
72 |
+
}
|
73 |
+
}, [Dimensions]))
|
74 |
+
|
75 |
+
|
76 |
+
// First load + Refresh ->
|
77 |
+
useFocusEffect(useCallback(() => {
|
78 |
+
(async ()=>{
|
79 |
+
if (!onRefresh) return
|
80 |
+
else setOnRefresh(false)
|
81 |
+
})()
|
82 |
+
},[onRefresh]))
|
83 |
+
|
84 |
+
useFocusEffect(useCallback(() => {
|
85 |
(async ()=>{
|
86 |
if (!SELECTED_BOOKMARK) return
|
87 |
const stored_comic = await ComicStorage.getByTag(SELECTED_BOOKMARK)
|
|
|
88 |
SET_COMIC_DATA(stored_comic)
|
89 |
})()
|
90 |
+
},[SELECTED_BOOKMARK]))
|
91 |
|
92 |
useFocusEffect(useCallback(() => {
|
93 |
(async ()=>{
|
94 |
setIsLoading(true)
|
95 |
const stored_bookmark = await Storage.get("bookmark") || []
|
|
|
96 |
|
97 |
SET_BOOKMARK_DATA(stored_bookmark)
|
|
|
98 |
if (stored_bookmark.length) {
|
99 |
SET_SELECTED_BOOKMARK(stored_bookmark[0])
|
100 |
}
|
|
|
118 |
setIsLoading(true)
|
119 |
setShowMenuContext(true)
|
120 |
setStyles(__styles(themeTypeContext,Dimensions))
|
|
|
121 |
return () => {
|
122 |
controller.abort();
|
123 |
};
|
124 |
},[]))
|
125 |
+
// <- End
|
126 |
|
127 |
return (<>{styles && ! isLoading
|
128 |
? <>
|
|
|
194 |
contentContainerStyle={{
|
195 |
flex:1,
|
196 |
flexGrow: 1,
|
|
|
197 |
}}
|
198 |
horizontal={true}
|
199 |
data={BOOKMARK_DATA}
|
|
|
214 |
setWidgetContext({state:true,component:
|
215 |
<BookmarkWidget
|
216 |
setIsLoading={setIsLoading}
|
217 |
+
onRefresh={()=>{setOnRefresh(true)}}
|
218 |
|
219 |
/>
|
220 |
})
|
|
|
223 |
<Icon source={require("@/assets/icons/tag-edit-outline.png")} size={((Dimensions.width+Dimensions.height)/2)*0.0325} color={Theme[themeTypeContext].icon_color}/>
|
224 |
</TouchableRipple>
|
225 |
</View>
|
226 |
+
<>{!reRender && (
|
227 |
+
<FlatList
|
228 |
+
contentContainerStyle={{
|
229 |
+
padding:12,
|
230 |
+
marginHorizontal:"auto",
|
231 |
+
}}
|
232 |
+
numColumns={max_column.current}
|
233 |
+
renderItem={renderComicComponent}
|
234 |
+
ItemSeparatorComponent={undefined}
|
235 |
+
data={COMIC_DATA.filter((item:any) => item.info.title.toLowerCase().includes(search.text.toLowerCase()))}
|
236 |
+
ListEmptyComponent={
|
237 |
+
<View
|
238 |
+
style={{
|
239 |
+
width:"100%",
|
240 |
+
height:"100%",
|
241 |
+
backgroundColor:"transparent",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
242 |
display:"flex",
|
|
|
243 |
justifyContent:"center",
|
244 |
alignItems:"center",
|
245 |
+
flexDirection:"row",
|
246 |
+
gap:12,
|
247 |
+
}}
|
248 |
+
>
|
249 |
+
<>{BOOKMARK_DATA.length
|
250 |
+
? <>
|
251 |
+
{search.text && COMIC_DATA.length
|
252 |
+
? <>
|
253 |
+
<Icon source={"magnify-scan"} color={Theme[themeTypeContext].icon_color} size={((Dimensions.width+Dimensions.height)/2)*0.03}/>
|
254 |
+
<Text selectable={false}
|
255 |
+
style={{
|
256 |
+
fontFamily:"roboto-bold",
|
257 |
+
fontSize:((Dimensions.width+Dimensions.height)/2)*0.025,
|
258 |
+
color:Theme[themeTypeContext].text_color,
|
259 |
+
}}
|
260 |
+
>Search no result</Text>
|
261 |
+
</>
|
262 |
+
: <>
|
263 |
+
<Icon source={require("@/assets/icons/tag-hidden.png")} color={Theme[themeTypeContext].icon_color} size={((Dimensions.width+Dimensions.height)/2)*0.03}/>
|
264 |
+
<Text selectable={false}
|
265 |
+
style={{
|
266 |
+
fontFamily:"roboto-bold",
|
267 |
+
fontSize:((Dimensions.width+Dimensions.height)/2)*0.025,
|
268 |
+
color:Theme[themeTypeContext].text_color,
|
269 |
+
}}
|
270 |
+
>This tag is empty.</Text>
|
271 |
+
</>
|
272 |
+
}
|
273 |
+
</>
|
274 |
+
: <View style={{
|
275 |
+
display:"flex",
|
276 |
+
flexDirection:"column",
|
277 |
+
justifyContent:"center",
|
278 |
+
alignItems:"center",
|
279 |
+
width:"100%",
|
280 |
+
height:"auto",
|
281 |
+
}}>
|
282 |
+
|
283 |
+
<Text selectable={false}
|
284 |
+
style={{
|
285 |
+
fontFamily:"roboto-bold",
|
286 |
+
fontSize:((Dimensions.width+Dimensions.height)/2)*0.025,
|
287 |
+
color:Theme[themeTypeContext].text_color,
|
288 |
+
textAlign:"center",
|
289 |
+
}}
|
290 |
+
>
|
291 |
+
No tag found. {"\n"}Press{" "}
|
292 |
+
<Icon source={require("@/assets/icons/tag-edit-outline.png")} color={Theme[themeTypeContext].icon_color} size={((Dimensions.width+Dimensions.height)/2)*0.03}/>
|
293 |
+
{" "}to create bookmark tag.
|
294 |
+
</Text>
|
295 |
+
</View>
|
296 |
|
297 |
+
}
|
298 |
+
</>
|
299 |
+
|
300 |
+
</View>
|
301 |
|
302 |
+
}
|
303 |
+
/>
|
304 |
+
)}</>
|
|
|
305 |
</View>
|
306 |
|
307 |
</>
|
frontend/app/read/[source]/[comic_id]/[chapter_idx].tsx
CHANGED
@@ -58,6 +58,7 @@ const Index = ({}:any) => {
|
|
58 |
useFocusEffect(useCallback(() => {
|
59 |
setShowMenuContext(null)
|
60 |
return () => {
|
|
|
61 |
}
|
62 |
},[]))
|
63 |
|
|
|
58 |
useFocusEffect(useCallback(() => {
|
59 |
setShowMenuContext(null)
|
60 |
return () => {
|
61 |
+
SET_DATA([])
|
62 |
}
|
63 |
},[]))
|
64 |
|
frontend/app/view/[source]/[comic_id].tsx
CHANGED
@@ -181,6 +181,7 @@ const Index = ({}:any) => {
|
|
181 |
setShowMenuContext(null)
|
182 |
return () => {
|
183 |
controller.abort();
|
|
|
184 |
}
|
185 |
},[]))
|
186 |
|
@@ -188,8 +189,8 @@ const Index = ({}:any) => {
|
|
188 |
const Load_Local = async () => {
|
189 |
Toast.show({
|
190 |
type: 'info',
|
191 |
-
text1: '
|
192 |
-
text2: `Press refresh button to fetch new updates
|
193 |
|
194 |
position: "bottom",
|
195 |
visibilityTime: 6000,
|
|
|
181 |
setShowMenuContext(null)
|
182 |
return () => {
|
183 |
controller.abort();
|
184 |
+
Toast.hide()
|
185 |
}
|
186 |
},[]))
|
187 |
|
|
|
189 |
const Load_Local = async () => {
|
190 |
Toast.show({
|
191 |
type: 'info',
|
192 |
+
text1: '⚡ Loaded in Local Mode',
|
193 |
+
text2: `Press refresh button to fetch new updates`,
|
194 |
|
195 |
position: "bottom",
|
196 |
visibilityTime: 6000,
|
frontend/app/view/componenets/widgets/bookmark.tsx
CHANGED
@@ -90,13 +90,20 @@ const BookmarkWidget: React.FC<BookmarkWidgetProps> = ({
|
|
90 |
gap:18,
|
91 |
}}
|
92 |
>
|
93 |
-
<
|
94 |
style={{
|
95 |
-
|
96 |
-
fontFamily:"roboto-medium",
|
97 |
-
fontSize:(Dimensions.width+Dimensions.height)/2*0.025
|
98 |
}}
|
99 |
-
>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
100 |
<View
|
101 |
style={{
|
102 |
width:"auto",
|
|
|
90 |
gap:18,
|
91 |
}}
|
92 |
>
|
93 |
+
<View
|
94 |
style={{
|
95 |
+
flex:1,
|
|
|
|
|
96 |
}}
|
97 |
+
>
|
98 |
+
<Text
|
99 |
+
style={{
|
100 |
+
color:"white",
|
101 |
+
fontFamily:"roboto-medium",
|
102 |
+
fontSize:(Dimensions.width+Dimensions.height)/2*0.025,
|
103 |
+
width:"100%"
|
104 |
+
}}
|
105 |
+
>{item.label}</Text>
|
106 |
+
</View>
|
107 |
<View
|
108 |
style={{
|
109 |
width:"auto",
|
frontend/app/view/modules/content.tsx
CHANGED
@@ -32,7 +32,8 @@ export const get = async (setShowCloudflareTurnstile:any,setIsLoading:any,signal
|
|
32 |
timeout: 60000,
|
33 |
signal:signal,
|
34 |
}).then((response) => {(async () =>{
|
35 |
-
const DATA = response.data
|
|
|
36 |
if (Object.keys(DATA).length) setFeedBack("")
|
37 |
else{
|
38 |
setFeedBack("No content found!")
|
@@ -243,10 +244,15 @@ export const download_chapter = async (
|
|
243 |
signal:signal,
|
244 |
}).then(async (response) => {
|
245 |
const DATA = response.data
|
|
|
|
|
|
|
246 |
if (Platform.OS === "web"){
|
247 |
|
248 |
const zip = new JSZip();
|
249 |
const zipContent = await zip.loadAsync(DATA);
|
|
|
|
|
250 |
let page = 0;
|
251 |
for (const fileName of Object.keys(zipContent.files).sort((a,b) => parseInt(a, 10) - parseInt(b, 10))) {
|
252 |
if (zipContent.files[fileName].dir) {
|
@@ -256,7 +262,8 @@ export const download_chapter = async (
|
|
256 |
const fileData = await zipContent.files[fileName].async('blob');
|
257 |
const layout = await getImageLayout(await blobToBase64(fileData, "image/png"));
|
258 |
await ChapterDataStorage.store(`${source}-${comic_id}-${request_info.chapter_idx}-${page}`,comic_id,request_info.chapter_idx, fileData, layout)
|
259 |
-
|
|
|
260 |
}
|
261 |
|
262 |
await ChapterStorage.update(`${source}-${comic_id}`,chapter_id, "completed", page)
|
@@ -271,7 +278,7 @@ export const download_chapter = async (
|
|
271 |
// await ChapterStorage.update(`${source}-${comic_id}`,chapter_id,{type:"file_path", value:chapter_dir + `${request_info.chapter_idx}.zip`}, "completed")
|
272 |
}
|
273 |
|
274 |
-
download_progress.current = {...download_progress.current, [chapter_id]:{progress:total_length, total:total_length}}
|
275 |
|
276 |
|
277 |
|
|
|
32 |
timeout: 60000,
|
33 |
signal:signal,
|
34 |
}).then((response) => {(async () =>{
|
35 |
+
const DATA = response.data
|
36 |
+
console.log("EEE",response)
|
37 |
if (Object.keys(DATA).length) setFeedBack("")
|
38 |
else{
|
39 |
setFeedBack("No content found!")
|
|
|
244 |
signal:signal,
|
245 |
}).then(async (response) => {
|
246 |
const DATA = response.data
|
247 |
+
|
248 |
+
|
249 |
+
|
250 |
if (Platform.OS === "web"){
|
251 |
|
252 |
const zip = new JSZip();
|
253 |
const zipContent = await zip.loadAsync(DATA);
|
254 |
+
const MAX_PAGE = Object.keys(zipContent.files).length
|
255 |
+
|
256 |
let page = 0;
|
257 |
for (const fileName of Object.keys(zipContent.files).sort((a,b) => parseInt(a, 10) - parseInt(b, 10))) {
|
258 |
if (zipContent.files[fileName].dir) {
|
|
|
262 |
const fileData = await zipContent.files[fileName].async('blob');
|
263 |
const layout = await getImageLayout(await blobToBase64(fileData, "image/png"));
|
264 |
await ChapterDataStorage.store(`${source}-${comic_id}-${request_info.chapter_idx}-${page}`,comic_id,request_info.chapter_idx, fileData, layout)
|
265 |
+
const current_progress = progress_lenth + ((total_length-progress_lenth)*page)/MAX_PAGE
|
266 |
+
download_progress.current = {...download_progress.current, [chapter_id]:{progress:current_progress, total:total_length}}
|
267 |
}
|
268 |
|
269 |
await ChapterStorage.update(`${source}-${comic_id}`,chapter_id, "completed", page)
|
|
|
278 |
// await ChapterStorage.update(`${source}-${comic_id}`,chapter_id,{type:"file_path", value:chapter_dir + `${request_info.chapter_idx}.zip`}, "completed")
|
279 |
}
|
280 |
|
281 |
+
// download_progress.current = {...download_progress.current, [chapter_id]:{progress:total_length, total:total_length}}
|
282 |
|
283 |
|
284 |
|