Spaces:
Runtime error
Runtime error
Commit
ยท
0a1b571
1
Parent(s):
8c0d9cd
DF.
Browse filesThis view is limited to 50 files because it contains too many changes. ย
See raw diff
- Dockerfile +18 -0
- LICENSE +201 -0
- docker-compose.yml +41 -0
- hibiapi/__init__.py +15 -0
- hibiapi/__main__.py +118 -0
- hibiapi/api/__init__.py +0 -0
- hibiapi/api/bika/__init__.py +3 -0
- hibiapi/api/bika/api.py +206 -0
- hibiapi/api/bika/constants.py +19 -0
- hibiapi/api/bika/net.py +73 -0
- hibiapi/api/bilibili/__init__.py +4 -0
- hibiapi/api/bilibili/api/__init__.py +4 -0
- hibiapi/api/bilibili/api/base.py +278 -0
- hibiapi/api/bilibili/api/v2.py +124 -0
- hibiapi/api/bilibili/api/v3.py +79 -0
- hibiapi/api/bilibili/constants.py +32 -0
- hibiapi/api/bilibili/net.py +13 -0
- hibiapi/api/netease/__init__.py +4 -0
- hibiapi/api/netease/api.py +326 -0
- hibiapi/api/netease/constants.py +33 -0
- hibiapi/api/netease/net.py +13 -0
- hibiapi/api/pixiv/__init__.py +13 -0
- hibiapi/api/pixiv/api.py +613 -0
- hibiapi/api/pixiv/constants.py +19 -0
- hibiapi/api/pixiv/net.py +85 -0
- hibiapi/api/qrcode.py +160 -0
- hibiapi/api/sauce/__init__.py +4 -0
- hibiapi/api/sauce/api.py +140 -0
- hibiapi/api/sauce/constants.py +16 -0
- hibiapi/api/sauce/net.py +11 -0
- hibiapi/api/tieba/__init__.py +3 -0
- hibiapi/api/tieba/api.py +142 -0
- hibiapi/api/tieba/net.py +5 -0
- hibiapi/api/wallpaper/__init__.py +3 -0
- hibiapi/api/wallpaper/api.py +125 -0
- hibiapi/api/wallpaper/constants.py +8 -0
- hibiapi/api/wallpaper/net.py +8 -0
- hibiapi/app/__init__.py +4 -0
- hibiapi/app/application.py +170 -0
- hibiapi/app/handlers.py +59 -0
- hibiapi/app/middlewares.py +97 -0
- hibiapi/app/routes/__init__.py +52 -0
- hibiapi/app/routes/bika.py +36 -0
- hibiapi/app/routes/bilibili/__init__.py +10 -0
- hibiapi/app/routes/bilibili/v2.py +6 -0
- hibiapi/app/routes/bilibili/v3.py +5 -0
- hibiapi/app/routes/netease.py +7 -0
- hibiapi/app/routes/pixiv.py +26 -0
- hibiapi/app/routes/qrcode.py +76 -0
- hibiapi/app/routes/sauce.py +120 -0
Dockerfile
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:bullseye
|
2 |
+
|
3 |
+
EXPOSE 7860
|
4 |
+
|
5 |
+
ENV PORT=7860 \
|
6 |
+
PROCS=1 \
|
7 |
+
GENERAL_SERVER_HOST=0.0.0.0
|
8 |
+
|
9 |
+
COPY . /hibi
|
10 |
+
|
11 |
+
WORKDIR /hibi
|
12 |
+
|
13 |
+
RUN pip install .
|
14 |
+
|
15 |
+
CMD hibiapi run --port $PORT --workers $PROCS
|
16 |
+
|
17 |
+
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
|
18 |
+
CMD httpx --verbose --follow-redirects http://127.0.0.1:${PORT}
|
LICENSE
ADDED
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Apache License
|
2 |
+
Version 2.0, January 2004
|
3 |
+
http://www.apache.org/licenses/
|
4 |
+
|
5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
6 |
+
|
7 |
+
1. Definitions.
|
8 |
+
|
9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
11 |
+
|
12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
13 |
+
the copyright owner that is granting the License.
|
14 |
+
|
15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
16 |
+
other entities that control, are controlled by, or are under common
|
17 |
+
control with that entity. For the purposes of this definition,
|
18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
19 |
+
direction or management of such entity, whether by contract or
|
20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
22 |
+
|
23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
24 |
+
exercising permissions granted by this License.
|
25 |
+
|
26 |
+
"Source" form shall mean the preferred form for making modifications,
|
27 |
+
including but not limited to software source code, documentation
|
28 |
+
source, and configuration files.
|
29 |
+
|
30 |
+
"Object" form shall mean any form resulting from mechanical
|
31 |
+
transformation or translation of a Source form, including but
|
32 |
+
not limited to compiled object code, generated documentation,
|
33 |
+
and conversions to other media types.
|
34 |
+
|
35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
36 |
+
Object form, made available under the License, as indicated by a
|
37 |
+
copyright notice that is included in or attached to the work
|
38 |
+
(an example is provided in the Appendix below).
|
39 |
+
|
40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
41 |
+
form, that is based on (or derived from) the Work and for which the
|
42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
44 |
+
of this License, Derivative Works shall not include works that remain
|
45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
46 |
+
the Work and Derivative Works thereof.
|
47 |
+
|
48 |
+
"Contribution" shall mean any work of authorship, including
|
49 |
+
the original version of the Work and any modifications or additions
|
50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
54 |
+
means any form of electronic, verbal, or written communication sent
|
55 |
+
to the Licensor or its representatives, including but not limited to
|
56 |
+
communication on electronic mailing lists, source code control systems,
|
57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
59 |
+
excluding communication that is conspicuously marked or otherwise
|
60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
61 |
+
|
62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
64 |
+
subsequently incorporated within the Work.
|
65 |
+
|
66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
71 |
+
Work and such Derivative Works in Source or Object form.
|
72 |
+
|
73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
76 |
+
(except as stated in this section) patent license to make, have made,
|
77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
78 |
+
where such license applies only to those patent claims licensable
|
79 |
+
by such Contributor that are necessarily infringed by their
|
80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
82 |
+
institute patent litigation against any entity (including a
|
83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
84 |
+
or a Contribution incorporated within the Work constitutes direct
|
85 |
+
or contributory patent infringement, then any patent licenses
|
86 |
+
granted to You under this License for that Work shall terminate
|
87 |
+
as of the date such litigation is filed.
|
88 |
+
|
89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
90 |
+
Work or Derivative Works thereof in any medium, with or without
|
91 |
+
modifications, and in Source or Object form, provided that You
|
92 |
+
meet the following conditions:
|
93 |
+
|
94 |
+
(a) You must give any other recipients of the Work or
|
95 |
+
Derivative Works a copy of this License; and
|
96 |
+
|
97 |
+
(b) You must cause any modified files to carry prominent notices
|
98 |
+
stating that You changed the files; and
|
99 |
+
|
100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
101 |
+
that You distribute, all copyright, patent, trademark, and
|
102 |
+
attribution notices from the Source form of the Work,
|
103 |
+
excluding those notices that do not pertain to any part of
|
104 |
+
the Derivative Works; and
|
105 |
+
|
106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
107 |
+
distribution, then any Derivative Works that You distribute must
|
108 |
+
include a readable copy of the attribution notices contained
|
109 |
+
within such NOTICE file, excluding those notices that do not
|
110 |
+
pertain to any part of the Derivative Works, in at least one
|
111 |
+
of the following places: within a NOTICE text file distributed
|
112 |
+
as part of the Derivative Works; within the Source form or
|
113 |
+
documentation, if provided along with the Derivative Works; or,
|
114 |
+
within a display generated by the Derivative Works, if and
|
115 |
+
wherever such third-party notices normally appear. The contents
|
116 |
+
of the NOTICE file are for informational purposes only and
|
117 |
+
do not modify the License. You may add Your own attribution
|
118 |
+
notices within Derivative Works that You distribute, alongside
|
119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
120 |
+
that such additional attribution notices cannot be construed
|
121 |
+
as modifying the License.
|
122 |
+
|
123 |
+
You may add Your own copyright statement to Your modifications and
|
124 |
+
may provide additional or different license terms and conditions
|
125 |
+
for use, reproduction, or distribution of Your modifications, or
|
126 |
+
for any such Derivative Works as a whole, provided Your use,
|
127 |
+
reproduction, and distribution of the Work otherwise complies with
|
128 |
+
the conditions stated in this License.
|
129 |
+
|
130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
132 |
+
by You to the Licensor shall be under the terms and conditions of
|
133 |
+
this License, without any additional terms or conditions.
|
134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
135 |
+
the terms of any separate license agreement you may have executed
|
136 |
+
with Licensor regarding such Contributions.
|
137 |
+
|
138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
140 |
+
except as required for reasonable and customary use in describing the
|
141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
142 |
+
|
143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
144 |
+
agreed to in writing, Licensor provides the Work (and each
|
145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
147 |
+
implied, including, without limitation, any warranties or conditions
|
148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
150 |
+
appropriateness of using or redistributing the Work and assume any
|
151 |
+
risks associated with Your exercise of permissions under this License.
|
152 |
+
|
153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
154 |
+
whether in tort (including negligence), contract, or otherwise,
|
155 |
+
unless required by applicable law (such as deliberate and grossly
|
156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
157 |
+
liable to You for damages, including any direct, indirect, special,
|
158 |
+
incidental, or consequential damages of any character arising as a
|
159 |
+
result of this License or out of the use or inability to use the
|
160 |
+
Work (including but not limited to damages for loss of goodwill,
|
161 |
+
work stoppage, computer failure or malfunction, or any and all
|
162 |
+
other commercial damages or losses), even if such Contributor
|
163 |
+
has been advised of the possibility of such damages.
|
164 |
+
|
165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
168 |
+
or other liability obligations and/or rights consistent with this
|
169 |
+
License. However, in accepting such obligations, You may act only
|
170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
171 |
+
of any other Contributor, and only if You agree to indemnify,
|
172 |
+
defend, and hold each Contributor harmless for any liability
|
173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
174 |
+
of your accepting any such warranty or additional liability.
|
175 |
+
|
176 |
+
END OF TERMS AND CONDITIONS
|
177 |
+
|
178 |
+
APPENDIX: How to apply the Apache License to your work.
|
179 |
+
|
180 |
+
To apply the Apache License to your work, attach the following
|
181 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
182 |
+
replaced with your own identifying information. (Don't include
|
183 |
+
the brackets!) The text should be enclosed in the appropriate
|
184 |
+
comment syntax for the file format. We also recommend that a
|
185 |
+
file or class name and description of purpose be included on the
|
186 |
+
same "printed page" as the copyright notice for easier
|
187 |
+
identification within third-party archives.
|
188 |
+
|
189 |
+
Copyright 2020-2021 Mix Technology
|
190 |
+
|
191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
192 |
+
you may not use this file except in compliance with the License.
|
193 |
+
You may obtain a copy of the License at
|
194 |
+
|
195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
196 |
+
|
197 |
+
Unless required by applicable law or agreed to in writing, software
|
198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
200 |
+
See the License for the specific language governing permissions and
|
201 |
+
limitations under the License.
|
docker-compose.yml
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
version: "3.9"
|
2 |
+
|
3 |
+
volumes:
|
4 |
+
hibi_redis: {}
|
5 |
+
|
6 |
+
networks:
|
7 |
+
hibi_net: {}
|
8 |
+
|
9 |
+
services:
|
10 |
+
redis:
|
11 |
+
image: redis:alpine
|
12 |
+
container_name: hibi_redis
|
13 |
+
healthcheck:
|
14 |
+
test: ["CMD-SHELL", "redis-cli ping"]
|
15 |
+
interval: 10s
|
16 |
+
timeout: 5s
|
17 |
+
retries: 5
|
18 |
+
networks:
|
19 |
+
- hibi_net
|
20 |
+
volumes:
|
21 |
+
- hibi_redis:/data
|
22 |
+
expose: [6379]
|
23 |
+
|
24 |
+
api:
|
25 |
+
container_name: hibiapi
|
26 |
+
build:
|
27 |
+
dockerfile: Dockerfile
|
28 |
+
context: .
|
29 |
+
restart: on-failure
|
30 |
+
networks:
|
31 |
+
- hibi_net
|
32 |
+
depends_on:
|
33 |
+
redis:
|
34 |
+
condition: service_healthy
|
35 |
+
ports:
|
36 |
+
- "8080:8080"
|
37 |
+
environment:
|
38 |
+
PORT: "8080"
|
39 |
+
FORWARDED_ALLOW_IPS: "*"
|
40 |
+
GENERAL_CACHE_URI: "redis://redis:6379"
|
41 |
+
GENERAL_SERVER_HOST: "0.0.0.0"
|
hibiapi/__init__.py
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
r"""
|
2 |
+
_ _ _ _ _ _____ _____
|
3 |
+
| | | (_) | (_) /\ | __ \_ _|
|
4 |
+
| |__| |_| |__ _ / \ | |__) || |
|
5 |
+
| __ | | '_ \| | / /\ \ | ___/ | |
|
6 |
+
| | | | | |_) | |/ ____ \| | _| |_
|
7 |
+
|_| |_|_|_.__/|_/_/ \_\_| |_____|
|
8 |
+
|
9 |
+
A program that implements easy-to-use APIs for a variety of commonly used sites
|
10 |
+
Repository: https://github.com/mixmoe/HibiAPI
|
11 |
+
""" # noqa:W291,W293
|
12 |
+
|
13 |
+
from importlib.metadata import version
|
14 |
+
|
15 |
+
__version__ = version("hibiapi")
|
hibiapi/__main__.py
ADDED
@@ -0,0 +1,118 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from pathlib import Path
|
3 |
+
|
4 |
+
import typer
|
5 |
+
import uvicorn
|
6 |
+
|
7 |
+
from hibiapi import __file__ as root_file
|
8 |
+
from hibiapi import __version__
|
9 |
+
from hibiapi.utils.config import CONFIG_DIR, DEFAULT_DIR, Config
|
10 |
+
from hibiapi.utils.log import LOG_LEVEL, logger
|
11 |
+
|
12 |
+
COPYRIGHT = r"""
|
13 |
+
<b><g>
|
14 |
+
_ _ _ _ _ _____ _____
|
15 |
+
| | | (_) | (_) /\ | __ \_ _|
|
16 |
+
| |__| |_| |__ _ / \ | |__) || |
|
17 |
+
| __ | | '_ \| | / /\ \ | ___/ | |
|
18 |
+
| | | | | |_) | |/ ____ \| | _| |_
|
19 |
+
|_| |_|_|_.__/|_/_/ \_\_| |_____|
|
20 |
+
</g><e>
|
21 |
+
A program that implements easy-to-use APIs for a variety of commonly used sites
|
22 |
+
Repository: https://github.com/mixmoe/HibiAPI
|
23 |
+
</e></b>""".strip() # noqa:W291
|
24 |
+
|
25 |
+
|
26 |
+
LOG_CONFIG = {
|
27 |
+
"version": 1,
|
28 |
+
"disable_existing_loggers": False,
|
29 |
+
"handlers": {
|
30 |
+
"default": {
|
31 |
+
"class": "hibiapi.utils.log.LoguruHandler",
|
32 |
+
},
|
33 |
+
},
|
34 |
+
"loggers": {
|
35 |
+
"uvicorn.error": {
|
36 |
+
"handlers": ["default"],
|
37 |
+
"level": LOG_LEVEL,
|
38 |
+
},
|
39 |
+
"uvicorn.access": {
|
40 |
+
"handlers": ["default"],
|
41 |
+
"level": LOG_LEVEL,
|
42 |
+
},
|
43 |
+
},
|
44 |
+
}
|
45 |
+
|
46 |
+
RELOAD_CONFIG = {
|
47 |
+
"reload": True,
|
48 |
+
"reload_dirs": [
|
49 |
+
*map(str, [Path(root_file).parent.absolute(), CONFIG_DIR.absolute()])
|
50 |
+
],
|
51 |
+
"reload_includes": ["*.py", "*.yml"],
|
52 |
+
}
|
53 |
+
|
54 |
+
|
55 |
+
cli = typer.Typer()
|
56 |
+
|
57 |
+
|
58 |
+
@cli.callback(invoke_without_command=True)
|
59 |
+
@cli.command()
|
60 |
+
def run(
|
61 |
+
ctx: typer.Context,
|
62 |
+
host: str = Config["server"]["host"].as_str(),
|
63 |
+
port: int = Config["server"]["port"].as_number(),
|
64 |
+
workers: int = 1,
|
65 |
+
reload: bool = False,
|
66 |
+
):
|
67 |
+
if ctx.invoked_subcommand is not None:
|
68 |
+
return
|
69 |
+
|
70 |
+
if ctx.info_name != (func_name := run.__name__):
|
71 |
+
logger.warning(
|
72 |
+
f"Directly usage of command <r>{ctx.info_name}</r> is <b>deprecated</b>, "
|
73 |
+
f"please use <g>{ctx.info_name} {func_name}</g> instead."
|
74 |
+
)
|
75 |
+
|
76 |
+
try:
|
77 |
+
terminal_width, _ = os.get_terminal_size()
|
78 |
+
except OSError:
|
79 |
+
terminal_width = 0
|
80 |
+
logger.warning(
|
81 |
+
"\n".join(i.center(terminal_width) for i in COPYRIGHT.splitlines()),
|
82 |
+
)
|
83 |
+
logger.info(f"HibiAPI version: <g><b>{__version__}</b></g>")
|
84 |
+
|
85 |
+
uvicorn.run(
|
86 |
+
"hibiapi.app:app",
|
87 |
+
host=host,
|
88 |
+
port=port,
|
89 |
+
access_log=False,
|
90 |
+
log_config=LOG_CONFIG,
|
91 |
+
workers=workers,
|
92 |
+
forwarded_allow_ips=Config["server"]["allowed-forward"].get_optional(str),
|
93 |
+
**(RELOAD_CONFIG if reload else {}),
|
94 |
+
)
|
95 |
+
|
96 |
+
|
97 |
+
@cli.command()
|
98 |
+
def config(force: bool = False):
|
99 |
+
total_written = 0
|
100 |
+
CONFIG_DIR.mkdir(parents=True, exist_ok=True)
|
101 |
+
for file in os.listdir(DEFAULT_DIR):
|
102 |
+
default_path = DEFAULT_DIR / file
|
103 |
+
config_path = CONFIG_DIR / file
|
104 |
+
if not (existed := config_path.is_file()) or force:
|
105 |
+
total_written += config_path.write_text(
|
106 |
+
default_path.read_text(encoding="utf-8"),
|
107 |
+
encoding="utf-8",
|
108 |
+
)
|
109 |
+
typer.echo(
|
110 |
+
typer.style(("Overwritten" if existed else "Created") + ": ", fg="blue")
|
111 |
+
+ typer.style(str(config_path), fg="yellow")
|
112 |
+
)
|
113 |
+
if total_written > 0:
|
114 |
+
typer.echo(f"Config folder generated, {total_written=}")
|
115 |
+
|
116 |
+
|
117 |
+
if __name__ == "__main__":
|
118 |
+
cli()
|
hibiapi/api/__init__.py
ADDED
File without changes
|
hibiapi/api/bika/__init__.py
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
from .api import BikaEndpoints, ImageQuality, ResultSort # noqa: F401
|
2 |
+
from .constants import BikaConstants # noqa: F401
|
3 |
+
from .net import BikaLogin, NetRequest # noqa: F401
|
hibiapi/api/bika/api.py
ADDED
@@ -0,0 +1,206 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import hashlib
|
2 |
+
import hmac
|
3 |
+
from datetime import timedelta
|
4 |
+
from enum import Enum
|
5 |
+
from time import time
|
6 |
+
from typing import Any, Optional, cast
|
7 |
+
|
8 |
+
from httpx import URL
|
9 |
+
|
10 |
+
from hibiapi.api.bika.constants import BikaConstants
|
11 |
+
from hibiapi.api.bika.net import NetRequest
|
12 |
+
from hibiapi.utils.cache import cache_config
|
13 |
+
from hibiapi.utils.decorators import enum_auto_doc
|
14 |
+
from hibiapi.utils.net import catch_network_error
|
15 |
+
from hibiapi.utils.routing import BaseEndpoint, dont_route, request_headers
|
16 |
+
|
17 |
+
|
18 |
+
@enum_auto_doc
|
19 |
+
class ImageQuality(str, Enum):
|
20 |
+
"""ๅๅAPI่ฟๅ็ๅพ็่ดจ้"""
|
21 |
+
|
22 |
+
low = "low"
|
23 |
+
"""ไฝ่ดจ้"""
|
24 |
+
medium = "medium"
|
25 |
+
"""ไธญ็ญ่ดจ้"""
|
26 |
+
high = "high"
|
27 |
+
"""้ซ่ดจ้"""
|
28 |
+
original = "original"
|
29 |
+
"""ๅๅพ"""
|
30 |
+
|
31 |
+
|
32 |
+
@enum_auto_doc
|
33 |
+
class ResultSort(str, Enum):
|
34 |
+
"""ๅๅAPI่ฟๅ็ๆ็ดข็ปๆๆๅบๆนๅผ"""
|
35 |
+
|
36 |
+
date_descending = "dd"
|
37 |
+
"""ๆๆฐๅๅธ"""
|
38 |
+
date_ascending = "da"
|
39 |
+
"""ๆๆฉๅๅธ"""
|
40 |
+
like_descending = "ld"
|
41 |
+
"""ๆๅคๅๆฌข"""
|
42 |
+
views_descending = "vd"
|
43 |
+
"""ๆๅคๆต่ง"""
|
44 |
+
|
45 |
+
|
46 |
+
class BikaEndpoints(BaseEndpoint):
|
47 |
+
@staticmethod
|
48 |
+
def _sign(url: URL, timestamp_bytes: bytes, nonce: bytes, method: bytes):
|
49 |
+
return hmac.new(
|
50 |
+
BikaConstants.DIGEST_KEY,
|
51 |
+
(
|
52 |
+
url.raw_path.lstrip(b"/")
|
53 |
+
+ timestamp_bytes
|
54 |
+
+ nonce
|
55 |
+
+ method
|
56 |
+
+ BikaConstants.API_KEY
|
57 |
+
).lower(),
|
58 |
+
hashlib.sha256,
|
59 |
+
).hexdigest()
|
60 |
+
|
61 |
+
@dont_route
|
62 |
+
@catch_network_error
|
63 |
+
async def request(
|
64 |
+
self,
|
65 |
+
endpoint: str,
|
66 |
+
*,
|
67 |
+
params: Optional[dict[str, Any]] = None,
|
68 |
+
body: Optional[dict[str, Any]] = None,
|
69 |
+
no_token: bool = False,
|
70 |
+
):
|
71 |
+
net_client = cast(NetRequest, self.client.net_client)
|
72 |
+
if not no_token:
|
73 |
+
async with net_client.auth_lock:
|
74 |
+
if net_client.token is None:
|
75 |
+
await net_client.login(self)
|
76 |
+
|
77 |
+
headers = {
|
78 |
+
"Authorization": net_client.token or "",
|
79 |
+
"Time": (current_time := f"{time():.0f}".encode()),
|
80 |
+
"Image-Quality": request_headers.get().get(
|
81 |
+
"X-Image-Quality", ImageQuality.medium
|
82 |
+
),
|
83 |
+
"Nonce": (nonce := hashlib.md5(current_time).hexdigest().encode()),
|
84 |
+
"Signature": self._sign(
|
85 |
+
request_url := self._join(
|
86 |
+
base=BikaConstants.API_HOST,
|
87 |
+
endpoint=endpoint,
|
88 |
+
params=params or {},
|
89 |
+
),
|
90 |
+
current_time,
|
91 |
+
nonce,
|
92 |
+
b"GET" if body is None else b"POST",
|
93 |
+
),
|
94 |
+
}
|
95 |
+
|
96 |
+
response = await (
|
97 |
+
self.client.get(request_url, headers=headers)
|
98 |
+
if body is None
|
99 |
+
else self.client.post(request_url, headers=headers, json=body)
|
100 |
+
)
|
101 |
+
return response.json()
|
102 |
+
|
103 |
+
@cache_config(ttl=timedelta(days=1))
|
104 |
+
async def collections(self):
|
105 |
+
return await self.request("collections")
|
106 |
+
|
107 |
+
@cache_config(ttl=timedelta(days=3))
|
108 |
+
async def categories(self):
|
109 |
+
return await self.request("categories")
|
110 |
+
|
111 |
+
@cache_config(ttl=timedelta(days=3))
|
112 |
+
async def keywords(self):
|
113 |
+
return await self.request("keywords")
|
114 |
+
|
115 |
+
async def advanced_search(
|
116 |
+
self,
|
117 |
+
*,
|
118 |
+
keyword: str,
|
119 |
+
page: int = 1,
|
120 |
+
sort: ResultSort = ResultSort.date_descending,
|
121 |
+
):
|
122 |
+
return await self.request(
|
123 |
+
"comics/advanced-search",
|
124 |
+
body={
|
125 |
+
"keyword": keyword,
|
126 |
+
"sort": sort,
|
127 |
+
},
|
128 |
+
params={
|
129 |
+
"page": page,
|
130 |
+
"s": sort,
|
131 |
+
},
|
132 |
+
)
|
133 |
+
|
134 |
+
async def category_list(
|
135 |
+
self,
|
136 |
+
*,
|
137 |
+
category: str,
|
138 |
+
page: int = 1,
|
139 |
+
sort: ResultSort = ResultSort.date_descending,
|
140 |
+
):
|
141 |
+
return await self.request(
|
142 |
+
"comics",
|
143 |
+
params={
|
144 |
+
"page": page,
|
145 |
+
"c": category,
|
146 |
+
"s": sort,
|
147 |
+
},
|
148 |
+
)
|
149 |
+
|
150 |
+
async def author_list(
|
151 |
+
self,
|
152 |
+
*,
|
153 |
+
author: str,
|
154 |
+
page: int = 1,
|
155 |
+
sort: ResultSort = ResultSort.date_descending,
|
156 |
+
):
|
157 |
+
return await self.request(
|
158 |
+
"comics",
|
159 |
+
params={
|
160 |
+
"page": page,
|
161 |
+
"a": author,
|
162 |
+
"s": sort,
|
163 |
+
},
|
164 |
+
)
|
165 |
+
|
166 |
+
@cache_config(ttl=timedelta(days=3))
|
167 |
+
async def comic_detail(self, *, id: str):
|
168 |
+
return await self.request("comics/{id}", params={"id": id})
|
169 |
+
|
170 |
+
async def comic_recommendation(self, *, id: str):
|
171 |
+
return await self.request("comics/{id}/recommendation", params={"id": id})
|
172 |
+
|
173 |
+
async def comic_episodes(self, *, id: str, page: int = 1):
|
174 |
+
return await self.request(
|
175 |
+
"comics/{id}/eps",
|
176 |
+
params={
|
177 |
+
"id": id,
|
178 |
+
"page": page,
|
179 |
+
},
|
180 |
+
)
|
181 |
+
|
182 |
+
async def comic_page(self, *, id: str, order: int = 1, page: int = 1):
|
183 |
+
return await self.request(
|
184 |
+
"comics/{id}/order/{order}/pages",
|
185 |
+
params={
|
186 |
+
"id": id,
|
187 |
+
"order": order,
|
188 |
+
"page": page,
|
189 |
+
},
|
190 |
+
)
|
191 |
+
|
192 |
+
async def comic_comments(self, *, id: str, page: int = 1):
|
193 |
+
return await self.request(
|
194 |
+
"comics/{id}/comments",
|
195 |
+
params={
|
196 |
+
"id": id,
|
197 |
+
"page": page,
|
198 |
+
},
|
199 |
+
)
|
200 |
+
|
201 |
+
async def games(self, *, page: int = 1):
|
202 |
+
return await self.request("games", params={"page": page})
|
203 |
+
|
204 |
+
@cache_config(ttl=timedelta(days=3))
|
205 |
+
async def game_detail(self, *, id: str):
|
206 |
+
return await self.request("games/{id}", params={"id": id})
|
hibiapi/api/bika/constants.py
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from hibiapi.utils.config import APIConfig
|
2 |
+
|
3 |
+
|
4 |
+
class BikaConstants:
|
5 |
+
DIGEST_KEY = b"~d}$Q7$eIni=V)9\\RK/P.RM4;9[7|@/CA}b~OW!3?EV`:<>M7pddUBL5n|0/*Cn"
|
6 |
+
API_KEY = b"C69BAF41DA5ABD1FFEDC6D2FEA56B"
|
7 |
+
DEFAULT_HEADERS = {
|
8 |
+
"API-Key": API_KEY,
|
9 |
+
"App-Channel": "2",
|
10 |
+
"App-Version": "2.2.1.2.3.3",
|
11 |
+
"App-Build-Version": "44",
|
12 |
+
"App-UUID": "defaultUuid",
|
13 |
+
"Accept": "application/vnd.picacomic.com.v1+json",
|
14 |
+
"App-Platform": "android",
|
15 |
+
"User-Agent": "okhttp/3.8.1",
|
16 |
+
"Content-Type": "application/json; charset=UTF-8",
|
17 |
+
}
|
18 |
+
API_HOST = "https://picaapi.picacomic.com/"
|
19 |
+
CONFIG = APIConfig("bika")
|
hibiapi/api/bika/net.py
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
from base64 import urlsafe_b64decode
|
3 |
+
from datetime import datetime, timezone
|
4 |
+
from functools import lru_cache
|
5 |
+
from typing import TYPE_CHECKING, Any, Literal, Optional
|
6 |
+
|
7 |
+
from pydantic import BaseModel, Field
|
8 |
+
|
9 |
+
from hibiapi.api.bika.constants import BikaConstants
|
10 |
+
from hibiapi.utils.net import BaseNetClient
|
11 |
+
|
12 |
+
if TYPE_CHECKING:
|
13 |
+
from .api import BikaEndpoints
|
14 |
+
|
15 |
+
|
16 |
+
class BikaLogin(BaseModel):
|
17 |
+
email: str
|
18 |
+
password: str
|
19 |
+
|
20 |
+
|
21 |
+
class JWTHeader(BaseModel):
|
22 |
+
alg: str
|
23 |
+
typ: Literal["JWT"]
|
24 |
+
|
25 |
+
|
26 |
+
class JWTBody(BaseModel):
|
27 |
+
id: str = Field(alias="_id")
|
28 |
+
iat: datetime
|
29 |
+
exp: datetime
|
30 |
+
|
31 |
+
|
32 |
+
@lru_cache(maxsize=4)
|
33 |
+
def load_jwt(token: str):
|
34 |
+
def b64pad(data: str):
|
35 |
+
return data + "=" * (-len(data) % 4)
|
36 |
+
|
37 |
+
head, body, _ = token.split(".")
|
38 |
+
head_data = JWTHeader.parse_raw(urlsafe_b64decode(b64pad(head)))
|
39 |
+
body_data = JWTBody.parse_raw(urlsafe_b64decode(b64pad(body)))
|
40 |
+
return head_data, body_data
|
41 |
+
|
42 |
+
|
43 |
+
class NetRequest(BaseNetClient):
|
44 |
+
_token: Optional[str] = None
|
45 |
+
|
46 |
+
def __init__(self):
|
47 |
+
super().__init__(
|
48 |
+
headers=BikaConstants.DEFAULT_HEADERS.copy(),
|
49 |
+
proxies=BikaConstants.CONFIG["proxy"].as_dict(),
|
50 |
+
)
|
51 |
+
self.auth_lock = asyncio.Lock()
|
52 |
+
|
53 |
+
@property
|
54 |
+
def token(self) -> Optional[str]:
|
55 |
+
if self._token is None:
|
56 |
+
return None
|
57 |
+
_, body = load_jwt(self._token)
|
58 |
+
return None if body.exp < datetime.now(timezone.utc) else self._token
|
59 |
+
|
60 |
+
async def login(self, endpoint: "BikaEndpoints"):
|
61 |
+
login_data = BikaConstants.CONFIG["account"].get(BikaLogin)
|
62 |
+
login_result: dict[str, Any] = await endpoint.request(
|
63 |
+
"auth/sign-in",
|
64 |
+
body=login_data.dict(),
|
65 |
+
no_token=True,
|
66 |
+
)
|
67 |
+
assert login_result["code"] == 200, login_result["message"]
|
68 |
+
if not (
|
69 |
+
isinstance(login_data := login_result.get("data"), dict)
|
70 |
+
and "token" in login_data
|
71 |
+
):
|
72 |
+
raise ValueError("failed to read Bika account token.")
|
73 |
+
self._token = login_data["token"]
|
hibiapi/api/bilibili/__init__.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# flake8:noqa:F401
|
2 |
+
from .api import * # noqa: F401, F403
|
3 |
+
from .constants import BilibiliConstants
|
4 |
+
from .net import NetRequest
|
hibiapi/api/bilibili/api/__init__.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# flake8:noqa:F401
|
2 |
+
from .base import BaseBilibiliEndpoint, TimelineType, VideoFormatType, VideoQualityType
|
3 |
+
from .v2 import BilibiliEndpointV2, SearchType
|
4 |
+
from .v3 import BilibiliEndpointV3
|
hibiapi/api/bilibili/api/base.py
ADDED
@@ -0,0 +1,278 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import hashlib
|
2 |
+
import json
|
3 |
+
from enum import Enum, IntEnum
|
4 |
+
from time import time
|
5 |
+
from typing import Any, Optional, overload
|
6 |
+
|
7 |
+
from httpx import URL
|
8 |
+
|
9 |
+
from hibiapi.api.bilibili.constants import BilibiliConstants
|
10 |
+
from hibiapi.utils.decorators import enum_auto_doc
|
11 |
+
from hibiapi.utils.net import catch_network_error
|
12 |
+
from hibiapi.utils.routing import BaseEndpoint, dont_route
|
13 |
+
|
14 |
+
|
15 |
+
@enum_auto_doc
|
16 |
+
class TimelineType(str, Enum):
|
17 |
+
"""็ชๅงๆถ้ด็บฟ็ฑปๅ"""
|
18 |
+
|
19 |
+
CN = "cn"
|
20 |
+
"""ๅฝไบงๅจ็ป"""
|
21 |
+
GLOBAL = "global"
|
22 |
+
"""็ชๅง"""
|
23 |
+
|
24 |
+
|
25 |
+
@enum_auto_doc
|
26 |
+
class VideoQualityType(IntEnum):
|
27 |
+
"""่ง้ข่ดจ้็ฑปๅ"""
|
28 |
+
|
29 |
+
VIDEO_240P = 6
|
30 |
+
VIDEO_360P = 16
|
31 |
+
VIDEO_480P = 32
|
32 |
+
VIDEO_720P = 64
|
33 |
+
VIDEO_720P_60FPS = 74
|
34 |
+
VIDEO_1080P = 80
|
35 |
+
VIDEO_1080P_PLUS = 112
|
36 |
+
VIDEO_1080P_60FPS = 116
|
37 |
+
VIDEO_4K = 120
|
38 |
+
|
39 |
+
|
40 |
+
@enum_auto_doc
|
41 |
+
class VideoFormatType(IntEnum):
|
42 |
+
"""่ง้ขๆ ผๅผ็ฑปๅ"""
|
43 |
+
|
44 |
+
FLV = 0
|
45 |
+
MP4 = 2
|
46 |
+
DASH = 16
|
47 |
+
|
48 |
+
|
49 |
+
class BaseBilibiliEndpoint(BaseEndpoint):
|
50 |
+
def _sign(self, base: str, endpoint: str, params: dict[str, Any]) -> URL:
|
51 |
+
params.update(
|
52 |
+
{
|
53 |
+
**BilibiliConstants.DEFAULT_PARAMS,
|
54 |
+
"access_key": BilibiliConstants.ACCESS_KEY,
|
55 |
+
"appkey": BilibiliConstants.APP_KEY,
|
56 |
+
"ts": int(time()),
|
57 |
+
}
|
58 |
+
)
|
59 |
+
params = {k: params[k] for k in sorted(params.keys())}
|
60 |
+
url = self._join(base=base, endpoint=endpoint, params=params)
|
61 |
+
params["sign"] = hashlib.md5(url.query + BilibiliConstants.SECRET).hexdigest()
|
62 |
+
return URL(url, params=params)
|
63 |
+
|
64 |
+
@staticmethod
|
65 |
+
def _parse_json(content: str) -> dict[str, Any]:
|
66 |
+
try:
|
67 |
+
return json.loads(content)
|
68 |
+
except json.JSONDecodeError:
|
69 |
+
# NOTE: this is used to parse jsonp response
|
70 |
+
right, left = content.find("("), content.rfind(")")
|
71 |
+
return json.loads(content[right + 1 : left].strip())
|
72 |
+
|
73 |
+
@overload
|
74 |
+
async def request(
|
75 |
+
self,
|
76 |
+
endpoint: str,
|
77 |
+
*,
|
78 |
+
sign: bool = True,
|
79 |
+
params: Optional[dict[str, Any]] = None,
|
80 |
+
) -> dict[str, Any]: ...
|
81 |
+
|
82 |
+
@overload
|
83 |
+
async def request(
|
84 |
+
self,
|
85 |
+
endpoint: str,
|
86 |
+
source: str,
|
87 |
+
*,
|
88 |
+
sign: bool = True,
|
89 |
+
params: Optional[dict[str, Any]] = None,
|
90 |
+
) -> dict[str, Any]: ...
|
91 |
+
|
92 |
+
@dont_route
|
93 |
+
@catch_network_error
|
94 |
+
async def request(
|
95 |
+
self,
|
96 |
+
endpoint: str,
|
97 |
+
source: Optional[str] = None,
|
98 |
+
*,
|
99 |
+
sign: bool = True,
|
100 |
+
params: Optional[dict[str, Any]] = None,
|
101 |
+
) -> dict[str, Any]:
|
102 |
+
host = BilibiliConstants.SERVER_HOST[source or "app"]
|
103 |
+
url = (self._sign if sign else self._join)(
|
104 |
+
base=host, endpoint=endpoint, params=params or {}
|
105 |
+
)
|
106 |
+
response = await self.client.get(url)
|
107 |
+
response.raise_for_status()
|
108 |
+
return self._parse_json(response.text)
|
109 |
+
|
110 |
+
async def playurl(
|
111 |
+
self,
|
112 |
+
*,
|
113 |
+
aid: int,
|
114 |
+
cid: int,
|
115 |
+
quality: VideoQualityType = VideoQualityType.VIDEO_480P,
|
116 |
+
type: VideoFormatType = VideoFormatType.FLV,
|
117 |
+
):
|
118 |
+
return await self.request(
|
119 |
+
"x/player/playurl",
|
120 |
+
"api",
|
121 |
+
sign=False,
|
122 |
+
params={
|
123 |
+
"avid": aid,
|
124 |
+
"cid": cid,
|
125 |
+
"qn": quality,
|
126 |
+
"fnval": type,
|
127 |
+
"fnver": 0,
|
128 |
+
"fourk": 0 if quality >= VideoQualityType.VIDEO_4K else 1,
|
129 |
+
},
|
130 |
+
)
|
131 |
+
|
132 |
+
async def view(self, *, aid: int):
|
133 |
+
return await self.request(
|
134 |
+
"x/v2/view",
|
135 |
+
params={
|
136 |
+
"aid": aid,
|
137 |
+
},
|
138 |
+
)
|
139 |
+
|
140 |
+
async def search(self, *, keyword: str, page: int = 1, pagesize: int = 20):
|
141 |
+
return await self.request(
|
142 |
+
"x/v2/search",
|
143 |
+
params={
|
144 |
+
"duration": 0,
|
145 |
+
"keyword": keyword,
|
146 |
+
"pn": page,
|
147 |
+
"ps": pagesize,
|
148 |
+
},
|
149 |
+
)
|
150 |
+
|
151 |
+
async def search_hot(self, *, limit: int = 50):
|
152 |
+
return await self.request(
|
153 |
+
"x/v2/search/hot",
|
154 |
+
params={
|
155 |
+
"limit": limit,
|
156 |
+
},
|
157 |
+
)
|
158 |
+
|
159 |
+
async def search_suggest(self, *, keyword: str, type: str = "accurate"):
|
160 |
+
return await self.request(
|
161 |
+
"x/v2/search/suggest",
|
162 |
+
params={
|
163 |
+
"keyword": keyword,
|
164 |
+
"type": type,
|
165 |
+
},
|
166 |
+
)
|
167 |
+
|
168 |
+
async def space(self, *, vmid: int, page: int = 1, pagesize: int = 10):
|
169 |
+
return await self.request(
|
170 |
+
"x/v2/space",
|
171 |
+
params={
|
172 |
+
"vmid": vmid,
|
173 |
+
"ps": pagesize,
|
174 |
+
"pn": page,
|
175 |
+
},
|
176 |
+
)
|
177 |
+
|
178 |
+
async def space_archive(self, *, vmid: int, page: int = 1, pagesize: int = 10):
|
179 |
+
return await self.request(
|
180 |
+
"x/v2/space/archive",
|
181 |
+
params={
|
182 |
+
"vmid": vmid,
|
183 |
+
"ps": pagesize,
|
184 |
+
"pn": page,
|
185 |
+
},
|
186 |
+
)
|
187 |
+
|
188 |
+
async def favorite_video(
|
189 |
+
self,
|
190 |
+
*,
|
191 |
+
fid: int,
|
192 |
+
vmid: int,
|
193 |
+
page: int = 1,
|
194 |
+
pagesize: int = 20,
|
195 |
+
):
|
196 |
+
return await self.request(
|
197 |
+
"x/v2/fav/video",
|
198 |
+
"api",
|
199 |
+
params={
|
200 |
+
"fid": fid,
|
201 |
+
"pn": page,
|
202 |
+
"ps": pagesize,
|
203 |
+
"vmid": vmid,
|
204 |
+
"order": "ftime",
|
205 |
+
},
|
206 |
+
)
|
207 |
+
|
208 |
+
async def event_list(
|
209 |
+
self,
|
210 |
+
*,
|
211 |
+
fid: int,
|
212 |
+
vmid: int,
|
213 |
+
page: int = 1,
|
214 |
+
pagesize: int = 20,
|
215 |
+
): # NOTE: this endpoint is not used
|
216 |
+
return await self.request(
|
217 |
+
"event/getlist",
|
218 |
+
"api",
|
219 |
+
params={
|
220 |
+
"fid": fid,
|
221 |
+
"pn": page,
|
222 |
+
"ps": pagesize,
|
223 |
+
"vmid": vmid,
|
224 |
+
"order": "ftime",
|
225 |
+
},
|
226 |
+
)
|
227 |
+
|
228 |
+
async def season_info(self, *, season_id: int):
|
229 |
+
return await self.request(
|
230 |
+
"pgc/view/web/season",
|
231 |
+
"api",
|
232 |
+
params={
|
233 |
+
"season_id": season_id,
|
234 |
+
},
|
235 |
+
)
|
236 |
+
|
237 |
+
async def bangumi_source(self, *, episode_id: int):
|
238 |
+
return await self.request(
|
239 |
+
"api/get_source",
|
240 |
+
"bgm",
|
241 |
+
params={
|
242 |
+
"episode_id": episode_id,
|
243 |
+
},
|
244 |
+
)
|
245 |
+
|
246 |
+
async def season_recommend(self, *, season_id: int):
|
247 |
+
return await self.request(
|
248 |
+
"pgc/season/web/related/recommend",
|
249 |
+
"api",
|
250 |
+
sign=False,
|
251 |
+
params={
|
252 |
+
"season_id": season_id,
|
253 |
+
},
|
254 |
+
)
|
255 |
+
|
256 |
+
async def timeline(self, *, type: TimelineType = TimelineType.GLOBAL):
|
257 |
+
return await self.request(
|
258 |
+
"web_api/timeline_{type}",
|
259 |
+
"bgm",
|
260 |
+
sign=False,
|
261 |
+
params={
|
262 |
+
"type": type,
|
263 |
+
},
|
264 |
+
)
|
265 |
+
|
266 |
+
async def suggest(self, *, keyword: str): # NOTE: this endpoint is not used
|
267 |
+
return await self.request(
|
268 |
+
"main/suggest",
|
269 |
+
"search",
|
270 |
+
sign=False,
|
271 |
+
params={
|
272 |
+
"func": "suggest",
|
273 |
+
"suggest_type": "accurate",
|
274 |
+
"sug_type": "tag",
|
275 |
+
"main_ver": "v1",
|
276 |
+
"keyword": keyword,
|
277 |
+
},
|
278 |
+
)
|
hibiapi/api/bilibili/api/v2.py
ADDED
@@ -0,0 +1,124 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from collections.abc import Coroutine
|
2 |
+
from enum import Enum
|
3 |
+
from functools import wraps
|
4 |
+
from typing import Callable, Optional, TypeVar
|
5 |
+
|
6 |
+
from hibiapi.api.bilibili.api.base import (
|
7 |
+
BaseBilibiliEndpoint,
|
8 |
+
TimelineType,
|
9 |
+
VideoFormatType,
|
10 |
+
VideoQualityType,
|
11 |
+
)
|
12 |
+
from hibiapi.utils.decorators import enum_auto_doc
|
13 |
+
from hibiapi.utils.exceptions import ClientSideException
|
14 |
+
from hibiapi.utils.net import AsyncHTTPClient
|
15 |
+
from hibiapi.utils.routing import BaseEndpoint
|
16 |
+
|
17 |
+
_AnyCallable = TypeVar("_AnyCallable", bound=Callable[..., Coroutine])
|
18 |
+
|
19 |
+
|
20 |
+
def process_keyerror(function: _AnyCallable) -> _AnyCallable:
|
21 |
+
@wraps(function)
|
22 |
+
async def wrapper(*args, **kwargs):
|
23 |
+
try:
|
24 |
+
return await function(*args, **kwargs)
|
25 |
+
except (KeyError, IndexError) as e:
|
26 |
+
raise ClientSideException(detail=str(e)) from None
|
27 |
+
|
28 |
+
return wrapper # type:ignore
|
29 |
+
|
30 |
+
|
31 |
+
@enum_auto_doc
|
32 |
+
class SearchType(str, Enum):
|
33 |
+
"""ๆ็ดข็ฑปๅ"""
|
34 |
+
|
35 |
+
search = "search"
|
36 |
+
"""็ปผๅๆ็ดข"""
|
37 |
+
|
38 |
+
suggest = "suggest"
|
39 |
+
"""ๆ็ดขๅปบ่ฎฎ"""
|
40 |
+
|
41 |
+
hot = "hot"
|
42 |
+
"""็ญ้จ"""
|
43 |
+
|
44 |
+
|
45 |
+
class BilibiliEndpointV2(BaseEndpoint, cache_endpoints=False):
|
46 |
+
def __init__(self, client: AsyncHTTPClient):
|
47 |
+
super().__init__(client)
|
48 |
+
self.base = BaseBilibiliEndpoint(client)
|
49 |
+
|
50 |
+
@process_keyerror
|
51 |
+
async def playurl(
|
52 |
+
self,
|
53 |
+
*,
|
54 |
+
aid: int,
|
55 |
+
page: Optional[int] = None,
|
56 |
+
quality: VideoQualityType = VideoQualityType.VIDEO_480P,
|
57 |
+
type: VideoFormatType = VideoFormatType.MP4,
|
58 |
+
): # NOTE: not completely same with origin
|
59 |
+
video_view = await self.base.view(aid=aid)
|
60 |
+
if page is None:
|
61 |
+
return video_view
|
62 |
+
cid: int = video_view["data"]["pages"][page - 1]["cid"]
|
63 |
+
return await self.base.playurl(
|
64 |
+
aid=aid,
|
65 |
+
cid=cid,
|
66 |
+
quality=quality,
|
67 |
+
type=type,
|
68 |
+
)
|
69 |
+
|
70 |
+
async def seasoninfo(self, *, season_id: int): # NOTE: not same with origin
|
71 |
+
return await self.base.season_info(season_id=season_id)
|
72 |
+
|
73 |
+
async def source(self, *, episode_id: int):
|
74 |
+
return await self.base.bangumi_source(episode_id=episode_id)
|
75 |
+
|
76 |
+
async def seasonrecommend(self, *, season_id: int): # NOTE: not same with origin
|
77 |
+
return await self.base.season_recommend(season_id=season_id)
|
78 |
+
|
79 |
+
async def search(
|
80 |
+
self,
|
81 |
+
*,
|
82 |
+
keyword: str = "",
|
83 |
+
type: SearchType = SearchType.search,
|
84 |
+
page: int = 1,
|
85 |
+
pagesize: int = 20,
|
86 |
+
limit: int = 50,
|
87 |
+
):
|
88 |
+
if type == SearchType.suggest:
|
89 |
+
return await self.base.search_suggest(keyword=keyword)
|
90 |
+
elif type == SearchType.hot:
|
91 |
+
return await self.base.search_hot(limit=limit)
|
92 |
+
else:
|
93 |
+
return await self.base.search(
|
94 |
+
keyword=keyword,
|
95 |
+
page=page,
|
96 |
+
pagesize=pagesize,
|
97 |
+
)
|
98 |
+
|
99 |
+
async def timeline(
|
100 |
+
self, *, type: TimelineType = TimelineType.GLOBAL
|
101 |
+
): # NOTE: not same with origin
|
102 |
+
return await self.base.timeline(type=type)
|
103 |
+
|
104 |
+
async def space(self, *, vmid: int, page: int = 1, pagesize: int = 10):
|
105 |
+
return await self.base.space(
|
106 |
+
vmid=vmid,
|
107 |
+
page=page,
|
108 |
+
pagesize=pagesize,
|
109 |
+
)
|
110 |
+
|
111 |
+
async def archive(self, *, vmid: int, page: int = 1, pagesize: int = 10):
|
112 |
+
return await self.base.space_archive(
|
113 |
+
vmid=vmid,
|
114 |
+
page=page,
|
115 |
+
pagesize=pagesize,
|
116 |
+
)
|
117 |
+
|
118 |
+
async def favlist(self, *, fid: int, vmid: int, page: int = 1, pagesize: int = 20):
|
119 |
+
return await self.base.favorite_video(
|
120 |
+
fid=fid,
|
121 |
+
vmid=vmid,
|
122 |
+
page=page,
|
123 |
+
pagesize=pagesize,
|
124 |
+
)
|
hibiapi/api/bilibili/api/v3.py
ADDED
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from hibiapi.api.bilibili.api.base import (
|
2 |
+
BaseBilibiliEndpoint,
|
3 |
+
TimelineType,
|
4 |
+
VideoFormatType,
|
5 |
+
VideoQualityType,
|
6 |
+
)
|
7 |
+
from hibiapi.utils.net import AsyncHTTPClient
|
8 |
+
from hibiapi.utils.routing import BaseEndpoint
|
9 |
+
|
10 |
+
|
11 |
+
class BilibiliEndpointV3(BaseEndpoint, cache_endpoints=False):
|
12 |
+
def __init__(self, client: AsyncHTTPClient):
|
13 |
+
super().__init__(client)
|
14 |
+
self.base = BaseBilibiliEndpoint(client)
|
15 |
+
|
16 |
+
async def video_info(self, *, aid: int):
|
17 |
+
return await self.base.view(aid=aid)
|
18 |
+
|
19 |
+
async def video_address(
|
20 |
+
self,
|
21 |
+
*,
|
22 |
+
aid: int,
|
23 |
+
cid: int,
|
24 |
+
quality: VideoQualityType = VideoQualityType.VIDEO_480P,
|
25 |
+
type: VideoFormatType = VideoFormatType.FLV,
|
26 |
+
):
|
27 |
+
return await self.base.playurl(
|
28 |
+
aid=aid,
|
29 |
+
cid=cid,
|
30 |
+
quality=quality,
|
31 |
+
type=type,
|
32 |
+
)
|
33 |
+
|
34 |
+
async def user_info(self, *, uid: int, page: int = 1, size: int = 10):
|
35 |
+
return await self.base.space(
|
36 |
+
vmid=uid,
|
37 |
+
page=page,
|
38 |
+
pagesize=size,
|
39 |
+
)
|
40 |
+
|
41 |
+
async def user_uploaded(self, *, uid: int, page: int = 1, size: int = 10):
|
42 |
+
return await self.base.space_archive(
|
43 |
+
vmid=uid,
|
44 |
+
page=page,
|
45 |
+
pagesize=size,
|
46 |
+
)
|
47 |
+
|
48 |
+
async def user_favorite(self, *, uid: int, fid: int, page: int = 1, size: int = 10):
|
49 |
+
return await self.base.favorite_video(
|
50 |
+
fid=fid,
|
51 |
+
vmid=uid,
|
52 |
+
page=page,
|
53 |
+
pagesize=size,
|
54 |
+
)
|
55 |
+
|
56 |
+
async def season_info(self, *, season_id: int):
|
57 |
+
return await self.base.season_info(season_id=season_id)
|
58 |
+
|
59 |
+
async def season_recommend(self, *, season_id: int):
|
60 |
+
return await self.base.season_recommend(season_id=season_id)
|
61 |
+
|
62 |
+
async def season_episode(self, *, episode_id: int):
|
63 |
+
return await self.base.bangumi_source(episode_id=episode_id)
|
64 |
+
|
65 |
+
async def season_timeline(self, *, type: TimelineType = TimelineType.GLOBAL):
|
66 |
+
return await self.base.timeline(type=type)
|
67 |
+
|
68 |
+
async def search(self, *, keyword: str, page: int = 1, size: int = 20):
|
69 |
+
return await self.base.search(
|
70 |
+
keyword=keyword,
|
71 |
+
page=page,
|
72 |
+
pagesize=size,
|
73 |
+
)
|
74 |
+
|
75 |
+
async def search_recommend(self, *, limit: int = 50):
|
76 |
+
return await self.base.search_hot(limit=limit)
|
77 |
+
|
78 |
+
async def search_suggestion(self, *, keyword: str):
|
79 |
+
return await self.base.search_suggest(keyword=keyword)
|
hibiapi/api/bilibili/constants.py
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from http.cookies import SimpleCookie
|
2 |
+
from typing import Any
|
3 |
+
|
4 |
+
from hibiapi.utils.config import APIConfig
|
5 |
+
|
6 |
+
_CONFIG = APIConfig("bilibili")
|
7 |
+
|
8 |
+
|
9 |
+
class BilibiliConstants:
|
10 |
+
SERVER_HOST: dict[str, str] = {
|
11 |
+
"app": "https://app.bilibili.com",
|
12 |
+
"api": "https://api.bilibili.com",
|
13 |
+
"interface": "https://interface.bilibili.com",
|
14 |
+
"main": "https://www.bilibili.com",
|
15 |
+
"bgm": "https://bangumi.bilibili.com",
|
16 |
+
"comment": "https://comment.bilibili.com",
|
17 |
+
"search": "https://s.search.bilibili.com",
|
18 |
+
"mobile": "https://m.bilibili.com",
|
19 |
+
}
|
20 |
+
APP_HOST: str = "http://app.bilibili.com"
|
21 |
+
DEFAULT_PARAMS: dict[str, Any] = {
|
22 |
+
"build": 507000,
|
23 |
+
"device": "android",
|
24 |
+
"platform": "android",
|
25 |
+
"mobi_app": "android",
|
26 |
+
}
|
27 |
+
APP_KEY: str = "1d8b6e7d45233436"
|
28 |
+
SECRET: bytes = b"560c52ccd288fed045859ed18bffd973"
|
29 |
+
ACCESS_KEY: str = "5271b2f0eb92f5f89af4dc39197d8e41"
|
30 |
+
COOKIES: SimpleCookie = SimpleCookie(_CONFIG["net"]["cookie"].as_str())
|
31 |
+
USER_AGENT: str = _CONFIG["net"]["user-agent"].as_str()
|
32 |
+
CONFIG: APIConfig = _CONFIG
|
hibiapi/api/bilibili/net.py
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from httpx import Cookies
|
2 |
+
|
3 |
+
from hibiapi.utils.net import BaseNetClient
|
4 |
+
|
5 |
+
from .constants import BilibiliConstants
|
6 |
+
|
7 |
+
|
8 |
+
class NetRequest(BaseNetClient):
|
9 |
+
def __init__(self):
|
10 |
+
super().__init__(
|
11 |
+
headers={"user-agent": BilibiliConstants.USER_AGENT},
|
12 |
+
cookies=Cookies({k: v.value for k, v in BilibiliConstants.COOKIES.items()}),
|
13 |
+
)
|
hibiapi/api/netease/__init__.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# flake8:noqa:F401
|
2 |
+
from .api import BitRateType, NeteaseEndpoint, RecordPeriodType, SearchType
|
3 |
+
from .constants import NeteaseConstants
|
4 |
+
from .net import NetRequest
|
hibiapi/api/netease/api.py
ADDED
@@ -0,0 +1,326 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import base64
|
2 |
+
import json
|
3 |
+
import secrets
|
4 |
+
import string
|
5 |
+
from datetime import timedelta
|
6 |
+
from enum import IntEnum
|
7 |
+
from ipaddress import IPv4Address
|
8 |
+
from random import randint
|
9 |
+
from typing import Annotated, Any, Optional
|
10 |
+
|
11 |
+
from Cryptodome.Cipher import AES
|
12 |
+
from Cryptodome.Util.Padding import pad
|
13 |
+
from fastapi import Query
|
14 |
+
|
15 |
+
from hibiapi.api.netease.constants import NeteaseConstants
|
16 |
+
from hibiapi.utils.cache import cache_config
|
17 |
+
from hibiapi.utils.decorators import enum_auto_doc
|
18 |
+
from hibiapi.utils.exceptions import UpstreamAPIException
|
19 |
+
from hibiapi.utils.net import catch_network_error
|
20 |
+
from hibiapi.utils.routing import BaseEndpoint, dont_route
|
21 |
+
|
22 |
+
|
23 |
+
@enum_auto_doc
|
24 |
+
class SearchType(IntEnum):
|
25 |
+
"""ๆ็ดขๅ
ๅฎน็ฑปๅ"""
|
26 |
+
|
27 |
+
SONG = 1
|
28 |
+
"""ๅๆฒ"""
|
29 |
+
ALBUM = 10
|
30 |
+
"""ไธ่พ"""
|
31 |
+
ARTIST = 100
|
32 |
+
"""ๆญๆ"""
|
33 |
+
PLAYLIST = 1000
|
34 |
+
"""ๆญๅ"""
|
35 |
+
USER = 1002
|
36 |
+
"""็จๆท"""
|
37 |
+
MV = 1004
|
38 |
+
"""MV"""
|
39 |
+
LYRICS = 1006
|
40 |
+
"""ๆญ่ฏ"""
|
41 |
+
DJ = 1009
|
42 |
+
"""ไธปๆญ็ตๅฐ"""
|
43 |
+
VIDEO = 1014
|
44 |
+
"""่ง้ข"""
|
45 |
+
|
46 |
+
|
47 |
+
@enum_auto_doc
|
48 |
+
class BitRateType(IntEnum):
|
49 |
+
"""ๆญๆฒ็ ็"""
|
50 |
+
|
51 |
+
LOW = 64000
|
52 |
+
MEDIUM = 128000
|
53 |
+
STANDARD = 198000
|
54 |
+
HIGH = 320000
|
55 |
+
|
56 |
+
|
57 |
+
@enum_auto_doc
|
58 |
+
class MVResolutionType(IntEnum):
|
59 |
+
"""MVๅ่พจ็"""
|
60 |
+
|
61 |
+
QVGA = 240
|
62 |
+
VGA = 480
|
63 |
+
HD = 720
|
64 |
+
FHD = 1080
|
65 |
+
|
66 |
+
|
67 |
+
@enum_auto_doc
|
68 |
+
class RecordPeriodType(IntEnum):
|
69 |
+
"""ๅฌๆญ่ฎฐๅฝๆถๆฎต็ฑปๅ"""
|
70 |
+
|
71 |
+
WEEKLY = 1
|
72 |
+
"""ๆฌๅจ"""
|
73 |
+
ALL = 0
|
74 |
+
"""ๆๆๆถๆฎต"""
|
75 |
+
|
76 |
+
|
77 |
+
class _EncryptUtil:
|
78 |
+
alphabets = bytearray(ord(char) for char in string.ascii_letters + string.digits)
|
79 |
+
|
80 |
+
@staticmethod
|
81 |
+
def _aes(data: bytes, key: bytes) -> bytes:
|
82 |
+
data = pad(data, 16) if len(data) % 16 else data
|
83 |
+
return base64.encodebytes(
|
84 |
+
AES.new(
|
85 |
+
key=key,
|
86 |
+
mode=AES.MODE_CBC,
|
87 |
+
iv=NeteaseConstants.AES_IV,
|
88 |
+
).encrypt(data)
|
89 |
+
)
|
90 |
+
|
91 |
+
@staticmethod
|
92 |
+
def _rsa(data: bytes):
|
93 |
+
result = pow(
|
94 |
+
base=int(data.hex(), 16),
|
95 |
+
exp=NeteaseConstants.RSA_PUBKEY,
|
96 |
+
mod=NeteaseConstants.RSA_MODULUS,
|
97 |
+
)
|
98 |
+
return f"{result:0>256x}"
|
99 |
+
|
100 |
+
@classmethod
|
101 |
+
def encrypt(cls, data: dict[str, Any]) -> dict[str, str]:
|
102 |
+
secret = bytes(secrets.choice(cls.alphabets) for _ in range(16))
|
103 |
+
secure_key = cls._rsa(bytes(reversed(secret)))
|
104 |
+
return {
|
105 |
+
"params": cls._aes(
|
106 |
+
data=cls._aes(
|
107 |
+
data=json.dumps(data).encode(),
|
108 |
+
key=NeteaseConstants.AES_KEY,
|
109 |
+
),
|
110 |
+
key=secret,
|
111 |
+
).decode("ascii"),
|
112 |
+
"encSecKey": secure_key,
|
113 |
+
}
|
114 |
+
|
115 |
+
|
116 |
+
class NeteaseEndpoint(BaseEndpoint):
|
117 |
+
def _construct_headers(self):
|
118 |
+
headers = self.client.headers.copy()
|
119 |
+
headers["X-Real-IP"] = str(
|
120 |
+
IPv4Address(
|
121 |
+
randint(
|
122 |
+
int(NeteaseConstants.SOURCE_IP_SEGMENT.network_address),
|
123 |
+
int(NeteaseConstants.SOURCE_IP_SEGMENT.broadcast_address),
|
124 |
+
)
|
125 |
+
)
|
126 |
+
)
|
127 |
+
return headers
|
128 |
+
|
129 |
+
@dont_route
|
130 |
+
@catch_network_error
|
131 |
+
async def request(
|
132 |
+
self, endpoint: str, *, params: Optional[dict[str, Any]] = None
|
133 |
+
) -> dict[str, Any]:
|
134 |
+
params = {
|
135 |
+
**(params or {}),
|
136 |
+
"csrf_token": self.client.cookies.get("__csrf", ""),
|
137 |
+
}
|
138 |
+
response = await self.client.post(
|
139 |
+
self._join(
|
140 |
+
NeteaseConstants.HOST,
|
141 |
+
endpoint=endpoint,
|
142 |
+
params=params,
|
143 |
+
),
|
144 |
+
headers=self._construct_headers(),
|
145 |
+
data=_EncryptUtil.encrypt(params),
|
146 |
+
)
|
147 |
+
response.raise_for_status()
|
148 |
+
if not response.text.strip():
|
149 |
+
raise UpstreamAPIException(
|
150 |
+
f"Upstream API {endpoint=} returns blank content"
|
151 |
+
)
|
152 |
+
return response.json()
|
153 |
+
|
154 |
+
async def search(
|
155 |
+
self,
|
156 |
+
*,
|
157 |
+
s: str,
|
158 |
+
search_type: SearchType = SearchType.SONG,
|
159 |
+
limit: int = 20,
|
160 |
+
offset: int = 0,
|
161 |
+
):
|
162 |
+
return await self.request(
|
163 |
+
"api/cloudsearch/pc",
|
164 |
+
params={
|
165 |
+
"s": s,
|
166 |
+
"type": search_type,
|
167 |
+
"limit": limit,
|
168 |
+
"offset": offset,
|
169 |
+
"total": True,
|
170 |
+
},
|
171 |
+
)
|
172 |
+
|
173 |
+
async def artist(self, *, id: int):
|
174 |
+
return await self.request(
|
175 |
+
"weapi/v1/artist/{artist_id}",
|
176 |
+
params={
|
177 |
+
"artist_id": id,
|
178 |
+
},
|
179 |
+
)
|
180 |
+
|
181 |
+
async def album(self, *, id: int):
|
182 |
+
return await self.request(
|
183 |
+
"weapi/v1/album/{album_id}",
|
184 |
+
params={
|
185 |
+
"album_id": id,
|
186 |
+
},
|
187 |
+
)
|
188 |
+
|
189 |
+
async def detail(
|
190 |
+
self,
|
191 |
+
*,
|
192 |
+
id: Annotated[list[int], Query()],
|
193 |
+
):
|
194 |
+
return await self.request(
|
195 |
+
"api/v3/song/detail",
|
196 |
+
params={
|
197 |
+
"c": json.dumps(
|
198 |
+
[{"id": str(i)} for i in id],
|
199 |
+
),
|
200 |
+
},
|
201 |
+
)
|
202 |
+
|
203 |
+
@cache_config(ttl=timedelta(minutes=20))
|
204 |
+
async def song(
|
205 |
+
self,
|
206 |
+
*,
|
207 |
+
id: Annotated[list[int], Query()],
|
208 |
+
br: BitRateType = BitRateType.STANDARD,
|
209 |
+
):
|
210 |
+
return await self.request(
|
211 |
+
"weapi/song/enhance/player/url",
|
212 |
+
params={
|
213 |
+
"ids": [str(i) for i in id],
|
214 |
+
"br": br,
|
215 |
+
},
|
216 |
+
)
|
217 |
+
|
218 |
+
async def playlist(self, *, id: int):
|
219 |
+
return await self.request(
|
220 |
+
"weapi/v6/playlist/detail",
|
221 |
+
params={
|
222 |
+
"id": id,
|
223 |
+
"total": True,
|
224 |
+
"offset": 0,
|
225 |
+
"limit": 1000,
|
226 |
+
"n": 1000,
|
227 |
+
},
|
228 |
+
)
|
229 |
+
|
230 |
+
async def lyric(self, *, id: int):
|
231 |
+
return await self.request(
|
232 |
+
"weapi/song/lyric",
|
233 |
+
params={
|
234 |
+
"id": id,
|
235 |
+
"os": "pc",
|
236 |
+
"lv": -1,
|
237 |
+
"kv": -1,
|
238 |
+
"tv": -1,
|
239 |
+
},
|
240 |
+
)
|
241 |
+
|
242 |
+
async def mv(self, *, id: int):
|
243 |
+
return await self.request(
|
244 |
+
"api/v1/mv/detail",
|
245 |
+
params={
|
246 |
+
"id": id,
|
247 |
+
},
|
248 |
+
)
|
249 |
+
|
250 |
+
async def mv_url(
|
251 |
+
self,
|
252 |
+
*,
|
253 |
+
id: int,
|
254 |
+
res: MVResolutionType = MVResolutionType.FHD,
|
255 |
+
):
|
256 |
+
return await self.request(
|
257 |
+
"weapi/song/enhance/play/mv/url",
|
258 |
+
params={
|
259 |
+
"id": id,
|
260 |
+
"r": res,
|
261 |
+
},
|
262 |
+
)
|
263 |
+
|
264 |
+
async def comments(self, *, id: int, offset: int = 0, limit: int = 1):
|
265 |
+
return await self.request(
|
266 |
+
"weapi/v1/resource/comments/R_SO_4_{song_id}",
|
267 |
+
params={
|
268 |
+
"song_id": id,
|
269 |
+
"offset": offset,
|
270 |
+
"total": True,
|
271 |
+
"limit": limit,
|
272 |
+
},
|
273 |
+
)
|
274 |
+
|
275 |
+
async def record(self, *, id: int, period: RecordPeriodType = RecordPeriodType.ALL):
|
276 |
+
return await self.request(
|
277 |
+
"weapi/v1/play/record",
|
278 |
+
params={
|
279 |
+
"uid": id,
|
280 |
+
"type": period,
|
281 |
+
},
|
282 |
+
)
|
283 |
+
|
284 |
+
async def djradio(self, *, id: int):
|
285 |
+
return await self.request(
|
286 |
+
"api/djradio/v2/get",
|
287 |
+
params={
|
288 |
+
"id": id,
|
289 |
+
},
|
290 |
+
)
|
291 |
+
|
292 |
+
async def dj(self, *, id: int, offset: int = 0, limit: int = 20, asc: bool = False):
|
293 |
+
# NOTE: Possible not same with origin
|
294 |
+
return await self.request(
|
295 |
+
"weapi/dj/program/byradio",
|
296 |
+
params={
|
297 |
+
"radioId": id,
|
298 |
+
"offset": offset,
|
299 |
+
"limit": limit,
|
300 |
+
"asc": asc,
|
301 |
+
},
|
302 |
+
)
|
303 |
+
|
304 |
+
async def detail_dj(self, *, id: int):
|
305 |
+
return await self.request(
|
306 |
+
"api/dj/program/detail",
|
307 |
+
params={
|
308 |
+
"id": id,
|
309 |
+
},
|
310 |
+
)
|
311 |
+
|
312 |
+
async def user(self, *, id: int):
|
313 |
+
return await self.request(
|
314 |
+
"weapi/v1/user/detail/{id}",
|
315 |
+
params={"id": id},
|
316 |
+
)
|
317 |
+
|
318 |
+
async def user_playlist(self, *, id: int, limit: int = 50, offset: int = 0):
|
319 |
+
return await self.request(
|
320 |
+
"weapi/user/playlist",
|
321 |
+
params={
|
322 |
+
"uid": id,
|
323 |
+
"limit": limit,
|
324 |
+
"offset": offset,
|
325 |
+
},
|
326 |
+
)
|
hibiapi/api/netease/constants.py
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from http.cookies import SimpleCookie
|
2 |
+
from ipaddress import IPv4Network
|
3 |
+
|
4 |
+
from hibiapi.utils.config import APIConfig
|
5 |
+
|
6 |
+
_Config = APIConfig("netease")
|
7 |
+
|
8 |
+
|
9 |
+
class NeteaseConstants:
|
10 |
+
AES_KEY: bytes = b"0CoJUm6Qyw8W8jud"
|
11 |
+
AES_IV: bytes = b"0102030405060708"
|
12 |
+
RSA_PUBKEY: int = int("010001", 16)
|
13 |
+
RSA_MODULUS: int = int(
|
14 |
+
"00e0b509f6259df8642dbc3566290147"
|
15 |
+
"7df22677ec152b5ff68ace615bb7b725"
|
16 |
+
"152b3ab17a876aea8a5aa76d2e417629"
|
17 |
+
"ec4ee341f56135fccf695280104e0312"
|
18 |
+
"ecbda92557c93870114af6c9d05c4f7f"
|
19 |
+
"0c3685b7a46bee255932575cce10b424"
|
20 |
+
"d813cfe4875d3e82047b97ddef52741d"
|
21 |
+
"546b8e289dc6935b3ece0462db0a22b8e7",
|
22 |
+
16,
|
23 |
+
)
|
24 |
+
|
25 |
+
HOST: str = "http://music.163.com"
|
26 |
+
COOKIES: SimpleCookie = SimpleCookie(_Config["net"]["cookie"].as_str())
|
27 |
+
SOURCE_IP_SEGMENT: IPv4Network = _Config["net"]["source"].get(IPv4Network)
|
28 |
+
DEFAULT_HEADERS: dict[str, str] = {
|
29 |
+
"user-agent": _Config["net"]["user-agent"].as_str(),
|
30 |
+
"referer": "http://music.163.com",
|
31 |
+
}
|
32 |
+
|
33 |
+
CONFIG: APIConfig = _Config
|
hibiapi/api/netease/net.py
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from httpx import Cookies
|
2 |
+
|
3 |
+
from hibiapi.utils.net import BaseNetClient
|
4 |
+
|
5 |
+
from .constants import NeteaseConstants
|
6 |
+
|
7 |
+
|
8 |
+
class NetRequest(BaseNetClient):
|
9 |
+
def __init__(self):
|
10 |
+
super().__init__(
|
11 |
+
headers=NeteaseConstants.DEFAULT_HEADERS,
|
12 |
+
cookies=Cookies({k: v.value for k, v in NeteaseConstants.COOKIES.items()}),
|
13 |
+
)
|
hibiapi/api/pixiv/__init__.py
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# flake8:noqa:F401
|
2 |
+
from .api import (
|
3 |
+
IllustType,
|
4 |
+
PixivEndpoints,
|
5 |
+
RankingDate,
|
6 |
+
RankingType,
|
7 |
+
SearchDurationType,
|
8 |
+
SearchModeType,
|
9 |
+
SearchNovelModeType,
|
10 |
+
SearchSortType,
|
11 |
+
)
|
12 |
+
from .constants import PixivConstants
|
13 |
+
from .net import NetRequest, PixivAuthData
|
hibiapi/api/pixiv/api.py
ADDED
@@ -0,0 +1,613 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import re
|
3 |
+
from datetime import date, timedelta
|
4 |
+
from enum import Enum
|
5 |
+
from typing import Any, Literal, Optional, Union, cast, overload
|
6 |
+
|
7 |
+
from hibiapi.api.pixiv.constants import PixivConstants
|
8 |
+
from hibiapi.api.pixiv.net import NetRequest as PixivNetClient
|
9 |
+
from hibiapi.utils.cache import cache_config
|
10 |
+
from hibiapi.utils.decorators import enum_auto_doc
|
11 |
+
from hibiapi.utils.net import catch_network_error
|
12 |
+
from hibiapi.utils.routing import BaseEndpoint, dont_route, request_headers
|
13 |
+
|
14 |
+
|
15 |
+
@enum_auto_doc
|
16 |
+
class IllustType(str, Enum):
|
17 |
+
"""็ปไฝ็ฑปๅ"""
|
18 |
+
|
19 |
+
illust = "illust"
|
20 |
+
"""ๆ็ป"""
|
21 |
+
manga = "manga"
|
22 |
+
"""ๆผซ็ป"""
|
23 |
+
|
24 |
+
|
25 |
+
@enum_auto_doc
|
26 |
+
class RankingType(str, Enum):
|
27 |
+
"""ๆ่กๆฆๅ
ๅฎน็ฑปๅ"""
|
28 |
+
|
29 |
+
day = "day"
|
30 |
+
"""ๆฅๆฆ"""
|
31 |
+
week = "week"
|
32 |
+
"""ๅจๆฆ"""
|
33 |
+
month = "month"
|
34 |
+
"""ๆๆฆ"""
|
35 |
+
day_male = "day_male"
|
36 |
+
"""็ทๆงๅ"""
|
37 |
+
day_female = "day_female"
|
38 |
+
"""ๅฅณๆงๅ"""
|
39 |
+
week_original = "week_original"
|
40 |
+
"""ๅๅๅจๆฆ"""
|
41 |
+
week_rookie = "week_rookie"
|
42 |
+
"""ๆฐไบบๅจๆฆ"""
|
43 |
+
day_ai = "day_ai"
|
44 |
+
"""AIๆฅๆฆ"""
|
45 |
+
day_manga = "day_manga"
|
46 |
+
"""ๆผซ็ปๆฅๆฆ"""
|
47 |
+
week_manga = "week_manga"
|
48 |
+
"""ๆผซ็ปๅจๆฆ"""
|
49 |
+
month_manga = "month_manga"
|
50 |
+
"""ๆผซ็ปๆๆฆ"""
|
51 |
+
week_rookie_manga = "week_rookie_manga"
|
52 |
+
"""ๆผซ็ปๆฐไบบๅจๆฆ"""
|
53 |
+
day_r18 = "day_r18"
|
54 |
+
day_male_r18 = "day_male_r18"
|
55 |
+
day_female_r18 = "day_female_r18"
|
56 |
+
week_r18 = "week_r18"
|
57 |
+
week_r18g = "week_r18g"
|
58 |
+
day_r18_ai = "day_r18_ai"
|
59 |
+
day_r18_manga = "day_r18_manga"
|
60 |
+
week_r18_manga = "week_r18_manga"
|
61 |
+
|
62 |
+
|
63 |
+
@enum_auto_doc
|
64 |
+
class SearchModeType(str, Enum):
|
65 |
+
"""ๆ็ดขๅน้
็ฑปๅ"""
|
66 |
+
|
67 |
+
partial_match_for_tags = "partial_match_for_tags"
|
68 |
+
"""ๆ ็ญพ้จๅไธ่ด"""
|
69 |
+
exact_match_for_tags = "exact_match_for_tags"
|
70 |
+
"""ๆ ็ญพๅฎๅ
จไธ่ด"""
|
71 |
+
title_and_caption = "title_and_caption"
|
72 |
+
"""ๆ ้ข่ฏดๆๆ"""
|
73 |
+
|
74 |
+
|
75 |
+
@enum_auto_doc
|
76 |
+
class SearchNovelModeType(str, Enum):
|
77 |
+
"""ๆ็ดขๅน้
็ฑปๅ"""
|
78 |
+
|
79 |
+
partial_match_for_tags = "partial_match_for_tags"
|
80 |
+
"""ๆ ็ญพ้จๅไธ่ด"""
|
81 |
+
exact_match_for_tags = "exact_match_for_tags"
|
82 |
+
"""ๆ ็ญพๅฎๅ
จไธ่ด"""
|
83 |
+
text = "text"
|
84 |
+
"""ๆญฃๆ"""
|
85 |
+
keyword = "keyword"
|
86 |
+
"""ๅ
ณ้ฎ่ฏ"""
|
87 |
+
|
88 |
+
|
89 |
+
@enum_auto_doc
|
90 |
+
class SearchSortType(str, Enum):
|
91 |
+
"""ๆ็ดขๆๅบ็ฑปๅ"""
|
92 |
+
|
93 |
+
date_desc = "date_desc"
|
94 |
+
"""ๆๆฅๆๅๅบ"""
|
95 |
+
date_asc = "date_asc"
|
96 |
+
"""ๆๆฅๆๆญฃๅบ"""
|
97 |
+
popular_desc = "popular_desc"
|
98 |
+
"""ๅๆฌข่ฟ้ๅบ(Premiumๅ่ฝ)"""
|
99 |
+
|
100 |
+
|
101 |
+
@enum_auto_doc
|
102 |
+
class SearchDurationType(str, Enum):
|
103 |
+
"""ๆ็ดขๆถๆฎต็ฑปๅ"""
|
104 |
+
|
105 |
+
within_last_day = "within_last_day"
|
106 |
+
"""ไธๅคฉๅ
"""
|
107 |
+
within_last_week = "within_last_week"
|
108 |
+
"""ไธๅจๅ
"""
|
109 |
+
within_last_month = "within_last_month"
|
110 |
+
"""ไธไธชๆๅ
"""
|
111 |
+
|
112 |
+
|
113 |
+
class RankingDate(date):
|
114 |
+
@classmethod
|
115 |
+
def yesterday(cls) -> "RankingDate":
|
116 |
+
yesterday = cls.today() - timedelta(days=1)
|
117 |
+
return cls(yesterday.year, yesterday.month, yesterday.day)
|
118 |
+
|
119 |
+
def toString(self) -> str:
|
120 |
+
return self.strftime(r"%Y-%m-%d")
|
121 |
+
|
122 |
+
@classmethod
|
123 |
+
def new(cls, date: date) -> "RankingDate":
|
124 |
+
return cls(date.year, date.month, date.day)
|
125 |
+
|
126 |
+
|
127 |
+
class PixivEndpoints(BaseEndpoint):
|
128 |
+
@staticmethod
|
129 |
+
def _parse_accept_language(accept_language: str) -> str:
|
130 |
+
first_language, *_ = accept_language.partition(",")
|
131 |
+
language_code, *_ = first_language.partition(";")
|
132 |
+
return language_code.lower().strip()
|
133 |
+
|
134 |
+
@overload
|
135 |
+
async def request(
|
136 |
+
self,
|
137 |
+
endpoint: str,
|
138 |
+
*,
|
139 |
+
params: Optional[dict[str, Any]] = None,
|
140 |
+
return_text: Literal[False] = False,
|
141 |
+
) -> dict[str, Any]: ...
|
142 |
+
|
143 |
+
@overload
|
144 |
+
async def request(
|
145 |
+
self,
|
146 |
+
endpoint: str,
|
147 |
+
*,
|
148 |
+
params: Optional[dict[str, Any]] = None,
|
149 |
+
return_text: Literal[True],
|
150 |
+
) -> str: ...
|
151 |
+
|
152 |
+
@dont_route
|
153 |
+
@catch_network_error
|
154 |
+
async def request(
|
155 |
+
self,
|
156 |
+
endpoint: str,
|
157 |
+
*,
|
158 |
+
params: Optional[dict[str, Any]] = None,
|
159 |
+
return_text: bool = False,
|
160 |
+
) -> Union[dict[str, Any], str]:
|
161 |
+
headers = self.client.headers.copy()
|
162 |
+
|
163 |
+
net_client = cast(PixivNetClient, self.client.net_client)
|
164 |
+
async with net_client.auth_lock:
|
165 |
+
auth, token = net_client.get_available_user()
|
166 |
+
if auth is None:
|
167 |
+
auth = await net_client.auth(token)
|
168 |
+
headers["Authorization"] = f"Bearer {auth.access_token}"
|
169 |
+
|
170 |
+
if language := request_headers.get().get("Accept-Language"):
|
171 |
+
language = self._parse_accept_language(language)
|
172 |
+
headers["Accept-Language"] = language
|
173 |
+
|
174 |
+
response = await self.client.get(
|
175 |
+
self._join(
|
176 |
+
base=PixivConstants.APP_HOST,
|
177 |
+
endpoint=endpoint,
|
178 |
+
params=params or {},
|
179 |
+
),
|
180 |
+
headers=headers,
|
181 |
+
)
|
182 |
+
if return_text:
|
183 |
+
return response.text
|
184 |
+
return response.json()
|
185 |
+
|
186 |
+
@cache_config(ttl=timedelta(days=3))
|
187 |
+
async def illust(self, *, id: int):
|
188 |
+
return await self.request("v1/illust/detail", params={"illust_id": id})
|
189 |
+
|
190 |
+
@cache_config(ttl=timedelta(days=1))
|
191 |
+
async def member(self, *, id: int):
|
192 |
+
return await self.request("v1/user/detail", params={"user_id": id})
|
193 |
+
|
194 |
+
async def member_illust(
|
195 |
+
self,
|
196 |
+
*,
|
197 |
+
id: int,
|
198 |
+
illust_type: IllustType = IllustType.illust,
|
199 |
+
page: int = 1,
|
200 |
+
size: int = 30,
|
201 |
+
):
|
202 |
+
return await self.request(
|
203 |
+
"v1/user/illusts",
|
204 |
+
params={
|
205 |
+
"user_id": id,
|
206 |
+
"type": illust_type,
|
207 |
+
"offset": (page - 1) * size,
|
208 |
+
},
|
209 |
+
)
|
210 |
+
|
211 |
+
async def favorite(
|
212 |
+
self,
|
213 |
+
*,
|
214 |
+
id: int,
|
215 |
+
tag: Optional[str] = None,
|
216 |
+
max_bookmark_id: Optional[int] = None,
|
217 |
+
):
|
218 |
+
return await self.request(
|
219 |
+
"v1/user/bookmarks/illust",
|
220 |
+
params={
|
221 |
+
"user_id": id,
|
222 |
+
"tag": tag,
|
223 |
+
"restrict": "public",
|
224 |
+
"max_bookmark_id": max_bookmark_id or None,
|
225 |
+
},
|
226 |
+
)
|
227 |
+
|
228 |
+
# ็จๆทๆถ่็ๅฐ่ฏด
|
229 |
+
async def favorite_novel(
|
230 |
+
self,
|
231 |
+
*,
|
232 |
+
id: int,
|
233 |
+
tag: Optional[str] = None,
|
234 |
+
):
|
235 |
+
return await self.request(
|
236 |
+
"v1/user/bookmarks/novel",
|
237 |
+
params={
|
238 |
+
"user_id": id,
|
239 |
+
"tag": tag,
|
240 |
+
"restrict": "public",
|
241 |
+
},
|
242 |
+
)
|
243 |
+
|
244 |
+
async def following(self, *, id: int, page: int = 1, size: int = 30):
|
245 |
+
return await self.request(
|
246 |
+
"v1/user/following",
|
247 |
+
params={
|
248 |
+
"user_id": id,
|
249 |
+
"offset": (page - 1) * size,
|
250 |
+
},
|
251 |
+
)
|
252 |
+
|
253 |
+
async def follower(self, *, id: int, page: int = 1, size: int = 30):
|
254 |
+
return await self.request(
|
255 |
+
"v1/user/follower",
|
256 |
+
params={
|
257 |
+
"user_id": id,
|
258 |
+
"offset": (page - 1) * size,
|
259 |
+
},
|
260 |
+
)
|
261 |
+
|
262 |
+
@cache_config(ttl=timedelta(hours=12))
|
263 |
+
async def rank(
|
264 |
+
self,
|
265 |
+
*,
|
266 |
+
mode: RankingType = RankingType.week,
|
267 |
+
date: Optional[RankingDate] = None,
|
268 |
+
page: int = 1,
|
269 |
+
size: int = 30,
|
270 |
+
):
|
271 |
+
return await self.request(
|
272 |
+
"v1/illust/ranking",
|
273 |
+
params={
|
274 |
+
"mode": mode,
|
275 |
+
"date": RankingDate.new(date or RankingDate.yesterday()).toString(),
|
276 |
+
"offset": (page - 1) * size,
|
277 |
+
},
|
278 |
+
)
|
279 |
+
|
280 |
+
async def search(
|
281 |
+
self,
|
282 |
+
*,
|
283 |
+
word: str,
|
284 |
+
mode: SearchModeType = SearchModeType.partial_match_for_tags,
|
285 |
+
order: SearchSortType = SearchSortType.date_desc,
|
286 |
+
duration: Optional[SearchDurationType] = None,
|
287 |
+
page: int = 1,
|
288 |
+
size: int = 30,
|
289 |
+
include_translated_tag_results: bool = True,
|
290 |
+
search_ai_type: bool = True, # ๆ็ดข็ปๆๆฏๅฆๅ
ๅซAIไฝๅ
|
291 |
+
):
|
292 |
+
return await self.request(
|
293 |
+
"v1/search/illust",
|
294 |
+
params={
|
295 |
+
"word": word,
|
296 |
+
"search_target": mode,
|
297 |
+
"sort": order,
|
298 |
+
"duration": duration,
|
299 |
+
"offset": (page - 1) * size,
|
300 |
+
"include_translated_tag_results": include_translated_tag_results,
|
301 |
+
"search_ai_type": 1 if search_ai_type else 0,
|
302 |
+
},
|
303 |
+
)
|
304 |
+
|
305 |
+
# ็ญ้จๆ็ปไฝๅ้ข่ง
|
306 |
+
async def popular_preview(
|
307 |
+
self,
|
308 |
+
*,
|
309 |
+
word: str,
|
310 |
+
mode: SearchModeType = SearchModeType.partial_match_for_tags,
|
311 |
+
merge_plain_keyword_results: bool = True,
|
312 |
+
include_translated_tag_results: bool = True,
|
313 |
+
filter: str = "for_ios",
|
314 |
+
):
|
315 |
+
return await self.request(
|
316 |
+
"v1/search/popular-preview/illust",
|
317 |
+
params={
|
318 |
+
"word": word,
|
319 |
+
"search_target": mode,
|
320 |
+
"merge_plain_keyword_results": merge_plain_keyword_results,
|
321 |
+
"include_translated_tag_results": include_translated_tag_results,
|
322 |
+
"filter": filter,
|
323 |
+
},
|
324 |
+
)
|
325 |
+
|
326 |
+
async def search_user(
|
327 |
+
self,
|
328 |
+
*,
|
329 |
+
word: str,
|
330 |
+
page: int = 1,
|
331 |
+
size: int = 30,
|
332 |
+
):
|
333 |
+
return await self.request(
|
334 |
+
"v1/search/user",
|
335 |
+
params={"word": word, "offset": (page - 1) * size},
|
336 |
+
)
|
337 |
+
|
338 |
+
async def tags_autocomplete(
|
339 |
+
self,
|
340 |
+
*,
|
341 |
+
word: str,
|
342 |
+
merge_plain_keyword_results: bool = True,
|
343 |
+
):
|
344 |
+
return await self.request(
|
345 |
+
"/v2/search/autocomplete",
|
346 |
+
params={
|
347 |
+
"word": word,
|
348 |
+
"merge_plain_keyword_results": merge_plain_keyword_results,
|
349 |
+
},
|
350 |
+
)
|
351 |
+
|
352 |
+
@cache_config(ttl=timedelta(hours=12))
|
353 |
+
async def tags(self):
|
354 |
+
return await self.request("v1/trending-tags/illust")
|
355 |
+
|
356 |
+
@cache_config(ttl=timedelta(minutes=15))
|
357 |
+
async def related(self, *, id: int, page: int = 1, size: int = 30):
|
358 |
+
return await self.request(
|
359 |
+
"v2/illust/related",
|
360 |
+
params={
|
361 |
+
"illust_id": id,
|
362 |
+
"offset": (page - 1) * size,
|
363 |
+
},
|
364 |
+
)
|
365 |
+
|
366 |
+
@cache_config(ttl=timedelta(days=3))
|
367 |
+
async def ugoira_metadata(self, *, id: int):
|
368 |
+
return await self.request(
|
369 |
+
"v1/ugoira/metadata",
|
370 |
+
params={
|
371 |
+
"illust_id": id,
|
372 |
+
},
|
373 |
+
)
|
374 |
+
|
375 |
+
# ๅคงๅฎถ็ๆฐไฝๅ๏ผๆ็ป๏ผ
|
376 |
+
async def illust_new(
|
377 |
+
self,
|
378 |
+
*,
|
379 |
+
content_type: str = "illust",
|
380 |
+
):
|
381 |
+
return await self.request(
|
382 |
+
"v1/illust/new",
|
383 |
+
params={
|
384 |
+
"content_type": content_type,
|
385 |
+
"filter": "for_ios",
|
386 |
+
},
|
387 |
+
)
|
388 |
+
|
389 |
+
# pixivision(ไบฎ็น/็น่พ) ๅ่กจ
|
390 |
+
async def spotlights(
|
391 |
+
self,
|
392 |
+
*,
|
393 |
+
category: str = "all",
|
394 |
+
page: int = 1,
|
395 |
+
size: int = 10,
|
396 |
+
):
|
397 |
+
return await self.request(
|
398 |
+
"v1/spotlight/articles",
|
399 |
+
params={
|
400 |
+
"filter": "for_ios",
|
401 |
+
"category": category,
|
402 |
+
"offset": (page - 1) * size,
|
403 |
+
},
|
404 |
+
)
|
405 |
+
|
406 |
+
# ๆ็ป่ฏ่ฎบ
|
407 |
+
async def illust_comments(
|
408 |
+
self,
|
409 |
+
*,
|
410 |
+
id: int,
|
411 |
+
page: int = 1,
|
412 |
+
size: int = 30,
|
413 |
+
):
|
414 |
+
return await self.request(
|
415 |
+
"v3/illust/comments",
|
416 |
+
params={
|
417 |
+
"illust_id": id,
|
418 |
+
"offset": (page - 1) * size,
|
419 |
+
},
|
420 |
+
)
|
421 |
+
|
422 |
+
# ๆ็ป่ฏ่ฎบๅๅค
|
423 |
+
async def illust_comment_replies(
|
424 |
+
self,
|
425 |
+
*,
|
426 |
+
id: int,
|
427 |
+
):
|
428 |
+
return await self.request(
|
429 |
+
"v2/illust/comment/replies",
|
430 |
+
params={
|
431 |
+
"comment_id": id,
|
432 |
+
},
|
433 |
+
)
|
434 |
+
|
435 |
+
# ๅฐ่ฏด่ฏ่ฎบ
|
436 |
+
async def novel_comments(
|
437 |
+
self,
|
438 |
+
*,
|
439 |
+
id: int,
|
440 |
+
page: int = 1,
|
441 |
+
size: int = 30,
|
442 |
+
):
|
443 |
+
return await self.request(
|
444 |
+
"v3/novel/comments",
|
445 |
+
params={
|
446 |
+
"novel_id": id,
|
447 |
+
"offset": (page - 1) * size,
|
448 |
+
},
|
449 |
+
)
|
450 |
+
|
451 |
+
# ๅฐ่ฏด่ฏ่ฎบๅๅค
|
452 |
+
async def novel_comment_replies(
|
453 |
+
self,
|
454 |
+
*,
|
455 |
+
id: int,
|
456 |
+
):
|
457 |
+
return await self.request(
|
458 |
+
"v2/novel/comment/replies",
|
459 |
+
params={
|
460 |
+
"comment_id": id,
|
461 |
+
},
|
462 |
+
)
|
463 |
+
|
464 |
+
# ๅฐ่ฏดๆ่กๆฆ
|
465 |
+
async def rank_novel(
|
466 |
+
self,
|
467 |
+
*,
|
468 |
+
mode: str = "day",
|
469 |
+
date: Optional[RankingDate] = None,
|
470 |
+
page: int = 1,
|
471 |
+
size: int = 30,
|
472 |
+
):
|
473 |
+
return await self.request(
|
474 |
+
"v1/novel/ranking",
|
475 |
+
params={
|
476 |
+
"mode": mode,
|
477 |
+
"date": RankingDate.new(date or RankingDate.yesterday()).toString(),
|
478 |
+
"offset": (page - 1) * size,
|
479 |
+
},
|
480 |
+
)
|
481 |
+
|
482 |
+
async def member_novel(self, *, id: int, page: int = 1, size: int = 30):
|
483 |
+
return await self.request(
|
484 |
+
"/v1/user/novels",
|
485 |
+
params={
|
486 |
+
"user_id": id,
|
487 |
+
"offset": (page - 1) * size,
|
488 |
+
},
|
489 |
+
)
|
490 |
+
|
491 |
+
async def novel_series(self, *, id: int):
|
492 |
+
return await self.request("/v2/novel/series", params={"series_id": id})
|
493 |
+
|
494 |
+
async def novel_detail(self, *, id: int):
|
495 |
+
return await self.request("/v2/novel/detail", params={"novel_id": id})
|
496 |
+
|
497 |
+
# ๅทฒ่ขซๅฎๆน็งป้ค๏ผ่ฐ็จ webview/v2/novel ไฝๅ
ผๅฎนๅค็
|
498 |
+
async def novel_text(self, *, id: int):
|
499 |
+
# return await self.request("/v1/novel/text", params={"novel_id": id})
|
500 |
+
response = await self.webview_novel(id=id)
|
501 |
+
return {"novel_text": response["text"] or ""}
|
502 |
+
|
503 |
+
# ่ทๅๅฐ่ฏด HTML ๅ่งฃๆ JSON
|
504 |
+
async def webview_novel(self, *, id: int):
|
505 |
+
response = await self.request(
|
506 |
+
"webview/v2/novel",
|
507 |
+
params={
|
508 |
+
"id": id,
|
509 |
+
"viewer_version": "20221031_ai",
|
510 |
+
},
|
511 |
+
return_text=True,
|
512 |
+
)
|
513 |
+
|
514 |
+
novel_match = re.search(r"novel:\s+(?P<data>{.+?}),\s+isOwnWork", response)
|
515 |
+
return json.loads(novel_match["data"] if novel_match else response)
|
516 |
+
|
517 |
+
@cache_config(ttl=timedelta(hours=12))
|
518 |
+
async def tags_novel(self):
|
519 |
+
return await self.request("v1/trending-tags/novel")
|
520 |
+
|
521 |
+
async def search_novel(
|
522 |
+
self,
|
523 |
+
*,
|
524 |
+
word: str,
|
525 |
+
mode: SearchNovelModeType = SearchNovelModeType.partial_match_for_tags,
|
526 |
+
sort: SearchSortType = SearchSortType.date_desc,
|
527 |
+
merge_plain_keyword_results: bool = True,
|
528 |
+
include_translated_tag_results: bool = True,
|
529 |
+
duration: Optional[SearchDurationType] = None,
|
530 |
+
page: int = 1,
|
531 |
+
size: int = 30,
|
532 |
+
search_ai_type: bool = True, # ๆ็ดข็ปๆๆฏๅฆๅ
ๅซAIไฝๅ
|
533 |
+
):
|
534 |
+
return await self.request(
|
535 |
+
"/v1/search/novel",
|
536 |
+
params={
|
537 |
+
"word": word,
|
538 |
+
"search_target": mode,
|
539 |
+
"sort": sort,
|
540 |
+
"merge_plain_keyword_results": merge_plain_keyword_results,
|
541 |
+
"include_translated_tag_results": include_translated_tag_results,
|
542 |
+
"duration": duration,
|
543 |
+
"offset": (page - 1) * size,
|
544 |
+
"search_ai_type": 1 if search_ai_type else 0,
|
545 |
+
},
|
546 |
+
)
|
547 |
+
|
548 |
+
# ็ญ้จๅฐ่ฏดไฝๅ้ข่ง
|
549 |
+
async def popular_preview_novel(
|
550 |
+
self,
|
551 |
+
*,
|
552 |
+
word: str,
|
553 |
+
mode: SearchNovelModeType = SearchNovelModeType.partial_match_for_tags,
|
554 |
+
merge_plain_keyword_results: bool = True,
|
555 |
+
include_translated_tag_results: bool = True,
|
556 |
+
filter: str = "for_ios",
|
557 |
+
):
|
558 |
+
return await self.request(
|
559 |
+
"v1/search/popular-preview/novel",
|
560 |
+
params={
|
561 |
+
"word": word,
|
562 |
+
"search_target": mode,
|
563 |
+
"merge_plain_keyword_results": merge_plain_keyword_results,
|
564 |
+
"include_translated_tag_results": include_translated_tag_results,
|
565 |
+
"filter": filter,
|
566 |
+
},
|
567 |
+
)
|
568 |
+
|
569 |
+
async def novel_new(self, *, max_novel_id: Optional[int] = None):
|
570 |
+
return await self.request(
|
571 |
+
"/v1/novel/new", params={"max_novel_id": max_novel_id}
|
572 |
+
)
|
573 |
+
|
574 |
+
# ไบบๆฐ็ดๆญๅ่กจ
|
575 |
+
async def live_list(self, *, page: int = 1, size: int = 30):
|
576 |
+
params = {"list_type": "popular", "offset": (page - 1) * size}
|
577 |
+
if not params["offset"]:
|
578 |
+
del params["offset"]
|
579 |
+
return await self.request("v1/live/list", params=params)
|
580 |
+
|
581 |
+
# ็ธๅ
ณๅฐ่ฏดไฝๅ
|
582 |
+
async def related_novel(self, *, id: int, page: int = 1, size: int = 30):
|
583 |
+
return await self.request(
|
584 |
+
"v1/novel/related",
|
585 |
+
params={
|
586 |
+
"novel_id": id,
|
587 |
+
"offset": (page - 1) * size,
|
588 |
+
},
|
589 |
+
)
|
590 |
+
|
591 |
+
# ็ธๅ
ณ็จๆท
|
592 |
+
async def related_member(self, *, id: int):
|
593 |
+
return await self.request("v1/user/related", params={"seed_user_id": id})
|
594 |
+
|
595 |
+
# ๆผซ็ป็ณปๅ
|
596 |
+
async def illust_series(self, *, id: int, page: int = 1, size: int = 30):
|
597 |
+
return await self.request(
|
598 |
+
"v1/illust/series",
|
599 |
+
params={"illust_series_id": id, "offset": (page - 1) * size},
|
600 |
+
)
|
601 |
+
|
602 |
+
# ็จๆท็ๆผซ็ป็ณปๅ
|
603 |
+
async def member_illust_series(self, *, id: int, page: int = 1, size: int = 30):
|
604 |
+
return await self.request(
|
605 |
+
"v1/user/illust-series",
|
606 |
+
params={"user_id": id, "offset": (page - 1) * size},
|
607 |
+
)
|
608 |
+
|
609 |
+
# ็จๆท็ๅฐ่ฏด็ณปๅ
|
610 |
+
async def member_novel_series(self, *, id: int, page: int = 1, size: int = 30):
|
611 |
+
return await self.request(
|
612 |
+
"v1/user/novel-series", params={"user_id": id, "offset": (page - 1) * size}
|
613 |
+
)
|
hibiapi/api/pixiv/constants.py
ADDED
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Any
|
2 |
+
|
3 |
+
from hibiapi.utils.config import APIConfig
|
4 |
+
|
5 |
+
|
6 |
+
class PixivConstants:
|
7 |
+
DEFAULT_HEADERS: dict[str, Any] = {
|
8 |
+
"App-OS": "ios",
|
9 |
+
"App-OS-Version": "14.6",
|
10 |
+
"User-Agent": "PixivIOSApp/7.13.3 (iOS 14.6; iPhone13,2)",
|
11 |
+
}
|
12 |
+
CLIENT_ID: str = "MOBrBDS8blbauoSck0ZfDbtuzpyT"
|
13 |
+
CLIENT_SECRET: str = "lsACyCD94FhDUtGTXi3QzcFE2uU1hqtDaKeqrdwj"
|
14 |
+
HASH_SECRET: bytes = (
|
15 |
+
b"28c1fdd170a5204386cb1313c7077b34f83e4aaf4aa829ce78c231e05b0bae2c"
|
16 |
+
)
|
17 |
+
CONFIG: APIConfig = APIConfig("pixiv")
|
18 |
+
APP_HOST: str = "https://app-api.pixiv.net"
|
19 |
+
AUTH_HOST: str = "https://oauth.secure.pixiv.net"
|
hibiapi/api/pixiv/net.py
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import hashlib
|
3 |
+
from datetime import datetime, timedelta, timezone
|
4 |
+
from itertools import cycle
|
5 |
+
|
6 |
+
from httpx import URL
|
7 |
+
from pydantic import BaseModel, Extra, Field
|
8 |
+
|
9 |
+
from hibiapi.utils.log import logger
|
10 |
+
from hibiapi.utils.net import BaseNetClient
|
11 |
+
|
12 |
+
from .constants import PixivConstants
|
13 |
+
|
14 |
+
|
15 |
+
class AccountDataModel(BaseModel):
|
16 |
+
class Config:
|
17 |
+
extra = Extra.allow
|
18 |
+
|
19 |
+
|
20 |
+
class PixivUserData(AccountDataModel):
|
21 |
+
account: str
|
22 |
+
id: int
|
23 |
+
is_premium: bool
|
24 |
+
mail_address: str
|
25 |
+
name: str
|
26 |
+
|
27 |
+
|
28 |
+
class PixivAuthData(AccountDataModel):
|
29 |
+
time: datetime = Field(default_factory=datetime.now)
|
30 |
+
expires_in: int
|
31 |
+
access_token: str
|
32 |
+
refresh_token: str
|
33 |
+
user: PixivUserData
|
34 |
+
|
35 |
+
|
36 |
+
class NetRequest(BaseNetClient):
|
37 |
+
def __init__(self, tokens: list[str]):
|
38 |
+
super().__init__(
|
39 |
+
headers=PixivConstants.DEFAULT_HEADERS.copy(),
|
40 |
+
proxies=PixivConstants.CONFIG["proxy"].as_dict(),
|
41 |
+
)
|
42 |
+
self.user_tokens = cycle(tokens)
|
43 |
+
self.auth_lock = asyncio.Lock()
|
44 |
+
self.user_tokens_dict: dict[str, PixivAuthData] = {}
|
45 |
+
self.headers["accept-language"] = PixivConstants.CONFIG["language"].as_str()
|
46 |
+
|
47 |
+
def get_available_user(self):
|
48 |
+
token = next(self.user_tokens)
|
49 |
+
if (auth_data := self.user_tokens_dict.get(token)) and (
|
50 |
+
auth_data.time + timedelta(minutes=1, seconds=auth_data.expires_in)
|
51 |
+
> datetime.now()
|
52 |
+
):
|
53 |
+
return auth_data, token
|
54 |
+
return None, token
|
55 |
+
|
56 |
+
async def auth(self, refresh_token: str):
|
57 |
+
url = URL(PixivConstants.AUTH_HOST).join("/auth/token")
|
58 |
+
time = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S+00:00")
|
59 |
+
headers = {
|
60 |
+
**self.headers,
|
61 |
+
"X-Client-Time": time,
|
62 |
+
"X-Client-Hash": hashlib.md5(
|
63 |
+
time.encode() + PixivConstants.HASH_SECRET
|
64 |
+
).hexdigest(),
|
65 |
+
}
|
66 |
+
payload = {
|
67 |
+
"get_secure_url": 1,
|
68 |
+
"client_id": PixivConstants.CLIENT_ID,
|
69 |
+
"client_secret": PixivConstants.CLIENT_SECRET,
|
70 |
+
"grant_type": "refresh_token",
|
71 |
+
"refresh_token": refresh_token,
|
72 |
+
}
|
73 |
+
|
74 |
+
async with self as client:
|
75 |
+
response = await client.post(url, data=payload, headers=headers)
|
76 |
+
response.raise_for_status()
|
77 |
+
|
78 |
+
self.user_tokens_dict[refresh_token] = PixivAuthData.parse_obj(response.json())
|
79 |
+
user_data = self.user_tokens_dict[refresh_token].user
|
80 |
+
logger.opt(colors=True).info(
|
81 |
+
f"Pixiv account <m>{user_data.id}</m> info <b>Updated</b>: "
|
82 |
+
f"<b><e>{user_data.name}</e>({user_data.account})</b>."
|
83 |
+
)
|
84 |
+
|
85 |
+
return self.user_tokens_dict[refresh_token]
|
hibiapi/api/qrcode.py
ADDED
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import datetime
|
2 |
+
from enum import Enum
|
3 |
+
from io import BytesIO
|
4 |
+
from os import fdopen
|
5 |
+
from pathlib import Path
|
6 |
+
from typing import Literal, Optional, cast
|
7 |
+
|
8 |
+
from PIL import Image
|
9 |
+
from pydantic import AnyHttpUrl, BaseModel, Field, validate_arguments
|
10 |
+
from pydantic.color import Color
|
11 |
+
from qrcode import constants
|
12 |
+
from qrcode.image.pil import PilImage
|
13 |
+
from qrcode.main import QRCode
|
14 |
+
|
15 |
+
from hibiapi.utils.config import APIConfig
|
16 |
+
from hibiapi.utils.decorators import ToAsync, enum_auto_doc
|
17 |
+
from hibiapi.utils.exceptions import ClientSideException
|
18 |
+
from hibiapi.utils.net import BaseNetClient
|
19 |
+
from hibiapi.utils.routing import BaseHostUrl
|
20 |
+
from hibiapi.utils.temp import TempFile
|
21 |
+
|
22 |
+
Config = APIConfig("qrcode")
|
23 |
+
|
24 |
+
|
25 |
+
class HostUrl(BaseHostUrl):
|
26 |
+
allowed_hosts = Config["qrcode"]["icon-site"].get(list[str])
|
27 |
+
|
28 |
+
|
29 |
+
@enum_auto_doc
|
30 |
+
class QRCodeLevel(str, Enum):
|
31 |
+
"""ไบ็ปด็ ๅฎน้็"""
|
32 |
+
|
33 |
+
LOW = "L"
|
34 |
+
"""ๆไฝๅฎน้็"""
|
35 |
+
MEDIUM = "M"
|
36 |
+
"""ไธญ็ญๅฎน้็"""
|
37 |
+
QUARTILE = "Q"
|
38 |
+
"""้ซๅฎน้็"""
|
39 |
+
HIGH = "H"
|
40 |
+
"""ๆ้ซๅฎน้็"""
|
41 |
+
|
42 |
+
|
43 |
+
@enum_auto_doc
|
44 |
+
class ReturnEncode(str, Enum):
|
45 |
+
"""ไบ็ปด็ ่ฟๅ็็ผ็ ๆนๅผ"""
|
46 |
+
|
47 |
+
raw = "raw"
|
48 |
+
"""็ดๆฅ้ๅฎๅๅฐไบ็ปด็ ๅพ็"""
|
49 |
+
json = "json"
|
50 |
+
"""่ฟๅJSONๆ ผๅผ็ไบ็ปด็ ไฟกๆฏ"""
|
51 |
+
js = "js"
|
52 |
+
jsc = "jsc"
|
53 |
+
|
54 |
+
|
55 |
+
COLOR_WHITE = Color("FFFFFF")
|
56 |
+
COLOR_BLACK = Color("000000")
|
57 |
+
|
58 |
+
|
59 |
+
class QRInfo(BaseModel):
|
60 |
+
url: Optional[AnyHttpUrl] = None
|
61 |
+
path: Path
|
62 |
+
time: datetime = Field(default_factory=datetime.now)
|
63 |
+
data: str
|
64 |
+
logo: Optional[HostUrl] = None
|
65 |
+
level: QRCodeLevel = QRCodeLevel.MEDIUM
|
66 |
+
size: int = 200
|
67 |
+
code: Literal[0] = 0
|
68 |
+
status: Literal["success"] = "success"
|
69 |
+
|
70 |
+
@classmethod
|
71 |
+
@validate_arguments
|
72 |
+
async def new(
|
73 |
+
cls,
|
74 |
+
text: str,
|
75 |
+
*,
|
76 |
+
size: int = Field(
|
77 |
+
200,
|
78 |
+
gt=Config["qrcode"]["min-size"].as_number(),
|
79 |
+
lt=Config["qrcode"]["max-size"].as_number(),
|
80 |
+
),
|
81 |
+
logo: Optional[HostUrl] = None,
|
82 |
+
level: QRCodeLevel = QRCodeLevel.MEDIUM,
|
83 |
+
bgcolor: Color = COLOR_WHITE,
|
84 |
+
fgcolor: Color = COLOR_BLACK,
|
85 |
+
):
|
86 |
+
icon_stream = None
|
87 |
+
if logo is not None:
|
88 |
+
async with BaseNetClient() as client:
|
89 |
+
response = await client.get(
|
90 |
+
logo, headers={"user-agent": "HibiAPI@GitHub"}, timeout=6
|
91 |
+
)
|
92 |
+
response.raise_for_status()
|
93 |
+
icon_stream = BytesIO(response.content)
|
94 |
+
return cls(
|
95 |
+
data=text,
|
96 |
+
logo=logo,
|
97 |
+
level=level,
|
98 |
+
size=size,
|
99 |
+
path=await cls._generate(
|
100 |
+
text,
|
101 |
+
size=size,
|
102 |
+
level=level,
|
103 |
+
icon_stream=icon_stream,
|
104 |
+
bgcolor=bgcolor.as_hex(),
|
105 |
+
fgcolor=fgcolor.as_hex(),
|
106 |
+
),
|
107 |
+
)
|
108 |
+
|
109 |
+
@classmethod
|
110 |
+
@ToAsync
|
111 |
+
def _generate(
|
112 |
+
cls,
|
113 |
+
text: str,
|
114 |
+
*,
|
115 |
+
size: int = 200,
|
116 |
+
level: QRCodeLevel = QRCodeLevel.MEDIUM,
|
117 |
+
icon_stream: Optional[BytesIO] = None,
|
118 |
+
bgcolor: str = "#FFFFFF",
|
119 |
+
fgcolor: str = "#000000",
|
120 |
+
) -> Path:
|
121 |
+
qr = QRCode(
|
122 |
+
error_correction={
|
123 |
+
QRCodeLevel.LOW: constants.ERROR_CORRECT_L,
|
124 |
+
QRCodeLevel.MEDIUM: constants.ERROR_CORRECT_M,
|
125 |
+
QRCodeLevel.QUARTILE: constants.ERROR_CORRECT_Q,
|
126 |
+
QRCodeLevel.HIGH: constants.ERROR_CORRECT_H,
|
127 |
+
}[level],
|
128 |
+
border=2,
|
129 |
+
box_size=8,
|
130 |
+
)
|
131 |
+
qr.add_data(text)
|
132 |
+
image = cast(
|
133 |
+
Image.Image,
|
134 |
+
qr.make_image(
|
135 |
+
PilImage,
|
136 |
+
back_color=bgcolor,
|
137 |
+
fill_color=fgcolor,
|
138 |
+
).get_image(),
|
139 |
+
)
|
140 |
+
image = image.resize((size, size))
|
141 |
+
if icon_stream is not None:
|
142 |
+
try:
|
143 |
+
icon = Image.open(icon_stream)
|
144 |
+
except ValueError as e:
|
145 |
+
raise ClientSideException("Invalid image format.") from e
|
146 |
+
icon_width, icon_height = icon.size
|
147 |
+
image.paste(
|
148 |
+
icon,
|
149 |
+
box=(
|
150 |
+
int(size / 2 - icon_width / 2),
|
151 |
+
int(size / 2 - icon_height / 2),
|
152 |
+
int(size / 2 + icon_width / 2),
|
153 |
+
int(size / 2 + icon_height / 2),
|
154 |
+
),
|
155 |
+
mask=icon if icon.mode == "RGBA" else None,
|
156 |
+
)
|
157 |
+
descriptor, path = TempFile.create(".png")
|
158 |
+
with fdopen(descriptor, "wb") as f:
|
159 |
+
image.save(f, format="PNG")
|
160 |
+
return path
|
hibiapi/api/sauce/__init__.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# flake8:noqa:F401
|
2 |
+
from .api import DeduplicateType, HostUrl, SauceEndpoint, UploadFileIO
|
3 |
+
from .constants import SauceConstants
|
4 |
+
from .net import NetRequest
|
hibiapi/api/sauce/api.py
ADDED
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import random
|
2 |
+
from enum import IntEnum
|
3 |
+
from io import BytesIO
|
4 |
+
from typing import Any, Optional, overload
|
5 |
+
|
6 |
+
from httpx import HTTPError
|
7 |
+
|
8 |
+
from hibiapi.api.sauce.constants import SauceConstants
|
9 |
+
from hibiapi.utils.decorators import enum_auto_doc
|
10 |
+
from hibiapi.utils.exceptions import ClientSideException
|
11 |
+
from hibiapi.utils.net import catch_network_error
|
12 |
+
from hibiapi.utils.routing import BaseEndpoint, BaseHostUrl
|
13 |
+
|
14 |
+
|
15 |
+
class UnavailableSourceException(ClientSideException):
|
16 |
+
code = 422
|
17 |
+
detail = "given image is not avaliable to fetch"
|
18 |
+
|
19 |
+
|
20 |
+
class ImageSourceOversizedException(UnavailableSourceException):
|
21 |
+
code = 413
|
22 |
+
detail = (
|
23 |
+
"given image size is rather than maximum limit "
|
24 |
+
f"{SauceConstants.IMAGE_MAXIMUM_SIZE} bytes"
|
25 |
+
)
|
26 |
+
|
27 |
+
|
28 |
+
class HostUrl(BaseHostUrl):
|
29 |
+
allowed_hosts = SauceConstants.IMAGE_ALLOWED_HOST
|
30 |
+
|
31 |
+
|
32 |
+
class UploadFileIO(BytesIO):
|
33 |
+
@classmethod
|
34 |
+
def __get_validators__(cls):
|
35 |
+
yield cls.validate
|
36 |
+
|
37 |
+
@classmethod
|
38 |
+
def validate(cls, v: Any) -> BytesIO:
|
39 |
+
if not isinstance(v, BytesIO):
|
40 |
+
raise ValueError(f"Expected UploadFile, received: {type(v)}")
|
41 |
+
return v
|
42 |
+
|
43 |
+
|
44 |
+
@enum_auto_doc
|
45 |
+
class DeduplicateType(IntEnum):
|
46 |
+
DISABLED = 0
|
47 |
+
"""no result deduplicating"""
|
48 |
+
IDENTIFIER = 1
|
49 |
+
"""consolidate search results and deduplicate by item identifier"""
|
50 |
+
ALL = 2
|
51 |
+
"""all implemented deduplicate methods such as by series name"""
|
52 |
+
|
53 |
+
|
54 |
+
class SauceEndpoint(BaseEndpoint, cache_endpoints=False):
|
55 |
+
base = "https://saucenao.com"
|
56 |
+
|
57 |
+
async def fetch(self, host: HostUrl) -> UploadFileIO:
|
58 |
+
try:
|
59 |
+
response = await self.client.get(
|
60 |
+
url=host,
|
61 |
+
headers=SauceConstants.IMAGE_HEADERS,
|
62 |
+
timeout=SauceConstants.IMAGE_TIMEOUT,
|
63 |
+
)
|
64 |
+
response.raise_for_status()
|
65 |
+
if len(response.content) > SauceConstants.IMAGE_MAXIMUM_SIZE:
|
66 |
+
raise ImageSourceOversizedException
|
67 |
+
return UploadFileIO(response.content)
|
68 |
+
except HTTPError as e:
|
69 |
+
raise UnavailableSourceException(detail=str(e)) from e
|
70 |
+
|
71 |
+
@catch_network_error
|
72 |
+
async def request(
|
73 |
+
self, *, file: UploadFileIO, params: dict[str, Any]
|
74 |
+
) -> dict[str, Any]:
|
75 |
+
response = await self.client.post(
|
76 |
+
url=self._join(
|
77 |
+
self.base,
|
78 |
+
"search.php",
|
79 |
+
params={
|
80 |
+
**params,
|
81 |
+
"api_key": random.choice(SauceConstants.API_KEY),
|
82 |
+
"output_type": 2,
|
83 |
+
},
|
84 |
+
),
|
85 |
+
files={"file": file},
|
86 |
+
)
|
87 |
+
if response.status_code >= 500:
|
88 |
+
response.raise_for_status()
|
89 |
+
return response.json()
|
90 |
+
|
91 |
+
@overload
|
92 |
+
async def search(
|
93 |
+
self,
|
94 |
+
*,
|
95 |
+
url: HostUrl,
|
96 |
+
size: int = 30,
|
97 |
+
deduplicate: DeduplicateType = DeduplicateType.ALL,
|
98 |
+
database: Optional[int] = None,
|
99 |
+
enabled_mask: Optional[int] = None,
|
100 |
+
disabled_mask: Optional[int] = None,
|
101 |
+
) -> dict[str, Any]:
|
102 |
+
...
|
103 |
+
|
104 |
+
@overload
|
105 |
+
async def search(
|
106 |
+
self,
|
107 |
+
*,
|
108 |
+
file: UploadFileIO,
|
109 |
+
size: int = 30,
|
110 |
+
deduplicate: DeduplicateType = DeduplicateType.ALL,
|
111 |
+
database: Optional[int] = None,
|
112 |
+
enabled_mask: Optional[int] = None,
|
113 |
+
disabled_mask: Optional[int] = None,
|
114 |
+
) -> dict[str, Any]:
|
115 |
+
...
|
116 |
+
|
117 |
+
async def search(
|
118 |
+
self,
|
119 |
+
*,
|
120 |
+
url: Optional[HostUrl] = None,
|
121 |
+
file: Optional[UploadFileIO] = None,
|
122 |
+
size: int = 30,
|
123 |
+
deduplicate: DeduplicateType = DeduplicateType.ALL,
|
124 |
+
database: Optional[int] = None,
|
125 |
+
enabled_mask: Optional[int] = None,
|
126 |
+
disabled_mask: Optional[int] = None,
|
127 |
+
):
|
128 |
+
if url is not None:
|
129 |
+
file = await self.fetch(url)
|
130 |
+
assert file is not None
|
131 |
+
return await self.request(
|
132 |
+
file=file,
|
133 |
+
params={
|
134 |
+
"dbmask": enabled_mask,
|
135 |
+
"dbmaski": disabled_mask,
|
136 |
+
"db": database,
|
137 |
+
"numres": size,
|
138 |
+
"dedupe": deduplicate,
|
139 |
+
},
|
140 |
+
)
|
hibiapi/api/sauce/constants.py
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Any
|
2 |
+
|
3 |
+
from hibiapi.utils.config import APIConfig
|
4 |
+
|
5 |
+
_Config = APIConfig("sauce")
|
6 |
+
|
7 |
+
|
8 |
+
class SauceConstants:
|
9 |
+
CONFIG: APIConfig = _Config
|
10 |
+
API_KEY: list[str] = _Config["net"]["api-key"].as_str_seq()
|
11 |
+
USER_AGENT: str = _Config["net"]["user-agent"].as_str()
|
12 |
+
PROXIES: dict[str, str] = _Config["proxy"].as_dict()
|
13 |
+
IMAGE_HEADERS: dict[str, Any] = _Config["image"]["headers"].as_dict()
|
14 |
+
IMAGE_ALLOWED_HOST: list[str] = _Config["image"]["allowed"].get(list[str])
|
15 |
+
IMAGE_MAXIMUM_SIZE: int = _Config["image"]["max-size"].as_number() * 1024
|
16 |
+
IMAGE_TIMEOUT: int = _Config["image"]["timeout"].as_number()
|
hibiapi/api/sauce/net.py
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from hibiapi.utils.net import BaseNetClient
|
2 |
+
|
3 |
+
from .constants import SauceConstants
|
4 |
+
|
5 |
+
|
6 |
+
class NetRequest(BaseNetClient):
|
7 |
+
def __init__(self):
|
8 |
+
super().__init__(
|
9 |
+
headers={"user-agent": SauceConstants.USER_AGENT},
|
10 |
+
proxies=SauceConstants.PROXIES,
|
11 |
+
)
|
hibiapi/api/tieba/__init__.py
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
# flake8:noqa:F401
|
2 |
+
from .api import Config, TiebaEndpoint
|
3 |
+
from .net import NetRequest
|
hibiapi/api/tieba/api.py
ADDED
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import hashlib
|
2 |
+
from enum import Enum
|
3 |
+
from random import randint
|
4 |
+
from typing import Any, Optional
|
5 |
+
|
6 |
+
from hibiapi.utils.config import APIConfig
|
7 |
+
from hibiapi.utils.net import catch_network_error
|
8 |
+
from hibiapi.utils.routing import BaseEndpoint, dont_route
|
9 |
+
|
10 |
+
Config = APIConfig("tieba")
|
11 |
+
|
12 |
+
|
13 |
+
class TiebaSignUtils:
|
14 |
+
salt = b"tiebaclient!!!"
|
15 |
+
|
16 |
+
@staticmethod
|
17 |
+
def random_digit(length: int) -> str:
|
18 |
+
return "".join(map(str, [randint(0, 9) for _ in range(length)]))
|
19 |
+
|
20 |
+
@staticmethod
|
21 |
+
def construct_content(params: dict[str, Any]) -> bytes:
|
22 |
+
# NOTE: this function used to construct form content WITHOUT urlencode
|
23 |
+
# Don't ask me why this is necessary, ask Tieba's programmers instead
|
24 |
+
return b"&".join(
|
25 |
+
map(
|
26 |
+
lambda k, v: (
|
27 |
+
k.encode()
|
28 |
+
+ b"="
|
29 |
+
+ str(v.value if isinstance(v, Enum) else v).encode()
|
30 |
+
),
|
31 |
+
params.keys(),
|
32 |
+
params.values(),
|
33 |
+
)
|
34 |
+
)
|
35 |
+
|
36 |
+
@classmethod
|
37 |
+
def sign(cls, params: dict[str, Any]) -> bytes:
|
38 |
+
params.update(
|
39 |
+
{
|
40 |
+
"_client_id": (
|
41 |
+
"wappc_" + cls.random_digit(13) + "_" + cls.random_digit(3)
|
42 |
+
),
|
43 |
+
"_client_type": 2,
|
44 |
+
"_client_version": "9.9.8.32",
|
45 |
+
**{
|
46 |
+
k.upper(): str(v).strip()
|
47 |
+
for k, v in Config["net"]["params"].as_dict().items()
|
48 |
+
if v
|
49 |
+
},
|
50 |
+
}
|
51 |
+
)
|
52 |
+
params = {k: params[k] for k in sorted(params.keys())}
|
53 |
+
params["sign"] = (
|
54 |
+
hashlib.md5(cls.construct_content(params).replace(b"&", b"") + cls.salt)
|
55 |
+
.hexdigest()
|
56 |
+
.upper()
|
57 |
+
)
|
58 |
+
return cls.construct_content(params)
|
59 |
+
|
60 |
+
|
61 |
+
class TiebaEndpoint(BaseEndpoint):
|
62 |
+
base = "http://c.tieba.baidu.com"
|
63 |
+
|
64 |
+
@dont_route
|
65 |
+
@catch_network_error
|
66 |
+
async def request(
|
67 |
+
self, endpoint: str, *, params: Optional[dict[str, Any]] = None
|
68 |
+
) -> dict[str, Any]:
|
69 |
+
response = await self.client.post(
|
70 |
+
url=self._join(self.base, endpoint, {}),
|
71 |
+
content=TiebaSignUtils.sign(params or {}),
|
72 |
+
)
|
73 |
+
response.raise_for_status()
|
74 |
+
return response.json()
|
75 |
+
|
76 |
+
async def post_list(self, *, name: str, page: int = 1, size: int = 50):
|
77 |
+
return await self.request(
|
78 |
+
"c/f/frs/page",
|
79 |
+
params={
|
80 |
+
"kw": name,
|
81 |
+
"pn": page,
|
82 |
+
"rn": size,
|
83 |
+
},
|
84 |
+
)
|
85 |
+
|
86 |
+
async def post_detail(
|
87 |
+
self,
|
88 |
+
*,
|
89 |
+
tid: int,
|
90 |
+
page: int = 1,
|
91 |
+
size: int = 50,
|
92 |
+
reversed: bool = False,
|
93 |
+
):
|
94 |
+
return await self.request(
|
95 |
+
"c/f/pb/page",
|
96 |
+
params={
|
97 |
+
**({"last": 1, "r": 1} if reversed else {}),
|
98 |
+
"kz": tid,
|
99 |
+
"pn": page,
|
100 |
+
"rn": size,
|
101 |
+
},
|
102 |
+
)
|
103 |
+
|
104 |
+
async def subpost_detail(
|
105 |
+
self,
|
106 |
+
*,
|
107 |
+
tid: int,
|
108 |
+
pid: int,
|
109 |
+
page: int = 1,
|
110 |
+
size: int = 50,
|
111 |
+
):
|
112 |
+
return await self.request(
|
113 |
+
"c/f/pb/floor",
|
114 |
+
params={
|
115 |
+
"kz": tid,
|
116 |
+
"pid": pid,
|
117 |
+
"pn": page,
|
118 |
+
"rn": size,
|
119 |
+
},
|
120 |
+
)
|
121 |
+
|
122 |
+
async def user_profile(self, *, uid: int):
|
123 |
+
return await self.request(
|
124 |
+
"c/u/user/profile",
|
125 |
+
params={
|
126 |
+
"uid": uid,
|
127 |
+
"need_post_count": 1,
|
128 |
+
"has_plist": 1,
|
129 |
+
},
|
130 |
+
)
|
131 |
+
|
132 |
+
async def user_subscribed(
|
133 |
+
self, *, uid: int, page: int = 1
|
134 |
+
): # XXX This API required user login!
|
135 |
+
return await self.request(
|
136 |
+
"c/f/forum/like",
|
137 |
+
params={
|
138 |
+
"is_guest": 0,
|
139 |
+
"uid": uid,
|
140 |
+
"page_no": page,
|
141 |
+
},
|
142 |
+
)
|
hibiapi/api/tieba/net.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from hibiapi.utils.net import BaseNetClient
|
2 |
+
|
3 |
+
|
4 |
+
class NetRequest(BaseNetClient):
|
5 |
+
pass
|
hibiapi/api/wallpaper/__init__.py
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
# flake8:noqa:F401
|
2 |
+
from .api import Config, WallpaperCategoryType, WallpaperEndpoint, WallpaperOrderType
|
3 |
+
from .net import NetRequest
|
hibiapi/api/wallpaper/api.py
ADDED
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import timedelta
|
2 |
+
from enum import Enum
|
3 |
+
from typing import Any, Optional
|
4 |
+
|
5 |
+
from hibiapi.utils.cache import cache_config
|
6 |
+
from hibiapi.utils.config import APIConfig
|
7 |
+
from hibiapi.utils.decorators import enum_auto_doc
|
8 |
+
from hibiapi.utils.net import catch_network_error
|
9 |
+
from hibiapi.utils.routing import BaseEndpoint, dont_route
|
10 |
+
|
11 |
+
Config = APIConfig("wallpaper")
|
12 |
+
|
13 |
+
|
14 |
+
@enum_auto_doc
|
15 |
+
class WallpaperCategoryType(str, Enum):
|
16 |
+
"""ๅฃ็บธๅ็ฑป"""
|
17 |
+
|
18 |
+
girl = "girl"
|
19 |
+
"""ๅฅณ็"""
|
20 |
+
animal = "animal"
|
21 |
+
"""ๅจ็ฉ"""
|
22 |
+
landscape = "landscape"
|
23 |
+
"""่ช็ถ"""
|
24 |
+
anime = "anime"
|
25 |
+
"""ไบๆฌกๅ
"""
|
26 |
+
drawn = "drawn"
|
27 |
+
"""ๆ็ป"""
|
28 |
+
mechanics = "mechanics"
|
29 |
+
"""ๆบๆขฐ"""
|
30 |
+
boy = "boy"
|
31 |
+
"""็ท็"""
|
32 |
+
game = "game"
|
33 |
+
"""ๆธธๆ"""
|
34 |
+
text = "text"
|
35 |
+
"""ๆๅญ"""
|
36 |
+
|
37 |
+
|
38 |
+
CATEGORY: dict[WallpaperCategoryType, str] = {
|
39 |
+
WallpaperCategoryType.girl: "4e4d610cdf714d2966000000",
|
40 |
+
WallpaperCategoryType.animal: "4e4d610cdf714d2966000001",
|
41 |
+
WallpaperCategoryType.landscape: "4e4d610cdf714d2966000002",
|
42 |
+
WallpaperCategoryType.anime: "4e4d610cdf714d2966000003",
|
43 |
+
WallpaperCategoryType.drawn: "4e4d610cdf714d2966000004",
|
44 |
+
WallpaperCategoryType.mechanics: "4e4d610cdf714d2966000005",
|
45 |
+
WallpaperCategoryType.boy: "4e4d610cdf714d2966000006",
|
46 |
+
WallpaperCategoryType.game: "4e4d610cdf714d2966000007",
|
47 |
+
WallpaperCategoryType.text: "5109e04e48d5b9364ae9ac45",
|
48 |
+
}
|
49 |
+
|
50 |
+
|
51 |
+
@enum_auto_doc
|
52 |
+
class WallpaperOrderType(str, Enum):
|
53 |
+
"""ๅฃ็บธๆๅบๆนๅผ"""
|
54 |
+
|
55 |
+
hot = "hot"
|
56 |
+
"""็ญ้จ"""
|
57 |
+
new = "new"
|
58 |
+
"""ๆๆฐ"""
|
59 |
+
|
60 |
+
|
61 |
+
class WallpaperEndpoint(BaseEndpoint):
|
62 |
+
base = "http://service.aibizhi.adesk.com"
|
63 |
+
|
64 |
+
@dont_route
|
65 |
+
@catch_network_error
|
66 |
+
async def request(
|
67 |
+
self, endpoint: str, *, params: Optional[dict[str, Any]] = None
|
68 |
+
) -> dict[str, Any]:
|
69 |
+
|
70 |
+
response = await self.client.get(
|
71 |
+
self._join(
|
72 |
+
base=WallpaperEndpoint.base,
|
73 |
+
endpoint=endpoint,
|
74 |
+
params=params or {},
|
75 |
+
)
|
76 |
+
)
|
77 |
+
return response.json()
|
78 |
+
|
79 |
+
# ๅฃ็บธๆ้ฒ็้พtoken, ไธๅปบ่ฎฎ้ฟๆถ้ด็ผๅญ
|
80 |
+
@cache_config(ttl=timedelta(hours=2))
|
81 |
+
async def wallpaper(
|
82 |
+
self,
|
83 |
+
*,
|
84 |
+
category: WallpaperCategoryType,
|
85 |
+
limit: int = 20,
|
86 |
+
skip: int = 0,
|
87 |
+
adult: bool = True,
|
88 |
+
order: WallpaperOrderType = WallpaperOrderType.hot,
|
89 |
+
):
|
90 |
+
|
91 |
+
return await self.request(
|
92 |
+
"v1/wallpaper/category/{category}/wallpaper",
|
93 |
+
params={
|
94 |
+
"limit": limit,
|
95 |
+
"skip": skip,
|
96 |
+
"adult": adult,
|
97 |
+
"order": order,
|
98 |
+
"first": 0,
|
99 |
+
"category": CATEGORY[category],
|
100 |
+
},
|
101 |
+
)
|
102 |
+
|
103 |
+
# ๅฃ็บธๆ้ฒ็้พtoken, ไธๅปบ่ฎฎ้ฟๆถ้ด็ผๅญ
|
104 |
+
@cache_config(ttl=timedelta(hours=2))
|
105 |
+
async def vertical(
|
106 |
+
self,
|
107 |
+
*,
|
108 |
+
category: WallpaperCategoryType,
|
109 |
+
limit: int = 20,
|
110 |
+
skip: int = 0,
|
111 |
+
adult: bool = True,
|
112 |
+
order: WallpaperOrderType = WallpaperOrderType.hot,
|
113 |
+
):
|
114 |
+
|
115 |
+
return await self.request(
|
116 |
+
"v1/vertical/category/{category}/vertical",
|
117 |
+
params={
|
118 |
+
"limit": limit,
|
119 |
+
"skip": skip,
|
120 |
+
"adult": adult,
|
121 |
+
"order": order,
|
122 |
+
"first": 0,
|
123 |
+
"category": CATEGORY[category],
|
124 |
+
},
|
125 |
+
)
|
hibiapi/api/wallpaper/constants.py
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from hibiapi.utils.config import APIConfig
|
2 |
+
|
3 |
+
_CONFIG = APIConfig("wallpaper")
|
4 |
+
|
5 |
+
|
6 |
+
class WallpaperConstants:
|
7 |
+
CONFIG: APIConfig = _CONFIG
|
8 |
+
USER_AGENT: str = _CONFIG["net"]["user-agent"].as_str()
|
hibiapi/api/wallpaper/net.py
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from hibiapi.utils.net import BaseNetClient
|
2 |
+
|
3 |
+
from .constants import WallpaperConstants
|
4 |
+
|
5 |
+
|
6 |
+
class NetRequest(BaseNetClient):
|
7 |
+
def __init__(self):
|
8 |
+
super().__init__(headers={"user-agent": WallpaperConstants.USER_AGENT})
|
hibiapi/app/__init__.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# flake8:noqa:F401
|
2 |
+
from . import application, handlers, middlewares
|
3 |
+
|
4 |
+
app = application.app
|
hibiapi/app/application.py
ADDED
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import re
|
3 |
+
from contextlib import asynccontextmanager
|
4 |
+
from ipaddress import ip_address
|
5 |
+
from secrets import compare_digest
|
6 |
+
from typing import Annotated
|
7 |
+
|
8 |
+
import sentry_sdk
|
9 |
+
from fastapi import Depends, FastAPI, Request, Response
|
10 |
+
from fastapi.responses import RedirectResponse
|
11 |
+
from fastapi.security import HTTPBasic, HTTPBasicCredentials
|
12 |
+
from fastapi.staticfiles import StaticFiles
|
13 |
+
from pydantic import BaseModel
|
14 |
+
from sentry_sdk.integrations.logging import LoggingIntegration
|
15 |
+
|
16 |
+
from hibiapi import __version__
|
17 |
+
from hibiapi.app.routes import router as ImplRouter
|
18 |
+
from hibiapi.utils.cache import cache
|
19 |
+
from hibiapi.utils.config import Config
|
20 |
+
from hibiapi.utils.exceptions import ClientSideException, RateLimitReachedException
|
21 |
+
from hibiapi.utils.log import logger
|
22 |
+
from hibiapi.utils.net import BaseNetClient
|
23 |
+
from hibiapi.utils.temp import TempFile
|
24 |
+
|
25 |
+
DESCRIPTION = (
|
26 |
+
"""
|
27 |
+
**A program that implements easy-to-use APIs for a variety of commonly used sites**
|
28 |
+
|
29 |
+
- *Documents*:
|
30 |
+
- [Redoc](/docs) (Easier to read and more beautiful)
|
31 |
+
- [Swagger UI](/docs/test) (Integrated interactive testing function)
|
32 |
+
|
33 |
+
Project: [mixmoe/HibiAPI](https://github.com/mixmoe/HibiAPI)
|
34 |
+
|
35 |
+
"""
|
36 |
+
+ Config["content"]["slogan"].as_str().strip()
|
37 |
+
).strip()
|
38 |
+
|
39 |
+
|
40 |
+
if Config["log"]["sentry"]["enabled"].as_bool():
|
41 |
+
sentry_sdk.init(
|
42 |
+
dsn=Config["log"]["sentry"]["dsn"].as_str(),
|
43 |
+
send_default_pii=Config["log"]["sentry"]["pii"].as_bool(),
|
44 |
+
integrations=[LoggingIntegration(level=None, event_level=None)],
|
45 |
+
traces_sample_rate=Config["log"]["sentry"]["sample"].get(float),
|
46 |
+
)
|
47 |
+
else:
|
48 |
+
sentry_sdk.init()
|
49 |
+
|
50 |
+
|
51 |
+
class AuthorizationModel(BaseModel):
|
52 |
+
username: str
|
53 |
+
password: str
|
54 |
+
|
55 |
+
|
56 |
+
AUTHORIZATION_ENABLED = Config["authorization"]["enabled"].as_bool()
|
57 |
+
AUTHORIZATION_ALLOWED = Config["authorization"]["allowed"].get(list[AuthorizationModel])
|
58 |
+
|
59 |
+
security = HTTPBasic()
|
60 |
+
|
61 |
+
|
62 |
+
async def basic_authorization_depend(
|
63 |
+
credentials: Annotated[HTTPBasicCredentials, Depends(security)],
|
64 |
+
):
|
65 |
+
# NOTE: We use `compare_digest` to avoid timing attacks.
|
66 |
+
# Ref: https://fastapi.tiangolo.com/advanced/security/http-basic-auth/
|
67 |
+
for allowed in AUTHORIZATION_ALLOWED:
|
68 |
+
if compare_digest(credentials.username, allowed.username) and compare_digest(
|
69 |
+
credentials.password, allowed.password
|
70 |
+
):
|
71 |
+
return credentials.username, credentials.password
|
72 |
+
raise ClientSideException(
|
73 |
+
f"Invalid credentials for user {credentials.username!r}",
|
74 |
+
status_code=401,
|
75 |
+
headers={"WWW-Authenticate": "Basic"},
|
76 |
+
)
|
77 |
+
|
78 |
+
|
79 |
+
RATE_LIMIT_ENABLED = Config["limit"]["enabled"].as_bool()
|
80 |
+
RATE_LIMIT_MAX = Config["limit"]["max"].as_number()
|
81 |
+
RATE_LIMIT_INTERVAL = Config["limit"]["interval"].as_number()
|
82 |
+
|
83 |
+
|
84 |
+
async def rate_limit_depend(request: Request):
|
85 |
+
if not request.client:
|
86 |
+
return
|
87 |
+
|
88 |
+
try:
|
89 |
+
client_ip = ip_address(request.client.host)
|
90 |
+
client_ip_hex = client_ip.packed.hex()
|
91 |
+
limit_key = f"rate_limit:IPv{client_ip.version}-{client_ip_hex:x}"
|
92 |
+
except ValueError:
|
93 |
+
limit_key = f"rate_limit:fallback-{request.client.host}"
|
94 |
+
|
95 |
+
request_count = await cache.incr(limit_key)
|
96 |
+
if request_count <= 1:
|
97 |
+
await cache.expire(limit_key, timeout=RATE_LIMIT_INTERVAL)
|
98 |
+
elif request_count > RATE_LIMIT_MAX:
|
99 |
+
limit_remain: int = await cache.get_expire(limit_key)
|
100 |
+
raise RateLimitReachedException(headers={"Retry-After": limit_remain})
|
101 |
+
|
102 |
+
return
|
103 |
+
|
104 |
+
|
105 |
+
async def flush_sentry():
|
106 |
+
client = sentry_sdk.Hub.current.client
|
107 |
+
if client is not None:
|
108 |
+
client.close()
|
109 |
+
sentry_sdk.flush()
|
110 |
+
logger.debug("Sentry client has been closed")
|
111 |
+
|
112 |
+
|
113 |
+
async def cleanup_clients():
|
114 |
+
opened_clients = [
|
115 |
+
client for client in BaseNetClient.clients if not client.is_closed
|
116 |
+
]
|
117 |
+
if opened_clients:
|
118 |
+
await asyncio.gather(
|
119 |
+
*map(lambda client: client.aclose(), opened_clients),
|
120 |
+
return_exceptions=True,
|
121 |
+
)
|
122 |
+
logger.debug(f"Cleaned <r>{len(opened_clients)}</r> unclosed HTTP clients")
|
123 |
+
|
124 |
+
|
125 |
+
@asynccontextmanager
|
126 |
+
async def fastapi_lifespan(app: FastAPI):
|
127 |
+
yield
|
128 |
+
await asyncio.gather(cleanup_clients(), flush_sentry())
|
129 |
+
|
130 |
+
|
131 |
+
app = FastAPI(
|
132 |
+
title="HibiAPI",
|
133 |
+
version=__version__,
|
134 |
+
description=DESCRIPTION,
|
135 |
+
docs_url="/docs/test",
|
136 |
+
redoc_url="/docs",
|
137 |
+
lifespan=fastapi_lifespan,
|
138 |
+
)
|
139 |
+
app.include_router(
|
140 |
+
ImplRouter,
|
141 |
+
prefix="/api",
|
142 |
+
dependencies=(
|
143 |
+
([Depends(basic_authorization_depend)] if AUTHORIZATION_ENABLED else [])
|
144 |
+
+ ([Depends(rate_limit_depend)] if RATE_LIMIT_ENABLED else [])
|
145 |
+
),
|
146 |
+
)
|
147 |
+
app.mount("/temp", StaticFiles(directory=TempFile.path, check_dir=False))
|
148 |
+
|
149 |
+
|
150 |
+
@app.get("/", include_in_schema=False)
|
151 |
+
async def redirect():
|
152 |
+
return Response(status_code=302, headers={"Location": "/docs"})
|
153 |
+
|
154 |
+
|
155 |
+
@app.get("/robots.txt", include_in_schema=False)
|
156 |
+
async def robots():
|
157 |
+
content = Config["content"]["robots"].as_str().strip()
|
158 |
+
return Response(content, status_code=200)
|
159 |
+
|
160 |
+
|
161 |
+
@app.middleware("http")
|
162 |
+
async def redirect_workaround_middleware(request: Request, call_next):
|
163 |
+
"""Temporary redirection workaround for #12"""
|
164 |
+
if matched := re.match(
|
165 |
+
r"^/(qrcode|pixiv|netease|bilibili)/(\w*)$", request.url.path
|
166 |
+
):
|
167 |
+
service, path = matched.groups()
|
168 |
+
redirect_url = request.url.replace(path=f"/api/{service}/{path}")
|
169 |
+
return RedirectResponse(redirect_url, status_code=301)
|
170 |
+
return await call_next(request)
|
hibiapi/app/handlers.py
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import Request, Response
|
2 |
+
from fastapi.exceptions import HTTPException as FastAPIHTTPException
|
3 |
+
from fastapi.exceptions import RequestValidationError as FastAPIValidationError
|
4 |
+
from pydantic.error_wrappers import ValidationError as PydanticValidationError
|
5 |
+
from starlette.exceptions import HTTPException as StarletteHTTPException
|
6 |
+
|
7 |
+
from hibiapi.utils import exceptions
|
8 |
+
from hibiapi.utils.log import logger
|
9 |
+
|
10 |
+
from .application import app
|
11 |
+
|
12 |
+
|
13 |
+
@app.exception_handler(exceptions.BaseServerException)
|
14 |
+
async def exception_handler(
|
15 |
+
request: Request,
|
16 |
+
exc: exceptions.BaseServerException,
|
17 |
+
) -> Response:
|
18 |
+
if isinstance(exc, exceptions.UncaughtException):
|
19 |
+
logger.opt(exception=exc).exception(f"Uncaught exception raised {exc.data=}:")
|
20 |
+
|
21 |
+
exc.data.url = str(request.url) # type:ignore
|
22 |
+
return Response(
|
23 |
+
content=exc.data.json(),
|
24 |
+
status_code=exc.data.code,
|
25 |
+
headers=exc.data.headers,
|
26 |
+
media_type="application/json",
|
27 |
+
)
|
28 |
+
|
29 |
+
|
30 |
+
@app.exception_handler(StarletteHTTPException)
|
31 |
+
async def override_handler(
|
32 |
+
request: Request,
|
33 |
+
exc: StarletteHTTPException,
|
34 |
+
):
|
35 |
+
return await exception_handler(
|
36 |
+
request,
|
37 |
+
exceptions.BaseHTTPException(
|
38 |
+
exc.detail,
|
39 |
+
code=exc.status_code,
|
40 |
+
headers={} if not isinstance(exc, FastAPIHTTPException) else exc.headers,
|
41 |
+
),
|
42 |
+
)
|
43 |
+
|
44 |
+
|
45 |
+
@app.exception_handler(AssertionError)
|
46 |
+
async def assertion_handler(request: Request, exc: AssertionError):
|
47 |
+
return await exception_handler(
|
48 |
+
request,
|
49 |
+
exceptions.ClientSideException(detail=f"Assertion: {exc}"),
|
50 |
+
)
|
51 |
+
|
52 |
+
|
53 |
+
@app.exception_handler(FastAPIValidationError)
|
54 |
+
@app.exception_handler(PydanticValidationError)
|
55 |
+
async def validation_handler(request: Request, exc: PydanticValidationError):
|
56 |
+
return await exception_handler(
|
57 |
+
request,
|
58 |
+
exceptions.ValidationException(detail=str(exc), validation=exc.errors()),
|
59 |
+
)
|
hibiapi/app/middlewares.py
ADDED
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from collections.abc import Awaitable
|
2 |
+
from datetime import datetime
|
3 |
+
from typing import Callable
|
4 |
+
|
5 |
+
from fastapi import Request, Response
|
6 |
+
from fastapi.middleware.cors import CORSMiddleware
|
7 |
+
from fastapi.middleware.gzip import GZipMiddleware
|
8 |
+
from fastapi.middleware.trustedhost import TrustedHostMiddleware
|
9 |
+
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
|
10 |
+
from sentry_sdk.integrations.httpx import HttpxIntegration
|
11 |
+
from starlette.datastructures import MutableHeaders
|
12 |
+
|
13 |
+
from hibiapi.utils.config import Config
|
14 |
+
from hibiapi.utils.exceptions import BaseServerException, UncaughtException
|
15 |
+
from hibiapi.utils.log import LoguruHandler, logger
|
16 |
+
from hibiapi.utils.routing import request_headers, response_headers
|
17 |
+
|
18 |
+
from .application import app
|
19 |
+
from .handlers import exception_handler
|
20 |
+
|
21 |
+
RequestHandler = Callable[[Request], Awaitable[Response]]
|
22 |
+
|
23 |
+
|
24 |
+
if Config["server"]["gzip"].as_bool():
|
25 |
+
app.add_middleware(GZipMiddleware)
|
26 |
+
app.add_middleware(
|
27 |
+
CORSMiddleware,
|
28 |
+
allow_origins=Config["server"]["cors"]["origins"].get(list[str]),
|
29 |
+
allow_credentials=Config["server"]["cors"]["credentials"].as_bool(),
|
30 |
+
allow_methods=Config["server"]["cors"]["methods"].get(list[str]),
|
31 |
+
allow_headers=Config["server"]["cors"]["headers"].get(list[str]),
|
32 |
+
)
|
33 |
+
app.add_middleware(
|
34 |
+
TrustedHostMiddleware,
|
35 |
+
allowed_hosts=Config["server"]["allowed"].get(list[str]),
|
36 |
+
)
|
37 |
+
app.add_middleware(SentryAsgiMiddleware)
|
38 |
+
|
39 |
+
HttpxIntegration.setup_once()
|
40 |
+
|
41 |
+
|
42 |
+
@app.middleware("http")
|
43 |
+
async def request_logger(request: Request, call_next: RequestHandler) -> Response:
|
44 |
+
start_time = datetime.now()
|
45 |
+
host, port = request.client or (None, None)
|
46 |
+
response = await call_next(request)
|
47 |
+
process_time = (datetime.now() - start_time).total_seconds() * 1000
|
48 |
+
response_headers.get().setdefault("X-Process-Time", f"{process_time:.3f}")
|
49 |
+
bg, fg = (
|
50 |
+
("green", "red")
|
51 |
+
if response.status_code < 400
|
52 |
+
else ("yellow", "blue")
|
53 |
+
if response.status_code < 500
|
54 |
+
else ("red", "green")
|
55 |
+
)
|
56 |
+
status_code, method = response.status_code, request.method.upper()
|
57 |
+
user_agent = (
|
58 |
+
LoguruHandler.escape_tag(request.headers["user-agent"])
|
59 |
+
if "user-agent" in request.headers
|
60 |
+
else "<d>Unknown</d>"
|
61 |
+
)
|
62 |
+
logger.info(
|
63 |
+
f"<m><b>{host}</b>:{port}</m>"
|
64 |
+
f" | <{bg.upper()}><b><{fg}>{method}</{fg}></b></{bg.upper()}>"
|
65 |
+
f" | <n><b>{str(request.url)!r}</b></n>"
|
66 |
+
f" | <c>{process_time:.3f}ms</c>"
|
67 |
+
f" | <e>{user_agent}</e>"
|
68 |
+
f" | <b><{bg}>{status_code}</{bg}></b>"
|
69 |
+
)
|
70 |
+
return response
|
71 |
+
|
72 |
+
|
73 |
+
@app.middleware("http")
|
74 |
+
async def contextvar_setter(request: Request, call_next: RequestHandler):
|
75 |
+
request_headers.set(request.headers)
|
76 |
+
response_headers.set(MutableHeaders())
|
77 |
+
response = await call_next(request)
|
78 |
+
response.headers.update({**response_headers.get()})
|
79 |
+
return response
|
80 |
+
|
81 |
+
|
82 |
+
@app.middleware("http")
|
83 |
+
async def uncaught_exception_handler(
|
84 |
+
request: Request, call_next: RequestHandler
|
85 |
+
) -> Response:
|
86 |
+
try:
|
87 |
+
response = await call_next(request)
|
88 |
+
except Exception as error:
|
89 |
+
response = await exception_handler(
|
90 |
+
request,
|
91 |
+
exc=(
|
92 |
+
error
|
93 |
+
if isinstance(error, BaseServerException)
|
94 |
+
else UncaughtException.with_exception(error)
|
95 |
+
),
|
96 |
+
)
|
97 |
+
return response
|
hibiapi/app/routes/__init__.py
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Protocol, cast
|
2 |
+
|
3 |
+
from hibiapi.app.routes import (
|
4 |
+
bika,
|
5 |
+
bilibili,
|
6 |
+
netease,
|
7 |
+
pixiv,
|
8 |
+
qrcode,
|
9 |
+
sauce,
|
10 |
+
tieba,
|
11 |
+
wallpaper,
|
12 |
+
)
|
13 |
+
from hibiapi.utils.config import APIConfig
|
14 |
+
from hibiapi.utils.exceptions import ExceptionReturn
|
15 |
+
from hibiapi.utils.log import logger
|
16 |
+
from hibiapi.utils.routing import SlashRouter
|
17 |
+
|
18 |
+
router = SlashRouter(
|
19 |
+
responses={
|
20 |
+
code: {
|
21 |
+
"model": ExceptionReturn,
|
22 |
+
}
|
23 |
+
for code in (400, 422, 500, 502)
|
24 |
+
}
|
25 |
+
)
|
26 |
+
|
27 |
+
|
28 |
+
class RouteInterface(Protocol):
|
29 |
+
router: SlashRouter
|
30 |
+
__mount__: str
|
31 |
+
__config__: APIConfig
|
32 |
+
|
33 |
+
|
34 |
+
modules = cast(
|
35 |
+
list[RouteInterface],
|
36 |
+
[bilibili, netease, pixiv, qrcode, sauce, tieba, wallpaper, bika],
|
37 |
+
)
|
38 |
+
|
39 |
+
for module in modules:
|
40 |
+
mount = (
|
41 |
+
mount_point
|
42 |
+
if (mount_point := module.__mount__).startswith("/")
|
43 |
+
else f"/{mount_point}"
|
44 |
+
)
|
45 |
+
|
46 |
+
if not module.__config__["enabled"].as_bool():
|
47 |
+
logger.warning(
|
48 |
+
f"API Route <y><b>{mount}</b></y> has been "
|
49 |
+
"<r><b>disabled</b></r> in config."
|
50 |
+
)
|
51 |
+
continue
|
52 |
+
router.include_router(module.router, prefix=mount)
|
hibiapi/app/routes/bika.py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Annotated
|
2 |
+
|
3 |
+
from fastapi import Depends, Header
|
4 |
+
|
5 |
+
from hibiapi.api.bika import (
|
6 |
+
BikaConstants,
|
7 |
+
BikaEndpoints,
|
8 |
+
BikaLogin,
|
9 |
+
ImageQuality,
|
10 |
+
NetRequest,
|
11 |
+
)
|
12 |
+
from hibiapi.utils.log import logger
|
13 |
+
from hibiapi.utils.routing import EndpointRouter
|
14 |
+
|
15 |
+
try:
|
16 |
+
BikaConstants.CONFIG["account"].get(BikaLogin)
|
17 |
+
except Exception as e:
|
18 |
+
logger.warning(f"Bika account misconfigured: {e}")
|
19 |
+
BikaConstants.CONFIG["enabled"].set(False)
|
20 |
+
|
21 |
+
|
22 |
+
async def x_image_quality(
|
23 |
+
x_image_quality: Annotated[ImageQuality, Header()] = ImageQuality.medium,
|
24 |
+
):
|
25 |
+
if x_image_quality is None:
|
26 |
+
return BikaConstants.CONFIG["image_quality"].get(ImageQuality)
|
27 |
+
return x_image_quality
|
28 |
+
|
29 |
+
|
30 |
+
__mount__, __config__ = "bika", BikaConstants.CONFIG
|
31 |
+
router = EndpointRouter(tags=["Bika"], dependencies=[Depends(x_image_quality)])
|
32 |
+
|
33 |
+
BikaAPIRoot = NetRequest()
|
34 |
+
|
35 |
+
|
36 |
+
router.include_endpoint(BikaEndpoints, BikaAPIRoot)
|
hibiapi/app/routes/bilibili/__init__.py
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from hibiapi.api.bilibili import BilibiliConstants
|
2 |
+
from hibiapi.app.routes.bilibili.v2 import router as RouterV2
|
3 |
+
from hibiapi.app.routes.bilibili.v3 import router as RouterV3
|
4 |
+
from hibiapi.utils.routing import SlashRouter
|
5 |
+
|
6 |
+
__mount__, __config__ = "bilibili", BilibiliConstants.CONFIG
|
7 |
+
|
8 |
+
router = SlashRouter()
|
9 |
+
router.include_router(RouterV2, prefix="/v2")
|
10 |
+
router.include_router(RouterV3, prefix="/v3")
|
hibiapi/app/routes/bilibili/v2.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from hibiapi.api.bilibili.api import BilibiliEndpointV2
|
2 |
+
from hibiapi.api.bilibili.net import NetRequest
|
3 |
+
from hibiapi.utils.routing import EndpointRouter
|
4 |
+
|
5 |
+
router = EndpointRouter(tags=["Bilibili V2"])
|
6 |
+
router.include_endpoint(BilibiliEndpointV2, NetRequest())
|
hibiapi/app/routes/bilibili/v3.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from hibiapi.api.bilibili import BilibiliEndpointV3, NetRequest
|
2 |
+
from hibiapi.utils.routing import EndpointRouter
|
3 |
+
|
4 |
+
router = EndpointRouter(tags=["Bilibili V3"])
|
5 |
+
router.include_endpoint(BilibiliEndpointV3, NetRequest())
|
hibiapi/app/routes/netease.py
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from hibiapi.api.netease import NeteaseConstants, NeteaseEndpoint, NetRequest
|
2 |
+
from hibiapi.utils.routing import EndpointRouter
|
3 |
+
|
4 |
+
__mount__, __config__ = "netease", NeteaseConstants.CONFIG
|
5 |
+
|
6 |
+
router = EndpointRouter(tags=["Netease"])
|
7 |
+
router.include_endpoint(NeteaseEndpoint, NetRequest())
|
hibiapi/app/routes/pixiv.py
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from fastapi import Depends, Header
|
4 |
+
|
5 |
+
from hibiapi.api.pixiv import NetRequest, PixivConstants, PixivEndpoints
|
6 |
+
from hibiapi.utils.log import logger
|
7 |
+
from hibiapi.utils.routing import EndpointRouter
|
8 |
+
|
9 |
+
if not (refresh_tokens := PixivConstants.CONFIG["account"]["token"].as_str_seq()):
|
10 |
+
logger.warning("Pixiv API token is not set, pixiv endpoint will be unavailable.")
|
11 |
+
PixivConstants.CONFIG["enabled"].set(False)
|
12 |
+
|
13 |
+
|
14 |
+
async def accept_language(
|
15 |
+
accept_language: Optional[str] = Header(
|
16 |
+
None,
|
17 |
+
description="Accepted tag translation language",
|
18 |
+
)
|
19 |
+
):
|
20 |
+
return accept_language
|
21 |
+
|
22 |
+
|
23 |
+
__mount__, __config__ = "pixiv", PixivConstants.CONFIG
|
24 |
+
|
25 |
+
router = EndpointRouter(tags=["Pixiv"], dependencies=[Depends(accept_language)])
|
26 |
+
router.include_endpoint(PixivEndpoints, api_root := NetRequest(refresh_tokens))
|
hibiapi/app/routes/qrcode.py
ADDED
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from fastapi import Request, Response
|
4 |
+
from pydantic.color import Color
|
5 |
+
|
6 |
+
from hibiapi.api.qrcode import (
|
7 |
+
COLOR_BLACK,
|
8 |
+
COLOR_WHITE,
|
9 |
+
Config,
|
10 |
+
HostUrl,
|
11 |
+
QRCodeLevel,
|
12 |
+
QRInfo,
|
13 |
+
ReturnEncode,
|
14 |
+
)
|
15 |
+
from hibiapi.utils.routing import SlashRouter
|
16 |
+
from hibiapi.utils.temp import TempFile
|
17 |
+
|
18 |
+
QR_CALLBACK_TEMPLATE = (
|
19 |
+
r"""function {fun}(){document.write('<img class="qrcode" src="{url}"/>');}"""
|
20 |
+
)
|
21 |
+
|
22 |
+
__mount__, __config__ = "qrcode", Config
|
23 |
+
router = SlashRouter(tags=["QRCode"])
|
24 |
+
|
25 |
+
|
26 |
+
@router.get(
|
27 |
+
"/",
|
28 |
+
responses={
|
29 |
+
200: {
|
30 |
+
"content": {"image/png": {}, "text/javascript": {}, "application/json": {}},
|
31 |
+
"description": "Avaliable to return an javascript, image or json.",
|
32 |
+
}
|
33 |
+
},
|
34 |
+
response_model=QRInfo,
|
35 |
+
)
|
36 |
+
async def qrcode_api(
|
37 |
+
request: Request,
|
38 |
+
*,
|
39 |
+
text: str,
|
40 |
+
size: int = 200,
|
41 |
+
logo: Optional[HostUrl] = None,
|
42 |
+
encode: ReturnEncode = ReturnEncode.raw,
|
43 |
+
level: QRCodeLevel = QRCodeLevel.MEDIUM,
|
44 |
+
bgcolor: Color = COLOR_BLACK,
|
45 |
+
fgcolor: Color = COLOR_WHITE,
|
46 |
+
fun: str = "qrcode",
|
47 |
+
):
|
48 |
+
qr = await QRInfo.new(
|
49 |
+
text, size=size, logo=logo, level=level, bgcolor=bgcolor, fgcolor=fgcolor
|
50 |
+
)
|
51 |
+
qr.url = TempFile.to_url(request, qr.path) # type:ignore
|
52 |
+
"""function {fun}(){document.write('<img class="qrcode" src="{url}"/>');}"""
|
53 |
+
return (
|
54 |
+
qr
|
55 |
+
if encode == ReturnEncode.json
|
56 |
+
else Response(
|
57 |
+
content=qr.json(),
|
58 |
+
media_type="application/json",
|
59 |
+
headers={"Location": qr.url},
|
60 |
+
status_code=302,
|
61 |
+
)
|
62 |
+
if encode == ReturnEncode.raw
|
63 |
+
else Response(
|
64 |
+
content=f"{fun}({qr.json()})",
|
65 |
+
media_type="text/javascript",
|
66 |
+
)
|
67 |
+
if encode == ReturnEncode.jsc
|
68 |
+
else Response(
|
69 |
+
content="function "
|
70 |
+
+ fun
|
71 |
+
+ '''(){document.write('<img class="qrcode" src="'''
|
72 |
+
+ qr.url
|
73 |
+
+ """"/>');}""",
|
74 |
+
media_type="text/javascript",
|
75 |
+
)
|
76 |
+
)
|
hibiapi/app/routes/sauce.py
ADDED
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Annotated, Optional
|
2 |
+
|
3 |
+
from fastapi import Depends, File, Form
|
4 |
+
from loguru import logger
|
5 |
+
|
6 |
+
from hibiapi.api.sauce import (
|
7 |
+
DeduplicateType,
|
8 |
+
HostUrl,
|
9 |
+
NetRequest,
|
10 |
+
SauceConstants,
|
11 |
+
SauceEndpoint,
|
12 |
+
UploadFileIO,
|
13 |
+
)
|
14 |
+
from hibiapi.utils.routing import SlashRouter
|
15 |
+
|
16 |
+
if (not SauceConstants.API_KEY) or (not all(map(str.strip, SauceConstants.API_KEY))):
|
17 |
+
logger.warning("Sauce API key not set, SauceNAO endpoint will be unavailable")
|
18 |
+
SauceConstants.CONFIG["enabled"].set(False)
|
19 |
+
|
20 |
+
__mount__, __config__ = "sauce", SauceConstants.CONFIG
|
21 |
+
router = SlashRouter(tags=["SauceNAO"])
|
22 |
+
|
23 |
+
SauceAPIRoot = NetRequest()
|
24 |
+
|
25 |
+
|
26 |
+
async def request_client():
|
27 |
+
async with SauceAPIRoot as client:
|
28 |
+
yield SauceEndpoint(client)
|
29 |
+
|
30 |
+
|
31 |
+
@router.get("/")
|
32 |
+
async def sauce_url(
|
33 |
+
endpoint: Annotated[SauceEndpoint, Depends(request_client)],
|
34 |
+
url: HostUrl,
|
35 |
+
size: int = 30,
|
36 |
+
deduplicate: DeduplicateType = DeduplicateType.ALL,
|
37 |
+
database: Optional[int] = None,
|
38 |
+
enabled_mask: Optional[int] = None,
|
39 |
+
disabled_mask: Optional[int] = None,
|
40 |
+
):
|
41 |
+
"""
|
42 |
+
## Name: `sauce_url`
|
43 |
+
|
44 |
+
> ไฝฟ็จSauceNAOๆฃ็ดข็ฝ็ปๅพ็
|
45 |
+
|
46 |
+
---
|
47 |
+
|
48 |
+
### Required:
|
49 |
+
|
50 |
+
- ***HostUrl*** **`url`**
|
51 |
+
- Description: ๅพ็URL
|
52 |
+
|
53 |
+
---
|
54 |
+
|
55 |
+
### Optional:
|
56 |
+
- ***int*** `size` = `30`
|
57 |
+
- Description: ๆ็ดข็ปๆๆฐ็ฎ
|
58 |
+
- ***DeduplicateType*** `deduplicate` = `DeduplicateType.ALL`
|
59 |
+
- Description: ็ปๆๅป้ๆจกๅผ
|
60 |
+
- ***Optional[int]*** `database` = `None`
|
61 |
+
- Description: ๆฃ็ดข็ๆฐๆฎๅบID, 999ไธบๅ
จ้จๆฃ็ดข
|
62 |
+
- ***Optional[int]*** `enabled_mask` = `None`
|
63 |
+
- Description: ๅฏ็จ็ๆฃ็ดขๆฐๆฎๅบ
|
64 |
+
- ***Optional[int]*** `disabled_mask` = `None`
|
65 |
+
- Description: ็ฆ็จ็ๆฃ็ดขๆฐๆฎๅบ
|
66 |
+
"""
|
67 |
+
return await endpoint.search(
|
68 |
+
url=url,
|
69 |
+
size=size,
|
70 |
+
deduplicate=deduplicate,
|
71 |
+
database=database,
|
72 |
+
enabled_mask=enabled_mask,
|
73 |
+
disabled_mask=disabled_mask,
|
74 |
+
)
|
75 |
+
|
76 |
+
|
77 |
+
@router.post("/")
|
78 |
+
async def sauce_form(
|
79 |
+
endpoint: Annotated[SauceEndpoint, Depends(request_client)],
|
80 |
+
file: bytes = File(..., max_length=SauceConstants.IMAGE_MAXIMUM_SIZE),
|
81 |
+
size: int = Form(30),
|
82 |
+
deduplicate: Annotated[DeduplicateType, Form()] = DeduplicateType.ALL,
|
83 |
+
database: Optional[int] = Form(None),
|
84 |
+
enabled_mask: Optional[int] = Form(None),
|
85 |
+
disabled_mask: Optional[int] = Form(None),
|
86 |
+
):
|
87 |
+
"""
|
88 |
+
## Name: `sauce_form`
|
89 |
+
|
90 |
+
> ไฝฟ็จSauceNAOๆฃ็ดข่กจๅไธไผ ๅพ็
|
91 |
+
|
92 |
+
---
|
93 |
+
|
94 |
+
### Required:
|
95 |
+
- ***bytes*** `file`
|
96 |
+
- Description: ไธไผ ็ๅพ็
|
97 |
+
|
98 |
+
---
|
99 |
+
|
100 |
+
### Optional:
|
101 |
+
- ***int*** `size` = `30`
|
102 |
+
- Description: ๆ็ดข็ปๆๆฐ็ฎ
|
103 |
+
- ***DeduplicateType*** `deduplicate` = `DeduplicateType.ALL`
|
104 |
+
- Description: ็ปๆๅป้ๆจกๅผ
|
105 |
+
- ***Optional[int]*** `database` = `None`
|
106 |
+
- Description: ๆฃ็ดข็ๆฐๆฎๅบID, 999ไธบๅ
จ้จๆฃ็ดข
|
107 |
+
- ***Optional[int]*** `enabled_mask` = `None`
|
108 |
+
- Description: ๅฏ็จ็ๆฃ็ดขๆฐๆฎๅบ
|
109 |
+
- ***Optional[int]*** `disabled_mask` = `None`
|
110 |
+
- Description: ็ฆ็จ็ๆฃ็ดขๆฐๆฎๅบ
|
111 |
+
|
112 |
+
"""
|
113 |
+
return await endpoint.search(
|
114 |
+
file=UploadFileIO(file),
|
115 |
+
size=size,
|
116 |
+
deduplicate=deduplicate,
|
117 |
+
database=database,
|
118 |
+
disabled_mask=disabled_mask,
|
119 |
+
enabled_mask=enabled_mask,
|
120 |
+
)
|