diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..53ae6d20ef0bfb6dbb2cd35540309d9d177f5ff6
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,18 @@
+FROM python:bullseye
+
+EXPOSE 7860
+
+ENV PORT=7860 \
+ PROCS=1 \
+ GENERAL_SERVER_HOST=0.0.0.0
+
+COPY . /hibi
+
+WORKDIR /hibi
+
+RUN pip install .
+
+CMD hibiapi run --port $PORT --workers $PROCS
+
+HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
+ CMD httpx --verbose --follow-redirects http://127.0.0.1:${PORT}
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..54273b66850af8a4079de8da86b9693c4caad847
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2020-2021 Mix Technology
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100644
index 0000000000000000000000000000000000000000..f9d675f72e45272d25ae278223834a73670f597f
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,41 @@
+version: "3.9"
+
+volumes:
+ hibi_redis: {}
+
+networks:
+ hibi_net: {}
+
+services:
+ redis:
+ image: redis:alpine
+ container_name: hibi_redis
+ healthcheck:
+ test: ["CMD-SHELL", "redis-cli ping"]
+ interval: 10s
+ timeout: 5s
+ retries: 5
+ networks:
+ - hibi_net
+ volumes:
+ - hibi_redis:/data
+ expose: [6379]
+
+ api:
+ container_name: hibiapi
+ build:
+ dockerfile: Dockerfile
+ context: .
+ restart: on-failure
+ networks:
+ - hibi_net
+ depends_on:
+ redis:
+ condition: service_healthy
+ ports:
+ - "8080:8080"
+ environment:
+ PORT: "8080"
+ FORWARDED_ALLOW_IPS: "*"
+ GENERAL_CACHE_URI: "redis://redis:6379"
+ GENERAL_SERVER_HOST: "0.0.0.0"
diff --git a/hibiapi/__init__.py b/hibiapi/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..3703157bfb8ae1debc7b54e641f85a651097cc32
--- /dev/null
+++ b/hibiapi/__init__.py
@@ -0,0 +1,15 @@
+r"""
+ _ _ _ _ _ _____ _____
+ | | | (_) | (_) /\ | __ \_ _|
+ | |__| |_| |__ _ / \ | |__) || |
+ | __ | | '_ \| | / /\ \ | ___/ | |
+ | | | | | |_) | |/ ____ \| | _| |_
+ |_| |_|_|_.__/|_/_/ \_\_| |_____|
+
+A program that implements easy-to-use APIs for a variety of commonly used sites
+Repository: https://github.com/mixmoe/HibiAPI
+""" # noqa:W291,W293
+
+from importlib.metadata import version
+
+__version__ = version("hibiapi")
diff --git a/hibiapi/__main__.py b/hibiapi/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..79bd92abc842e2028de7729b735d1d795a371578
--- /dev/null
+++ b/hibiapi/__main__.py
@@ -0,0 +1,118 @@
+import os
+from pathlib import Path
+
+import typer
+import uvicorn
+
+from hibiapi import __file__ as root_file
+from hibiapi import __version__
+from hibiapi.utils.config import CONFIG_DIR, DEFAULT_DIR, Config
+from hibiapi.utils.log import LOG_LEVEL, logger
+
+COPYRIGHT = r"""
+
+ _ _ _ _ _ _____ _____
+ | | | (_) | (_) /\ | __ \_ _|
+ | |__| |_| |__ _ / \ | |__) || |
+ | __ | | '_ \| | / /\ \ | ___/ | |
+ | | | | | |_) | |/ ____ \| | _| |_
+ |_| |_|_|_.__/|_/_/ \_\_| |_____|
+
+A program that implements easy-to-use APIs for a variety of commonly used sites
+Repository: https://github.com/mixmoe/HibiAPI
+""".strip() # noqa:W291
+
+
+LOG_CONFIG = {
+ "version": 1,
+ "disable_existing_loggers": False,
+ "handlers": {
+ "default": {
+ "class": "hibiapi.utils.log.LoguruHandler",
+ },
+ },
+ "loggers": {
+ "uvicorn.error": {
+ "handlers": ["default"],
+ "level": LOG_LEVEL,
+ },
+ "uvicorn.access": {
+ "handlers": ["default"],
+ "level": LOG_LEVEL,
+ },
+ },
+}
+
+RELOAD_CONFIG = {
+ "reload": True,
+ "reload_dirs": [
+ *map(str, [Path(root_file).parent.absolute(), CONFIG_DIR.absolute()])
+ ],
+ "reload_includes": ["*.py", "*.yml"],
+}
+
+
+cli = typer.Typer()
+
+
+@cli.callback(invoke_without_command=True)
+@cli.command()
+def run(
+ ctx: typer.Context,
+ host: str = Config["server"]["host"].as_str(),
+ port: int = Config["server"]["port"].as_number(),
+ workers: int = 1,
+ reload: bool = False,
+):
+ if ctx.invoked_subcommand is not None:
+ return
+
+ if ctx.info_name != (func_name := run.__name__):
+ logger.warning(
+ f"Directly usage of command {ctx.info_name} is deprecated, "
+ f"please use {ctx.info_name} {func_name} instead."
+ )
+
+ try:
+ terminal_width, _ = os.get_terminal_size()
+ except OSError:
+ terminal_width = 0
+ logger.warning(
+ "\n".join(i.center(terminal_width) for i in COPYRIGHT.splitlines()),
+ )
+ logger.info(f"HibiAPI version: {__version__}")
+
+ uvicorn.run(
+ "hibiapi.app:app",
+ host=host,
+ port=port,
+ access_log=False,
+ log_config=LOG_CONFIG,
+ workers=workers,
+ forwarded_allow_ips=Config["server"]["allowed-forward"].get_optional(str),
+ **(RELOAD_CONFIG if reload else {}),
+ )
+
+
+@cli.command()
+def config(force: bool = False):
+ total_written = 0
+ CONFIG_DIR.mkdir(parents=True, exist_ok=True)
+ for file in os.listdir(DEFAULT_DIR):
+ default_path = DEFAULT_DIR / file
+ config_path = CONFIG_DIR / file
+ if not (existed := config_path.is_file()) or force:
+ total_written += config_path.write_text(
+ default_path.read_text(encoding="utf-8"),
+ encoding="utf-8",
+ )
+ typer.echo(
+ typer.style(("Overwritten" if existed else "Created") + ": ", fg="blue")
+ + typer.style(str(config_path), fg="yellow")
+ )
+ if total_written > 0:
+ typer.echo(f"Config folder generated, {total_written=}")
+
+
+if __name__ == "__main__":
+ cli()
diff --git a/hibiapi/api/__init__.py b/hibiapi/api/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/hibiapi/api/bika/__init__.py b/hibiapi/api/bika/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..1db879614c202b816a657c8440a87cdf3bb65901
--- /dev/null
+++ b/hibiapi/api/bika/__init__.py
@@ -0,0 +1,3 @@
+from .api import BikaEndpoints, ImageQuality, ResultSort # noqa: F401
+from .constants import BikaConstants # noqa: F401
+from .net import BikaLogin, NetRequest # noqa: F401
diff --git a/hibiapi/api/bika/api.py b/hibiapi/api/bika/api.py
new file mode 100644
index 0000000000000000000000000000000000000000..3438a89b91b56bd6f168fe864ff92b1f75057cac
--- /dev/null
+++ b/hibiapi/api/bika/api.py
@@ -0,0 +1,206 @@
+import hashlib
+import hmac
+from datetime import timedelta
+from enum import Enum
+from time import time
+from typing import Any, Optional, cast
+
+from httpx import URL
+
+from hibiapi.api.bika.constants import BikaConstants
+from hibiapi.api.bika.net import NetRequest
+from hibiapi.utils.cache import cache_config
+from hibiapi.utils.decorators import enum_auto_doc
+from hibiapi.utils.net import catch_network_error
+from hibiapi.utils.routing import BaseEndpoint, dont_route, request_headers
+
+
+@enum_auto_doc
+class ImageQuality(str, Enum):
+ """哔咔API返回的图片质量"""
+
+ low = "low"
+ """低质量"""
+ medium = "medium"
+ """中等质量"""
+ high = "high"
+ """高质量"""
+ original = "original"
+ """原图"""
+
+
+@enum_auto_doc
+class ResultSort(str, Enum):
+ """哔咔API返回的搜索结果排序方式"""
+
+ date_descending = "dd"
+ """最新发布"""
+ date_ascending = "da"
+ """最早发布"""
+ like_descending = "ld"
+ """最多喜欢"""
+ views_descending = "vd"
+ """最多浏览"""
+
+
+class BikaEndpoints(BaseEndpoint):
+ @staticmethod
+ def _sign(url: URL, timestamp_bytes: bytes, nonce: bytes, method: bytes):
+ return hmac.new(
+ BikaConstants.DIGEST_KEY,
+ (
+ url.raw_path.lstrip(b"/")
+ + timestamp_bytes
+ + nonce
+ + method
+ + BikaConstants.API_KEY
+ ).lower(),
+ hashlib.sha256,
+ ).hexdigest()
+
+ @dont_route
+ @catch_network_error
+ async def request(
+ self,
+ endpoint: str,
+ *,
+ params: Optional[dict[str, Any]] = None,
+ body: Optional[dict[str, Any]] = None,
+ no_token: bool = False,
+ ):
+ net_client = cast(NetRequest, self.client.net_client)
+ if not no_token:
+ async with net_client.auth_lock:
+ if net_client.token is None:
+ await net_client.login(self)
+
+ headers = {
+ "Authorization": net_client.token or "",
+ "Time": (current_time := f"{time():.0f}".encode()),
+ "Image-Quality": request_headers.get().get(
+ "X-Image-Quality", ImageQuality.medium
+ ),
+ "Nonce": (nonce := hashlib.md5(current_time).hexdigest().encode()),
+ "Signature": self._sign(
+ request_url := self._join(
+ base=BikaConstants.API_HOST,
+ endpoint=endpoint,
+ params=params or {},
+ ),
+ current_time,
+ nonce,
+ b"GET" if body is None else b"POST",
+ ),
+ }
+
+ response = await (
+ self.client.get(request_url, headers=headers)
+ if body is None
+ else self.client.post(request_url, headers=headers, json=body)
+ )
+ return response.json()
+
+ @cache_config(ttl=timedelta(days=1))
+ async def collections(self):
+ return await self.request("collections")
+
+ @cache_config(ttl=timedelta(days=3))
+ async def categories(self):
+ return await self.request("categories")
+
+ @cache_config(ttl=timedelta(days=3))
+ async def keywords(self):
+ return await self.request("keywords")
+
+ async def advanced_search(
+ self,
+ *,
+ keyword: str,
+ page: int = 1,
+ sort: ResultSort = ResultSort.date_descending,
+ ):
+ return await self.request(
+ "comics/advanced-search",
+ body={
+ "keyword": keyword,
+ "sort": sort,
+ },
+ params={
+ "page": page,
+ "s": sort,
+ },
+ )
+
+ async def category_list(
+ self,
+ *,
+ category: str,
+ page: int = 1,
+ sort: ResultSort = ResultSort.date_descending,
+ ):
+ return await self.request(
+ "comics",
+ params={
+ "page": page,
+ "c": category,
+ "s": sort,
+ },
+ )
+
+ async def author_list(
+ self,
+ *,
+ author: str,
+ page: int = 1,
+ sort: ResultSort = ResultSort.date_descending,
+ ):
+ return await self.request(
+ "comics",
+ params={
+ "page": page,
+ "a": author,
+ "s": sort,
+ },
+ )
+
+ @cache_config(ttl=timedelta(days=3))
+ async def comic_detail(self, *, id: str):
+ return await self.request("comics/{id}", params={"id": id})
+
+ async def comic_recommendation(self, *, id: str):
+ return await self.request("comics/{id}/recommendation", params={"id": id})
+
+ async def comic_episodes(self, *, id: str, page: int = 1):
+ return await self.request(
+ "comics/{id}/eps",
+ params={
+ "id": id,
+ "page": page,
+ },
+ )
+
+ async def comic_page(self, *, id: str, order: int = 1, page: int = 1):
+ return await self.request(
+ "comics/{id}/order/{order}/pages",
+ params={
+ "id": id,
+ "order": order,
+ "page": page,
+ },
+ )
+
+ async def comic_comments(self, *, id: str, page: int = 1):
+ return await self.request(
+ "comics/{id}/comments",
+ params={
+ "id": id,
+ "page": page,
+ },
+ )
+
+ async def games(self, *, page: int = 1):
+ return await self.request("games", params={"page": page})
+
+ @cache_config(ttl=timedelta(days=3))
+ async def game_detail(self, *, id: str):
+ return await self.request("games/{id}", params={"id": id})
diff --git a/hibiapi/api/bika/constants.py b/hibiapi/api/bika/constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..cf5df703f84b0b59051cf161a8757c4aa6ac600c
--- /dev/null
+++ b/hibiapi/api/bika/constants.py
@@ -0,0 +1,19 @@
+from hibiapi.utils.config import APIConfig
+
+
+class BikaConstants:
+ DIGEST_KEY = b"~d}$Q7$eIni=V)9\\RK/P.RM4;9[7|@/CA}b~OW!3?EV`:<>M7pddUBL5n|0/*Cn"
+ API_KEY = b"C69BAF41DA5ABD1FFEDC6D2FEA56B"
+ DEFAULT_HEADERS = {
+ "API-Key": API_KEY,
+ "App-Channel": "2",
+ "App-Version": "2.2.1.2.3.3",
+ "App-Build-Version": "44",
+ "App-UUID": "defaultUuid",
+ "Accept": "application/vnd.picacomic.com.v1+json",
+ "App-Platform": "android",
+ "User-Agent": "okhttp/3.8.1",
+ "Content-Type": "application/json; charset=UTF-8",
+ }
+ API_HOST = "https://picaapi.picacomic.com/"
+ CONFIG = APIConfig("bika")
diff --git a/hibiapi/api/bika/net.py b/hibiapi/api/bika/net.py
new file mode 100644
index 0000000000000000000000000000000000000000..1d985f1aa3a8bdb7cca996dbe47b29e1f07ccb30
--- /dev/null
+++ b/hibiapi/api/bika/net.py
@@ -0,0 +1,73 @@
+import asyncio
+from base64 import urlsafe_b64decode
+from datetime import datetime, timezone
+from functools import lru_cache
+from typing import TYPE_CHECKING, Any, Literal, Optional
+
+from pydantic import BaseModel, Field
+
+from hibiapi.api.bika.constants import BikaConstants
+from hibiapi.utils.net import BaseNetClient
+
+if TYPE_CHECKING:
+ from .api import BikaEndpoints
+
+
+class BikaLogin(BaseModel):
+ email: str
+ password: str
+
+
+class JWTHeader(BaseModel):
+ alg: str
+ typ: Literal["JWT"]
+
+
+class JWTBody(BaseModel):
+ id: str = Field(alias="_id")
+ iat: datetime
+ exp: datetime
+
+
+@lru_cache(maxsize=4)
+def load_jwt(token: str):
+ def b64pad(data: str):
+ return data + "=" * (-len(data) % 4)
+
+ head, body, _ = token.split(".")
+ head_data = JWTHeader.parse_raw(urlsafe_b64decode(b64pad(head)))
+ body_data = JWTBody.parse_raw(urlsafe_b64decode(b64pad(body)))
+ return head_data, body_data
+
+
+class NetRequest(BaseNetClient):
+ _token: Optional[str] = None
+
+ def __init__(self):
+ super().__init__(
+ headers=BikaConstants.DEFAULT_HEADERS.copy(),
+ proxies=BikaConstants.CONFIG["proxy"].as_dict(),
+ )
+ self.auth_lock = asyncio.Lock()
+
+ @property
+ def token(self) -> Optional[str]:
+ if self._token is None:
+ return None
+ _, body = load_jwt(self._token)
+ return None if body.exp < datetime.now(timezone.utc) else self._token
+
+ async def login(self, endpoint: "BikaEndpoints"):
+ login_data = BikaConstants.CONFIG["account"].get(BikaLogin)
+ login_result: dict[str, Any] = await endpoint.request(
+ "auth/sign-in",
+ body=login_data.dict(),
+ no_token=True,
+ )
+ assert login_result["code"] == 200, login_result["message"]
+ if not (
+ isinstance(login_data := login_result.get("data"), dict)
+ and "token" in login_data
+ ):
+ raise ValueError("failed to read Bika account token.")
+ self._token = login_data["token"]
diff --git a/hibiapi/api/bilibili/__init__.py b/hibiapi/api/bilibili/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..b67abd1fc4aed24f699bb5174d03bf641fb693a7
--- /dev/null
+++ b/hibiapi/api/bilibili/__init__.py
@@ -0,0 +1,4 @@
+# flake8:noqa:F401
+from .api import * # noqa: F401, F403
+from .constants import BilibiliConstants
+from .net import NetRequest
diff --git a/hibiapi/api/bilibili/api/__init__.py b/hibiapi/api/bilibili/api/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..618bc1355dfc49234add44acfbaca8898aa873ce
--- /dev/null
+++ b/hibiapi/api/bilibili/api/__init__.py
@@ -0,0 +1,4 @@
+# flake8:noqa:F401
+from .base import BaseBilibiliEndpoint, TimelineType, VideoFormatType, VideoQualityType
+from .v2 import BilibiliEndpointV2, SearchType
+from .v3 import BilibiliEndpointV3
diff --git a/hibiapi/api/bilibili/api/base.py b/hibiapi/api/bilibili/api/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..b76c6dba7faa854f0bedef84b4ea5ef1b5c1e430
--- /dev/null
+++ b/hibiapi/api/bilibili/api/base.py
@@ -0,0 +1,278 @@
+import hashlib
+import json
+from enum import Enum, IntEnum
+from time import time
+from typing import Any, Optional, overload
+
+from httpx import URL
+
+from hibiapi.api.bilibili.constants import BilibiliConstants
+from hibiapi.utils.decorators import enum_auto_doc
+from hibiapi.utils.net import catch_network_error
+from hibiapi.utils.routing import BaseEndpoint, dont_route
+
+
+@enum_auto_doc
+class TimelineType(str, Enum):
+ """番剧时间线类型"""
+
+ CN = "cn"
+ """国产动画"""
+ GLOBAL = "global"
+ """番剧"""
+
+
+@enum_auto_doc
+class VideoQualityType(IntEnum):
+ """视频质量类型"""
+
+ VIDEO_240P = 6
+ VIDEO_360P = 16
+ VIDEO_480P = 32
+ VIDEO_720P = 64
+ VIDEO_720P_60FPS = 74
+ VIDEO_1080P = 80
+ VIDEO_1080P_PLUS = 112
+ VIDEO_1080P_60FPS = 116
+ VIDEO_4K = 120
+
+
+@enum_auto_doc
+class VideoFormatType(IntEnum):
+ """视频格式类型"""
+
+ FLV = 0
+ MP4 = 2
+ DASH = 16
+
+
+class BaseBilibiliEndpoint(BaseEndpoint):
+ def _sign(self, base: str, endpoint: str, params: dict[str, Any]) -> URL:
+ params.update(
+ {
+ **BilibiliConstants.DEFAULT_PARAMS,
+ "access_key": BilibiliConstants.ACCESS_KEY,
+ "appkey": BilibiliConstants.APP_KEY,
+ "ts": int(time()),
+ }
+ )
+ params = {k: params[k] for k in sorted(params.keys())}
+ url = self._join(base=base, endpoint=endpoint, params=params)
+ params["sign"] = hashlib.md5(url.query + BilibiliConstants.SECRET).hexdigest()
+ return URL(url, params=params)
+
+ @staticmethod
+ def _parse_json(content: str) -> dict[str, Any]:
+ try:
+ return json.loads(content)
+ except json.JSONDecodeError:
+ # NOTE: this is used to parse jsonp response
+ right, left = content.find("("), content.rfind(")")
+ return json.loads(content[right + 1 : left].strip())
+
+ @overload
+ async def request(
+ self,
+ endpoint: str,
+ *,
+ sign: bool = True,
+ params: Optional[dict[str, Any]] = None,
+ ) -> dict[str, Any]: ...
+
+ @overload
+ async def request(
+ self,
+ endpoint: str,
+ source: str,
+ *,
+ sign: bool = True,
+ params: Optional[dict[str, Any]] = None,
+ ) -> dict[str, Any]: ...
+
+ @dont_route
+ @catch_network_error
+ async def request(
+ self,
+ endpoint: str,
+ source: Optional[str] = None,
+ *,
+ sign: bool = True,
+ params: Optional[dict[str, Any]] = None,
+ ) -> dict[str, Any]:
+ host = BilibiliConstants.SERVER_HOST[source or "app"]
+ url = (self._sign if sign else self._join)(
+ base=host, endpoint=endpoint, params=params or {}
+ )
+ response = await self.client.get(url)
+ response.raise_for_status()
+ return self._parse_json(response.text)
+
+ async def playurl(
+ self,
+ *,
+ aid: int,
+ cid: int,
+ quality: VideoQualityType = VideoQualityType.VIDEO_480P,
+ type: VideoFormatType = VideoFormatType.FLV,
+ ):
+ return await self.request(
+ "x/player/playurl",
+ "api",
+ sign=False,
+ params={
+ "avid": aid,
+ "cid": cid,
+ "qn": quality,
+ "fnval": type,
+ "fnver": 0,
+ "fourk": 0 if quality >= VideoQualityType.VIDEO_4K else 1,
+ },
+ )
+
+ async def view(self, *, aid: int):
+ return await self.request(
+ "x/v2/view",
+ params={
+ "aid": aid,
+ },
+ )
+
+ async def search(self, *, keyword: str, page: int = 1, pagesize: int = 20):
+ return await self.request(
+ "x/v2/search",
+ params={
+ "duration": 0,
+ "keyword": keyword,
+ "pn": page,
+ "ps": pagesize,
+ },
+ )
+
+ async def search_hot(self, *, limit: int = 50):
+ return await self.request(
+ "x/v2/search/hot",
+ params={
+ "limit": limit,
+ },
+ )
+
+ async def search_suggest(self, *, keyword: str, type: str = "accurate"):
+ return await self.request(
+ "x/v2/search/suggest",
+ params={
+ "keyword": keyword,
+ "type": type,
+ },
+ )
+
+ async def space(self, *, vmid: int, page: int = 1, pagesize: int = 10):
+ return await self.request(
+ "x/v2/space",
+ params={
+ "vmid": vmid,
+ "ps": pagesize,
+ "pn": page,
+ },
+ )
+
+ async def space_archive(self, *, vmid: int, page: int = 1, pagesize: int = 10):
+ return await self.request(
+ "x/v2/space/archive",
+ params={
+ "vmid": vmid,
+ "ps": pagesize,
+ "pn": page,
+ },
+ )
+
+ async def favorite_video(
+ self,
+ *,
+ fid: int,
+ vmid: int,
+ page: int = 1,
+ pagesize: int = 20,
+ ):
+ return await self.request(
+ "x/v2/fav/video",
+ "api",
+ params={
+ "fid": fid,
+ "pn": page,
+ "ps": pagesize,
+ "vmid": vmid,
+ "order": "ftime",
+ },
+ )
+
+ async def event_list(
+ self,
+ *,
+ fid: int,
+ vmid: int,
+ page: int = 1,
+ pagesize: int = 20,
+ ): # NOTE: this endpoint is not used
+ return await self.request(
+ "event/getlist",
+ "api",
+ params={
+ "fid": fid,
+ "pn": page,
+ "ps": pagesize,
+ "vmid": vmid,
+ "order": "ftime",
+ },
+ )
+
+ async def season_info(self, *, season_id: int):
+ return await self.request(
+ "pgc/view/web/season",
+ "api",
+ params={
+ "season_id": season_id,
+ },
+ )
+
+ async def bangumi_source(self, *, episode_id: int):
+ return await self.request(
+ "api/get_source",
+ "bgm",
+ params={
+ "episode_id": episode_id,
+ },
+ )
+
+ async def season_recommend(self, *, season_id: int):
+ return await self.request(
+ "pgc/season/web/related/recommend",
+ "api",
+ sign=False,
+ params={
+ "season_id": season_id,
+ },
+ )
+
+ async def timeline(self, *, type: TimelineType = TimelineType.GLOBAL):
+ return await self.request(
+ "web_api/timeline_{type}",
+ "bgm",
+ sign=False,
+ params={
+ "type": type,
+ },
+ )
+
+ async def suggest(self, *, keyword: str): # NOTE: this endpoint is not used
+ return await self.request(
+ "main/suggest",
+ "search",
+ sign=False,
+ params={
+ "func": "suggest",
+ "suggest_type": "accurate",
+ "sug_type": "tag",
+ "main_ver": "v1",
+ "keyword": keyword,
+ },
+ )
diff --git a/hibiapi/api/bilibili/api/v2.py b/hibiapi/api/bilibili/api/v2.py
new file mode 100644
index 0000000000000000000000000000000000000000..898fcae6b2d88d3c04aad273b2b14fdb09cd52cb
--- /dev/null
+++ b/hibiapi/api/bilibili/api/v2.py
@@ -0,0 +1,124 @@
+from collections.abc import Coroutine
+from enum import Enum
+from functools import wraps
+from typing import Callable, Optional, TypeVar
+
+from hibiapi.api.bilibili.api.base import (
+ BaseBilibiliEndpoint,
+ TimelineType,
+ VideoFormatType,
+ VideoQualityType,
+)
+from hibiapi.utils.decorators import enum_auto_doc
+from hibiapi.utils.exceptions import ClientSideException
+from hibiapi.utils.net import AsyncHTTPClient
+from hibiapi.utils.routing import BaseEndpoint
+
+_AnyCallable = TypeVar("_AnyCallable", bound=Callable[..., Coroutine])
+
+
+def process_keyerror(function: _AnyCallable) -> _AnyCallable:
+ @wraps(function)
+ async def wrapper(*args, **kwargs):
+ try:
+ return await function(*args, **kwargs)
+ except (KeyError, IndexError) as e:
+ raise ClientSideException(detail=str(e)) from None
+
+ return wrapper # type:ignore
+
+
+@enum_auto_doc
+class SearchType(str, Enum):
+ """搜索类型"""
+
+ search = "search"
+ """综合搜索"""
+
+ suggest = "suggest"
+ """搜索建议"""
+
+ hot = "hot"
+ """热门"""
+
+
+class BilibiliEndpointV2(BaseEndpoint, cache_endpoints=False):
+ def __init__(self, client: AsyncHTTPClient):
+ super().__init__(client)
+ self.base = BaseBilibiliEndpoint(client)
+
+ @process_keyerror
+ async def playurl(
+ self,
+ *,
+ aid: int,
+ page: Optional[int] = None,
+ quality: VideoQualityType = VideoQualityType.VIDEO_480P,
+ type: VideoFormatType = VideoFormatType.MP4,
+ ): # NOTE: not completely same with origin
+ video_view = await self.base.view(aid=aid)
+ if page is None:
+ return video_view
+ cid: int = video_view["data"]["pages"][page - 1]["cid"]
+ return await self.base.playurl(
+ aid=aid,
+ cid=cid,
+ quality=quality,
+ type=type,
+ )
+
+ async def seasoninfo(self, *, season_id: int): # NOTE: not same with origin
+ return await self.base.season_info(season_id=season_id)
+
+ async def source(self, *, episode_id: int):
+ return await self.base.bangumi_source(episode_id=episode_id)
+
+ async def seasonrecommend(self, *, season_id: int): # NOTE: not same with origin
+ return await self.base.season_recommend(season_id=season_id)
+
+ async def search(
+ self,
+ *,
+ keyword: str = "",
+ type: SearchType = SearchType.search,
+ page: int = 1,
+ pagesize: int = 20,
+ limit: int = 50,
+ ):
+ if type == SearchType.suggest:
+ return await self.base.search_suggest(keyword=keyword)
+ elif type == SearchType.hot:
+ return await self.base.search_hot(limit=limit)
+ else:
+ return await self.base.search(
+ keyword=keyword,
+ page=page,
+ pagesize=pagesize,
+ )
+
+ async def timeline(
+ self, *, type: TimelineType = TimelineType.GLOBAL
+ ): # NOTE: not same with origin
+ return await self.base.timeline(type=type)
+
+ async def space(self, *, vmid: int, page: int = 1, pagesize: int = 10):
+ return await self.base.space(
+ vmid=vmid,
+ page=page,
+ pagesize=pagesize,
+ )
+
+ async def archive(self, *, vmid: int, page: int = 1, pagesize: int = 10):
+ return await self.base.space_archive(
+ vmid=vmid,
+ page=page,
+ pagesize=pagesize,
+ )
+
+ async def favlist(self, *, fid: int, vmid: int, page: int = 1, pagesize: int = 20):
+ return await self.base.favorite_video(
+ fid=fid,
+ vmid=vmid,
+ page=page,
+ pagesize=pagesize,
+ )
diff --git a/hibiapi/api/bilibili/api/v3.py b/hibiapi/api/bilibili/api/v3.py
new file mode 100644
index 0000000000000000000000000000000000000000..ece57bf28840555b382e8f5e535e08a36efeed4e
--- /dev/null
+++ b/hibiapi/api/bilibili/api/v3.py
@@ -0,0 +1,79 @@
+from hibiapi.api.bilibili.api.base import (
+ BaseBilibiliEndpoint,
+ TimelineType,
+ VideoFormatType,
+ VideoQualityType,
+)
+from hibiapi.utils.net import AsyncHTTPClient
+from hibiapi.utils.routing import BaseEndpoint
+
+
+class BilibiliEndpointV3(BaseEndpoint, cache_endpoints=False):
+ def __init__(self, client: AsyncHTTPClient):
+ super().__init__(client)
+ self.base = BaseBilibiliEndpoint(client)
+
+ async def video_info(self, *, aid: int):
+ return await self.base.view(aid=aid)
+
+ async def video_address(
+ self,
+ *,
+ aid: int,
+ cid: int,
+ quality: VideoQualityType = VideoQualityType.VIDEO_480P,
+ type: VideoFormatType = VideoFormatType.FLV,
+ ):
+ return await self.base.playurl(
+ aid=aid,
+ cid=cid,
+ quality=quality,
+ type=type,
+ )
+
+ async def user_info(self, *, uid: int, page: int = 1, size: int = 10):
+ return await self.base.space(
+ vmid=uid,
+ page=page,
+ pagesize=size,
+ )
+
+ async def user_uploaded(self, *, uid: int, page: int = 1, size: int = 10):
+ return await self.base.space_archive(
+ vmid=uid,
+ page=page,
+ pagesize=size,
+ )
+
+ async def user_favorite(self, *, uid: int, fid: int, page: int = 1, size: int = 10):
+ return await self.base.favorite_video(
+ fid=fid,
+ vmid=uid,
+ page=page,
+ pagesize=size,
+ )
+
+ async def season_info(self, *, season_id: int):
+ return await self.base.season_info(season_id=season_id)
+
+ async def season_recommend(self, *, season_id: int):
+ return await self.base.season_recommend(season_id=season_id)
+
+ async def season_episode(self, *, episode_id: int):
+ return await self.base.bangumi_source(episode_id=episode_id)
+
+ async def season_timeline(self, *, type: TimelineType = TimelineType.GLOBAL):
+ return await self.base.timeline(type=type)
+
+ async def search(self, *, keyword: str, page: int = 1, size: int = 20):
+ return await self.base.search(
+ keyword=keyword,
+ page=page,
+ pagesize=size,
+ )
+
+ async def search_recommend(self, *, limit: int = 50):
+ return await self.base.search_hot(limit=limit)
+
+ async def search_suggestion(self, *, keyword: str):
+ return await self.base.search_suggest(keyword=keyword)
diff --git a/hibiapi/api/bilibili/constants.py b/hibiapi/api/bilibili/constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..7a7d7f698bd0ed8cb93ec1ee1aebff70f5894da1
--- /dev/null
+++ b/hibiapi/api/bilibili/constants.py
@@ -0,0 +1,32 @@
+from http.cookies import SimpleCookie
+from typing import Any
+
+from hibiapi.utils.config import APIConfig
+
+_CONFIG = APIConfig("bilibili")
+
+
+class BilibiliConstants:
+ SERVER_HOST: dict[str, str] = {
+ "app": "https://app.bilibili.com",
+ "api": "https://api.bilibili.com",
+ "interface": "https://interface.bilibili.com",
+ "main": "https://www.bilibili.com",
+ "bgm": "https://bangumi.bilibili.com",
+ "comment": "https://comment.bilibili.com",
+ "search": "https://s.search.bilibili.com",
+ "mobile": "https://m.bilibili.com",
+ }
+ APP_HOST: str = "http://app.bilibili.com"
+ DEFAULT_PARAMS: dict[str, Any] = {
+ "build": 507000,
+ "device": "android",
+ "platform": "android",
+ "mobi_app": "android",
+ }
+ APP_KEY: str = "1d8b6e7d45233436"
+ SECRET: bytes = b"560c52ccd288fed045859ed18bffd973"
+ ACCESS_KEY: str = "5271b2f0eb92f5f89af4dc39197d8e41"
+ COOKIES: SimpleCookie = SimpleCookie(_CONFIG["net"]["cookie"].as_str())
+ USER_AGENT: str = _CONFIG["net"]["user-agent"].as_str()
+ CONFIG: APIConfig = _CONFIG
diff --git a/hibiapi/api/bilibili/net.py b/hibiapi/api/bilibili/net.py
new file mode 100644
index 0000000000000000000000000000000000000000..6e0ffb79479cd02cb8e84af7fa0002bfaaa1303e
--- /dev/null
+++ b/hibiapi/api/bilibili/net.py
@@ -0,0 +1,13 @@
+from httpx import Cookies
+
+from hibiapi.utils.net import BaseNetClient
+
+from .constants import BilibiliConstants
+
+
+class NetRequest(BaseNetClient):
+ def __init__(self):
+ super().__init__(
+ headers={"user-agent": BilibiliConstants.USER_AGENT},
+ cookies=Cookies({k: v.value for k, v in BilibiliConstants.COOKIES.items()}),
+ )
diff --git a/hibiapi/api/netease/__init__.py b/hibiapi/api/netease/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..3d39c47353ead272948bb8574607b723805d3cf9
--- /dev/null
+++ b/hibiapi/api/netease/__init__.py
@@ -0,0 +1,4 @@
+# flake8:noqa:F401
+from .api import BitRateType, NeteaseEndpoint, RecordPeriodType, SearchType
+from .constants import NeteaseConstants
+from .net import NetRequest
diff --git a/hibiapi/api/netease/api.py b/hibiapi/api/netease/api.py
new file mode 100644
index 0000000000000000000000000000000000000000..a1fe5870182f16891db910eddf1984b303f5edb1
--- /dev/null
+++ b/hibiapi/api/netease/api.py
@@ -0,0 +1,326 @@
+import base64
+import json
+import secrets
+import string
+from datetime import timedelta
+from enum import IntEnum
+from ipaddress import IPv4Address
+from random import randint
+from typing import Annotated, Any, Optional
+
+from Cryptodome.Cipher import AES
+from Cryptodome.Util.Padding import pad
+from fastapi import Query
+
+from hibiapi.api.netease.constants import NeteaseConstants
+from hibiapi.utils.cache import cache_config
+from hibiapi.utils.decorators import enum_auto_doc
+from hibiapi.utils.exceptions import UpstreamAPIException
+from hibiapi.utils.net import catch_network_error
+from hibiapi.utils.routing import BaseEndpoint, dont_route
+
+
+@enum_auto_doc
+class SearchType(IntEnum):
+ """搜索内容类型"""
+
+ SONG = 1
+ """单曲"""
+ ALBUM = 10
+ """专辑"""
+ ARTIST = 100
+ """歌手"""
+ PLAYLIST = 1000
+ """歌单"""
+ USER = 1002
+ """用户"""
+ MV = 1004
+ """MV"""
+ LYRICS = 1006
+ """歌词"""
+ DJ = 1009
+ """主播电台"""
+ VIDEO = 1014
+ """视频"""
+
+
+@enum_auto_doc
+class BitRateType(IntEnum):
+ """歌曲码率"""
+
+ LOW = 64000
+ MEDIUM = 128000
+ STANDARD = 198000
+ HIGH = 320000
+
+
+@enum_auto_doc
+class MVResolutionType(IntEnum):
+ """MV分辨率"""
+
+ QVGA = 240
+ VGA = 480
+ HD = 720
+ FHD = 1080
+
+
+@enum_auto_doc
+class RecordPeriodType(IntEnum):
+ """听歌记录时段类型"""
+
+ WEEKLY = 1
+ """本周"""
+ ALL = 0
+ """所有时段"""
+
+
+class _EncryptUtil:
+ alphabets = bytearray(ord(char) for char in string.ascii_letters + string.digits)
+
+ @staticmethod
+ def _aes(data: bytes, key: bytes) -> bytes:
+ data = pad(data, 16) if len(data) % 16 else data
+ return base64.encodebytes(
+ AES.new(
+ key=key,
+ mode=AES.MODE_CBC,
+ iv=NeteaseConstants.AES_IV,
+ ).encrypt(data)
+ )
+
+ @staticmethod
+ def _rsa(data: bytes):
+ result = pow(
+ base=int(data.hex(), 16),
+ exp=NeteaseConstants.RSA_PUBKEY,
+ mod=NeteaseConstants.RSA_MODULUS,
+ )
+ return f"{result:0>256x}"
+
+ @classmethod
+ def encrypt(cls, data: dict[str, Any]) -> dict[str, str]:
+ secret = bytes(secrets.choice(cls.alphabets) for _ in range(16))
+ secure_key = cls._rsa(bytes(reversed(secret)))
+ return {
+ "params": cls._aes(
+ data=cls._aes(
+ data=json.dumps(data).encode(),
+ key=NeteaseConstants.AES_KEY,
+ ),
+ key=secret,
+ ).decode("ascii"),
+ "encSecKey": secure_key,
+ }
+
+
+class NeteaseEndpoint(BaseEndpoint):
+ def _construct_headers(self):
+ headers = self.client.headers.copy()
+ headers["X-Real-IP"] = str(
+ IPv4Address(
+ randint(
+ int(NeteaseConstants.SOURCE_IP_SEGMENT.network_address),
+ int(NeteaseConstants.SOURCE_IP_SEGMENT.broadcast_address),
+ )
+ )
+ )
+ return headers
+
+ @dont_route
+ @catch_network_error
+ async def request(
+ self, endpoint: str, *, params: Optional[dict[str, Any]] = None
+ ) -> dict[str, Any]:
+ params = {
+ **(params or {}),
+ "csrf_token": self.client.cookies.get("__csrf", ""),
+ }
+ response = await self.client.post(
+ self._join(
+ NeteaseConstants.HOST,
+ endpoint=endpoint,
+ params=params,
+ ),
+ headers=self._construct_headers(),
+ data=_EncryptUtil.encrypt(params),
+ )
+ response.raise_for_status()
+ if not response.text.strip():
+ raise UpstreamAPIException(
+ f"Upstream API {endpoint=} returns blank content"
+ )
+ return response.json()
+
+ async def search(
+ self,
+ *,
+ s: str,
+ search_type: SearchType = SearchType.SONG,
+ limit: int = 20,
+ offset: int = 0,
+ ):
+ return await self.request(
+ "api/cloudsearch/pc",
+ params={
+ "s": s,
+ "type": search_type,
+ "limit": limit,
+ "offset": offset,
+ "total": True,
+ },
+ )
+
+ async def artist(self, *, id: int):
+ return await self.request(
+ "weapi/v1/artist/{artist_id}",
+ params={
+ "artist_id": id,
+ },
+ )
+
+ async def album(self, *, id: int):
+ return await self.request(
+ "weapi/v1/album/{album_id}",
+ params={
+ "album_id": id,
+ },
+ )
+
+ async def detail(
+ self,
+ *,
+ id: Annotated[list[int], Query()],
+ ):
+ return await self.request(
+ "api/v3/song/detail",
+ params={
+ "c": json.dumps(
+ [{"id": str(i)} for i in id],
+ ),
+ },
+ )
+
+ @cache_config(ttl=timedelta(minutes=20))
+ async def song(
+ self,
+ *,
+ id: Annotated[list[int], Query()],
+ br: BitRateType = BitRateType.STANDARD,
+ ):
+ return await self.request(
+ "weapi/song/enhance/player/url",
+ params={
+ "ids": [str(i) for i in id],
+ "br": br,
+ },
+ )
+
+ async def playlist(self, *, id: int):
+ return await self.request(
+ "weapi/v6/playlist/detail",
+ params={
+ "id": id,
+ "total": True,
+ "offset": 0,
+ "limit": 1000,
+ "n": 1000,
+ },
+ )
+
+ async def lyric(self, *, id: int):
+ return await self.request(
+ "weapi/song/lyric",
+ params={
+ "id": id,
+ "os": "pc",
+ "lv": -1,
+ "kv": -1,
+ "tv": -1,
+ },
+ )
+
+ async def mv(self, *, id: int):
+ return await self.request(
+ "api/v1/mv/detail",
+ params={
+ "id": id,
+ },
+ )
+
+ async def mv_url(
+ self,
+ *,
+ id: int,
+ res: MVResolutionType = MVResolutionType.FHD,
+ ):
+ return await self.request(
+ "weapi/song/enhance/play/mv/url",
+ params={
+ "id": id,
+ "r": res,
+ },
+ )
+
+ async def comments(self, *, id: int, offset: int = 0, limit: int = 1):
+ return await self.request(
+ "weapi/v1/resource/comments/R_SO_4_{song_id}",
+ params={
+ "song_id": id,
+ "offset": offset,
+ "total": True,
+ "limit": limit,
+ },
+ )
+
+ async def record(self, *, id: int, period: RecordPeriodType = RecordPeriodType.ALL):
+ return await self.request(
+ "weapi/v1/play/record",
+ params={
+ "uid": id,
+ "type": period,
+ },
+ )
+
+ async def djradio(self, *, id: int):
+ return await self.request(
+ "api/djradio/v2/get",
+ params={
+ "id": id,
+ },
+ )
+
+ async def dj(self, *, id: int, offset: int = 0, limit: int = 20, asc: bool = False):
+ # NOTE: Possible not same with origin
+ return await self.request(
+ "weapi/dj/program/byradio",
+ params={
+ "radioId": id,
+ "offset": offset,
+ "limit": limit,
+ "asc": asc,
+ },
+ )
+
+ async def detail_dj(self, *, id: int):
+ return await self.request(
+ "api/dj/program/detail",
+ params={
+ "id": id,
+ },
+ )
+
+ async def user(self, *, id: int):
+ return await self.request(
+ "weapi/v1/user/detail/{id}",
+ params={"id": id},
+ )
+
+ async def user_playlist(self, *, id: int, limit: int = 50, offset: int = 0):
+ return await self.request(
+ "weapi/user/playlist",
+ params={
+ "uid": id,
+ "limit": limit,
+ "offset": offset,
+ },
+ )
diff --git a/hibiapi/api/netease/constants.py b/hibiapi/api/netease/constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..ec0c668627d0dcefcab089382f8b6968e479b686
--- /dev/null
+++ b/hibiapi/api/netease/constants.py
@@ -0,0 +1,33 @@
+from http.cookies import SimpleCookie
+from ipaddress import IPv4Network
+
+from hibiapi.utils.config import APIConfig
+
+_Config = APIConfig("netease")
+
+
+class NeteaseConstants:
+ AES_KEY: bytes = b"0CoJUm6Qyw8W8jud"
+ AES_IV: bytes = b"0102030405060708"
+ RSA_PUBKEY: int = int("010001", 16)
+ RSA_MODULUS: int = int(
+ "00e0b509f6259df8642dbc3566290147"
+ "7df22677ec152b5ff68ace615bb7b725"
+ "152b3ab17a876aea8a5aa76d2e417629"
+ "ec4ee341f56135fccf695280104e0312"
+ "ecbda92557c93870114af6c9d05c4f7f"
+ "0c3685b7a46bee255932575cce10b424"
+ "d813cfe4875d3e82047b97ddef52741d"
+ "546b8e289dc6935b3ece0462db0a22b8e7",
+ 16,
+ )
+
+ HOST: str = "http://music.163.com"
+ COOKIES: SimpleCookie = SimpleCookie(_Config["net"]["cookie"].as_str())
+ SOURCE_IP_SEGMENT: IPv4Network = _Config["net"]["source"].get(IPv4Network)
+ DEFAULT_HEADERS: dict[str, str] = {
+ "user-agent": _Config["net"]["user-agent"].as_str(),
+ "referer": "http://music.163.com",
+ }
+
+ CONFIG: APIConfig = _Config
diff --git a/hibiapi/api/netease/net.py b/hibiapi/api/netease/net.py
new file mode 100644
index 0000000000000000000000000000000000000000..4727e2bfda18295632a5de088f849db37595ea8f
--- /dev/null
+++ b/hibiapi/api/netease/net.py
@@ -0,0 +1,13 @@
+from httpx import Cookies
+
+from hibiapi.utils.net import BaseNetClient
+
+from .constants import NeteaseConstants
+
+
+class NetRequest(BaseNetClient):
+ def __init__(self):
+ super().__init__(
+ headers=NeteaseConstants.DEFAULT_HEADERS,
+ cookies=Cookies({k: v.value for k, v in NeteaseConstants.COOKIES.items()}),
+ )
diff --git a/hibiapi/api/pixiv/__init__.py b/hibiapi/api/pixiv/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..bc69e460035f75f924c4c537d8d62456b8ee644a
--- /dev/null
+++ b/hibiapi/api/pixiv/__init__.py
@@ -0,0 +1,13 @@
+# flake8:noqa:F401
+from .api import (
+ IllustType,
+ PixivEndpoints,
+ RankingDate,
+ RankingType,
+ SearchDurationType,
+ SearchModeType,
+ SearchNovelModeType,
+ SearchSortType,
+)
+from .constants import PixivConstants
+from .net import NetRequest, PixivAuthData
diff --git a/hibiapi/api/pixiv/api.py b/hibiapi/api/pixiv/api.py
new file mode 100644
index 0000000000000000000000000000000000000000..4ed06c630b244a9096ca33361857c2c4237e442c
--- /dev/null
+++ b/hibiapi/api/pixiv/api.py
@@ -0,0 +1,613 @@
+import json
+import re
+from datetime import date, timedelta
+from enum import Enum
+from typing import Any, Literal, Optional, Union, cast, overload
+
+from hibiapi.api.pixiv.constants import PixivConstants
+from hibiapi.api.pixiv.net import NetRequest as PixivNetClient
+from hibiapi.utils.cache import cache_config
+from hibiapi.utils.decorators import enum_auto_doc
+from hibiapi.utils.net import catch_network_error
+from hibiapi.utils.routing import BaseEndpoint, dont_route, request_headers
+
+
+@enum_auto_doc
+class IllustType(str, Enum):
+ """画作类型"""
+
+ illust = "illust"
+ """插画"""
+ manga = "manga"
+ """漫画"""
+
+
+@enum_auto_doc
+class RankingType(str, Enum):
+ """排行榜内容类型"""
+
+ day = "day"
+ """日榜"""
+ week = "week"
+ """周榜"""
+ month = "month"
+ """月榜"""
+ day_male = "day_male"
+ """男性向"""
+ day_female = "day_female"
+ """女性向"""
+ week_original = "week_original"
+ """原创周榜"""
+ week_rookie = "week_rookie"
+ """新人周榜"""
+ day_ai = "day_ai"
+ """AI日榜"""
+ day_manga = "day_manga"
+ """漫画日榜"""
+ week_manga = "week_manga"
+ """漫画周榜"""
+ month_manga = "month_manga"
+ """漫画月榜"""
+ week_rookie_manga = "week_rookie_manga"
+ """漫画新人周榜"""
+ day_r18 = "day_r18"
+ day_male_r18 = "day_male_r18"
+ day_female_r18 = "day_female_r18"
+ week_r18 = "week_r18"
+ week_r18g = "week_r18g"
+ day_r18_ai = "day_r18_ai"
+ day_r18_manga = "day_r18_manga"
+ week_r18_manga = "week_r18_manga"
+
+
+@enum_auto_doc
+class SearchModeType(str, Enum):
+ """搜索匹配类型"""
+
+ partial_match_for_tags = "partial_match_for_tags"
+ """标签部分一致"""
+ exact_match_for_tags = "exact_match_for_tags"
+ """标签完全一致"""
+ title_and_caption = "title_and_caption"
+ """标题说明文"""
+
+
+@enum_auto_doc
+class SearchNovelModeType(str, Enum):
+ """搜索匹配类型"""
+
+ partial_match_for_tags = "partial_match_for_tags"
+ """标签部分一致"""
+ exact_match_for_tags = "exact_match_for_tags"
+ """标签完全一致"""
+ text = "text"
+ """正文"""
+ keyword = "keyword"
+ """关键词"""
+
+
+@enum_auto_doc
+class SearchSortType(str, Enum):
+ """搜索排序类型"""
+
+ date_desc = "date_desc"
+ """按日期倒序"""
+ date_asc = "date_asc"
+ """按日期正序"""
+ popular_desc = "popular_desc"
+ """受欢迎降序(Premium功能)"""
+
+
+@enum_auto_doc
+class SearchDurationType(str, Enum):
+ """搜索时段类型"""
+
+ within_last_day = "within_last_day"
+ """一天内"""
+ within_last_week = "within_last_week"
+ """一周内"""
+ within_last_month = "within_last_month"
+ """一个月内"""
+
+
+class RankingDate(date):
+ @classmethod
+ def yesterday(cls) -> "RankingDate":
+ yesterday = cls.today() - timedelta(days=1)
+ return cls(yesterday.year, yesterday.month, yesterday.day)
+
+ def toString(self) -> str:
+ return self.strftime(r"%Y-%m-%d")
+
+ @classmethod
+ def new(cls, date: date) -> "RankingDate":
+ return cls(date.year, date.month, date.day)
+
+
+class PixivEndpoints(BaseEndpoint):
+ @staticmethod
+ def _parse_accept_language(accept_language: str) -> str:
+ first_language, *_ = accept_language.partition(",")
+ language_code, *_ = first_language.partition(";")
+ return language_code.lower().strip()
+
+ @overload
+ async def request(
+ self,
+ endpoint: str,
+ *,
+ params: Optional[dict[str, Any]] = None,
+ return_text: Literal[False] = False,
+ ) -> dict[str, Any]: ...
+
+ @overload
+ async def request(
+ self,
+ endpoint: str,
+ *,
+ params: Optional[dict[str, Any]] = None,
+ return_text: Literal[True],
+ ) -> str: ...
+
+ @dont_route
+ @catch_network_error
+ async def request(
+ self,
+ endpoint: str,
+ *,
+ params: Optional[dict[str, Any]] = None,
+ return_text: bool = False,
+ ) -> Union[dict[str, Any], str]:
+ headers = self.client.headers.copy()
+
+ net_client = cast(PixivNetClient, self.client.net_client)
+ async with net_client.auth_lock:
+ auth, token = net_client.get_available_user()
+ if auth is None:
+ auth = await net_client.auth(token)
+ headers["Authorization"] = f"Bearer {auth.access_token}"
+
+ if language := request_headers.get().get("Accept-Language"):
+ language = self._parse_accept_language(language)
+ headers["Accept-Language"] = language
+
+ response = await self.client.get(
+ self._join(
+ base=PixivConstants.APP_HOST,
+ endpoint=endpoint,
+ params=params or {},
+ ),
+ headers=headers,
+ )
+ if return_text:
+ return response.text
+ return response.json()
+
+ @cache_config(ttl=timedelta(days=3))
+ async def illust(self, *, id: int):
+ return await self.request("v1/illust/detail", params={"illust_id": id})
+
+ @cache_config(ttl=timedelta(days=1))
+ async def member(self, *, id: int):
+ return await self.request("v1/user/detail", params={"user_id": id})
+
+ async def member_illust(
+ self,
+ *,
+ id: int,
+ illust_type: IllustType = IllustType.illust,
+ page: int = 1,
+ size: int = 30,
+ ):
+ return await self.request(
+ "v1/user/illusts",
+ params={
+ "user_id": id,
+ "type": illust_type,
+ "offset": (page - 1) * size,
+ },
+ )
+
+ async def favorite(
+ self,
+ *,
+ id: int,
+ tag: Optional[str] = None,
+ max_bookmark_id: Optional[int] = None,
+ ):
+ return await self.request(
+ "v1/user/bookmarks/illust",
+ params={
+ "user_id": id,
+ "tag": tag,
+ "restrict": "public",
+ "max_bookmark_id": max_bookmark_id or None,
+ },
+ )
+
+ # 用户收藏的小说
+ async def favorite_novel(
+ self,
+ *,
+ id: int,
+ tag: Optional[str] = None,
+ ):
+ return await self.request(
+ "v1/user/bookmarks/novel",
+ params={
+ "user_id": id,
+ "tag": tag,
+ "restrict": "public",
+ },
+ )
+
+ async def following(self, *, id: int, page: int = 1, size: int = 30):
+ return await self.request(
+ "v1/user/following",
+ params={
+ "user_id": id,
+ "offset": (page - 1) * size,
+ },
+ )
+
+ async def follower(self, *, id: int, page: int = 1, size: int = 30):
+ return await self.request(
+ "v1/user/follower",
+ params={
+ "user_id": id,
+ "offset": (page - 1) * size,
+ },
+ )
+
+ @cache_config(ttl=timedelta(hours=12))
+ async def rank(
+ self,
+ *,
+ mode: RankingType = RankingType.week,
+ date: Optional[RankingDate] = None,
+ page: int = 1,
+ size: int = 30,
+ ):
+ return await self.request(
+ "v1/illust/ranking",
+ params={
+ "mode": mode,
+ "date": RankingDate.new(date or RankingDate.yesterday()).toString(),
+ "offset": (page - 1) * size,
+ },
+ )
+
+ async def search(
+ self,
+ *,
+ word: str,
+ mode: SearchModeType = SearchModeType.partial_match_for_tags,
+ order: SearchSortType = SearchSortType.date_desc,
+ duration: Optional[SearchDurationType] = None,
+ page: int = 1,
+ size: int = 30,
+ include_translated_tag_results: bool = True,
+ search_ai_type: bool = True, # 搜索结果是否包含AI作品
+ ):
+ return await self.request(
+ "v1/search/illust",
+ params={
+ "word": word,
+ "search_target": mode,
+ "sort": order,
+ "duration": duration,
+ "offset": (page - 1) * size,
+ "include_translated_tag_results": include_translated_tag_results,
+ "search_ai_type": 1 if search_ai_type else 0,
+ },
+ )
+
+ # 热门插画作品预览
+ async def popular_preview(
+ self,
+ *,
+ word: str,
+ mode: SearchModeType = SearchModeType.partial_match_for_tags,
+ merge_plain_keyword_results: bool = True,
+ include_translated_tag_results: bool = True,
+ filter: str = "for_ios",
+ ):
+ return await self.request(
+ "v1/search/popular-preview/illust",
+ params={
+ "word": word,
+ "search_target": mode,
+ "merge_plain_keyword_results": merge_plain_keyword_results,
+ "include_translated_tag_results": include_translated_tag_results,
+ "filter": filter,
+ },
+ )
+
+ async def search_user(
+ self,
+ *,
+ word: str,
+ page: int = 1,
+ size: int = 30,
+ ):
+ return await self.request(
+ "v1/search/user",
+ params={"word": word, "offset": (page - 1) * size},
+ )
+
+ async def tags_autocomplete(
+ self,
+ *,
+ word: str,
+ merge_plain_keyword_results: bool = True,
+ ):
+ return await self.request(
+ "/v2/search/autocomplete",
+ params={
+ "word": word,
+ "merge_plain_keyword_results": merge_plain_keyword_results,
+ },
+ )
+
+ @cache_config(ttl=timedelta(hours=12))
+ async def tags(self):
+ return await self.request("v1/trending-tags/illust")
+
+ @cache_config(ttl=timedelta(minutes=15))
+ async def related(self, *, id: int, page: int = 1, size: int = 30):
+ return await self.request(
+ "v2/illust/related",
+ params={
+ "illust_id": id,
+ "offset": (page - 1) * size,
+ },
+ )
+
+ @cache_config(ttl=timedelta(days=3))
+ async def ugoira_metadata(self, *, id: int):
+ return await self.request(
+ "v1/ugoira/metadata",
+ params={
+ "illust_id": id,
+ },
+ )
+
+ # 大家的新作品(插画)
+ async def illust_new(
+ self,
+ *,
+ content_type: str = "illust",
+ ):
+ return await self.request(
+ "v1/illust/new",
+ params={
+ "content_type": content_type,
+ "filter": "for_ios",
+ },
+ )
+
+ # pixivision(亮点/特辑) 列表
+ async def spotlights(
+ self,
+ *,
+ category: str = "all",
+ page: int = 1,
+ size: int = 10,
+ ):
+ return await self.request(
+ "v1/spotlight/articles",
+ params={
+ "filter": "for_ios",
+ "category": category,
+ "offset": (page - 1) * size,
+ },
+ )
+
+ # 插画评论
+ async def illust_comments(
+ self,
+ *,
+ id: int,
+ page: int = 1,
+ size: int = 30,
+ ):
+ return await self.request(
+ "v3/illust/comments",
+ params={
+ "illust_id": id,
+ "offset": (page - 1) * size,
+ },
+ )
+
+ # 插画评论回复
+ async def illust_comment_replies(
+ self,
+ *,
+ id: int,
+ ):
+ return await self.request(
+ "v2/illust/comment/replies",
+ params={
+ "comment_id": id,
+ },
+ )
+
+ # 小说评论
+ async def novel_comments(
+ self,
+ *,
+ id: int,
+ page: int = 1,
+ size: int = 30,
+ ):
+ return await self.request(
+ "v3/novel/comments",
+ params={
+ "novel_id": id,
+ "offset": (page - 1) * size,
+ },
+ )
+
+ # 小说评论回复
+ async def novel_comment_replies(
+ self,
+ *,
+ id: int,
+ ):
+ return await self.request(
+ "v2/novel/comment/replies",
+ params={
+ "comment_id": id,
+ },
+ )
+
+ # 小说排行榜
+ async def rank_novel(
+ self,
+ *,
+ mode: str = "day",
+ date: Optional[RankingDate] = None,
+ page: int = 1,
+ size: int = 30,
+ ):
+ return await self.request(
+ "v1/novel/ranking",
+ params={
+ "mode": mode,
+ "date": RankingDate.new(date or RankingDate.yesterday()).toString(),
+ "offset": (page - 1) * size,
+ },
+ )
+
+ async def member_novel(self, *, id: int, page: int = 1, size: int = 30):
+ return await self.request(
+ "/v1/user/novels",
+ params={
+ "user_id": id,
+ "offset": (page - 1) * size,
+ },
+ )
+
+ async def novel_series(self, *, id: int):
+ return await self.request("/v2/novel/series", params={"series_id": id})
+
+ async def novel_detail(self, *, id: int):
+ return await self.request("/v2/novel/detail", params={"novel_id": id})
+
+ # 已被官方移除,调用 webview/v2/novel 作兼容处理
+ async def novel_text(self, *, id: int):
+ # return await self.request("/v1/novel/text", params={"novel_id": id})
+ response = await self.webview_novel(id=id)
+ return {"novel_text": response["text"] or ""}
+
+ # 获取小说 HTML 后解析 JSON
+ async def webview_novel(self, *, id: int):
+ response = await self.request(
+ "webview/v2/novel",
+ params={
+ "id": id,
+ "viewer_version": "20221031_ai",
+ },
+ return_text=True,
+ )
+
+ novel_match = re.search(r"novel:\s+(?P{.+?}),\s+isOwnWork", response)
+ return json.loads(novel_match["data"] if novel_match else response)
+
+ @cache_config(ttl=timedelta(hours=12))
+ async def tags_novel(self):
+ return await self.request("v1/trending-tags/novel")
+
+ async def search_novel(
+ self,
+ *,
+ word: str,
+ mode: SearchNovelModeType = SearchNovelModeType.partial_match_for_tags,
+ sort: SearchSortType = SearchSortType.date_desc,
+ merge_plain_keyword_results: bool = True,
+ include_translated_tag_results: bool = True,
+ duration: Optional[SearchDurationType] = None,
+ page: int = 1,
+ size: int = 30,
+ search_ai_type: bool = True, # 搜索结果是否包含AI作品
+ ):
+ return await self.request(
+ "/v1/search/novel",
+ params={
+ "word": word,
+ "search_target": mode,
+ "sort": sort,
+ "merge_plain_keyword_results": merge_plain_keyword_results,
+ "include_translated_tag_results": include_translated_tag_results,
+ "duration": duration,
+ "offset": (page - 1) * size,
+ "search_ai_type": 1 if search_ai_type else 0,
+ },
+ )
+
+ # 热门小说作品预览
+ async def popular_preview_novel(
+ self,
+ *,
+ word: str,
+ mode: SearchNovelModeType = SearchNovelModeType.partial_match_for_tags,
+ merge_plain_keyword_results: bool = True,
+ include_translated_tag_results: bool = True,
+ filter: str = "for_ios",
+ ):
+ return await self.request(
+ "v1/search/popular-preview/novel",
+ params={
+ "word": word,
+ "search_target": mode,
+ "merge_plain_keyword_results": merge_plain_keyword_results,
+ "include_translated_tag_results": include_translated_tag_results,
+ "filter": filter,
+ },
+ )
+
+ async def novel_new(self, *, max_novel_id: Optional[int] = None):
+ return await self.request(
+ "/v1/novel/new", params={"max_novel_id": max_novel_id}
+ )
+
+ # 人气直播列表
+ async def live_list(self, *, page: int = 1, size: int = 30):
+ params = {"list_type": "popular", "offset": (page - 1) * size}
+ if not params["offset"]:
+ del params["offset"]
+ return await self.request("v1/live/list", params=params)
+
+ # 相关小说作品
+ async def related_novel(self, *, id: int, page: int = 1, size: int = 30):
+ return await self.request(
+ "v1/novel/related",
+ params={
+ "novel_id": id,
+ "offset": (page - 1) * size,
+ },
+ )
+
+ # 相关用户
+ async def related_member(self, *, id: int):
+ return await self.request("v1/user/related", params={"seed_user_id": id})
+
+ # 漫画系列
+ async def illust_series(self, *, id: int, page: int = 1, size: int = 30):
+ return await self.request(
+ "v1/illust/series",
+ params={"illust_series_id": id, "offset": (page - 1) * size},
+ )
+
+ # 用户的漫画系列
+ async def member_illust_series(self, *, id: int, page: int = 1, size: int = 30):
+ return await self.request(
+ "v1/user/illust-series",
+ params={"user_id": id, "offset": (page - 1) * size},
+ )
+
+ # 用户的小说系列
+ async def member_novel_series(self, *, id: int, page: int = 1, size: int = 30):
+ return await self.request(
+ "v1/user/novel-series", params={"user_id": id, "offset": (page - 1) * size}
+ )
diff --git a/hibiapi/api/pixiv/constants.py b/hibiapi/api/pixiv/constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..d43835579223caf6f55781e708f6a22d07f2569f
--- /dev/null
+++ b/hibiapi/api/pixiv/constants.py
@@ -0,0 +1,19 @@
+from typing import Any
+
+from hibiapi.utils.config import APIConfig
+
+
+class PixivConstants:
+ DEFAULT_HEADERS: dict[str, Any] = {
+ "App-OS": "ios",
+ "App-OS-Version": "14.6",
+ "User-Agent": "PixivIOSApp/7.13.3 (iOS 14.6; iPhone13,2)",
+ }
+ CLIENT_ID: str = "MOBrBDS8blbauoSck0ZfDbtuzpyT"
+ CLIENT_SECRET: str = "lsACyCD94FhDUtGTXi3QzcFE2uU1hqtDaKeqrdwj"
+ HASH_SECRET: bytes = (
+ b"28c1fdd170a5204386cb1313c7077b34f83e4aaf4aa829ce78c231e05b0bae2c"
+ )
+ CONFIG: APIConfig = APIConfig("pixiv")
+ APP_HOST: str = "https://app-api.pixiv.net"
+ AUTH_HOST: str = "https://oauth.secure.pixiv.net"
diff --git a/hibiapi/api/pixiv/net.py b/hibiapi/api/pixiv/net.py
new file mode 100644
index 0000000000000000000000000000000000000000..f73ae782436280fb95ffb8c46f0a0ff76aa15f13
--- /dev/null
+++ b/hibiapi/api/pixiv/net.py
@@ -0,0 +1,85 @@
+import asyncio
+import hashlib
+from datetime import datetime, timedelta, timezone
+from itertools import cycle
+
+from httpx import URL
+from pydantic import BaseModel, Extra, Field
+
+from hibiapi.utils.log import logger
+from hibiapi.utils.net import BaseNetClient
+
+from .constants import PixivConstants
+
+
+class AccountDataModel(BaseModel):
+ class Config:
+ extra = Extra.allow
+
+
+class PixivUserData(AccountDataModel):
+ account: str
+ id: int
+ is_premium: bool
+ mail_address: str
+ name: str
+
+
+class PixivAuthData(AccountDataModel):
+ time: datetime = Field(default_factory=datetime.now)
+ expires_in: int
+ access_token: str
+ refresh_token: str
+ user: PixivUserData
+
+
+class NetRequest(BaseNetClient):
+ def __init__(self, tokens: list[str]):
+ super().__init__(
+ headers=PixivConstants.DEFAULT_HEADERS.copy(),
+ proxies=PixivConstants.CONFIG["proxy"].as_dict(),
+ )
+ self.user_tokens = cycle(tokens)
+ self.auth_lock = asyncio.Lock()
+ self.user_tokens_dict: dict[str, PixivAuthData] = {}
+ self.headers["accept-language"] = PixivConstants.CONFIG["language"].as_str()
+
+ def get_available_user(self):
+ token = next(self.user_tokens)
+ if (auth_data := self.user_tokens_dict.get(token)) and (
+ auth_data.time + timedelta(minutes=1, seconds=auth_data.expires_in)
+ > datetime.now()
+ ):
+ return auth_data, token
+ return None, token
+
+ async def auth(self, refresh_token: str):
+ url = URL(PixivConstants.AUTH_HOST).join("/auth/token")
+ time = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S+00:00")
+ headers = {
+ **self.headers,
+ "X-Client-Time": time,
+ "X-Client-Hash": hashlib.md5(
+ time.encode() + PixivConstants.HASH_SECRET
+ ).hexdigest(),
+ }
+ payload = {
+ "get_secure_url": 1,
+ "client_id": PixivConstants.CLIENT_ID,
+ "client_secret": PixivConstants.CLIENT_SECRET,
+ "grant_type": "refresh_token",
+ "refresh_token": refresh_token,
+ }
+
+ async with self as client:
+ response = await client.post(url, data=payload, headers=headers)
+ response.raise_for_status()
+
+ self.user_tokens_dict[refresh_token] = PixivAuthData.parse_obj(response.json())
+ user_data = self.user_tokens_dict[refresh_token].user
+ logger.opt(colors=True).info(
+ f"Pixiv account {user_data.id} info Updated: "
+ f"{user_data.name}({user_data.account})."
+ )
+
+ return self.user_tokens_dict[refresh_token]
diff --git a/hibiapi/api/qrcode.py b/hibiapi/api/qrcode.py
new file mode 100644
index 0000000000000000000000000000000000000000..4005a495dadcc67e7f50c57d6677edfca92dd783
--- /dev/null
+++ b/hibiapi/api/qrcode.py
@@ -0,0 +1,160 @@
+from datetime import datetime
+from enum import Enum
+from io import BytesIO
+from os import fdopen
+from pathlib import Path
+from typing import Literal, Optional, cast
+
+from PIL import Image
+from pydantic import AnyHttpUrl, BaseModel, Field, validate_arguments
+from pydantic.color import Color
+from qrcode import constants
+from qrcode.image.pil import PilImage
+from qrcode.main import QRCode
+
+from hibiapi.utils.config import APIConfig
+from hibiapi.utils.decorators import ToAsync, enum_auto_doc
+from hibiapi.utils.exceptions import ClientSideException
+from hibiapi.utils.net import BaseNetClient
+from hibiapi.utils.routing import BaseHostUrl
+from hibiapi.utils.temp import TempFile
+
+Config = APIConfig("qrcode")
+
+
+class HostUrl(BaseHostUrl):
+ allowed_hosts = Config["qrcode"]["icon-site"].get(list[str])
+
+
+@enum_auto_doc
+class QRCodeLevel(str, Enum):
+ """二维码容错率"""
+
+ LOW = "L"
+ """最低容错率"""
+ MEDIUM = "M"
+ """中等容错率"""
+ QUARTILE = "Q"
+ """高容错率"""
+ HIGH = "H"
+ """最高容错率"""
+
+
+@enum_auto_doc
+class ReturnEncode(str, Enum):
+ """二维码返回的编码方式"""
+
+ raw = "raw"
+ """直接重定向到二维码图片"""
+ json = "json"
+ """返回JSON格式的二维码信息"""
+ js = "js"
+ jsc = "jsc"
+
+
+COLOR_WHITE = Color("FFFFFF")
+COLOR_BLACK = Color("000000")
+
+
+class QRInfo(BaseModel):
+ url: Optional[AnyHttpUrl] = None
+ path: Path
+ time: datetime = Field(default_factory=datetime.now)
+ data: str
+ logo: Optional[HostUrl] = None
+ level: QRCodeLevel = QRCodeLevel.MEDIUM
+ size: int = 200
+ code: Literal[0] = 0
+ status: Literal["success"] = "success"
+
+ @classmethod
+ @validate_arguments
+ async def new(
+ cls,
+ text: str,
+ *,
+ size: int = Field(
+ 200,
+ gt=Config["qrcode"]["min-size"].as_number(),
+ lt=Config["qrcode"]["max-size"].as_number(),
+ ),
+ logo: Optional[HostUrl] = None,
+ level: QRCodeLevel = QRCodeLevel.MEDIUM,
+ bgcolor: Color = COLOR_WHITE,
+ fgcolor: Color = COLOR_BLACK,
+ ):
+ icon_stream = None
+ if logo is not None:
+ async with BaseNetClient() as client:
+ response = await client.get(
+ logo, headers={"user-agent": "HibiAPI@GitHub"}, timeout=6
+ )
+ response.raise_for_status()
+ icon_stream = BytesIO(response.content)
+ return cls(
+ data=text,
+ logo=logo,
+ level=level,
+ size=size,
+ path=await cls._generate(
+ text,
+ size=size,
+ level=level,
+ icon_stream=icon_stream,
+ bgcolor=bgcolor.as_hex(),
+ fgcolor=fgcolor.as_hex(),
+ ),
+ )
+
+ @classmethod
+ @ToAsync
+ def _generate(
+ cls,
+ text: str,
+ *,
+ size: int = 200,
+ level: QRCodeLevel = QRCodeLevel.MEDIUM,
+ icon_stream: Optional[BytesIO] = None,
+ bgcolor: str = "#FFFFFF",
+ fgcolor: str = "#000000",
+ ) -> Path:
+ qr = QRCode(
+ error_correction={
+ QRCodeLevel.LOW: constants.ERROR_CORRECT_L,
+ QRCodeLevel.MEDIUM: constants.ERROR_CORRECT_M,
+ QRCodeLevel.QUARTILE: constants.ERROR_CORRECT_Q,
+ QRCodeLevel.HIGH: constants.ERROR_CORRECT_H,
+ }[level],
+ border=2,
+ box_size=8,
+ )
+ qr.add_data(text)
+ image = cast(
+ Image.Image,
+ qr.make_image(
+ PilImage,
+ back_color=bgcolor,
+ fill_color=fgcolor,
+ ).get_image(),
+ )
+ image = image.resize((size, size))
+ if icon_stream is not None:
+ try:
+ icon = Image.open(icon_stream)
+ except ValueError as e:
+ raise ClientSideException("Invalid image format.") from e
+ icon_width, icon_height = icon.size
+ image.paste(
+ icon,
+ box=(
+ int(size / 2 - icon_width / 2),
+ int(size / 2 - icon_height / 2),
+ int(size / 2 + icon_width / 2),
+ int(size / 2 + icon_height / 2),
+ ),
+ mask=icon if icon.mode == "RGBA" else None,
+ )
+ descriptor, path = TempFile.create(".png")
+ with fdopen(descriptor, "wb") as f:
+ image.save(f, format="PNG")
+ return path
diff --git a/hibiapi/api/sauce/__init__.py b/hibiapi/api/sauce/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e73486056abc9f8df84a569e282b64912161faa9
--- /dev/null
+++ b/hibiapi/api/sauce/__init__.py
@@ -0,0 +1,4 @@
+# flake8:noqa:F401
+from .api import DeduplicateType, HostUrl, SauceEndpoint, UploadFileIO
+from .constants import SauceConstants
+from .net import NetRequest
diff --git a/hibiapi/api/sauce/api.py b/hibiapi/api/sauce/api.py
new file mode 100644
index 0000000000000000000000000000000000000000..89cecf5f4e7cdcf196ffa64163031c0294a9ae60
--- /dev/null
+++ b/hibiapi/api/sauce/api.py
@@ -0,0 +1,140 @@
+import random
+from enum import IntEnum
+from io import BytesIO
+from typing import Any, Optional, overload
+
+from httpx import HTTPError
+
+from hibiapi.api.sauce.constants import SauceConstants
+from hibiapi.utils.decorators import enum_auto_doc
+from hibiapi.utils.exceptions import ClientSideException
+from hibiapi.utils.net import catch_network_error
+from hibiapi.utils.routing import BaseEndpoint, BaseHostUrl
+
+
+class UnavailableSourceException(ClientSideException):
+ code = 422
+ detail = "given image is not avaliable to fetch"
+
+
+class ImageSourceOversizedException(UnavailableSourceException):
+ code = 413
+ detail = (
+ "given image size is rather than maximum limit "
+ f"{SauceConstants.IMAGE_MAXIMUM_SIZE} bytes"
+ )
+
+
+class HostUrl(BaseHostUrl):
+ allowed_hosts = SauceConstants.IMAGE_ALLOWED_HOST
+
+
+class UploadFileIO(BytesIO):
+ @classmethod
+ def __get_validators__(cls):
+ yield cls.validate
+
+ @classmethod
+ def validate(cls, v: Any) -> BytesIO:
+ if not isinstance(v, BytesIO):
+ raise ValueError(f"Expected UploadFile, received: {type(v)}")
+ return v
+
+
+@enum_auto_doc
+class DeduplicateType(IntEnum):
+ DISABLED = 0
+ """no result deduplicating"""
+ IDENTIFIER = 1
+ """consolidate search results and deduplicate by item identifier"""
+ ALL = 2
+ """all implemented deduplicate methods such as by series name"""
+
+
+class SauceEndpoint(BaseEndpoint, cache_endpoints=False):
+ base = "https://saucenao.com"
+
+ async def fetch(self, host: HostUrl) -> UploadFileIO:
+ try:
+ response = await self.client.get(
+ url=host,
+ headers=SauceConstants.IMAGE_HEADERS,
+ timeout=SauceConstants.IMAGE_TIMEOUT,
+ )
+ response.raise_for_status()
+ if len(response.content) > SauceConstants.IMAGE_MAXIMUM_SIZE:
+ raise ImageSourceOversizedException
+ return UploadFileIO(response.content)
+ except HTTPError as e:
+ raise UnavailableSourceException(detail=str(e)) from e
+
+ @catch_network_error
+ async def request(
+ self, *, file: UploadFileIO, params: dict[str, Any]
+ ) -> dict[str, Any]:
+ response = await self.client.post(
+ url=self._join(
+ self.base,
+ "search.php",
+ params={
+ **params,
+ "api_key": random.choice(SauceConstants.API_KEY),
+ "output_type": 2,
+ },
+ ),
+ files={"file": file},
+ )
+ if response.status_code >= 500:
+ response.raise_for_status()
+ return response.json()
+
+ @overload
+ async def search(
+ self,
+ *,
+ url: HostUrl,
+ size: int = 30,
+ deduplicate: DeduplicateType = DeduplicateType.ALL,
+ database: Optional[int] = None,
+ enabled_mask: Optional[int] = None,
+ disabled_mask: Optional[int] = None,
+ ) -> dict[str, Any]:
+ ...
+
+ @overload
+ async def search(
+ self,
+ *,
+ file: UploadFileIO,
+ size: int = 30,
+ deduplicate: DeduplicateType = DeduplicateType.ALL,
+ database: Optional[int] = None,
+ enabled_mask: Optional[int] = None,
+ disabled_mask: Optional[int] = None,
+ ) -> dict[str, Any]:
+ ...
+
+ async def search(
+ self,
+ *,
+ url: Optional[HostUrl] = None,
+ file: Optional[UploadFileIO] = None,
+ size: int = 30,
+ deduplicate: DeduplicateType = DeduplicateType.ALL,
+ database: Optional[int] = None,
+ enabled_mask: Optional[int] = None,
+ disabled_mask: Optional[int] = None,
+ ):
+ if url is not None:
+ file = await self.fetch(url)
+ assert file is not None
+ return await self.request(
+ file=file,
+ params={
+ "dbmask": enabled_mask,
+ "dbmaski": disabled_mask,
+ "db": database,
+ "numres": size,
+ "dedupe": deduplicate,
+ },
+ )
diff --git a/hibiapi/api/sauce/constants.py b/hibiapi/api/sauce/constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..545cdf8f02826092a30001a24c7b514e274d3491
--- /dev/null
+++ b/hibiapi/api/sauce/constants.py
@@ -0,0 +1,16 @@
+from typing import Any
+
+from hibiapi.utils.config import APIConfig
+
+_Config = APIConfig("sauce")
+
+
+class SauceConstants:
+ CONFIG: APIConfig = _Config
+ API_KEY: list[str] = _Config["net"]["api-key"].as_str_seq()
+ USER_AGENT: str = _Config["net"]["user-agent"].as_str()
+ PROXIES: dict[str, str] = _Config["proxy"].as_dict()
+ IMAGE_HEADERS: dict[str, Any] = _Config["image"]["headers"].as_dict()
+ IMAGE_ALLOWED_HOST: list[str] = _Config["image"]["allowed"].get(list[str])
+ IMAGE_MAXIMUM_SIZE: int = _Config["image"]["max-size"].as_number() * 1024
+ IMAGE_TIMEOUT: int = _Config["image"]["timeout"].as_number()
diff --git a/hibiapi/api/sauce/net.py b/hibiapi/api/sauce/net.py
new file mode 100644
index 0000000000000000000000000000000000000000..099cf549a47d926e977130b18bb51eba80874632
--- /dev/null
+++ b/hibiapi/api/sauce/net.py
@@ -0,0 +1,11 @@
+from hibiapi.utils.net import BaseNetClient
+
+from .constants import SauceConstants
+
+
+class NetRequest(BaseNetClient):
+ def __init__(self):
+ super().__init__(
+ headers={"user-agent": SauceConstants.USER_AGENT},
+ proxies=SauceConstants.PROXIES,
+ )
diff --git a/hibiapi/api/tieba/__init__.py b/hibiapi/api/tieba/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..198bef311504b7b897279eed34f7beb7c57dec12
--- /dev/null
+++ b/hibiapi/api/tieba/__init__.py
@@ -0,0 +1,3 @@
+# flake8:noqa:F401
+from .api import Config, TiebaEndpoint
+from .net import NetRequest
diff --git a/hibiapi/api/tieba/api.py b/hibiapi/api/tieba/api.py
new file mode 100644
index 0000000000000000000000000000000000000000..45608244992747d8e7a6fca610f9b0a8ce3a9a2a
--- /dev/null
+++ b/hibiapi/api/tieba/api.py
@@ -0,0 +1,142 @@
+import hashlib
+from enum import Enum
+from random import randint
+from typing import Any, Optional
+
+from hibiapi.utils.config import APIConfig
+from hibiapi.utils.net import catch_network_error
+from hibiapi.utils.routing import BaseEndpoint, dont_route
+
+Config = APIConfig("tieba")
+
+
+class TiebaSignUtils:
+ salt = b"tiebaclient!!!"
+
+ @staticmethod
+ def random_digit(length: int) -> str:
+ return "".join(map(str, [randint(0, 9) for _ in range(length)]))
+
+ @staticmethod
+ def construct_content(params: dict[str, Any]) -> bytes:
+ # NOTE: this function used to construct form content WITHOUT urlencode
+ # Don't ask me why this is necessary, ask Tieba's programmers instead
+ return b"&".join(
+ map(
+ lambda k, v: (
+ k.encode()
+ + b"="
+ + str(v.value if isinstance(v, Enum) else v).encode()
+ ),
+ params.keys(),
+ params.values(),
+ )
+ )
+
+ @classmethod
+ def sign(cls, params: dict[str, Any]) -> bytes:
+ params.update(
+ {
+ "_client_id": (
+ "wappc_" + cls.random_digit(13) + "_" + cls.random_digit(3)
+ ),
+ "_client_type": 2,
+ "_client_version": "9.9.8.32",
+ **{
+ k.upper(): str(v).strip()
+ for k, v in Config["net"]["params"].as_dict().items()
+ if v
+ },
+ }
+ )
+ params = {k: params[k] for k in sorted(params.keys())}
+ params["sign"] = (
+ hashlib.md5(cls.construct_content(params).replace(b"&", b"") + cls.salt)
+ .hexdigest()
+ .upper()
+ )
+ return cls.construct_content(params)
+
+
+class TiebaEndpoint(BaseEndpoint):
+ base = "http://c.tieba.baidu.com"
+
+ @dont_route
+ @catch_network_error
+ async def request(
+ self, endpoint: str, *, params: Optional[dict[str, Any]] = None
+ ) -> dict[str, Any]:
+ response = await self.client.post(
+ url=self._join(self.base, endpoint, {}),
+ content=TiebaSignUtils.sign(params or {}),
+ )
+ response.raise_for_status()
+ return response.json()
+
+ async def post_list(self, *, name: str, page: int = 1, size: int = 50):
+ return await self.request(
+ "c/f/frs/page",
+ params={
+ "kw": name,
+ "pn": page,
+ "rn": size,
+ },
+ )
+
+ async def post_detail(
+ self,
+ *,
+ tid: int,
+ page: int = 1,
+ size: int = 50,
+ reversed: bool = False,
+ ):
+ return await self.request(
+ "c/f/pb/page",
+ params={
+ **({"last": 1, "r": 1} if reversed else {}),
+ "kz": tid,
+ "pn": page,
+ "rn": size,
+ },
+ )
+
+ async def subpost_detail(
+ self,
+ *,
+ tid: int,
+ pid: int,
+ page: int = 1,
+ size: int = 50,
+ ):
+ return await self.request(
+ "c/f/pb/floor",
+ params={
+ "kz": tid,
+ "pid": pid,
+ "pn": page,
+ "rn": size,
+ },
+ )
+
+ async def user_profile(self, *, uid: int):
+ return await self.request(
+ "c/u/user/profile",
+ params={
+ "uid": uid,
+ "need_post_count": 1,
+ "has_plist": 1,
+ },
+ )
+
+ async def user_subscribed(
+ self, *, uid: int, page: int = 1
+ ): # XXX This API required user login!
+ return await self.request(
+ "c/f/forum/like",
+ params={
+ "is_guest": 0,
+ "uid": uid,
+ "page_no": page,
+ },
+ )
diff --git a/hibiapi/api/tieba/net.py b/hibiapi/api/tieba/net.py
new file mode 100644
index 0000000000000000000000000000000000000000..1c468a5f559d58aac8a33d2176a44891f8e19041
--- /dev/null
+++ b/hibiapi/api/tieba/net.py
@@ -0,0 +1,5 @@
+from hibiapi.utils.net import BaseNetClient
+
+
+class NetRequest(BaseNetClient):
+ pass
diff --git a/hibiapi/api/wallpaper/__init__.py b/hibiapi/api/wallpaper/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..0f77753d0fd6cfda72e149c0b858d960c37e7a98
--- /dev/null
+++ b/hibiapi/api/wallpaper/__init__.py
@@ -0,0 +1,3 @@
+# flake8:noqa:F401
+from .api import Config, WallpaperCategoryType, WallpaperEndpoint, WallpaperOrderType
+from .net import NetRequest
diff --git a/hibiapi/api/wallpaper/api.py b/hibiapi/api/wallpaper/api.py
new file mode 100644
index 0000000000000000000000000000000000000000..2561dc413fd28b0e13f702e04c24c3f6a3e1d2cf
--- /dev/null
+++ b/hibiapi/api/wallpaper/api.py
@@ -0,0 +1,125 @@
+from datetime import timedelta
+from enum import Enum
+from typing import Any, Optional
+
+from hibiapi.utils.cache import cache_config
+from hibiapi.utils.config import APIConfig
+from hibiapi.utils.decorators import enum_auto_doc
+from hibiapi.utils.net import catch_network_error
+from hibiapi.utils.routing import BaseEndpoint, dont_route
+
+Config = APIConfig("wallpaper")
+
+
+@enum_auto_doc
+class WallpaperCategoryType(str, Enum):
+ """壁纸分类"""
+
+ girl = "girl"
+ """女生"""
+ animal = "animal"
+ """动物"""
+ landscape = "landscape"
+ """自然"""
+ anime = "anime"
+ """二次元"""
+ drawn = "drawn"
+ """手绘"""
+ mechanics = "mechanics"
+ """机械"""
+ boy = "boy"
+ """男生"""
+ game = "game"
+ """游戏"""
+ text = "text"
+ """文字"""
+
+
+CATEGORY: dict[WallpaperCategoryType, str] = {
+ WallpaperCategoryType.girl: "4e4d610cdf714d2966000000",
+ WallpaperCategoryType.animal: "4e4d610cdf714d2966000001",
+ WallpaperCategoryType.landscape: "4e4d610cdf714d2966000002",
+ WallpaperCategoryType.anime: "4e4d610cdf714d2966000003",
+ WallpaperCategoryType.drawn: "4e4d610cdf714d2966000004",
+ WallpaperCategoryType.mechanics: "4e4d610cdf714d2966000005",
+ WallpaperCategoryType.boy: "4e4d610cdf714d2966000006",
+ WallpaperCategoryType.game: "4e4d610cdf714d2966000007",
+ WallpaperCategoryType.text: "5109e04e48d5b9364ae9ac45",
+}
+
+
+@enum_auto_doc
+class WallpaperOrderType(str, Enum):
+ """壁纸排序方式"""
+
+ hot = "hot"
+ """热门"""
+ new = "new"
+ """最新"""
+
+
+class WallpaperEndpoint(BaseEndpoint):
+ base = "http://service.aibizhi.adesk.com"
+
+ @dont_route
+ @catch_network_error
+ async def request(
+ self, endpoint: str, *, params: Optional[dict[str, Any]] = None
+ ) -> dict[str, Any]:
+
+ response = await self.client.get(
+ self._join(
+ base=WallpaperEndpoint.base,
+ endpoint=endpoint,
+ params=params or {},
+ )
+ )
+ return response.json()
+
+ # 壁纸有防盗链token, 不建议长时间缓存
+ @cache_config(ttl=timedelta(hours=2))
+ async def wallpaper(
+ self,
+ *,
+ category: WallpaperCategoryType,
+ limit: int = 20,
+ skip: int = 0,
+ adult: bool = True,
+ order: WallpaperOrderType = WallpaperOrderType.hot,
+ ):
+
+ return await self.request(
+ "v1/wallpaper/category/{category}/wallpaper",
+ params={
+ "limit": limit,
+ "skip": skip,
+ "adult": adult,
+ "order": order,
+ "first": 0,
+ "category": CATEGORY[category],
+ },
+ )
+
+ # 壁纸有防盗链token, 不建议长时间缓存
+ @cache_config(ttl=timedelta(hours=2))
+ async def vertical(
+ self,
+ *,
+ category: WallpaperCategoryType,
+ limit: int = 20,
+ skip: int = 0,
+ adult: bool = True,
+ order: WallpaperOrderType = WallpaperOrderType.hot,
+ ):
+
+ return await self.request(
+ "v1/vertical/category/{category}/vertical",
+ params={
+ "limit": limit,
+ "skip": skip,
+ "adult": adult,
+ "order": order,
+ "first": 0,
+ "category": CATEGORY[category],
+ },
+ )
diff --git a/hibiapi/api/wallpaper/constants.py b/hibiapi/api/wallpaper/constants.py
new file mode 100644
index 0000000000000000000000000000000000000000..ea9bec40ec728f3008f57a79ce2b501cc2f74384
--- /dev/null
+++ b/hibiapi/api/wallpaper/constants.py
@@ -0,0 +1,8 @@
+from hibiapi.utils.config import APIConfig
+
+_CONFIG = APIConfig("wallpaper")
+
+
+class WallpaperConstants:
+ CONFIG: APIConfig = _CONFIG
+ USER_AGENT: str = _CONFIG["net"]["user-agent"].as_str()
diff --git a/hibiapi/api/wallpaper/net.py b/hibiapi/api/wallpaper/net.py
new file mode 100644
index 0000000000000000000000000000000000000000..e7923e8f77f7558cea090ec3cb6ac0bf67137e46
--- /dev/null
+++ b/hibiapi/api/wallpaper/net.py
@@ -0,0 +1,8 @@
+from hibiapi.utils.net import BaseNetClient
+
+from .constants import WallpaperConstants
+
+
+class NetRequest(BaseNetClient):
+ def __init__(self):
+ super().__init__(headers={"user-agent": WallpaperConstants.USER_AGENT})
diff --git a/hibiapi/app/__init__.py b/hibiapi/app/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e4b9a44d56d0b91fd0dd40249a2b0a00b0306449
--- /dev/null
+++ b/hibiapi/app/__init__.py
@@ -0,0 +1,4 @@
+# flake8:noqa:F401
+from . import application, handlers, middlewares
+
+app = application.app
diff --git a/hibiapi/app/application.py b/hibiapi/app/application.py
new file mode 100644
index 0000000000000000000000000000000000000000..c8da8fb4620ed189a0491eb56a003cf1894c67ac
--- /dev/null
+++ b/hibiapi/app/application.py
@@ -0,0 +1,170 @@
+import asyncio
+import re
+from contextlib import asynccontextmanager
+from ipaddress import ip_address
+from secrets import compare_digest
+from typing import Annotated
+
+import sentry_sdk
+from fastapi import Depends, FastAPI, Request, Response
+from fastapi.responses import RedirectResponse
+from fastapi.security import HTTPBasic, HTTPBasicCredentials
+from fastapi.staticfiles import StaticFiles
+from pydantic import BaseModel
+from sentry_sdk.integrations.logging import LoggingIntegration
+
+from hibiapi import __version__
+from hibiapi.app.routes import router as ImplRouter
+from hibiapi.utils.cache import cache
+from hibiapi.utils.config import Config
+from hibiapi.utils.exceptions import ClientSideException, RateLimitReachedException
+from hibiapi.utils.log import logger
+from hibiapi.utils.net import BaseNetClient
+from hibiapi.utils.temp import TempFile
+
+DESCRIPTION = (
+ """
+**A program that implements easy-to-use APIs for a variety of commonly used sites**
+
+- *Documents*:
+ - [Redoc](/docs) (Easier to read and more beautiful)
+ - [Swagger UI](/docs/test) (Integrated interactive testing function)
+
+Project: [mixmoe/HibiAPI](https://github.com/mixmoe/HibiAPI)
+
+"""
+ + Config["content"]["slogan"].as_str().strip()
+).strip()
+
+
+if Config["log"]["sentry"]["enabled"].as_bool():
+ sentry_sdk.init(
+ dsn=Config["log"]["sentry"]["dsn"].as_str(),
+ send_default_pii=Config["log"]["sentry"]["pii"].as_bool(),
+ integrations=[LoggingIntegration(level=None, event_level=None)],
+ traces_sample_rate=Config["log"]["sentry"]["sample"].get(float),
+ )
+else:
+ sentry_sdk.init()
+
+
+class AuthorizationModel(BaseModel):
+ username: str
+ password: str
+
+
+AUTHORIZATION_ENABLED = Config["authorization"]["enabled"].as_bool()
+AUTHORIZATION_ALLOWED = Config["authorization"]["allowed"].get(list[AuthorizationModel])
+
+security = HTTPBasic()
+
+
+async def basic_authorization_depend(
+ credentials: Annotated[HTTPBasicCredentials, Depends(security)],
+):
+ # NOTE: We use `compare_digest` to avoid timing attacks.
+ # Ref: https://fastapi.tiangolo.com/advanced/security/http-basic-auth/
+ for allowed in AUTHORIZATION_ALLOWED:
+ if compare_digest(credentials.username, allowed.username) and compare_digest(
+ credentials.password, allowed.password
+ ):
+ return credentials.username, credentials.password
+ raise ClientSideException(
+ f"Invalid credentials for user {credentials.username!r}",
+ status_code=401,
+ headers={"WWW-Authenticate": "Basic"},
+ )
+
+
+RATE_LIMIT_ENABLED = Config["limit"]["enabled"].as_bool()
+RATE_LIMIT_MAX = Config["limit"]["max"].as_number()
+RATE_LIMIT_INTERVAL = Config["limit"]["interval"].as_number()
+
+
+async def rate_limit_depend(request: Request):
+ if not request.client:
+ return
+
+ try:
+ client_ip = ip_address(request.client.host)
+ client_ip_hex = client_ip.packed.hex()
+ limit_key = f"rate_limit:IPv{client_ip.version}-{client_ip_hex:x}"
+ except ValueError:
+ limit_key = f"rate_limit:fallback-{request.client.host}"
+
+ request_count = await cache.incr(limit_key)
+ if request_count <= 1:
+ await cache.expire(limit_key, timeout=RATE_LIMIT_INTERVAL)
+ elif request_count > RATE_LIMIT_MAX:
+ limit_remain: int = await cache.get_expire(limit_key)
+ raise RateLimitReachedException(headers={"Retry-After": limit_remain})
+
+ return
+
+
+async def flush_sentry():
+ client = sentry_sdk.Hub.current.client
+ if client is not None:
+ client.close()
+ sentry_sdk.flush()
+ logger.debug("Sentry client has been closed")
+
+
+async def cleanup_clients():
+ opened_clients = [
+ client for client in BaseNetClient.clients if not client.is_closed
+ ]
+ if opened_clients:
+ await asyncio.gather(
+ *map(lambda client: client.aclose(), opened_clients),
+ return_exceptions=True,
+ )
+ logger.debug(f"Cleaned {len(opened_clients)} unclosed HTTP clients")
+
+
+@asynccontextmanager
+async def fastapi_lifespan(app: FastAPI):
+ yield
+ await asyncio.gather(cleanup_clients(), flush_sentry())
+
+
+app = FastAPI(
+ title="HibiAPI",
+ version=__version__,
+ description=DESCRIPTION,
+ docs_url="/docs/test",
+ redoc_url="/docs",
+ lifespan=fastapi_lifespan,
+)
+app.include_router(
+ ImplRouter,
+ prefix="/api",
+ dependencies=(
+ ([Depends(basic_authorization_depend)] if AUTHORIZATION_ENABLED else [])
+ + ([Depends(rate_limit_depend)] if RATE_LIMIT_ENABLED else [])
+ ),
+)
+app.mount("/temp", StaticFiles(directory=TempFile.path, check_dir=False))
+
+
+@app.get("/", include_in_schema=False)
+async def redirect():
+ return Response(status_code=302, headers={"Location": "/docs"})
+
+
+@app.get("/robots.txt", include_in_schema=False)
+async def robots():
+ content = Config["content"]["robots"].as_str().strip()
+ return Response(content, status_code=200)
+
+
+@app.middleware("http")
+async def redirect_workaround_middleware(request: Request, call_next):
+ """Temporary redirection workaround for #12"""
+ if matched := re.match(
+ r"^/(qrcode|pixiv|netease|bilibili)/(\w*)$", request.url.path
+ ):
+ service, path = matched.groups()
+ redirect_url = request.url.replace(path=f"/api/{service}/{path}")
+ return RedirectResponse(redirect_url, status_code=301)
+ return await call_next(request)
diff --git a/hibiapi/app/handlers.py b/hibiapi/app/handlers.py
new file mode 100644
index 0000000000000000000000000000000000000000..8766edc5872ac2c6f29468a3eb405552db3b368b
--- /dev/null
+++ b/hibiapi/app/handlers.py
@@ -0,0 +1,59 @@
+from fastapi import Request, Response
+from fastapi.exceptions import HTTPException as FastAPIHTTPException
+from fastapi.exceptions import RequestValidationError as FastAPIValidationError
+from pydantic.error_wrappers import ValidationError as PydanticValidationError
+from starlette.exceptions import HTTPException as StarletteHTTPException
+
+from hibiapi.utils import exceptions
+from hibiapi.utils.log import logger
+
+from .application import app
+
+
+@app.exception_handler(exceptions.BaseServerException)
+async def exception_handler(
+ request: Request,
+ exc: exceptions.BaseServerException,
+) -> Response:
+ if isinstance(exc, exceptions.UncaughtException):
+ logger.opt(exception=exc).exception(f"Uncaught exception raised {exc.data=}:")
+
+ exc.data.url = str(request.url) # type:ignore
+ return Response(
+ content=exc.data.json(),
+ status_code=exc.data.code,
+ headers=exc.data.headers,
+ media_type="application/json",
+ )
+
+
+@app.exception_handler(StarletteHTTPException)
+async def override_handler(
+ request: Request,
+ exc: StarletteHTTPException,
+):
+ return await exception_handler(
+ request,
+ exceptions.BaseHTTPException(
+ exc.detail,
+ code=exc.status_code,
+ headers={} if not isinstance(exc, FastAPIHTTPException) else exc.headers,
+ ),
+ )
+
+
+@app.exception_handler(AssertionError)
+async def assertion_handler(request: Request, exc: AssertionError):
+ return await exception_handler(
+ request,
+ exceptions.ClientSideException(detail=f"Assertion: {exc}"),
+ )
+
+
+@app.exception_handler(FastAPIValidationError)
+@app.exception_handler(PydanticValidationError)
+async def validation_handler(request: Request, exc: PydanticValidationError):
+ return await exception_handler(
+ request,
+ exceptions.ValidationException(detail=str(exc), validation=exc.errors()),
+ )
diff --git a/hibiapi/app/middlewares.py b/hibiapi/app/middlewares.py
new file mode 100644
index 0000000000000000000000000000000000000000..3bc79ac1c5d9902c65a2c9f02dfa282e9028e2e5
--- /dev/null
+++ b/hibiapi/app/middlewares.py
@@ -0,0 +1,97 @@
+from collections.abc import Awaitable
+from datetime import datetime
+from typing import Callable
+
+from fastapi import Request, Response
+from fastapi.middleware.cors import CORSMiddleware
+from fastapi.middleware.gzip import GZipMiddleware
+from fastapi.middleware.trustedhost import TrustedHostMiddleware
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.integrations.httpx import HttpxIntegration
+from starlette.datastructures import MutableHeaders
+
+from hibiapi.utils.config import Config
+from hibiapi.utils.exceptions import BaseServerException, UncaughtException
+from hibiapi.utils.log import LoguruHandler, logger
+from hibiapi.utils.routing import request_headers, response_headers
+
+from .application import app
+from .handlers import exception_handler
+
+RequestHandler = Callable[[Request], Awaitable[Response]]
+
+
+if Config["server"]["gzip"].as_bool():
+ app.add_middleware(GZipMiddleware)
+app.add_middleware(
+ CORSMiddleware,
+ allow_origins=Config["server"]["cors"]["origins"].get(list[str]),
+ allow_credentials=Config["server"]["cors"]["credentials"].as_bool(),
+ allow_methods=Config["server"]["cors"]["methods"].get(list[str]),
+ allow_headers=Config["server"]["cors"]["headers"].get(list[str]),
+)
+app.add_middleware(
+ TrustedHostMiddleware,
+ allowed_hosts=Config["server"]["allowed"].get(list[str]),
+)
+app.add_middleware(SentryAsgiMiddleware)
+
+HttpxIntegration.setup_once()
+
+
+@app.middleware("http")
+async def request_logger(request: Request, call_next: RequestHandler) -> Response:
+ start_time = datetime.now()
+ host, port = request.client or (None, None)
+ response = await call_next(request)
+ process_time = (datetime.now() - start_time).total_seconds() * 1000
+ response_headers.get().setdefault("X-Process-Time", f"{process_time:.3f}")
+ bg, fg = (
+ ("green", "red")
+ if response.status_code < 400
+ else ("yellow", "blue")
+ if response.status_code < 500
+ else ("red", "green")
+ )
+ status_code, method = response.status_code, request.method.upper()
+ user_agent = (
+ LoguruHandler.escape_tag(request.headers["user-agent"])
+ if "user-agent" in request.headers
+ else "Unknown"
+ )
+ logger.info(
+ f"{host}:{port}"
+ f" | <{bg.upper()}><{fg}>{method}{fg}>{bg.upper()}>"
+ f" | {str(request.url)!r}"
+ f" | {process_time:.3f}ms"
+ f" | {user_agent}"
+ f" | <{bg}>{status_code}{bg}>"
+ )
+ return response
+
+
+@app.middleware("http")
+async def contextvar_setter(request: Request, call_next: RequestHandler):
+ request_headers.set(request.headers)
+ response_headers.set(MutableHeaders())
+ response = await call_next(request)
+ response.headers.update({**response_headers.get()})
+ return response
+
+
+@app.middleware("http")
+async def uncaught_exception_handler(
+ request: Request, call_next: RequestHandler
+) -> Response:
+ try:
+ response = await call_next(request)
+ except Exception as error:
+ response = await exception_handler(
+ request,
+ exc=(
+ error
+ if isinstance(error, BaseServerException)
+ else UncaughtException.with_exception(error)
+ ),
+ )
+ return response
diff --git a/hibiapi/app/routes/__init__.py b/hibiapi/app/routes/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..77ba36dcbca3d56d64a37392c25bb48c22636f0c
--- /dev/null
+++ b/hibiapi/app/routes/__init__.py
@@ -0,0 +1,52 @@
+from typing import Protocol, cast
+
+from hibiapi.app.routes import (
+ bika,
+ bilibili,
+ netease,
+ pixiv,
+ qrcode,
+ sauce,
+ tieba,
+ wallpaper,
+)
+from hibiapi.utils.config import APIConfig
+from hibiapi.utils.exceptions import ExceptionReturn
+from hibiapi.utils.log import logger
+from hibiapi.utils.routing import SlashRouter
+
+router = SlashRouter(
+ responses={
+ code: {
+ "model": ExceptionReturn,
+ }
+ for code in (400, 422, 500, 502)
+ }
+)
+
+
+class RouteInterface(Protocol):
+ router: SlashRouter
+ __mount__: str
+ __config__: APIConfig
+
+
+modules = cast(
+ list[RouteInterface],
+ [bilibili, netease, pixiv, qrcode, sauce, tieba, wallpaper, bika],
+)
+
+for module in modules:
+ mount = (
+ mount_point
+ if (mount_point := module.__mount__).startswith("/")
+ else f"/{mount_point}"
+ )
+
+ if not module.__config__["enabled"].as_bool():
+ logger.warning(
+ f"API Route {mount} has been "
+ "disabled in config."
+ )
+ continue
+ router.include_router(module.router, prefix=mount)
diff --git a/hibiapi/app/routes/bika.py b/hibiapi/app/routes/bika.py
new file mode 100644
index 0000000000000000000000000000000000000000..c04ae48357de0361070db79fedcb07ca510e69d4
--- /dev/null
+++ b/hibiapi/app/routes/bika.py
@@ -0,0 +1,36 @@
+from typing import Annotated
+
+from fastapi import Depends, Header
+
+from hibiapi.api.bika import (
+ BikaConstants,
+ BikaEndpoints,
+ BikaLogin,
+ ImageQuality,
+ NetRequest,
+)
+from hibiapi.utils.log import logger
+from hibiapi.utils.routing import EndpointRouter
+
+try:
+ BikaConstants.CONFIG["account"].get(BikaLogin)
+except Exception as e:
+ logger.warning(f"Bika account misconfigured: {e}")
+ BikaConstants.CONFIG["enabled"].set(False)
+
+
+async def x_image_quality(
+ x_image_quality: Annotated[ImageQuality, Header()] = ImageQuality.medium,
+):
+ if x_image_quality is None:
+ return BikaConstants.CONFIG["image_quality"].get(ImageQuality)
+ return x_image_quality
+
+
+__mount__, __config__ = "bika", BikaConstants.CONFIG
+router = EndpointRouter(tags=["Bika"], dependencies=[Depends(x_image_quality)])
+
+BikaAPIRoot = NetRequest()
+
+
+router.include_endpoint(BikaEndpoints, BikaAPIRoot)
diff --git a/hibiapi/app/routes/bilibili/__init__.py b/hibiapi/app/routes/bilibili/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..cba52328b75e229a046117954e75aa2f6ff9de2b
--- /dev/null
+++ b/hibiapi/app/routes/bilibili/__init__.py
@@ -0,0 +1,10 @@
+from hibiapi.api.bilibili import BilibiliConstants
+from hibiapi.app.routes.bilibili.v2 import router as RouterV2
+from hibiapi.app.routes.bilibili.v3 import router as RouterV3
+from hibiapi.utils.routing import SlashRouter
+
+__mount__, __config__ = "bilibili", BilibiliConstants.CONFIG
+
+router = SlashRouter()
+router.include_router(RouterV2, prefix="/v2")
+router.include_router(RouterV3, prefix="/v3")
diff --git a/hibiapi/app/routes/bilibili/v2.py b/hibiapi/app/routes/bilibili/v2.py
new file mode 100644
index 0000000000000000000000000000000000000000..7130669ccc52b65671af0ad3c850dad808a4abaf
--- /dev/null
+++ b/hibiapi/app/routes/bilibili/v2.py
@@ -0,0 +1,6 @@
+from hibiapi.api.bilibili.api import BilibiliEndpointV2
+from hibiapi.api.bilibili.net import NetRequest
+from hibiapi.utils.routing import EndpointRouter
+
+router = EndpointRouter(tags=["Bilibili V2"])
+router.include_endpoint(BilibiliEndpointV2, NetRequest())
diff --git a/hibiapi/app/routes/bilibili/v3.py b/hibiapi/app/routes/bilibili/v3.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6726847467104bacc8ac4e8a57713d737d9f3d0
--- /dev/null
+++ b/hibiapi/app/routes/bilibili/v3.py
@@ -0,0 +1,5 @@
+from hibiapi.api.bilibili import BilibiliEndpointV3, NetRequest
+from hibiapi.utils.routing import EndpointRouter
+
+router = EndpointRouter(tags=["Bilibili V3"])
+router.include_endpoint(BilibiliEndpointV3, NetRequest())
diff --git a/hibiapi/app/routes/netease.py b/hibiapi/app/routes/netease.py
new file mode 100644
index 0000000000000000000000000000000000000000..c51b3bcfb08aac30aaebdc60b9fd5359a67e886d
--- /dev/null
+++ b/hibiapi/app/routes/netease.py
@@ -0,0 +1,7 @@
+from hibiapi.api.netease import NeteaseConstants, NeteaseEndpoint, NetRequest
+from hibiapi.utils.routing import EndpointRouter
+
+__mount__, __config__ = "netease", NeteaseConstants.CONFIG
+
+router = EndpointRouter(tags=["Netease"])
+router.include_endpoint(NeteaseEndpoint, NetRequest())
diff --git a/hibiapi/app/routes/pixiv.py b/hibiapi/app/routes/pixiv.py
new file mode 100644
index 0000000000000000000000000000000000000000..0209ff49dc6de46dd9984d0ea115e03c1ec3a38b
--- /dev/null
+++ b/hibiapi/app/routes/pixiv.py
@@ -0,0 +1,26 @@
+from typing import Optional
+
+from fastapi import Depends, Header
+
+from hibiapi.api.pixiv import NetRequest, PixivConstants, PixivEndpoints
+from hibiapi.utils.log import logger
+from hibiapi.utils.routing import EndpointRouter
+
+if not (refresh_tokens := PixivConstants.CONFIG["account"]["token"].as_str_seq()):
+ logger.warning("Pixiv API token is not set, pixiv endpoint will be unavailable.")
+ PixivConstants.CONFIG["enabled"].set(False)
+
+
+async def accept_language(
+ accept_language: Optional[str] = Header(
+ None,
+ description="Accepted tag translation language",
+ )
+):
+ return accept_language
+
+
+__mount__, __config__ = "pixiv", PixivConstants.CONFIG
+
+router = EndpointRouter(tags=["Pixiv"], dependencies=[Depends(accept_language)])
+router.include_endpoint(PixivEndpoints, api_root := NetRequest(refresh_tokens))
diff --git a/hibiapi/app/routes/qrcode.py b/hibiapi/app/routes/qrcode.py
new file mode 100644
index 0000000000000000000000000000000000000000..7d67faf9f374793506ab1ad81c902a2d0b7b2c6d
--- /dev/null
+++ b/hibiapi/app/routes/qrcode.py
@@ -0,0 +1,76 @@
+from typing import Optional
+
+from fastapi import Request, Response
+from pydantic.color import Color
+
+from hibiapi.api.qrcode import (
+ COLOR_BLACK,
+ COLOR_WHITE,
+ Config,
+ HostUrl,
+ QRCodeLevel,
+ QRInfo,
+ ReturnEncode,
+)
+from hibiapi.utils.routing import SlashRouter
+from hibiapi.utils.temp import TempFile
+
+QR_CALLBACK_TEMPLATE = (
+ r"""function {fun}(){document.write('
');}"""
+)
+
+__mount__, __config__ = "qrcode", Config
+router = SlashRouter(tags=["QRCode"])
+
+
+@router.get(
+ "/",
+ responses={
+ 200: {
+ "content": {"image/png": {}, "text/javascript": {}, "application/json": {}},
+ "description": "Avaliable to return an javascript, image or json.",
+ }
+ },
+ response_model=QRInfo,
+)
+async def qrcode_api(
+ request: Request,
+ *,
+ text: str,
+ size: int = 200,
+ logo: Optional[HostUrl] = None,
+ encode: ReturnEncode = ReturnEncode.raw,
+ level: QRCodeLevel = QRCodeLevel.MEDIUM,
+ bgcolor: Color = COLOR_BLACK,
+ fgcolor: Color = COLOR_WHITE,
+ fun: str = "qrcode",
+):
+ qr = await QRInfo.new(
+ text, size=size, logo=logo, level=level, bgcolor=bgcolor, fgcolor=fgcolor
+ )
+ qr.url = TempFile.to_url(request, qr.path) # type:ignore
+ """function {fun}(){document.write('
');}"""
+ return (
+ qr
+ if encode == ReturnEncode.json
+ else Response(
+ content=qr.json(),
+ media_type="application/json",
+ headers={"Location": qr.url},
+ status_code=302,
+ )
+ if encode == ReturnEncode.raw
+ else Response(
+ content=f"{fun}({qr.json()})",
+ media_type="text/javascript",
+ )
+ if encode == ReturnEncode.jsc
+ else Response(
+ content="function "
+ + fun
+ + '''(){document.write('
');}""",
+ media_type="text/javascript",
+ )
+ )
diff --git a/hibiapi/app/routes/sauce.py b/hibiapi/app/routes/sauce.py
new file mode 100644
index 0000000000000000000000000000000000000000..d5d0d124c6b0ec3d578fce0551cb56c95f35b8f2
--- /dev/null
+++ b/hibiapi/app/routes/sauce.py
@@ -0,0 +1,120 @@
+from typing import Annotated, Optional
+
+from fastapi import Depends, File, Form
+from loguru import logger
+
+from hibiapi.api.sauce import (
+ DeduplicateType,
+ HostUrl,
+ NetRequest,
+ SauceConstants,
+ SauceEndpoint,
+ UploadFileIO,
+)
+from hibiapi.utils.routing import SlashRouter
+
+if (not SauceConstants.API_KEY) or (not all(map(str.strip, SauceConstants.API_KEY))):
+ logger.warning("Sauce API key not set, SauceNAO endpoint will be unavailable")
+ SauceConstants.CONFIG["enabled"].set(False)
+
+__mount__, __config__ = "sauce", SauceConstants.CONFIG
+router = SlashRouter(tags=["SauceNAO"])
+
+SauceAPIRoot = NetRequest()
+
+
+async def request_client():
+ async with SauceAPIRoot as client:
+ yield SauceEndpoint(client)
+
+
+@router.get("/")
+async def sauce_url(
+ endpoint: Annotated[SauceEndpoint, Depends(request_client)],
+ url: HostUrl,
+ size: int = 30,
+ deduplicate: DeduplicateType = DeduplicateType.ALL,
+ database: Optional[int] = None,
+ enabled_mask: Optional[int] = None,
+ disabled_mask: Optional[int] = None,
+):
+ """
+ ## Name: `sauce_url`
+
+ > 使用SauceNAO检索网络图片
+
+ ---
+
+ ### Required:
+
+ - ***HostUrl*** **`url`**
+ - Description: 图片URL
+
+ ---
+
+ ### Optional:
+ - ***int*** `size` = `30`
+ - Description: 搜索结果数目
+ - ***DeduplicateType*** `deduplicate` = `DeduplicateType.ALL`
+ - Description: 结果去重模式
+ - ***Optional[int]*** `database` = `None`
+ - Description: 检索的数据库ID, 999为全部检索
+ - ***Optional[int]*** `enabled_mask` = `None`
+ - Description: 启用的检索数据库
+ - ***Optional[int]*** `disabled_mask` = `None`
+ - Description: 禁用的检索数据库
+ """
+ return await endpoint.search(
+ url=url,
+ size=size,
+ deduplicate=deduplicate,
+ database=database,
+ enabled_mask=enabled_mask,
+ disabled_mask=disabled_mask,
+ )
+
+
+@router.post("/")
+async def sauce_form(
+ endpoint: Annotated[SauceEndpoint, Depends(request_client)],
+ file: bytes = File(..., max_length=SauceConstants.IMAGE_MAXIMUM_SIZE),
+ size: int = Form(30),
+ deduplicate: Annotated[DeduplicateType, Form()] = DeduplicateType.ALL,
+ database: Optional[int] = Form(None),
+ enabled_mask: Optional[int] = Form(None),
+ disabled_mask: Optional[int] = Form(None),
+):
+ """
+ ## Name: `sauce_form`
+
+ > 使用SauceNAO检索表单上传图片
+
+ ---
+
+ ### Required:
+ - ***bytes*** `file`
+ - Description: 上传的图片
+
+ ---
+
+ ### Optional:
+ - ***int*** `size` = `30`
+ - Description: 搜索结果数目
+ - ***DeduplicateType*** `deduplicate` = `DeduplicateType.ALL`
+ - Description: 结果去重模式
+ - ***Optional[int]*** `database` = `None`
+ - Description: 检索的数据库ID, 999为全部检索
+ - ***Optional[int]*** `enabled_mask` = `None`
+ - Description: 启用的检索数据库
+ - ***Optional[int]*** `disabled_mask` = `None`
+ - Description: 禁用的检索数据库
+
+ """
+ return await endpoint.search(
+ file=UploadFileIO(file),
+ size=size,
+ deduplicate=deduplicate,
+ database=database,
+ disabled_mask=disabled_mask,
+ enabled_mask=enabled_mask,
+ )
diff --git a/hibiapi/app/routes/tieba.py b/hibiapi/app/routes/tieba.py
new file mode 100644
index 0000000000000000000000000000000000000000..ae3ef21444715c1bb0e3979a97898da652aea607
--- /dev/null
+++ b/hibiapi/app/routes/tieba.py
@@ -0,0 +1,7 @@
+from hibiapi.api.tieba import Config, NetRequest, TiebaEndpoint
+from hibiapi.utils.routing import EndpointRouter
+
+__mount__, __config__ = "tieba", Config
+
+router = EndpointRouter(tags=["Tieba"])
+router.include_endpoint(TiebaEndpoint, NetRequest())
diff --git a/hibiapi/app/routes/wallpaper.py b/hibiapi/app/routes/wallpaper.py
new file mode 100644
index 0000000000000000000000000000000000000000..a55783423a47ee5a795f0bb6c718c399501bb81d
--- /dev/null
+++ b/hibiapi/app/routes/wallpaper.py
@@ -0,0 +1,7 @@
+from hibiapi.api.wallpaper import Config, NetRequest, WallpaperEndpoint
+from hibiapi.utils.routing import EndpointRouter
+
+__mount__, __config__ = "wallpaper", Config
+
+router = EndpointRouter(tags=["Wallpaper"])
+router.include_endpoint(WallpaperEndpoint, NetRequest())
diff --git a/hibiapi/configs/bika.yml b/hibiapi/configs/bika.yml
new file mode 100644
index 0000000000000000000000000000000000000000..b4283126856f126d3170b22e1d7fb474cc9812c2
--- /dev/null
+++ b/hibiapi/configs/bika.yml
@@ -0,0 +1,8 @@
+enabled: true
+
+proxy: {}
+
+account:
+ # 请在此处填写你的哔咔账号密码
+ email:
+ password:
diff --git a/hibiapi/configs/bilibili.yml b/hibiapi/configs/bilibili.yml
new file mode 100644
index 0000000000000000000000000000000000000000..d71df3d0e60890844b5d3fe5771e4ea87a398621
--- /dev/null
+++ b/hibiapi/configs/bilibili.yml
@@ -0,0 +1,8 @@
+enabled: true
+
+net:
+ cookie: > # Bilibili的Cookie, 在一些需要用户登录的场景下需要
+ DedeUserID=;
+ DedeUserID__ckMd5=;
+ SESSDATA=;
+ user-agent: "Mozilla/5.0 (mixmoe@GitHub.com/HibiAPI) Chrome/114.514.1919810" # UA头, 一般没必要改
diff --git a/hibiapi/configs/general.yml b/hibiapi/configs/general.yml
new file mode 100644
index 0000000000000000000000000000000000000000..c77a04b23e2c56fe941f17a4520401807a8d000e
--- /dev/null
+++ b/hibiapi/configs/general.yml
@@ -0,0 +1,74 @@
+# _ _ _ _ _ _____ _____
+# | | | (_) | (_) /\ | __ \_ _|
+# | |__| |_| |__ _ / \ | |__) || |
+# | __ | | '_ \| | / /\ \ | ___/ | |
+# | | | | | |_) | |/ ____ \| | _| |_
+# |_| |_|_|_.__/|_/_/ \_\_| |_____|
+#
+# An alternative implement of Imjad API
+
+data:
+ temp-expiry: 7 # 临时文件目录文件过期时间, 单位为天
+ path: ./data # data目录所在位置
+
+server:
+ host: 127.0.0.1 # 监听主机
+ port: 8080 # 端口
+ gzip: true
+
+ # 限定来源域名, 支持通配符, 参考:
+ # https://fastapi.tiangolo.com/advanced/middleware/#trustedhostmiddleware
+ allowed: ["*"]
+
+ cors:
+ origins:
+ - "http://localhost.tiangolo.com"
+ - "https://localhost.tiangolo.com"
+ - "http://localhost"
+ - "http://localhost:8080"
+ credentials: true
+ methods: ["*"]
+ headers: ["*"]
+
+ allowed-forward: null # Reference: https://stackoverflow.com/questions/63511413
+
+limit: # 单IP速率限制策略
+ enabled: true
+ max: 60 # 每个单位时间内最大请求数
+ interval: 60 # 单位时间长度, 单位为秒
+
+cache:
+ enabled: true # 设置是否启用缓存
+ ttl: 3600 # 缓存默认生存时间, 单位为秒
+ uri: "mem://" # 缓存URI
+ controllable: true # 配置是否可以通过Cache-Control请求头刷新缓存
+
+log:
+ level: INFO # 日志等级, 可选 [TRACE,DEBUG,INFO,WARNING,ERROR]
+ format: > # 输出日志格式, 如果没有必要请不要修改
+
+ {level:<8}
+ [{time:YYYY/MM/DD} {time:HH:mm:ss.SSS} {module}:{name}:{line}]
+ {message}
+
+ # file: logs/{time.log}
+ file: null # 日志输出文件位置, 相对于data目录, 为空则不保存
+
+ sentry:
+ enabled: false
+ sample: 1
+ dsn: ""
+ pii: false
+
+content:
+ slogan: | # 在文档附加的标语, 可以用于自定义内容
+ 
+ robots: | # 提供的robots.txt内容, 用于提供搜索引擎抓取
+ User-agent: *
+ Disallow: /api/
+
+authorization:
+ enabled: false # 是否开启验证
+ allowed:
+ - username: admin # 用户名
+ password: admin # 密码
diff --git a/hibiapi/configs/netease.yml b/hibiapi/configs/netease.yml
new file mode 100644
index 0000000000000000000000000000000000000000..a582ad58f14a29394c5a2021999b6cc167caac8f
--- /dev/null
+++ b/hibiapi/configs/netease.yml
@@ -0,0 +1,11 @@
+enabled: true
+
+net:
+ cookie: > # 网易云的Cookie, 可能有些API需要
+ os=pc;
+ osver=Microsoft-Windows-10-Professional-build-10586-64bit;
+ appver=2.0.3.131777;
+ channel=netease;
+ __remember_me=true
+ user-agent: "Mozilla/5.0 (mixmoe@GitHub.com/HibiAPI) Chrome/114.514.1919810" # UA头, 一般没必要改
+ source: 118.88.64.0/18 # 伪造来源IP以绕过地区限制 #68
diff --git a/hibiapi/configs/pixiv.yml b/hibiapi/configs/pixiv.yml
new file mode 100644
index 0000000000000000000000000000000000000000..536ff8d76df4ab709d219a1bd0bac7fa803f86f8
--- /dev/null
+++ b/hibiapi/configs/pixiv.yml
@@ -0,0 +1,14 @@
+enabled: true
+
+# HTTP代理地址
+# 示例格式
+# proxy: { "all://": "http://127.0.0.1:1081" }
+proxy: {}
+
+account:
+ # Pixiv 登录凭证刷新令牌 (Refresh Token)
+ # 获取方法请参考: https://github.com/mixmoe/HibiAPI/issues/53
+ # 支持使用多个账户进行负载均衡, 每行一个token
+ token: ""
+
+language: zh-cn # 返回语言, 会影响标签的翻译
diff --git a/hibiapi/configs/qrcode.yml b/hibiapi/configs/qrcode.yml
new file mode 100644
index 0000000000000000000000000000000000000000..0bf71c93fc901ff9ac39699f7fe7407cf2d2037a
--- /dev/null
+++ b/hibiapi/configs/qrcode.yml
@@ -0,0 +1,9 @@
+enabled: true
+
+qrcode:
+ max-size: 1000 # 允许的二维码最大尺寸, 单位像素
+ min-size: 50 # 允许的二维码最小尺寸, 单位像素
+ icon-site: # 图标支持的站点, 可以阻止服务器ip泄漏, 支持通配符
+ - localhost
+ - i.loli.net
+ # - "*"
diff --git a/hibiapi/configs/sauce.yml b/hibiapi/configs/sauce.yml
new file mode 100644
index 0000000000000000000000000000000000000000..71a6ea3c31d74489d6795584a3fa8cc5fbd4638e
--- /dev/null
+++ b/hibiapi/configs/sauce.yml
@@ -0,0 +1,28 @@
+enabled: true
+
+# HTTP代理地址
+# 示例格式
+# proxy:
+# http_proxy: http://127.0.0.1:1081
+# https_proxy: https://127.0.0.1:1081
+proxy: {}
+
+net:
+ # SauceNAO 的API KEY, 支持多个以进行负载均衡, 每个KEY以换行分隔
+ # api-key: |
+ # aaaaaaa
+ # bbbbbbb
+ api-key: ""
+
+ keys: # SauceNAO 的API KEY, 支持多个以进行负载均衡
+ - ""
+ user-agent: &ua "Mozilla/5.0 (mixmoe@GitHub.com/HibiAPI) Chrome/114.514.1919810" # UA头, 一般没必要改
+
+image:
+ max-size: 4096 # 获取图片最大大小, 单位为 KBytes
+ timeout: 6 # 获取图片超时时间, 单位为秒
+ headers: { "user-agent": *ua } # 获取图片时携带的请求头
+ allowed: # 获取图片的站点白名单, 可以阻止服务器ip泄漏, 支持通配符
+ - localhost
+ - i.loli.net
+ # - "*"
diff --git a/hibiapi/configs/tieba.yml b/hibiapi/configs/tieba.yml
new file mode 100644
index 0000000000000000000000000000000000000000..b939c8e2cd91c859bf0a0ebf439f89ea068e0575
--- /dev/null
+++ b/hibiapi/configs/tieba.yml
@@ -0,0 +1,6 @@
+enabled: true
+
+net:
+ user-agent: "Mozilla/5.0 (mixmoe@GitHub.com/HibiAPI) Chrome/114.514.1919810" # UA头, 一般没必要改
+ params:
+ BDUSS: "" # 百度的BDUSS登录凭证, 在使用部分API时需要
diff --git a/hibiapi/configs/wallpaper.yml b/hibiapi/configs/wallpaper.yml
new file mode 100644
index 0000000000000000000000000000000000000000..99c154cdaf72867002d6783bf2ad5394205ffd13
--- /dev/null
+++ b/hibiapi/configs/wallpaper.yml
@@ -0,0 +1,4 @@
+enabled: true
+
+net:
+ user-agent: "Mozilla/5.0 (mixmoe@GitHub.com/HibiAPI) Chrome/114.514.1919810" # UA头, 一般没必要改
diff --git a/hibiapi/utils/__init__.py b/hibiapi/utils/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/hibiapi/utils/cache.py b/hibiapi/utils/cache.py
new file mode 100644
index 0000000000000000000000000000000000000000..269dd5eb52bd13333d2b938437b794f8aa3ef5ac
--- /dev/null
+++ b/hibiapi/utils/cache.py
@@ -0,0 +1,130 @@
+import hashlib
+from collections.abc import Awaitable
+from datetime import timedelta
+from functools import wraps
+from typing import Any, Callable, Optional, TypeVar, cast
+
+from cashews import Cache
+from pydantic import BaseModel
+from pydantic.decorator import ValidatedFunction
+
+from .config import Config
+from .log import logger
+
+CACHE_CONFIG_KEY = "_cache_config"
+
+AsyncFunc = Callable[..., Awaitable[Any]]
+T_AsyncFunc = TypeVar("T_AsyncFunc", bound=AsyncFunc)
+
+
+CACHE_ENABLED = Config["cache"]["enabled"].as_bool()
+CACHE_DELTA = timedelta(seconds=Config["cache"]["ttl"].as_number())
+CACHE_URI = Config["cache"]["uri"].as_str()
+CACHE_CONTROLLABLE = Config["cache"]["controllable"].as_bool()
+
+cache = Cache(name="hibiapi")
+try:
+ cache.setup(CACHE_URI)
+except Exception as e:
+ logger.warning(
+ f"Cache URI {CACHE_URI!r} setup failed: "
+ f"{e!r}, use memory backend instead."
+ )
+
+
+class CacheConfig(BaseModel):
+ endpoint: AsyncFunc
+ namespace: str
+ enabled: bool = True
+ ttl: timedelta = CACHE_DELTA
+
+ @staticmethod
+ def new(
+ function: AsyncFunc,
+ *,
+ enabled: bool = True,
+ ttl: timedelta = CACHE_DELTA,
+ namespace: Optional[str] = None,
+ ):
+ return CacheConfig(
+ endpoint=function,
+ enabled=enabled,
+ ttl=ttl,
+ namespace=namespace or function.__qualname__,
+ )
+
+
+def cache_config(
+ enabled: bool = True,
+ ttl: timedelta = CACHE_DELTA,
+ namespace: Optional[str] = None,
+):
+ def decorator(function: T_AsyncFunc) -> T_AsyncFunc:
+ setattr(
+ function,
+ CACHE_CONFIG_KEY,
+ CacheConfig.new(function, enabled=enabled, ttl=ttl, namespace=namespace),
+ )
+ return function
+
+ return decorator
+
+
+disable_cache = cache_config(enabled=False)
+
+
+class CachedValidatedFunction(ValidatedFunction):
+ def serialize(self, args: tuple[Any, ...], kwargs: dict[str, Any]) -> BaseModel:
+ values = self.build_values(args=args, kwargs=kwargs)
+ return self.model(**values)
+
+
+def endpoint_cache(function: T_AsyncFunc) -> T_AsyncFunc:
+ from .routing import request_headers, response_headers
+
+ vf = CachedValidatedFunction(function, config={})
+ config = cast(
+ CacheConfig,
+ getattr(function, CACHE_CONFIG_KEY, None) or CacheConfig.new(function),
+ )
+
+ config.enabled = CACHE_ENABLED and config.enabled
+
+ @wraps(function)
+ async def wrapper(*args, **kwargs):
+ cache_policy = "public"
+
+ if CACHE_CONTROLLABLE:
+ cache_policy = request_headers.get().get("cache-control", cache_policy)
+
+ if not config.enabled or cache_policy.casefold() == "no-store":
+ return await vf.call(*args, **kwargs)
+
+ key = (
+ f"{config.namespace}:"
+ + hashlib.md5(
+ (model := vf.serialize(args=args, kwargs=kwargs))
+ .json(exclude={"self"}, sort_keys=True, ensure_ascii=False)
+ .encode()
+ ).hexdigest()
+ )
+
+ response_header = response_headers.get()
+ result: Optional[Any] = None
+
+ if cache_policy.casefold() == "no-cache":
+ await cache.delete(key)
+ elif result := await cache.get(key):
+ logger.debug(f"Request hit cache {key}")
+ response_header.setdefault("X-Cache-Hit", key)
+
+ if result is None:
+ result = await vf.execute(model)
+ await cache.set(key, result, expire=config.ttl)
+
+ if (cache_remain := await cache.get_expire(key)) > 0:
+ response_header.setdefault("Cache-Control", f"max-age={cache_remain}")
+
+ return result
+
+ return wrapper # type:ignore
diff --git a/hibiapi/utils/config.py b/hibiapi/utils/config.py
new file mode 100644
index 0000000000000000000000000000000000000000..0618ac2ab127fb1a41447bf6701125e17ad40cf0
--- /dev/null
+++ b/hibiapi/utils/config.py
@@ -0,0 +1,121 @@
+import json
+import os
+from pathlib import Path
+from typing import Any, Optional, TypeVar, overload
+
+import confuse
+import dotenv
+from pydantic import parse_obj_as
+
+from hibiapi import __file__ as root_file
+
+CONFIG_DIR = Path(".") / "configs"
+DEFAULT_DIR = Path(root_file).parent / "configs"
+
+_T = TypeVar("_T")
+
+
+class ConfigSubView(confuse.Subview):
+ @overload
+ def get(self) -> Any: ...
+
+ @overload
+ def get(self, template: type[_T]) -> _T: ...
+
+ def get(self, template: Optional[type[_T]] = None): # type: ignore
+ object_ = super().get()
+ if template is not None:
+ return parse_obj_as(template, object_)
+ return object_
+
+ def get_optional(self, template: type[_T]) -> Optional[_T]:
+ try:
+ return self.get(template)
+ except Exception:
+ return None
+
+ def as_str(self) -> str:
+ return self.get(str)
+
+ def as_str_seq(self, split: str = "\n") -> list[str]: # type: ignore
+ return [
+ stripped
+ for line in self.as_str().strip().split(split)
+ if (stripped := line.strip())
+ ]
+
+ def as_number(self) -> int:
+ return self.get(int)
+
+ def as_bool(self) -> bool:
+ return self.get(bool)
+
+ def as_path(self) -> Path:
+ return self.get(Path)
+
+ def as_dict(self) -> dict[str, Any]:
+ return self.get(dict[str, Any])
+
+ def __getitem__(self, key: str) -> "ConfigSubView":
+ return self.__class__(self, key)
+
+
+class AppConfig(confuse.Configuration):
+ def __init__(self, name: str):
+ self._config_name = name
+ self._config = CONFIG_DIR / (filename := f"{name}.yml")
+ self._default = DEFAULT_DIR / filename
+ super().__init__(name)
+ self._add_env_source()
+
+ def config_dir(self) -> str:
+ return str(CONFIG_DIR)
+
+ def user_config_path(self) -> str:
+ return str(self._config)
+
+ def _add_env_source(self):
+ if dotenv.find_dotenv():
+ dotenv.load_dotenv()
+ config_name = f"{self._config_name.lower()}_"
+ env_configs = {
+ k[len(config_name) :].lower(): str(v)
+ for k, v in os.environ.items()
+ if k.lower().startswith(config_name)
+ }
+ # Convert `AAA_BBB_CCC=DDD` to `{'aaa':{'bbb':{'ccc':'ddd'}}}`
+ source_tree: dict[str, Any] = {}
+ for key, value in env_configs.items():
+ _tmp = source_tree
+ *nodes, name = key.split("_")
+ for node in nodes:
+ _tmp = _tmp.setdefault(node, {})
+ if value == "":
+ continue
+ try:
+ _tmp[name] = json.loads(value)
+ except json.JSONDecodeError:
+ _tmp[name] = value
+
+ self.sources.insert(0, confuse.ConfigSource.of(source_tree))
+
+ def _add_default_source(self):
+ self.add(confuse.YamlSource(self._default, default=True))
+
+ def _add_user_source(self):
+ self.add(confuse.YamlSource(self._config, optional=True))
+
+ def __getitem__(self, key: str) -> ConfigSubView:
+ return ConfigSubView(self, key)
+
+
+class GeneralConfig(AppConfig):
+ def __init__(self, name: str):
+ super().__init__(name)
+
+
+class APIConfig(GeneralConfig):
+ pass
+
+
+Config = GeneralConfig("general")
diff --git a/hibiapi/utils/decorators/__init__.py b/hibiapi/utils/decorators/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..0c7f8fd320a847c4817d10012e3a7ef142d60b93
--- /dev/null
+++ b/hibiapi/utils/decorators/__init__.py
@@ -0,0 +1,125 @@
+from __future__ import annotations
+
+import asyncio
+from asyncio import sleep as async_sleep
+from collections.abc import Awaitable, Iterable
+from functools import partial, wraps
+from inspect import iscoroutinefunction
+from time import sleep as sync_sleep
+from typing import Callable, Protocol, TypeVar, overload
+
+from typing_extensions import ParamSpec
+
+from hibiapi.utils.decorators.enum import enum_auto_doc as enum_auto_doc
+from hibiapi.utils.decorators.timer import Callable_T, TimeIt
+from hibiapi.utils.log import logger
+
+Argument_T = ParamSpec("Argument_T")
+Return_T = TypeVar("Return_T")
+
+
+class RetryT(Protocol):
+ @overload
+ def __call__(self, function: Callable_T) -> Callable_T: ...
+
+ @overload
+ def __call__(
+ self,
+ *,
+ retries: int = ...,
+ delay: float = ...,
+ exceptions: Iterable[type[Exception]] | None = ...,
+ ) -> RetryT: ...
+
+ def __call__(
+ self,
+ function: Callable | None = ...,
+ *,
+ retries: int = ...,
+ delay: float = ...,
+ exceptions: Iterable[type[Exception]] | None = ...,
+ ) -> Callable | RetryT: ...
+
+
+@overload
+def Retry(function: Callable_T) -> Callable_T: ...
+
+
+@overload
+def Retry(
+ *,
+ retries: int = ...,
+ delay: float = ...,
+ exceptions: Iterable[type[Exception]] | None = ...,
+) -> RetryT: ...
+
+
+def Retry(
+ function: Callable | None = None,
+ *,
+ retries: int = 3,
+ delay: float = 0.1,
+ exceptions: Iterable[type[Exception]] | None = None,
+) -> Callable | RetryT:
+ if function is None:
+ return partial(
+ Retry,
+ retries=retries,
+ delay=delay,
+ exceptions=exceptions,
+ )
+
+ timed_func = TimeIt(function)
+ allowed_exceptions: tuple[type[Exception], ...] = tuple(exceptions or [Exception])
+ assert (retries >= 1) and (delay >= 0)
+
+ @wraps(timed_func)
+ def sync_wrapper(*args, **kwargs):
+ error: Exception | None = None
+ for retried in range(retries):
+ try:
+ return timed_func(*args, **kwargs)
+ except Exception as exception:
+ error = exception
+ if not isinstance(exception, allowed_exceptions):
+ raise
+ logger.opt().debug(
+ f"Retry of {timed_func=} trigged "
+ f"due to {exception=} raised ({retried=}/{retries=})"
+ )
+ sync_sleep(delay)
+ assert isinstance(error, Exception)
+ raise error
+
+ @wraps(timed_func)
+ async def async_wrapper(*args, **kwargs):
+ error: Exception | None = None
+ for retried in range(retries):
+ try:
+ return await timed_func(*args, **kwargs)
+ except Exception as exception:
+ error = exception
+ if not isinstance(exception, allowed_exceptions):
+ raise
+ logger.opt().debug(
+ f"Retry of {timed_func=} trigged "
+ f"due to {exception=} raised ({retried=}/{retries})"
+ )
+ await async_sleep(delay)
+ assert isinstance(error, Exception)
+ raise error
+
+ return async_wrapper if iscoroutinefunction(function) else sync_wrapper
+
+
+def ToAsync(
+ function: Callable[Argument_T, Return_T],
+) -> Callable[Argument_T, Awaitable[Return_T]]:
+ @TimeIt
+ @wraps(function)
+ async def wrapper(*args: Argument_T.args, **kwargs: Argument_T.kwargs) -> Return_T:
+ return await asyncio.get_running_loop().run_in_executor(
+ None, lambda: function(*args, **kwargs)
+ )
+
+ return wrapper
diff --git a/hibiapi/utils/decorators/enum.py b/hibiapi/utils/decorators/enum.py
new file mode 100644
index 0000000000000000000000000000000000000000..53f467af05faa8c258a6e357824ccc238348527d
--- /dev/null
+++ b/hibiapi/utils/decorators/enum.py
@@ -0,0 +1,41 @@
+import ast
+import inspect
+from enum import Enum
+from typing import TypeVar
+
+_ET = TypeVar("_ET", bound=type[Enum])
+
+
+def enum_auto_doc(enum: _ET) -> _ET:
+ enum_class_ast, *_ = ast.parse(inspect.getsource(enum)).body
+ assert isinstance(enum_class_ast, ast.ClassDef)
+
+ enum_value_comments: dict[str, str] = {}
+ for index, body in enumerate(body_list := enum_class_ast.body):
+ if (
+ isinstance(body, ast.Assign)
+ and (next_index := index + 1) < len(body_list)
+ and isinstance(next_body := body_list[next_index], ast.Expr)
+ ):
+ target, *_ = body.targets
+ assert isinstance(target, ast.Name)
+ assert isinstance(next_body.value, ast.Constant)
+ assert isinstance(member_doc := next_body.value.value, str)
+ enum[target.id].__doc__ = member_doc
+ enum_value_comments[target.id] = inspect.cleandoc(member_doc)
+
+ if not enum_value_comments and all(member.name == member.value for member in enum):
+ return enum
+
+ members_doc = ""
+ for member in enum:
+ value_document = "-"
+ if member.name != member.value:
+ value_document += f" `{member.name}` ="
+ value_document += f" *`{member.value}`*"
+ if doc := enum_value_comments.get(member.name):
+ value_document += f" : {doc}"
+ members_doc += value_document + "\n"
+
+ enum.__doc__ = f"{enum.__doc__}\n{members_doc}"
+ return enum
diff --git a/hibiapi/utils/decorators/timer.py b/hibiapi/utils/decorators/timer.py
new file mode 100644
index 0000000000000000000000000000000000000000..b3b75fc75d4f6d80b139b24acd086b24b2384c14
--- /dev/null
+++ b/hibiapi/utils/decorators/timer.py
@@ -0,0 +1,95 @@
+from __future__ import annotations
+
+import time
+from dataclasses import dataclass, field
+from functools import wraps
+from inspect import iscoroutinefunction
+from typing import Any, Callable, ClassVar, TypeVar
+
+from hibiapi.utils.log import logger
+
+Callable_T = TypeVar("Callable_T", bound=Callable)
+
+
+class TimerError(Exception):
+ """A custom exception used to report errors in use of Timer class"""
+
+
+@dataclass
+class Timer:
+ """Time your code using a class, context manager, or decorator"""
+
+ timers: ClassVar[dict[str, float]] = dict()
+ name: str | None = None
+ text: str = "Elapsed time: {:0.3f} seconds"
+ logger_func: Callable[[str], None] | None = print
+ _start_time: float | None = field(default=None, init=False, repr=False)
+
+ def __post_init__(self) -> None:
+ """Initialization: add timer to dict of timers"""
+ if self.name:
+ self.timers.setdefault(self.name, 0)
+
+ def start(self) -> None:
+ """Start a new timer"""
+ if self._start_time is not None:
+ raise TimerError("Timer is running. Use .stop() to stop it")
+
+ self._start_time = time.perf_counter()
+
+ def stop(self) -> float:
+ """Stop the timer, and report the elapsed time"""
+ if self._start_time is None:
+ raise TimerError("Timer is not running. Use .start() to start it")
+
+ # Calculate elapsed time
+ elapsed_time = time.perf_counter() - self._start_time
+ self._start_time = None
+
+ # Report elapsed time
+ if self.logger_func:
+ self.logger_func(self.text.format(elapsed_time * 1000))
+ if self.name:
+ self.timers[self.name] += elapsed_time
+
+ return elapsed_time
+
+ def __enter__(self) -> Timer:
+ """Start a new timer as a context manager"""
+ self.start()
+ return self
+
+ def __exit__(self, *exc_info: Any) -> None:
+ """Stop the context manager timer"""
+ self.stop()
+
+ def _recreate_cm(self) -> Timer:
+ return self.__class__(self.name, self.text, self.logger_func)
+
+ def __call__(self, function: Callable_T) -> Callable_T:
+ @wraps(function)
+ async def async_wrapper(*args: Any, **kwargs: Any):
+ self.text = (
+ f"Async function {function.__qualname__} "
+ "cost {:.3f}ms"
+ )
+
+ with self._recreate_cm():
+ return await function(*args, **kwargs)
+
+ @wraps(function)
+ def sync_wrapper(*args: Any, **kwargs: Any):
+ self.text = (
+ f"sync function {function.__qualname__} "
+ "cost {:.3f}ms"
+ )
+
+ with self._recreate_cm():
+ return function(*args, **kwargs)
+
+ return (
+ async_wrapper if iscoroutinefunction(function) else sync_wrapper
+ ) # type:ignore
+
+
+TimeIt = Timer(logger_func=logger.trace)
diff --git a/hibiapi/utils/exceptions.py b/hibiapi/utils/exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..c3b819cfd0fcd7af3a2be76aee9330ba4a6b3ba6
--- /dev/null
+++ b/hibiapi/utils/exceptions.py
@@ -0,0 +1,77 @@
+from datetime import datetime
+from typing import Any, Optional
+
+from pydantic import AnyHttpUrl, BaseModel, Extra, Field
+
+
+class ExceptionReturn(BaseModel):
+ url: Optional[AnyHttpUrl] = None
+ time: datetime = Field(default_factory=datetime.now)
+ code: int = Field(ge=400, le=599)
+ detail: str
+ headers: dict[str, str] = {}
+
+ class Config:
+ extra = Extra.allow
+
+
+class BaseServerException(Exception):
+ code: int = 500
+ detail: str = "Server Fault"
+ headers: dict[str, Any] = {}
+
+ def __init__(
+ self,
+ detail: Optional[str] = None,
+ *,
+ code: Optional[int] = None,
+ headers: Optional[dict[str, Any]] = None,
+ **params
+ ) -> None:
+ self.data = ExceptionReturn(
+ detail=detail or self.__class__.detail,
+ code=code or self.__class__.code,
+ headers=headers or self.__class__.headers,
+ **params
+ )
+ super().__init__(detail)
+
+
+class BaseHTTPException(BaseServerException):
+ pass
+
+
+class ServerSideException(BaseServerException):
+ code = 500
+ detail = "Internal Server Error"
+
+
+class UpstreamAPIException(ServerSideException):
+ code = 502
+ detail = "Upstram API request failed"
+
+
+class UncaughtException(ServerSideException):
+ code = 500
+ detail = "Uncaught exception raised during processing"
+ exc: Exception
+
+ @classmethod
+ def with_exception(cls, e: Exception):
+ c = cls(e.__class__.__qualname__)
+ c.exc = e
+ return c
+
+
+class ClientSideException(BaseServerException):
+ code = 400
+ detail = "Bad Request"
+
+
+class ValidationException(ClientSideException):
+ code = 422
+
+
+class RateLimitReachedException(ClientSideException):
+ code = 429
+ detail = "Rate limit reached"
diff --git a/hibiapi/utils/log.py b/hibiapi/utils/log.py
new file mode 100644
index 0000000000000000000000000000000000000000..370f48bb757d088c8b3f36db9dde1dea1f17e754
--- /dev/null
+++ b/hibiapi/utils/log.py
@@ -0,0 +1,67 @@
+import logging
+import re
+import sys
+from datetime import timedelta
+from pathlib import Path
+
+import sentry_sdk.integrations.logging as sentry
+from loguru import logger as _logger
+
+from hibiapi.utils.config import Config
+
+LOG_FILE = Config["log"]["file"].get_optional(Path)
+LOG_LEVEL = Config["log"]["level"].as_str().strip().upper()
+LOG_FORMAT = Config["log"]["format"].as_str().strip()
+
+
+class LoguruHandler(logging.Handler):
+ _tag_escape_re = re.compile(r"?((?:[fb]g\s)?[^<>\s]*)>")
+
+ @classmethod
+ def escape_tag(cls, string: str) -> str:
+ return cls._tag_escape_re.sub(r"\\\g<0>", string)
+
+ def emit(self, record: logging.LogRecord):
+ try:
+ level = logger.level(record.levelname).name
+ except ValueError:
+ level = record.levelno
+
+ frame, depth, message = logging.currentframe(), 2, record.getMessage()
+ while frame.f_code.co_filename == logging.__file__: # type: ignore
+ frame = frame.f_back # type: ignore
+ depth += 1
+
+ logger.opt(depth=depth, exception=record.exc_info, colors=True).log(
+ level, f"{self.escape_tag(message)}"
+ )
+
+
+logger = _logger.opt(colors=True)
+logger.remove()
+logger.add(
+ sys.stdout,
+ level=LOG_LEVEL,
+ format=LOG_FORMAT,
+ filter=lambda record: record["level"].no < logging.WARNING,
+)
+logger.add(
+ sys.stderr,
+ level=LOG_LEVEL,
+ filter=lambda record: record["level"].no >= logging.WARNING,
+ format=LOG_FORMAT,
+)
+logger.add(sentry.BreadcrumbHandler(), level=LOG_LEVEL)
+logger.add(sentry.EventHandler(), level="ERROR")
+
+if LOG_FILE is not None:
+ LOG_FILE.parent.mkdir(parents=True, exist_ok=True)
+
+ logger.add(
+ str(LOG_FILE),
+ level=LOG_LEVEL,
+ encoding="utf-8",
+ rotation=timedelta(days=1),
+ )
+
+logger.level(LOG_LEVEL)
diff --git a/hibiapi/utils/net.py b/hibiapi/utils/net.py
new file mode 100644
index 0000000000000000000000000000000000000000..9fe66df017d0b37d695145d6c2e31391815d00fa
--- /dev/null
+++ b/hibiapi/utils/net.py
@@ -0,0 +1,130 @@
+import functools
+from collections.abc import Coroutine
+from types import TracebackType
+from typing import (
+ Any,
+ Callable,
+ ClassVar,
+ Optional,
+ TypeVar,
+ Union,
+)
+
+from httpx import (
+ URL,
+ AsyncClient,
+ Cookies,
+ HTTPError,
+ HTTPStatusError,
+ Request,
+ Response,
+ ResponseNotRead,
+ TransportError,
+)
+
+from .decorators import Retry, TimeIt
+from .exceptions import UpstreamAPIException
+from .log import logger
+
+AsyncCallable_T = TypeVar("AsyncCallable_T", bound=Callable[..., Coroutine])
+
+
+class AsyncHTTPClient(AsyncClient):
+ net_client: "BaseNetClient"
+
+ @staticmethod
+ async def _log_request(request: Request):
+ method, url = request.method, request.url
+ logger.debug(
+ f"Network request sent: {method} {url}"
+ )
+
+ @staticmethod
+ async def _log_response(response: Response):
+ method, url = response.request.method, response.url
+ try:
+ length, code = len(response.content), response.status_code
+ except ResponseNotRead:
+ length, code = -1, response.status_code
+ logger.debug(
+ f"Network request finished: {method} "
+ f"{url} {code} {length}"
+ )
+
+ @Retry(exceptions=[TransportError])
+ async def request(self, method: str, url: Union[URL, str], **kwargs):
+ self.event_hooks = {
+ "request": [self._log_request],
+ "response": [self._log_response],
+ }
+ return await super().request(method, url, **kwargs)
+
+
+class BaseNetClient:
+ connections: ClassVar[int] = 0
+ clients: ClassVar[list[AsyncHTTPClient]] = []
+
+ client: Optional[AsyncHTTPClient] = None
+
+ def __init__(
+ self,
+ headers: Optional[dict[str, Any]] = None,
+ cookies: Optional[Cookies] = None,
+ proxies: Optional[dict[str, str]] = None,
+ client_class: type[AsyncHTTPClient] = AsyncHTTPClient,
+ ):
+ self.cookies, self.client_class = cookies or Cookies(), client_class
+ self.headers: dict[str, Any] = headers or {}
+ self.proxies: Any = proxies or {} # Bypass type checker
+
+ self.create_client()
+
+ def create_client(self):
+ self.client = self.client_class(
+ headers=self.headers,
+ proxies=self.proxies,
+ cookies=self.cookies,
+ http2=True,
+ follow_redirects=True,
+ )
+ self.client.net_client = self
+ BaseNetClient.clients.append(self.client)
+ return self.client
+
+ async def __aenter__(self):
+ if not self.client or self.client.is_closed:
+ self.client = await self.create_client().__aenter__()
+
+ self.__class__.connections += 1
+ return self.client
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[type[BaseException]] = None,
+ exc_value: Optional[BaseException] = None,
+ traceback: Optional[TracebackType] = None,
+ ):
+ self.__class__.connections -= 1
+
+ if not (exc_type and exc_value and traceback):
+ return
+ if self.client and not self.client.is_closed:
+ client = self.client
+ self.client = None
+ await client.__aexit__(exc_type, exc_value, traceback)
+ return
+
+
+def catch_network_error(function: AsyncCallable_T) -> AsyncCallable_T:
+ timed_func = TimeIt(function)
+
+ @functools.wraps(timed_func)
+ async def wrapper(*args, **kwargs):
+ try:
+ return await timed_func(*args, **kwargs)
+ except HTTPStatusError as e:
+ raise UpstreamAPIException(detail=e.response.text) from e
+ except HTTPError as e:
+ raise UpstreamAPIException from e
+
+ return wrapper # type:ignore
diff --git a/hibiapi/utils/routing.py b/hibiapi/utils/routing.py
new file mode 100644
index 0000000000000000000000000000000000000000..3667439e596a47e92a4495dc61ad45db370e6ce5
--- /dev/null
+++ b/hibiapi/utils/routing.py
@@ -0,0 +1,187 @@
+import inspect
+from collections.abc import Mapping
+from contextvars import ContextVar
+from enum import Enum
+from fnmatch import fnmatch
+from functools import wraps
+from typing import Annotated, Any, Callable, Literal, Optional
+from urllib.parse import ParseResult, urlparse
+
+from fastapi import Depends, Request
+from fastapi.routing import APIRouter
+from httpx import URL
+from pydantic import AnyHttpUrl
+from pydantic.errors import UrlHostError
+from starlette.datastructures import Headers, MutableHeaders
+
+from hibiapi.utils.cache import endpoint_cache
+from hibiapi.utils.net import AsyncCallable_T, AsyncHTTPClient, BaseNetClient
+
+DONT_ROUTE_KEY = "_dont_route"
+
+
+def dont_route(func: AsyncCallable_T) -> AsyncCallable_T:
+ setattr(func, DONT_ROUTE_KEY, True)
+ return func
+
+
+class EndpointMeta(type):
+ @staticmethod
+ def _list_router_function(members: dict[str, Any]):
+ return {
+ name: object
+ for name, object in members.items()
+ if (
+ inspect.iscoroutinefunction(object)
+ and not name.startswith("_")
+ and not getattr(object, DONT_ROUTE_KEY, False)
+ )
+ }
+
+ def __new__(
+ cls,
+ name: str,
+ bases: tuple[type, ...],
+ namespace: dict[str, Any],
+ *,
+ cache_endpoints: bool = True,
+ **kwargs,
+ ):
+ for object_name, object in cls._list_router_function(namespace).items():
+ namespace[object_name] = (
+ endpoint_cache(object) if cache_endpoints else object
+ )
+ return super().__new__(cls, name, bases, namespace, **kwargs)
+
+ @property
+ def router_functions(self):
+ return self._list_router_function(dict(inspect.getmembers(self)))
+
+
+class BaseEndpoint(metaclass=EndpointMeta, cache_endpoints=False):
+ def __init__(self, client: AsyncHTTPClient):
+ self.client = client
+
+ @staticmethod
+ def _join(base: str, endpoint: str, params: dict[str, Any]) -> URL:
+ host: ParseResult = urlparse(base)
+ params = {
+ k: (v.value if isinstance(v, Enum) else v)
+ for k, v in params.items()
+ if v is not None
+ }
+ return URL(
+ url=ParseResult(
+ scheme=host.scheme,
+ netloc=host.netloc,
+ path=endpoint.format(**params),
+ params="",
+ query="",
+ fragment="",
+ ).geturl(),
+ params=params,
+ )
+
+
+class SlashRouter(APIRouter):
+ def api_route(self, path: str, **kwargs):
+ path = path if path.startswith("/") else f"/{path}"
+ return super().api_route(path, **kwargs)
+
+
+class EndpointRouter(SlashRouter):
+ @staticmethod
+ def _exclude_params(func: Callable, params: Mapping[str, Any]) -> dict[str, Any]:
+ func_params = inspect.signature(func).parameters
+ return {k: v for k, v in params.items() if k in func_params}
+
+ @staticmethod
+ def _router_signature_convert(
+ func,
+ endpoint_class: type["BaseEndpoint"],
+ request_client: Callable,
+ method_name: Optional[str] = None,
+ ):
+ @wraps(func)
+ async def route_func(endpoint: endpoint_class, **kwargs):
+ endpoint_method = getattr(endpoint, method_name or func.__name__)
+ return await endpoint_method(**kwargs)
+
+ route_func.__signature__ = inspect.signature(route_func).replace( # type:ignore
+ parameters=[
+ inspect.Parameter(
+ name="endpoint",
+ kind=inspect.Parameter.POSITIONAL_OR_KEYWORD,
+ annotation=endpoint_class,
+ default=Depends(request_client),
+ ),
+ *(
+ param
+ for param in inspect.signature(func).parameters.values()
+ if param.kind == inspect.Parameter.KEYWORD_ONLY
+ ),
+ ]
+ )
+ return route_func
+
+ def include_endpoint(
+ self,
+ endpoint_class: type[BaseEndpoint],
+ net_client: BaseNetClient,
+ add_match_all: bool = True,
+ ):
+ router_functions = endpoint_class.router_functions
+
+ async def request_client():
+ async with net_client as client:
+ yield endpoint_class(client)
+
+ for func_name, func in router_functions.items():
+ self.add_api_route(
+ path=f"/{func_name}",
+ endpoint=self._router_signature_convert(
+ func,
+ endpoint_class=endpoint_class,
+ request_client=request_client,
+ method_name=func_name,
+ ),
+ methods=["GET"],
+ )
+
+ if not add_match_all:
+ return
+
+ @self.get("/", description="JournalAD style API routing", deprecated=True)
+ async def match_all(
+ endpoint: Annotated[endpoint_class, Depends(request_client)],
+ request: Request,
+ type: Literal[tuple(router_functions.keys())], # type: ignore
+ ):
+ func = router_functions[type]
+ return await func(
+ endpoint, **self._exclude_params(func, request.query_params)
+ )
+
+
+class BaseHostUrl(AnyHttpUrl):
+ allowed_hosts: list[str] = []
+
+ @classmethod
+ def validate_host(cls, parts) -> tuple[str, Optional[str], str, bool]:
+ host, tld, host_type, rebuild = super().validate_host(parts)
+ if not cls._check_domain(host):
+ raise UrlHostError(allowed=cls.allowed_hosts)
+ return host, tld, host_type, rebuild
+
+ @classmethod
+ def _check_domain(cls, host: str) -> bool:
+ return any(
+ filter(
+ lambda x: fnmatch(host, x), # type:ignore
+ cls.allowed_hosts,
+ )
+ )
+
+
+request_headers = ContextVar[Headers]("request_headers")
+response_headers = ContextVar[MutableHeaders]("response_headers")
diff --git a/hibiapi/utils/temp.py b/hibiapi/utils/temp.py
new file mode 100644
index 0000000000000000000000000000000000000000..71633748615c99de51b887084078e677ba970d07
--- /dev/null
+++ b/hibiapi/utils/temp.py
@@ -0,0 +1,31 @@
+from pathlib import Path
+from tempfile import mkdtemp, mkstemp
+from threading import Lock
+from urllib.parse import ParseResult
+
+from fastapi import Request
+
+
+class TempFile:
+ path = Path(mkdtemp())
+ path_depth = 3
+ name_length = 16
+
+ _lock = Lock()
+
+ @classmethod
+ def create(cls, ext: str = ".tmp"):
+ descriptor, str_path = mkstemp(suffix=ext, dir=str(cls.path))
+ return descriptor, Path(str_path)
+
+ @classmethod
+ def to_url(cls, request: Request, path: Path) -> str:
+ assert cls.path
+ return ParseResult(
+ scheme=request.url.scheme,
+ netloc=request.url.netloc,
+ path=f"/temp/{path.relative_to(cls.path)}",
+ params="",
+ query="",
+ fragment="",
+ ).geturl()
diff --git a/pdm.lock b/pdm.lock
new file mode 100644
index 0000000000000000000000000000000000000000..94973b56dbd0e4b52a54ef813ef4a3f281bd84c8
--- /dev/null
+++ b/pdm.lock
@@ -0,0 +1,1361 @@
+# This file is @generated by PDM.
+# It is not intended for manual editing.
+
+[metadata]
+groups = ["default", "dev", "scripts"]
+strategy = ["cross_platform"]
+lock_version = "4.4.1"
+content_hash = "sha256:95cd422773e267866d29e8eee010107d7ecf028cfae1cf715c714eaeafd17711"
+
+[[package]]
+name = "anyio"
+version = "3.7.1"
+requires_python = ">=3.7"
+summary = "High level compatibility layer for multiple asynchronous event loop implementations"
+dependencies = [
+ "exceptiongroup; python_version < \"3.11\"",
+ "idna>=2.8",
+ "sniffio>=1.1",
+]
+files = [
+ {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"},
+ {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"},
+]
+
+[[package]]
+name = "async-timeout"
+version = "4.0.2"
+requires_python = ">=3.6"
+summary = "Timeout context manager for asyncio programs"
+files = [
+ {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"},
+ {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"},
+]
+
+[[package]]
+name = "cashews"
+version = "7.0.2"
+requires_python = ">=3.8"
+summary = "cache tools with async power"
+files = [
+ {file = "cashews-7.0.2-py3-none-any.whl", hash = "sha256:fa984d918461fab97cd232e2ccdd027c8b87b2a60062799b16b9b04c61307c1f"},
+ {file = "cashews-7.0.2.tar.gz", hash = "sha256:27ea1e2b74b95fe094d99197c60160a791f837c603b93120f39430c8a2767ca8"},
+]
+
+[[package]]
+name = "cashews"
+version = "7.0.2"
+extras = ["diskcache", "redis"]
+requires_python = ">=3.8"
+summary = "cache tools with async power"
+dependencies = [
+ "cashews==7.0.2",
+ "diskcache>=5.0.0",
+ "redis!=5.0.1,>=4.3.1",
+]
+files = [
+ {file = "cashews-7.0.2-py3-none-any.whl", hash = "sha256:fa984d918461fab97cd232e2ccdd027c8b87b2a60062799b16b9b04c61307c1f"},
+ {file = "cashews-7.0.2.tar.gz", hash = "sha256:27ea1e2b74b95fe094d99197c60160a791f837c603b93120f39430c8a2767ca8"},
+]
+
+[[package]]
+name = "certifi"
+version = "2023.5.7"
+requires_python = ">=3.6"
+summary = "Python package for providing Mozilla's CA Bundle."
+files = [
+ {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"},
+ {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"},
+]
+
+[[package]]
+name = "charset-normalizer"
+version = "3.2.0"
+requires_python = ">=3.7.0"
+summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+files = [
+ {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"},
+ {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"},
+ {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"},
+ {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"},
+ {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"},
+ {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"},
+]
+
+[[package]]
+name = "click"
+version = "8.1.5"
+requires_python = ">=3.7"
+summary = "Composable command line interface toolkit"
+dependencies = [
+ "colorama; platform_system == \"Windows\"",
+]
+files = [
+ {file = "click-8.1.5-py3-none-any.whl", hash = "sha256:e576aa487d679441d7d30abb87e1b43d24fc53bffb8758443b1a9e1cee504548"},
+ {file = "click-8.1.5.tar.gz", hash = "sha256:4be4b1af8d665c6d942909916d31a213a106800c47d0eeba73d34da3cbc11367"},
+]
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+summary = "Cross-platform colored terminal text."
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "confuse"
+version = "2.0.1"
+requires_python = ">=3.6"
+summary = "Painless YAML configuration."
+dependencies = [
+ "pyyaml",
+]
+files = [
+ {file = "confuse-2.0.1-py3-none-any.whl", hash = "sha256:9b9e5bbc70e2cb9b318bcab14d917ec88e21bf1b724365e3815eb16e37aabd2a"},
+ {file = "confuse-2.0.1.tar.gz", hash = "sha256:7379a2ad49aaa862b79600cc070260c1b7974d349f4fa5e01f9afa6c4dd0611f"},
+]
+
+[[package]]
+name = "coverage"
+version = "7.2.7"
+requires_python = ">=3.7"
+summary = "Code coverage measurement for Python"
+files = [
+ {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"},
+ {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"},
+ {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"},
+ {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"},
+ {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"},
+ {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"},
+ {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"},
+ {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"},
+ {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"},
+ {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"},
+ {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"},
+ {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"},
+ {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"},
+ {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"},
+ {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"},
+ {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"},
+ {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"},
+ {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"},
+ {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"},
+ {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"},
+ {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"},
+ {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"},
+ {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"},
+ {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"},
+ {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"},
+ {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"},
+ {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"},
+ {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"},
+ {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"},
+ {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"},
+ {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"},
+ {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"},
+ {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"},
+ {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"},
+ {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"},
+ {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"},
+ {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"},
+ {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"},
+ {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"},
+ {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"},
+ {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"},
+ {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"},
+ {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"},
+ {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"},
+ {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"},
+ {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"},
+ {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"},
+ {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"},
+ {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"},
+ {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"},
+ {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"},
+]
+
+[[package]]
+name = "coverage"
+version = "7.2.7"
+extras = ["toml"]
+requires_python = ">=3.7"
+summary = "Code coverage measurement for Python"
+dependencies = [
+ "coverage==7.2.7",
+ "tomli; python_full_version <= \"3.11.0a6\"",
+]
+files = [
+ {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"},
+ {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"},
+ {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"},
+ {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"},
+ {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"},
+ {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"},
+ {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"},
+ {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"},
+ {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"},
+ {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"},
+ {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"},
+ {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"},
+ {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"},
+ {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"},
+ {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"},
+ {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"},
+ {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"},
+ {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"},
+ {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"},
+ {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"},
+ {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"},
+ {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"},
+ {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"},
+ {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"},
+ {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"},
+ {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"},
+ {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"},
+ {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"},
+ {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"},
+ {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"},
+ {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"},
+ {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"},
+ {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"},
+ {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"},
+ {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"},
+ {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"},
+ {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"},
+ {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"},
+ {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"},
+ {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"},
+ {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"},
+ {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"},
+ {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"},
+ {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"},
+ {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"},
+ {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"},
+ {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"},
+ {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"},
+ {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"},
+ {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"},
+ {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"},
+]
+
+[[package]]
+name = "diskcache"
+version = "5.6.1"
+requires_python = ">=3"
+summary = "Disk Cache -- Disk and file backed persistent cache."
+files = [
+ {file = "diskcache-5.6.1-py3-none-any.whl", hash = "sha256:558c6a2d5d7c721bb00e40711803d6804850c9f76c426ed81ecc627fe9d2ce2d"},
+ {file = "diskcache-5.6.1.tar.gz", hash = "sha256:e4c978532feff5814c4cc00fe1e11e40501985946643d73220d41ee7737c72c3"},
+]
+
+[[package]]
+name = "exceptiongroup"
+version = "1.1.2"
+requires_python = ">=3.7"
+summary = "Backport of PEP 654 (exception groups)"
+files = [
+ {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"},
+ {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"},
+]
+
+[[package]]
+name = "fastapi"
+version = "0.110.2"
+requires_python = ">=3.8"
+summary = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
+dependencies = [
+ "pydantic!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0,>=1.7.4",
+ "starlette<0.38.0,>=0.37.2",
+ "typing-extensions>=4.8.0",
+]
+files = [
+ {file = "fastapi-0.110.2-py3-none-any.whl", hash = "sha256:239403f2c0a3dda07a9420f95157a7f014ddb2b770acdbc984f9bdf3ead7afdb"},
+ {file = "fastapi-0.110.2.tar.gz", hash = "sha256:b53d673652da3b65e8cd787ad214ec0fe303cad00d2b529b86ce7db13f17518d"},
+]
+
+[[package]]
+name = "h11"
+version = "0.14.0"
+requires_python = ">=3.7"
+summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
+files = [
+ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
+ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
+]
+
+[[package]]
+name = "h2"
+version = "4.1.0"
+requires_python = ">=3.6.1"
+summary = "HTTP/2 State-Machine based protocol implementation"
+dependencies = [
+ "hpack<5,>=4.0",
+ "hyperframe<7,>=6.0",
+]
+files = [
+ {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"},
+ {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"},
+]
+
+[[package]]
+name = "hpack"
+version = "4.0.0"
+requires_python = ">=3.6.1"
+summary = "Pure-Python HPACK header compression"
+files = [
+ {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"},
+ {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"},
+]
+
+[[package]]
+name = "httpcore"
+version = "1.0.5"
+requires_python = ">=3.8"
+summary = "A minimal low-level HTTP client."
+dependencies = [
+ "certifi",
+ "h11<0.15,>=0.13",
+]
+files = [
+ {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"},
+ {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"},
+]
+
+[[package]]
+name = "httptools"
+version = "0.6.0"
+requires_python = ">=3.5.0"
+summary = "A collection of framework independent HTTP protocol utils."
+files = [
+ {file = "httptools-0.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:818325afee467d483bfab1647a72054246d29f9053fd17cc4b86cda09cc60339"},
+ {file = "httptools-0.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72205730bf1be875003692ca54a4a7c35fac77b4746008966061d9d41a61b0f5"},
+ {file = "httptools-0.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33eb1d4e609c835966e969a31b1dedf5ba16b38cab356c2ce4f3e33ffa94cad3"},
+ {file = "httptools-0.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdc6675ec6cb79d27e0575750ac6e2b47032742e24eed011b8db73f2da9ed40"},
+ {file = "httptools-0.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:463c3bc5ef64b9cf091be9ac0e0556199503f6e80456b790a917774a616aff6e"},
+ {file = "httptools-0.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:82f228b88b0e8c6099a9c4757ce9fdbb8b45548074f8d0b1f0fc071e35655d1c"},
+ {file = "httptools-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:0781fedc610293a2716bc7fa142d4c85e6776bc59d617a807ff91246a95dea35"},
+ {file = "httptools-0.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:721e503245d591527cddd0f6fd771d156c509e831caa7a57929b55ac91ee2b51"},
+ {file = "httptools-0.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:274bf20eeb41b0956e34f6a81f84d26ed57c84dd9253f13dcb7174b27ccd8aaf"},
+ {file = "httptools-0.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:259920bbae18740a40236807915def554132ad70af5067e562f4660b62c59b90"},
+ {file = "httptools-0.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03bfd2ae8a2d532952ac54445a2fb2504c804135ed28b53fefaf03d3a93eb1fd"},
+ {file = "httptools-0.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f959e4770b3fc8ee4dbc3578fd910fab9003e093f20ac8c621452c4d62e517cb"},
+ {file = "httptools-0.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e22896b42b95b3237eccc42278cd72c0df6f23247d886b7ded3163452481e38"},
+ {file = "httptools-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:38f3cafedd6aa20ae05f81f2e616ea6f92116c8a0f8dcb79dc798df3356836e2"},
+ {file = "httptools-0.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cf8169e839a0d740f3d3c9c4fa630ac1a5aaf81641a34575ca6773ed7ce041a1"},
+ {file = "httptools-0.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5dcc14c090ab57b35908d4a4585ec5c0715439df07be2913405991dbb37e049d"},
+ {file = "httptools-0.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d0b0571806a5168013b8c3d180d9f9d6997365a4212cb18ea20df18b938aa0b"},
+ {file = "httptools-0.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fb4a608c631f7dcbdf986f40af7a030521a10ba6bc3d36b28c1dc9e9035a3c0"},
+ {file = "httptools-0.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:93f89975465133619aea8b1952bc6fa0e6bad22a447c6d982fc338fbb4c89649"},
+ {file = "httptools-0.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:73e9d66a5a28b2d5d9fbd9e197a31edd02be310186db423b28e6052472dc8201"},
+ {file = "httptools-0.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:22c01fcd53648162730a71c42842f73b50f989daae36534c818b3f5050b54589"},
+ {file = "httptools-0.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f96d2a351b5625a9fd9133c95744e8ca06f7a4f8f0b8231e4bbaae2c485046a"},
+ {file = "httptools-0.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72ec7c70bd9f95ef1083d14a755f321d181f046ca685b6358676737a5fecd26a"},
+ {file = "httptools-0.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b703d15dbe082cc23266bf5d9448e764c7cb3fcfe7cb358d79d3fd8248673ef9"},
+ {file = "httptools-0.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82c723ed5982f8ead00f8e7605c53e55ffe47c47465d878305ebe0082b6a1755"},
+ {file = "httptools-0.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b0a816bb425c116a160fbc6f34cece097fd22ece15059d68932af686520966bd"},
+ {file = "httptools-0.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dea66d94e5a3f68c5e9d86e0894653b87d952e624845e0b0e3ad1c733c6cc75d"},
+ {file = "httptools-0.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:23b09537086a5a611fad5696fc8963d67c7e7f98cb329d38ee114d588b0b74cd"},
+ {file = "httptools-0.6.0.tar.gz", hash = "sha256:9fc6e409ad38cbd68b177cd5158fc4042c796b82ca88d99ec78f07bed6c6b796"},
+]
+
+[[package]]
+name = "httpx"
+version = "0.27.0"
+requires_python = ">=3.8"
+summary = "The next generation HTTP client."
+dependencies = [
+ "anyio",
+ "certifi",
+ "httpcore==1.*",
+ "idna",
+ "sniffio",
+]
+files = [
+ {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"},
+ {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"},
+]
+
+[[package]]
+name = "httpx"
+version = "0.27.0"
+extras = ["http2"]
+requires_python = ">=3.8"
+summary = "The next generation HTTP client."
+dependencies = [
+ "h2<5,>=3",
+ "httpx==0.27.0",
+]
+files = [
+ {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"},
+ {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"},
+]
+
+[[package]]
+name = "hyperframe"
+version = "6.0.1"
+requires_python = ">=3.6.1"
+summary = "HTTP/2 framing layer for Python"
+files = [
+ {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"},
+ {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"},
+]
+
+[[package]]
+name = "idna"
+version = "3.4"
+requires_python = ">=3.5"
+summary = "Internationalized Domain Names in Applications (IDNA)"
+files = [
+ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
+ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
+]
+
+[[package]]
+name = "iniconfig"
+version = "2.0.0"
+requires_python = ">=3.7"
+summary = "brain-dead simple config-ini parsing"
+files = [
+ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
+ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
+]
+
+[[package]]
+name = "loguru"
+version = "0.7.2"
+requires_python = ">=3.5"
+summary = "Python logging made (stupidly) simple"
+dependencies = [
+ "colorama>=0.3.4; sys_platform == \"win32\"",
+ "win32-setctime>=1.0.0; sys_platform == \"win32\"",
+]
+files = [
+ {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"},
+ {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"},
+]
+
+[[package]]
+name = "markdown-it-py"
+version = "3.0.0"
+requires_python = ">=3.8"
+summary = "Python port of markdown-it. Markdown parsing, done right!"
+dependencies = [
+ "mdurl~=0.1",
+]
+files = [
+ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
+ {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
+]
+
+[[package]]
+name = "markupsafe"
+version = "2.1.3"
+requires_python = ">=3.7"
+summary = "Safely add untrusted strings to HTML/XML markup."
+files = [
+ {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"},
+ {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
+ {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"},
+ {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"},
+ {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"},
+ {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"},
+]
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+requires_python = ">=3.7"
+summary = "Markdown URL utilities"
+files = [
+ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
+ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
+]
+
+[[package]]
+name = "packaging"
+version = "23.1"
+requires_python = ">=3.7"
+summary = "Core utilities for Python packages"
+files = [
+ {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"},
+ {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"},
+]
+
+[[package]]
+name = "pillow"
+version = "10.0.0"
+requires_python = ">=3.8"
+summary = "Python Imaging Library (Fork)"
+files = [
+ {file = "Pillow-10.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1f62406a884ae75fb2f818694469519fb685cc7eaff05d3451a9ebe55c646891"},
+ {file = "Pillow-10.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d5db32e2a6ccbb3d34d87c87b432959e0db29755727afb37290e10f6e8e62614"},
+ {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf4392b77bdc81f36e92d3a07a5cd072f90253197f4a52a55a8cec48a12483b"},
+ {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:520f2a520dc040512699f20fa1c363eed506e94248d71f85412b625026f6142c"},
+ {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:8c11160913e3dd06c8ffdb5f233a4f254cb449f4dfc0f8f4549eda9e542c93d1"},
+ {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a74ba0c356aaa3bb8e3eb79606a87669e7ec6444be352870623025d75a14a2bf"},
+ {file = "Pillow-10.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d0dae4cfd56969d23d94dc8e89fb6a217be461c69090768227beb8ed28c0a3"},
+ {file = "Pillow-10.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22c10cc517668d44b211717fd9775799ccec4124b9a7f7b3635fc5386e584992"},
+ {file = "Pillow-10.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:dffe31a7f47b603318c609f378ebcd57f1554a3a6a8effbc59c3c69f804296de"},
+ {file = "Pillow-10.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:9fb218c8a12e51d7ead2a7c9e101a04982237d4855716af2e9499306728fb485"},
+ {file = "Pillow-10.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d35e3c8d9b1268cbf5d3670285feb3528f6680420eafe35cccc686b73c1e330f"},
+ {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ed64f9ca2f0a95411e88a4efbd7a29e5ce2cea36072c53dd9d26d9c76f753b3"},
+ {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6eb5502f45a60a3f411c63187db83a3d3107887ad0d036c13ce836f8a36f1d"},
+ {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c1fbe7621c167ecaa38ad29643d77a9ce7311583761abf7836e1510c580bf3dd"},
+ {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cd25d2a9d2b36fcb318882481367956d2cf91329f6892fe5d385c346c0649629"},
+ {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3b08d4cc24f471b2c8ca24ec060abf4bebc6b144cb89cba638c720546b1cf538"},
+ {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737a602fbd82afd892ca746392401b634e278cb65d55c4b7a8f48e9ef8d008d"},
+ {file = "Pillow-10.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:3a82c40d706d9aa9734289740ce26460a11aeec2d9c79b7af87bb35f0073c12f"},
+ {file = "Pillow-10.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:bc2ec7c7b5d66b8ec9ce9f720dbb5fa4bace0f545acd34870eff4a369b44bf37"},
+ {file = "Pillow-10.0.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:d80cf684b541685fccdd84c485b31ce73fc5c9b5d7523bf1394ce134a60c6883"},
+ {file = "Pillow-10.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76de421f9c326da8f43d690110f0e79fe3ad1e54be811545d7d91898b4c8493e"},
+ {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81ff539a12457809666fef6624684c008e00ff6bf455b4b89fd00a140eecd640"},
+ {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce543ed15570eedbb85df19b0a1a7314a9c8141a36ce089c0a894adbfccb4568"},
+ {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:685ac03cc4ed5ebc15ad5c23bc555d68a87777586d970c2c3e216619a5476223"},
+ {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d72e2ecc68a942e8cf9739619b7f408cc7b272b279b56b2c83c6123fcfa5cdff"},
+ {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d50b6aec14bc737742ca96e85d6d0a5f9bfbded018264b3b70ff9d8c33485551"},
+ {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:00e65f5e822decd501e374b0650146063fbb30a7264b4d2744bdd7b913e0cab5"},
+ {file = "Pillow-10.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:f31f9fdbfecb042d046f9d91270a0ba28368a723302786c0009ee9b9f1f60199"},
+ {file = "Pillow-10.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:1ce91b6ec08d866b14413d3f0bbdea7e24dfdc8e59f562bb77bc3fe60b6144ca"},
+ {file = "Pillow-10.0.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:349930d6e9c685c089284b013478d6f76e3a534e36ddfa912cde493f235372f3"},
+ {file = "Pillow-10.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3a684105f7c32488f7153905a4e3015a3b6c7182e106fe3c37fbb5ef3e6994c3"},
+ {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4f69b3700201b80bb82c3a97d5e9254084f6dd5fb5b16fc1a7b974260f89f43"},
+ {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f07ea8d2f827d7d2a49ecf1639ec02d75ffd1b88dcc5b3a61bbb37a8759ad8d"},
+ {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:040586f7d37b34547153fa383f7f9aed68b738992380ac911447bb78f2abe530"},
+ {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f88a0b92277de8e3ca715a0d79d68dc82807457dae3ab8699c758f07c20b3c51"},
+ {file = "Pillow-10.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c7cf14a27b0d6adfaebb3ae4153f1e516df54e47e42dcc073d7b3d76111a8d86"},
+ {file = "Pillow-10.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3400aae60685b06bb96f99a21e1ada7bc7a413d5f49bce739828ecd9391bb8f7"},
+ {file = "Pillow-10.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbc02381779d412145331789b40cc7b11fdf449e5d94f6bc0b080db0a56ea3f0"},
+ {file = "Pillow-10.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:9211e7ad69d7c9401cfc0e23d49b69ca65ddd898976d660a2fa5904e3d7a9baa"},
+ {file = "Pillow-10.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:faaf07ea35355b01a35cb442dd950d8f1bb5b040a7787791a535de13db15ed90"},
+ {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f72a021fbb792ce98306ffb0c348b3c9cb967dce0f12a49aa4c3d3fdefa967"},
+ {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f7c16705f44e0504a3a2a14197c1f0b32a95731d251777dcb060aa83022cb2d"},
+ {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:76edb0a1fa2b4745fb0c99fb9fb98f8b180a1bbceb8be49b087e0b21867e77d3"},
+ {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:368ab3dfb5f49e312231b6f27b8820c823652b7cd29cfbd34090565a015e99ba"},
+ {file = "Pillow-10.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:608bfdee0d57cf297d32bcbb3c728dc1da0907519d1784962c5f0c68bb93e5a3"},
+ {file = "Pillow-10.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5c6e3df6bdd396749bafd45314871b3d0af81ff935b2d188385e970052091017"},
+ {file = "Pillow-10.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:7be600823e4c8631b74e4a0d38384c73f680e6105a7d3c6824fcf226c178c7e6"},
+ {file = "Pillow-10.0.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:92be919bbc9f7d09f7ae343c38f5bb21c973d2576c1d45600fce4b74bafa7ac0"},
+ {file = "Pillow-10.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8182b523b2289f7c415f589118228d30ac8c355baa2f3194ced084dac2dbba"},
+ {file = "Pillow-10.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:38250a349b6b390ee6047a62c086d3817ac69022c127f8a5dc058c31ccef17f3"},
+ {file = "Pillow-10.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:88af2003543cc40c80f6fca01411892ec52b11021b3dc22ec3bc9d5afd1c5334"},
+ {file = "Pillow-10.0.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c189af0545965fa8d3b9613cfdb0cd37f9d71349e0f7750e1fd704648d475ed2"},
+ {file = "Pillow-10.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce7b031a6fc11365970e6a5686d7ba8c63e4c1cf1ea143811acbb524295eabed"},
+ {file = "Pillow-10.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:db24668940f82321e746773a4bc617bfac06ec831e5c88b643f91f122a785684"},
+ {file = "Pillow-10.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:efe8c0681042536e0d06c11f48cebe759707c9e9abf880ee213541c5b46c5bf3"},
+ {file = "Pillow-10.0.0.tar.gz", hash = "sha256:9c82b5b3e043c7af0d95792d0d20ccf68f61a1fec6b3530e718b688422727396"},
+]
+
+[[package]]
+name = "pluggy"
+version = "1.4.0"
+requires_python = ">=3.8"
+summary = "plugin and hook calling mechanisms for python"
+files = [
+ {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"},
+ {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"},
+]
+
+[[package]]
+name = "py-cpuinfo"
+version = "9.0.0"
+summary = "Get CPU info with pure Python"
+files = [
+ {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"},
+ {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"},
+]
+
+[[package]]
+name = "pycryptodomex"
+version = "3.20.0"
+requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+summary = "Cryptographic library for Python"
+files = [
+ {file = "pycryptodomex-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:59af01efb011b0e8b686ba7758d59cf4a8263f9ad35911bfe3f416cee4f5c08c"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:82ee7696ed8eb9a82c7037f32ba9b7c59e51dda6f105b39f043b6ef293989cb3"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91852d4480a4537d169c29a9d104dda44094c78f1f5b67bca76c29a91042b623"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca649483d5ed251d06daf25957f802e44e6bb6df2e8f218ae71968ff8f8edc4"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e186342cfcc3aafaad565cbd496060e5a614b441cacc3995ef0091115c1f6c5"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:25cd61e846aaab76d5791d006497134602a9e451e954833018161befc3b5b9ed"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:9c682436c359b5ada67e882fec34689726a09c461efd75b6ea77b2403d5665b7"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7a7a8f33a1f1fb762ede6cc9cbab8f2a9ba13b196bfaf7bc6f0b39d2ba315a43"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-win32.whl", hash = "sha256:c39778fd0548d78917b61f03c1fa8bfda6cfcf98c767decf360945fe6f97461e"},
+ {file = "pycryptodomex-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:2a47bcc478741b71273b917232f521fd5704ab4b25d301669879e7273d3586cc"},
+ {file = "pycryptodomex-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:1be97461c439a6af4fe1cf8bf6ca5936d3db252737d2f379cc6b2e394e12a458"},
+ {file = "pycryptodomex-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:19764605feea0df966445d46533729b645033f134baeb3ea26ad518c9fdf212c"},
+ {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f2e497413560e03421484189a6b65e33fe800d3bd75590e6d78d4dfdb7accf3b"},
+ {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48217c7901edd95f9f097feaa0388da215ed14ce2ece803d3f300b4e694abea"},
+ {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d00fe8596e1cc46b44bf3907354e9377aa030ec4cd04afbbf6e899fc1e2a7781"},
+ {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:88afd7a3af7ddddd42c2deda43d53d3dfc016c11327d0915f90ca34ebda91499"},
+ {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d3584623e68a5064a04748fb6d76117a21a7cb5eaba20608a41c7d0c61721794"},
+ {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0daad007b685db36d977f9de73f61f8da2a7104e20aca3effd30752fd56f73e1"},
+ {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dcac11031a71348faaed1f403a0debd56bf5404232284cf8c761ff918886ebc"},
+ {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:69138068268127cd605e03438312d8f271135a33140e2742b417d027a0539427"},
+ {file = "pycryptodomex-3.20.0.tar.gz", hash = "sha256:7a710b79baddd65b806402e14766c721aee8fb83381769c27920f26476276c1e"},
+]
+
+[[package]]
+name = "pydantic"
+version = "1.10.15"
+requires_python = ">=3.7"
+summary = "Data validation and settings management using python type hints"
+dependencies = [
+ "typing-extensions>=4.2.0",
+]
+files = [
+ {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"},
+ {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"},
+ {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"},
+ {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"},
+ {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"},
+ {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"},
+ {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"},
+ {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"},
+ {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"},
+ {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"},
+ {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"},
+ {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"},
+ {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"},
+ {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"},
+ {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"},
+ {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"},
+ {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"},
+ {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"},
+ {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"},
+ {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"},
+ {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"},
+ {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"},
+ {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"},
+]
+
+[[package]]
+name = "pygments"
+version = "2.15.1"
+requires_python = ">=3.7"
+summary = "Pygments is a syntax highlighting package written in Python."
+files = [
+ {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"},
+ {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"},
+]
+
+[[package]]
+name = "pypng"
+version = "0.20220715.0"
+summary = "Pure Python library for saving and loading PNG images"
+files = [
+ {file = "pypng-0.20220715.0-py3-none-any.whl", hash = "sha256:4a43e969b8f5aaafb2a415536c1a8ec7e341cd6a3f957fd5b5f32a4cfeed902c"},
+ {file = "pypng-0.20220715.0.tar.gz", hash = "sha256:739c433ba96f078315de54c0db975aee537cbc3e1d0ae4ed9aab0ca1e427e2c1"},
+]
+
+[[package]]
+name = "pyqt6"
+version = "6.6.1"
+requires_python = ">=3.6.1"
+summary = "Python bindings for the Qt cross platform application toolkit"
+dependencies = [
+ "PyQt6-Qt6>=6.6.0",
+ "PyQt6-sip<14,>=13.6",
+]
+files = [
+ {file = "PyQt6-6.6.1-cp38-abi3-macosx_10_14_universal2.whl", hash = "sha256:6b43878d0bbbcf8b7de165d305ec0cb87113c8930c92de748a11c473a6db5085"},
+ {file = "PyQt6-6.6.1-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5aa0e833cb5a79b93813f8181d9f145517dd5a46f4374544bcd1e93a8beec537"},
+ {file = "PyQt6-6.6.1-cp38-abi3-win_amd64.whl", hash = "sha256:03a656d5dc5ac31b6a9ad200f7f4f7ef49fa00ad7ce7a991b9bb691617141d12"},
+ {file = "PyQt6-6.6.1.tar.gz", hash = "sha256:9f158aa29d205142c56f0f35d07784b8df0be28378d20a97bcda8bd64ffd0379"},
+]
+
+[[package]]
+name = "pyqt6-qt6"
+version = "6.6.3"
+summary = "The subset of a Qt installation needed by PyQt6."
+files = [
+ {file = "PyQt6_Qt6-6.6.3-1-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:9e4defa75f50e749f03184f10a49e8830e3096a1157ae821741625e26e516169"},
+ {file = "PyQt6_Qt6-6.6.3-1-py3-none-win_amd64.whl", hash = "sha256:3bcd70529fe97eccd05b3b89867c461e441ed1d8db7bc238e28478aa9bec002d"},
+ {file = "PyQt6_Qt6-6.6.3-py3-none-macosx_10_14_x86_64.whl", hash = "sha256:1674d161ea49a36e9146fd652e789d413a246cc2455ac8bf9c76902b4bd3b986"},
+ {file = "PyQt6_Qt6-6.6.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:18fe1fbbc709dcff5c513e3cac7b1d7b630fb189e6d32a1601f193d73d326f42"},
+ {file = "PyQt6_Qt6-6.6.3-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:6ae465dfcbb819dae5e18e8c96abba735b5bb2f16c066497dda4b7ca17c066ce"},
+ {file = "PyQt6_Qt6-6.6.3-py3-none-win_amd64.whl", hash = "sha256:dbe509eccc579f8818b2b2e8ba93e27986facdd1d4d83ef1c7d9bd47cdf32651"},
+]
+
+[[package]]
+name = "pyqt6-sip"
+version = "13.6.0"
+requires_python = ">=3.7"
+summary = "The sip module support for PyQt6"
+files = [
+ {file = "PyQt6_sip-13.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d6b5f699aaed0ac1fcd23e8fbca70d8a77965831b7c1ce474b81b1678817a49d"},
+ {file = "PyQt6_sip-13.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8c282062125eea5baf830c6998587d98c50be7c3a817a057fb95fef647184012"},
+ {file = "PyQt6_sip-13.6.0-cp310-cp310-win32.whl", hash = "sha256:fa759b6339ff7e25f9afe2a6b651b775f0a36bcb3f5fa85e81a90d3b033c83f4"},
+ {file = "PyQt6_sip-13.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:8f9df9f7ccd8a9f0f1d36948c686f03ce1a1281543a3e636b7b7d5e086e1a436"},
+ {file = "PyQt6_sip-13.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b9c6b6f9cfccb48cbb78a59603145a698fb4ffd176764d7083e5bf47631d8df"},
+ {file = "PyQt6_sip-13.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:86a7b67c64436e32bffa9c28c9f21bf14a9faa54991520b12c3f6f435f24df7f"},
+ {file = "PyQt6_sip-13.6.0-cp311-cp311-win32.whl", hash = "sha256:58f68a48400e0b3d1ccb18090090299bad26e3aed7ccb7057c65887b79b8aeea"},
+ {file = "PyQt6_sip-13.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:0dfd22cfedd87e96f9d51e0778ca2ba3dc0be83e424e9e0f98f6994d8d9c90f0"},
+ {file = "PyQt6_sip-13.6.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3bf03e130fbfd75c9c06e687b86ba375410c7a9e835e4e03285889e61dd4b0c4"},
+ {file = "PyQt6_sip-13.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:43fb8551796030aae3d66d6e35e277494071ec6172cd182c9569ab7db268a2f5"},
+ {file = "PyQt6_sip-13.6.0-cp312-cp312-win32.whl", hash = "sha256:13885361ca2cb2f5085d50359ba61b3fabd41b139fb58f37332acbe631ef2357"},
+ {file = "PyQt6_sip-13.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:24441032a29791e82beb7dfd76878339058def0e97fdb7c1cea517f3a0e6e96b"},
+ {file = "PyQt6_sip-13.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98bf954103b087162fa63b3a78f30b0b63da22fd6450b610ec1b851dbb798228"},
+ {file = "PyQt6_sip-13.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:39854dba35f8e5a4288da26ecb5f40b4c5ec1932efffb3f49d5ea435a7f37fb3"},
+ {file = "PyQt6_sip-13.6.0-cp39-cp39-win32.whl", hash = "sha256:747f6ca44af81777a2c696bd501bc4815a53ec6fc94d4e25830e10bc1391f8ab"},
+ {file = "PyQt6_sip-13.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:33ea771fe777eb0d1a2c3ef35bcc3f7a286eb3ff09cd5b2fdd3d87d1f392d7e8"},
+ {file = "PyQt6_sip-13.6.0.tar.gz", hash = "sha256:2486e1588071943d4f6657ba09096dc9fffd2322ad2c30041e78ea3f037b5778"},
+]
+
+[[package]]
+name = "pyqt6-webengine"
+version = "6.6.0"
+requires_python = ">=3.7"
+summary = "Python bindings for the Qt WebEngine framework"
+dependencies = [
+ "PyQt6-WebEngine-Qt6>=6.6.0",
+ "PyQt6-sip<14,>=13.4",
+ "PyQt6>=6.2.0",
+]
+files = [
+ {file = "PyQt6_WebEngine-6.6.0-cp37-abi3-macosx_10_14_universal2.whl", hash = "sha256:cb7793f06525ca054fcc6039afd93e23b82228b880d0b1301ce635f7f3ed2edf"},
+ {file = "PyQt6_WebEngine-6.6.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:fded35fba636c4916fec84aa7c6840ad2e75d211462feb3e966f9545a59d56e6"},
+ {file = "PyQt6_WebEngine-6.6.0-cp37-abi3-win_amd64.whl", hash = "sha256:9d542738ed6e11c1978ce59035c07627def7c63eef0f59581d327f01209141bc"},
+ {file = "PyQt6_WebEngine-6.6.0.tar.gz", hash = "sha256:d50b984c3f85e409e692b156132721522d4e8cf9b6c25e0cf927eea2dfb39487"},
+]
+
+[[package]]
+name = "pyqt6-webengine-qt6"
+version = "6.6.3"
+summary = "The subset of a Qt installation needed by PyQt6-WebEngine."
+files = [
+ {file = "PyQt6_WebEngine_Qt6-6.6.3-py3-none-macosx_10_14_x86_64.whl", hash = "sha256:4ce545accc5a58d62bde7ce18253a70b3970c28a24c94642ec89537352c23974"},
+ {file = "PyQt6_WebEngine_Qt6-6.6.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a82308115193a6f220d6310453d1edbe30f1a8ac32c01fc813865319a2199959"},
+ {file = "PyQt6_WebEngine_Qt6-6.6.3-py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:87f636e23e9c1a1326bf91d273da6bdfed2f42fcc243e527e7b0dbc4f39e70dd"},
+ {file = "PyQt6_WebEngine_Qt6-6.6.3-py3-none-win_amd64.whl", hash = "sha256:3d3e81db62f166f5fbc24b28660fe81c1be4390282bfb9bb48111f32a6bd0f51"},
+]
+
+[[package]]
+name = "pytest"
+version = "8.1.1"
+requires_python = ">=3.8"
+summary = "pytest: simple powerful testing with Python"
+dependencies = [
+ "colorama; sys_platform == \"win32\"",
+ "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"",
+ "iniconfig",
+ "packaging",
+ "pluggy<2.0,>=1.4",
+ "tomli>=1; python_version < \"3.11\"",
+]
+files = [
+ {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"},
+ {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"},
+]
+
+[[package]]
+name = "pytest-benchmark"
+version = "4.0.0"
+requires_python = ">=3.7"
+summary = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer."
+dependencies = [
+ "py-cpuinfo",
+ "pytest>=3.8",
+]
+files = [
+ {file = "pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1"},
+ {file = "pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6"},
+]
+
+[[package]]
+name = "pytest-cov"
+version = "5.0.0"
+requires_python = ">=3.8"
+summary = "Pytest plugin for measuring coverage."
+dependencies = [
+ "coverage[toml]>=5.2.1",
+ "pytest>=4.6",
+]
+files = [
+ {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"},
+ {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"},
+]
+
+[[package]]
+name = "pytest-httpserver"
+version = "1.0.10"
+requires_python = ">=3.8"
+summary = "pytest-httpserver is a httpserver for pytest"
+dependencies = [
+ "Werkzeug>=2.0.0",
+]
+files = [
+ {file = "pytest_httpserver-1.0.10-py3-none-any.whl", hash = "sha256:d40e0cc3d61ed6e4d80f52a796926d557a7db62b17e43b3e258a78a3c34becb9"},
+ {file = "pytest_httpserver-1.0.10.tar.gz", hash = "sha256:77b9fbc2eb0a129cfbbacc8fe57e8cafe071d506489f31fe31e62f1b332d9905"},
+]
+
+[[package]]
+name = "pytest-pretty"
+version = "1.2.0"
+requires_python = ">=3.7"
+summary = "pytest plugin for printing summary data as I want it"
+dependencies = [
+ "pytest>=7",
+ "rich>=12",
+]
+files = [
+ {file = "pytest_pretty-1.2.0-py3-none-any.whl", hash = "sha256:6f79122bf53864ae2951b6c9e94d7a06a87ef753476acd4588aeac018f062036"},
+ {file = "pytest_pretty-1.2.0.tar.gz", hash = "sha256:105a355f128e392860ad2c478ae173ff96d2f03044692f9818ff3d49205d3a60"},
+]
+
+[[package]]
+name = "python-dotenv"
+version = "1.0.1"
+requires_python = ">=3.8"
+summary = "Read key-value pairs from a .env file and set them as environment variables"
+files = [
+ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},
+ {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},
+]
+
+[[package]]
+name = "python-multipart"
+version = "0.0.9"
+requires_python = ">=3.8"
+summary = "A streaming multipart parser for Python"
+files = [
+ {file = "python_multipart-0.0.9-py3-none-any.whl", hash = "sha256:97ca7b8ea7b05f977dc3849c3ba99d51689822fab725c3703af7c866a0c2b215"},
+ {file = "python_multipart-0.0.9.tar.gz", hash = "sha256:03f54688c663f1b7977105f021043b0793151e4cb1c1a9d4a11fc13d622c4026"},
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0"
+requires_python = ">=3.6"
+summary = "YAML parser and emitter for Python"
+files = [
+ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
+ {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
+ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
+ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
+ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
+ {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
+ {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
+ {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"},
+ {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"},
+ {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"},
+ {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"},
+ {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"},
+ {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"},
+ {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"},
+ {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
+ {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
+ {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
+ {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
+ {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
+ {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
+ {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
+ {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
+ {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
+ {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
+ {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
+ {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
+ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
+ {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
+]
+
+[[package]]
+name = "qrcode"
+version = "7.4.2"
+requires_python = ">=3.7"
+summary = "QR Code image generator"
+dependencies = [
+ "colorama; platform_system == \"Windows\"",
+ "pypng",
+ "typing-extensions",
+]
+files = [
+ {file = "qrcode-7.4.2-py3-none-any.whl", hash = "sha256:581dca7a029bcb2deef5d01068e39093e80ef00b4a61098a2182eac59d01643a"},
+ {file = "qrcode-7.4.2.tar.gz", hash = "sha256:9dd969454827e127dbd93696b20747239e6d540e082937c90f14ac95b30f5845"},
+]
+
+[[package]]
+name = "qrcode"
+version = "7.4.2"
+extras = ["pil"]
+requires_python = ">=3.7"
+summary = "QR Code image generator"
+dependencies = [
+ "pillow>=9.1.0",
+ "qrcode==7.4.2",
+]
+files = [
+ {file = "qrcode-7.4.2-py3-none-any.whl", hash = "sha256:581dca7a029bcb2deef5d01068e39093e80ef00b4a61098a2182eac59d01643a"},
+ {file = "qrcode-7.4.2.tar.gz", hash = "sha256:9dd969454827e127dbd93696b20747239e6d540e082937c90f14ac95b30f5845"},
+]
+
+[[package]]
+name = "redis"
+version = "4.6.0"
+requires_python = ">=3.7"
+summary = "Python client for Redis database and key-value store"
+dependencies = [
+ "async-timeout>=4.0.2; python_full_version <= \"3.11.2\"",
+]
+files = [
+ {file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"},
+ {file = "redis-4.6.0.tar.gz", hash = "sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d"},
+]
+
+[[package]]
+name = "requests"
+version = "2.31.0"
+requires_python = ">=3.7"
+summary = "Python HTTP for Humans."
+dependencies = [
+ "certifi>=2017.4.17",
+ "charset-normalizer<4,>=2",
+ "idna<4,>=2.5",
+ "urllib3<3,>=1.21.1",
+]
+files = [
+ {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
+ {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
+]
+
+[[package]]
+name = "rich"
+version = "13.4.2"
+requires_python = ">=3.7.0"
+summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
+dependencies = [
+ "markdown-it-py>=2.2.0",
+ "pygments<3.0.0,>=2.13.0",
+]
+files = [
+ {file = "rich-13.4.2-py3-none-any.whl", hash = "sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec"},
+ {file = "rich-13.4.2.tar.gz", hash = "sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898"},
+]
+
+[[package]]
+name = "ruff"
+version = "0.4.1"
+requires_python = ">=3.7"
+summary = "An extremely fast Python linter and code formatter, written in Rust."
+files = [
+ {file = "ruff-0.4.1-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:2d9ef6231e3fbdc0b8c72404a1a0c46fd0dcea84efca83beb4681c318ea6a953"},
+ {file = "ruff-0.4.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9485f54a7189e6f7433e0058cf8581bee45c31a25cd69009d2a040d1bd4bfaef"},
+ {file = "ruff-0.4.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2921ac03ce1383e360e8a95442ffb0d757a6a7ddd9a5be68561a671e0e5807e"},
+ {file = "ruff-0.4.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eec8d185fe193ad053eda3a6be23069e0c8ba8c5d20bc5ace6e3b9e37d246d3f"},
+ {file = "ruff-0.4.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:baa27d9d72a94574d250f42b7640b3bd2edc4c58ac8ac2778a8c82374bb27984"},
+ {file = "ruff-0.4.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f1ee41580bff1a651339eb3337c20c12f4037f6110a36ae4a2d864c52e5ef954"},
+ {file = "ruff-0.4.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0926cefb57fc5fced629603fbd1a23d458b25418681d96823992ba975f050c2b"},
+ {file = "ruff-0.4.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c6e37f2e3cd74496a74af9a4fa67b547ab3ca137688c484749189bf3a686ceb"},
+ {file = "ruff-0.4.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efd703a5975ac1998c2cc5e9494e13b28f31e66c616b0a76e206de2562e0843c"},
+ {file = "ruff-0.4.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b92f03b4aa9fa23e1799b40f15f8b95cdc418782a567d6c43def65e1bbb7f1cf"},
+ {file = "ruff-0.4.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1c859f294f8633889e7d77de228b203eb0e9a03071b72b5989d89a0cf98ee262"},
+ {file = "ruff-0.4.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b34510141e393519a47f2d7b8216fec747ea1f2c81e85f076e9f2910588d4b64"},
+ {file = "ruff-0.4.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6e68d248ed688b9d69fd4d18737edcbb79c98b251bba5a2b031ce2470224bdf9"},
+ {file = "ruff-0.4.1-py3-none-win32.whl", hash = "sha256:b90506f3d6d1f41f43f9b7b5ff845aeefabed6d2494307bc7b178360a8805252"},
+ {file = "ruff-0.4.1-py3-none-win_amd64.whl", hash = "sha256:c7d391e5936af5c9e252743d767c564670dc3889aff460d35c518ee76e4b26d7"},
+ {file = "ruff-0.4.1-py3-none-win_arm64.whl", hash = "sha256:a1eaf03d87e6a7cd5e661d36d8c6e874693cb9bc3049d110bc9a97b350680c43"},
+ {file = "ruff-0.4.1.tar.gz", hash = "sha256:d592116cdbb65f8b1b7e2a2b48297eb865f6bdc20641879aa9d7b9c11d86db79"},
+]
+
+[[package]]
+name = "sentry-sdk"
+version = "1.45.0"
+summary = "Python client for Sentry (https://sentry.io)"
+dependencies = [
+ "certifi",
+ "urllib3>=1.26.11; python_version >= \"3.6\"",
+]
+files = [
+ {file = "sentry-sdk-1.45.0.tar.gz", hash = "sha256:509aa9678c0512344ca886281766c2e538682f8acfa50fd8d405f8c417ad0625"},
+ {file = "sentry_sdk-1.45.0-py2.py3-none-any.whl", hash = "sha256:1ce29e30240cc289a027011103a8c83885b15ef2f316a60bcc7c5300afa144f1"},
+]
+
+[[package]]
+name = "shellingham"
+version = "1.5.0.post1"
+requires_python = ">=3.7"
+summary = "Tool to Detect Surrounding Shell"
+files = [
+ {file = "shellingham-1.5.0.post1-py2.py3-none-any.whl", hash = "sha256:368bf8c00754fd4f55afb7bbb86e272df77e4dc76ac29dbcbb81a59e9fc15744"},
+ {file = "shellingham-1.5.0.post1.tar.gz", hash = "sha256:823bc5fb5c34d60f285b624e7264f4dda254bc803a3774a147bf99c0e3004a28"},
+]
+
+[[package]]
+name = "sniffio"
+version = "1.3.0"
+requires_python = ">=3.7"
+summary = "Sniff out which async library your code is running under"
+files = [
+ {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"},
+ {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
+]
+
+[[package]]
+name = "starlette"
+version = "0.37.2"
+requires_python = ">=3.8"
+summary = "The little ASGI library that shines."
+dependencies = [
+ "anyio<5,>=3.4.0",
+ "typing-extensions>=3.10.0; python_version < \"3.10\"",
+]
+files = [
+ {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"},
+ {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"},
+]
+
+[[package]]
+name = "tomli"
+version = "2.0.1"
+requires_python = ">=3.7"
+summary = "A lil' TOML parser"
+files = [
+ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+]
+
+[[package]]
+name = "typer"
+version = "0.12.3"
+requires_python = ">=3.7"
+summary = "Typer, build great CLIs. Easy to code. Based on Python type hints."
+dependencies = [
+ "click>=8.0.0",
+ "rich>=10.11.0",
+ "shellingham>=1.3.0",
+ "typing-extensions>=3.7.4.3",
+]
+files = [
+ {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"},
+ {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"},
+]
+
+[[package]]
+name = "typer"
+version = "0.12.3"
+extras = ["all"]
+requires_python = ">=3.7"
+summary = "Typer, build great CLIs. Easy to code. Based on Python type hints."
+dependencies = [
+ "typer==0.12.3",
+]
+files = [
+ {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"},
+ {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"},
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.11.0"
+requires_python = ">=3.8"
+summary = "Backported and Experimental Type Hints for Python 3.8+"
+files = [
+ {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"},
+ {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"},
+]
+
+[[package]]
+name = "urllib3"
+version = "2.0.3"
+requires_python = ">=3.7"
+summary = "HTTP library with thread-safe connection pooling, file post, and more."
+files = [
+ {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"},
+ {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"},
+]
+
+[[package]]
+name = "uvicorn"
+version = "0.29.0"
+requires_python = ">=3.8"
+summary = "The lightning-fast ASGI server."
+dependencies = [
+ "click>=7.0",
+ "h11>=0.8",
+ "typing-extensions>=4.0; python_version < \"3.11\"",
+]
+files = [
+ {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"},
+ {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"},
+]
+
+[[package]]
+name = "uvicorn"
+version = "0.29.0"
+extras = ["standard"]
+requires_python = ">=3.8"
+summary = "The lightning-fast ASGI server."
+dependencies = [
+ "colorama>=0.4; sys_platform == \"win32\"",
+ "httptools>=0.5.0",
+ "python-dotenv>=0.13",
+ "pyyaml>=5.1",
+ "uvicorn==0.29.0",
+ "uvloop!=0.15.0,!=0.15.1,>=0.14.0; (sys_platform != \"cygwin\" and sys_platform != \"win32\") and platform_python_implementation != \"PyPy\"",
+ "watchfiles>=0.13",
+ "websockets>=10.4",
+]
+files = [
+ {file = "uvicorn-0.29.0-py3-none-any.whl", hash = "sha256:2c2aac7ff4f4365c206fd773a39bf4ebd1047c238f8b8268ad996829323473de"},
+ {file = "uvicorn-0.29.0.tar.gz", hash = "sha256:6a69214c0b6a087462412670b3ef21224fa48cae0e452b5883e8e8bdfdd11dd0"},
+]
+
+[[package]]
+name = "uvloop"
+version = "0.17.0"
+requires_python = ">=3.7"
+summary = "Fast implementation of asyncio event loop on top of libuv"
+files = [
+ {file = "uvloop-0.17.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce9f61938d7155f79d3cb2ffa663147d4a76d16e08f65e2c66b77bd41b356718"},
+ {file = "uvloop-0.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:68532f4349fd3900b839f588972b3392ee56042e440dd5873dfbbcd2cc67617c"},
+ {file = "uvloop-0.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0949caf774b9fcefc7c5756bacbbbd3fc4c05a6b7eebc7c7ad6f825b23998d6d"},
+ {file = "uvloop-0.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff3d00b70ce95adce264462c930fbaecb29718ba6563db354608f37e49e09024"},
+ {file = "uvloop-0.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a5abddb3558d3f0a78949c750644a67be31e47936042d4f6c888dd6f3c95f4aa"},
+ {file = "uvloop-0.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8efcadc5a0003d3a6e887ccc1fb44dec25594f117a94e3127954c05cf144d811"},
+ {file = "uvloop-0.17.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3378eb62c63bf336ae2070599e49089005771cc651c8769aaad72d1bd9385a7c"},
+ {file = "uvloop-0.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6aafa5a78b9e62493539456f8b646f85abc7093dd997f4976bb105537cf2635e"},
+ {file = "uvloop-0.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c686a47d57ca910a2572fddfe9912819880b8765e2f01dc0dd12a9bf8573e539"},
+ {file = "uvloop-0.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:864e1197139d651a76c81757db5eb199db8866e13acb0dfe96e6fc5d1cf45fc4"},
+ {file = "uvloop-0.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2a6149e1defac0faf505406259561bc14b034cdf1d4711a3ddcdfbaa8d825a05"},
+ {file = "uvloop-0.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6708f30db9117f115eadc4f125c2a10c1a50d711461699a0cbfaa45b9a78e376"},
+ {file = "uvloop-0.17.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a4aee22ece20958888eedbad20e4dbb03c37533e010fb824161b4f05e641f738"},
+ {file = "uvloop-0.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:307958f9fc5c8bb01fad752d1345168c0abc5d62c1b72a4a8c6c06f042b45b20"},
+ {file = "uvloop-0.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ebeeec6a6641d0adb2ea71dcfb76017602ee2bfd8213e3fcc18d8f699c5104f"},
+ {file = "uvloop-0.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1436c8673c1563422213ac6907789ecb2b070f5939b9cbff9ef7113f2b531595"},
+ {file = "uvloop-0.17.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8887d675a64cfc59f4ecd34382e5b4f0ef4ae1da37ed665adba0c2badf0d6578"},
+ {file = "uvloop-0.17.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3db8de10ed684995a7f34a001f15b374c230f7655ae840964d51496e2f8a8474"},
+ {file = "uvloop-0.17.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7d37dccc7ae63e61f7b96ee2e19c40f153ba6ce730d8ba4d3b4e9738c1dccc1b"},
+ {file = "uvloop-0.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cbbe908fda687e39afd6ea2a2f14c2c3e43f2ca88e3a11964b297822358d0e6c"},
+ {file = "uvloop-0.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d97672dc709fa4447ab83276f344a165075fd9f366a97b712bdd3fee05efae8"},
+ {file = "uvloop-0.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1e507c9ee39c61bfddd79714e4f85900656db1aec4d40c6de55648e85c2799c"},
+ {file = "uvloop-0.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c092a2c1e736086d59ac8e41f9c98f26bbf9b9222a76f21af9dfe949b99b2eb9"},
+ {file = "uvloop-0.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:30babd84706115626ea78ea5dbc7dd8d0d01a2e9f9b306d24ca4ed5796c66ded"},
+ {file = "uvloop-0.17.0.tar.gz", hash = "sha256:0ddf6baf9cf11a1a22c71487f39f15b2cf78eb5bde7e5b45fbb99e8a9d91b9e1"},
+]
+
+[[package]]
+name = "watchfiles"
+version = "0.19.0"
+requires_python = ">=3.7"
+summary = "Simple, modern and high performance file watching and code reload in python."
+dependencies = [
+ "anyio>=3.0.0",
+]
+files = [
+ {file = "watchfiles-0.19.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:91633e64712df3051ca454ca7d1b976baf842d7a3640b87622b323c55f3345e7"},
+ {file = "watchfiles-0.19.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:b6577b8c6c8701ba8642ea9335a129836347894b666dd1ec2226830e263909d3"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:18b28f6ad871b82df9542ff958d0c86bb0d8310bb09eb8e87d97318a3b5273af"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fac19dc9cbc34052394dbe81e149411a62e71999c0a19e1e09ce537867f95ae0"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:09ea3397aecbc81c19ed7f025e051a7387feefdb789cf768ff994c1228182fda"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0376deac92377817e4fb8f347bf559b7d44ff556d9bc6f6208dd3f79f104aaf"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c75eff897786ee262c9f17a48886f4e98e6cfd335e011c591c305e5d083c056"},
+ {file = "watchfiles-0.19.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb5d45c4143c1dd60f98a16187fd123eda7248f84ef22244818c18d531a249d1"},
+ {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:79c533ff593db861ae23436541f481ec896ee3da4e5db8962429b441bbaae16e"},
+ {file = "watchfiles-0.19.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3d7d267d27aceeeaa3de0dd161a0d64f0a282264d592e335fff7958cc0cbae7c"},
+ {file = "watchfiles-0.19.0-cp37-abi3-win32.whl", hash = "sha256:176a9a7641ec2c97b24455135d58012a5be5c6217fc4d5fef0b2b9f75dbf5154"},
+ {file = "watchfiles-0.19.0-cp37-abi3-win_amd64.whl", hash = "sha256:945be0baa3e2440151eb3718fd8846751e8b51d8de7b884c90b17d271d34cae8"},
+ {file = "watchfiles-0.19.0-cp37-abi3-win_arm64.whl", hash = "sha256:0089c6dc24d436b373c3c57657bf4f9a453b13767150d17284fc6162b2791911"},
+ {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cae3dde0b4b2078f31527acff6f486e23abed307ba4d3932466ba7cdd5ecec79"},
+ {file = "watchfiles-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f3920b1285a7d3ce898e303d84791b7bf40d57b7695ad549dc04e6a44c9f120"},
+ {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9afd0d69429172c796164fd7fe8e821ade9be983f51c659a38da3faaaaac44dc"},
+ {file = "watchfiles-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68dce92b29575dda0f8d30c11742a8e2b9b8ec768ae414b54f7453f27bdf9545"},
+ {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:5569fc7f967429d4bc87e355cdfdcee6aabe4b620801e2cf5805ea245c06097c"},
+ {file = "watchfiles-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5471582658ea56fca122c0f0d0116a36807c63fefd6fdc92c71ca9a4491b6b48"},
+ {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b538014a87f94d92f98f34d3e6d2635478e6be6423a9ea53e4dd96210065e193"},
+ {file = "watchfiles-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20b44221764955b1e703f012c74015306fb7e79a00c15370785f309b1ed9aa8d"},
+ {file = "watchfiles-0.19.0.tar.gz", hash = "sha256:d9b073073e048081e502b6c6b0b88714c026a1a4c890569238d04aca5f9ca74b"},
+]
+
+[[package]]
+name = "websockets"
+version = "11.0.3"
+requires_python = ">=3.7"
+summary = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
+files = [
+ {file = "websockets-11.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3ccc8a0c387629aec40f2fc9fdcb4b9d5431954f934da3eaf16cdc94f67dbfac"},
+ {file = "websockets-11.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d67ac60a307f760c6e65dad586f556dde58e683fab03323221a4e530ead6f74d"},
+ {file = "websockets-11.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84d27a4832cc1a0ee07cdcf2b0629a8a72db73f4cf6de6f0904f6661227f256f"},
+ {file = "websockets-11.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffd7dcaf744f25f82190856bc26ed81721508fc5cbf2a330751e135ff1283564"},
+ {file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7622a89d696fc87af8e8d280d9b421db5133ef5b29d3f7a1ce9f1a7bf7fcfa11"},
+ {file = "websockets-11.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bceab846bac555aff6427d060f2fcfff71042dba6f5fca7dc4f75cac815e57ca"},
+ {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:54c6e5b3d3a8936a4ab6870d46bdd6ec500ad62bde9e44462c32d18f1e9a8e54"},
+ {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:41f696ba95cd92dc047e46b41b26dd24518384749ed0d99bea0a941ca87404c4"},
+ {file = "websockets-11.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:86d2a77fd490ae3ff6fae1c6ceaecad063d3cc2320b44377efdde79880e11526"},
+ {file = "websockets-11.0.3-cp310-cp310-win32.whl", hash = "sha256:2d903ad4419f5b472de90cd2d40384573b25da71e33519a67797de17ef849b69"},
+ {file = "websockets-11.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:1d2256283fa4b7f4c7d7d3e84dc2ece74d341bce57d5b9bf385df109c2a1a82f"},
+ {file = "websockets-11.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e848f46a58b9fcf3d06061d17be388caf70ea5b8cc3466251963c8345e13f7eb"},
+ {file = "websockets-11.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa5003845cdd21ac0dc6c9bf661c5beddd01116f6eb9eb3c8e272353d45b3288"},
+ {file = "websockets-11.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b58cbf0697721120866820b89f93659abc31c1e876bf20d0b3d03cef14faf84d"},
+ {file = "websockets-11.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:660e2d9068d2bedc0912af508f30bbeb505bbbf9774d98def45f68278cea20d3"},
+ {file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1f0524f203e3bd35149f12157438f406eff2e4fb30f71221c8a5eceb3617b6b"},
+ {file = "websockets-11.0.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:def07915168ac8f7853812cc593c71185a16216e9e4fa886358a17ed0fd9fcf6"},
+ {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b30c6590146e53149f04e85a6e4fcae068df4289e31e4aee1fdf56a0dead8f97"},
+ {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:619d9f06372b3a42bc29d0cd0354c9bb9fb39c2cbc1a9c5025b4538738dbffaf"},
+ {file = "websockets-11.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:01f5567d9cf6f502d655151645d4e8b72b453413d3819d2b6f1185abc23e82dd"},
+ {file = "websockets-11.0.3-cp311-cp311-win32.whl", hash = "sha256:e1459677e5d12be8bbc7584c35b992eea142911a6236a3278b9b5ce3326f282c"},
+ {file = "websockets-11.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:e7837cb169eca3b3ae94cc5787c4fed99eef74c0ab9506756eea335e0d6f3ed8"},
+ {file = "websockets-11.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fb06eea71a00a7af0ae6aefbb932fb8a7df3cb390cc217d51a9ad7343de1b8d0"},
+ {file = "websockets-11.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8a34e13a62a59c871064dfd8ffb150867e54291e46d4a7cf11d02c94a5275bae"},
+ {file = "websockets-11.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4841ed00f1026dfbced6fca7d963c4e7043aa832648671b5138008dc5a8f6d99"},
+ {file = "websockets-11.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a073fc9ab1c8aff37c99f11f1641e16da517770e31a37265d2755282a5d28aa"},
+ {file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68b977f21ce443d6d378dbd5ca38621755f2063d6fdb3335bda981d552cfff86"},
+ {file = "websockets-11.0.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1a99a7a71631f0efe727c10edfba09ea6bee4166a6f9c19aafb6c0b5917d09c"},
+ {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bee9fcb41db2a23bed96c6b6ead6489702c12334ea20a297aa095ce6d31370d0"},
+ {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4b253869ea05a5a073ebfdcb5cb3b0266a57c3764cf6fe114e4cd90f4bfa5f5e"},
+ {file = "websockets-11.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1553cb82942b2a74dd9b15a018dce645d4e68674de2ca31ff13ebc2d9f283788"},
+ {file = "websockets-11.0.3-cp38-cp38-win32.whl", hash = "sha256:f61bdb1df43dc9c131791fbc2355535f9024b9a04398d3bd0684fc16ab07df74"},
+ {file = "websockets-11.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:03aae4edc0b1c68498f41a6772d80ac7c1e33c06c6ffa2ac1c27a07653e79d6f"},
+ {file = "websockets-11.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:777354ee16f02f643a4c7f2b3eff8027a33c9861edc691a2003531f5da4f6bc8"},
+ {file = "websockets-11.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8c82f11964f010053e13daafdc7154ce7385ecc538989a354ccc7067fd7028fd"},
+ {file = "websockets-11.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3580dd9c1ad0701169e4d6fc41e878ffe05e6bdcaf3c412f9d559389d0c9e016"},
+ {file = "websockets-11.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f1a3f10f836fab6ca6efa97bb952300b20ae56b409414ca85bff2ad241d2a61"},
+ {file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df41b9bc27c2c25b486bae7cf42fccdc52ff181c8c387bfd026624a491c2671b"},
+ {file = "websockets-11.0.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279e5de4671e79a9ac877427f4ac4ce93751b8823f276b681d04b2156713b9dd"},
+ {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1fdf26fa8a6a592f8f9235285b8affa72748dc12e964a5518c6c5e8f916716f7"},
+ {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:69269f3a0b472e91125b503d3c0b3566bda26da0a3261c49f0027eb6075086d1"},
+ {file = "websockets-11.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:97b52894d948d2f6ea480171a27122d77af14ced35f62e5c892ca2fae9344311"},
+ {file = "websockets-11.0.3-cp39-cp39-win32.whl", hash = "sha256:c7f3cb904cce8e1be667c7e6fef4516b98d1a6a0635a58a57528d577ac18a128"},
+ {file = "websockets-11.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c792ea4eabc0159535608fc5658a74d1a81020eb35195dd63214dcf07556f67e"},
+ {file = "websockets-11.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f2e58f2c36cc52d41f2659e4c0cbf7353e28c8c9e63e30d8c6d3494dc9fdedcf"},
+ {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de36fe9c02995c7e6ae6efe2e205816f5f00c22fd1fbf343d4d18c3d5ceac2f5"},
+ {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ac56b661e60edd453585f4bd68eb6a29ae25b5184fd5ba51e97652580458998"},
+ {file = "websockets-11.0.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e052b8467dd07d4943936009f46ae5ce7b908ddcac3fda581656b1b19c083d9b"},
+ {file = "websockets-11.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:42cc5452a54a8e46a032521d7365da775823e21bfba2895fb7b77633cce031bb"},
+ {file = "websockets-11.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e6316827e3e79b7b8e7d8e3b08f4e331af91a48e794d5d8b099928b6f0b85f20"},
+ {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8531fdcad636d82c517b26a448dcfe62f720e1922b33c81ce695d0edb91eb931"},
+ {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c114e8da9b475739dde229fd3bc6b05a6537a88a578358bc8eb29b4030fac9c9"},
+ {file = "websockets-11.0.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e063b1865974611313a3849d43f2c3f5368093691349cf3c7c8f8f75ad7cb280"},
+ {file = "websockets-11.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:92b2065d642bf8c0a82d59e59053dd2fdde64d4ed44efe4870fa816c1232647b"},
+ {file = "websockets-11.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0ee68fe502f9031f19d495dae2c268830df2760c0524cbac5d759921ba8c8e82"},
+ {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcacf2c7a6c3a84e720d1bb2b543c675bf6c40e460300b628bab1b1efc7c034c"},
+ {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b67c6f5e5a401fc56394f191f00f9b3811fe843ee93f4a70df3c389d1adf857d"},
+ {file = "websockets-11.0.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d5023a4b6a5b183dc838808087033ec5df77580485fc533e7dab2567851b0a4"},
+ {file = "websockets-11.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ed058398f55163a79bb9f06a90ef9ccc063b204bb346c4de78efc5d15abfe602"},
+ {file = "websockets-11.0.3-py3-none-any.whl", hash = "sha256:6681ba9e7f8f3b19440921e99efbb40fc89f26cd71bf539e45d8c8a25c976dc6"},
+ {file = "websockets-11.0.3.tar.gz", hash = "sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016"},
+]
+
+[[package]]
+name = "werkzeug"
+version = "2.3.6"
+requires_python = ">=3.8"
+summary = "The comprehensive WSGI web application library."
+dependencies = [
+ "MarkupSafe>=2.1.1",
+]
+files = [
+ {file = "Werkzeug-2.3.6-py3-none-any.whl", hash = "sha256:935539fa1413afbb9195b24880778422ed620c0fc09670945185cce4d91a8890"},
+ {file = "Werkzeug-2.3.6.tar.gz", hash = "sha256:98c774df2f91b05550078891dee5f0eb0cb797a522c757a2452b9cee5b202330"},
+]
+
+[[package]]
+name = "win32-setctime"
+version = "1.1.0"
+requires_python = ">=3.5"
+summary = "A small Python utility to set file creation time on Windows"
+files = [
+ {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
+ {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"},
+]
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000000000000000000000000000000000000..7a805a331aa37104073e92b58007659db9b3690d
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,81 @@
+[project]
+name = "HibiAPI"
+version = "0.8.0"
+description = "A program that implements easy-to-use APIs for a variety of commonly used sites"
+readme = "README.md"
+license = { text = "Apache-2.0" }
+authors = [{ name = "mixmoe", email = "admin@obfs.dev" }]
+requires-python = ">=3.9,<4.0"
+dependencies = [
+ "fastapi>=0.110.2",
+ "httpx[http2]>=0.27.0",
+ "uvicorn[standard]>=0.29.0",
+ "confuse>=2.0.1",
+ "loguru>=0.7.2",
+ "python-dotenv>=1.0.1",
+ "qrcode[pil]>=7.4.2",
+ "pycryptodomex>=3.20.0",
+ "sentry-sdk>=1.45.0",
+ "pydantic<2.0.0,>=1.9.0",
+ "python-multipart>=0.0.9",
+ "cashews[diskcache,redis]>=7.0.2",
+ "typing-extensions>=4.11.0",
+ "typer[all]>=0.12.3",
+]
+
+[project.urls]
+homepage = "https://api.obfs.dev"
+repository = "https://github.com/mixmoe/HibiAPI"
+documentation = "https://github.com/mixmoe/HibiAPI/wiki"
+
+[project.optional-dependencies]
+scripts = ["pyqt6>=6.6.1", "pyqt6-webengine>=6.6.0", "requests>=2.31.0"]
+
+[project.scripts]
+hibiapi = "hibiapi.__main__:cli"
+
+[build-system]
+requires = ["pdm-backend"]
+build-backend = "pdm.backend"
+
+[tool.pdm.dev-dependencies]
+dev = [
+ "pytest>=8.1.1",
+ "pytest-httpserver>=1.0.10",
+ "pytest-cov>=5.0.0",
+ "pytest-benchmark>=4.0.0",
+ "pytest-pretty>=1.2.0",
+ "ruff>=0.4.1",
+]
+
+[tool.pdm.build]
+includes = []
+
+[tool.pdm.scripts]
+test = """pytest \
+ --cov ./hibiapi/ \
+ --cov-report xml \
+ --cov-report term-missing \
+ ./test"""
+start = "hibiapi run"
+lint = "ruff check"
+
+[tool.pyright]
+typeCheckingMode = "standard"
+
+[tool.ruff]
+lint.select = [
+ # pycodestyle
+ "E",
+ # Pyflakes
+ "F",
+ # pyupgrade
+ "UP",
+ # flake8-bugbear
+ "B",
+ # flake8-simplify
+ "SIM",
+ # isort
+ "I",
+]
+target-version = "py39"
diff --git a/scripts/pixiv_login.py b/scripts/pixiv_login.py
new file mode 100644
index 0000000000000000000000000000000000000000..2ec33213df2ed06351cab5bcecaa147ddd8c54ab
--- /dev/null
+++ b/scripts/pixiv_login.py
@@ -0,0 +1,212 @@
+import hashlib
+import sys
+from base64 import urlsafe_b64encode
+from secrets import token_urlsafe
+from typing import Any, Callable, Optional, TypeVar
+from urllib.parse import parse_qs, urlencode
+
+import requests
+from loguru import logger as _logger
+from PyQt6.QtCore import QUrl
+from PyQt6.QtNetwork import QNetworkCookie
+from PyQt6.QtWebEngineCore import (
+ QWebEngineUrlRequestInfo,
+ QWebEngineUrlRequestInterceptor,
+)
+from PyQt6.QtWebEngineWidgets import QWebEngineView
+from PyQt6.QtWidgets import (
+ QApplication,
+ QHBoxLayout,
+ QMainWindow,
+ QPlainTextEdit,
+ QPushButton,
+ QVBoxLayout,
+ QWidget,
+)
+
+USER_AGENT = "PixivAndroidApp/5.0.234 (Android 11; Pixel 5)"
+REDIRECT_URI = "https://app-api.pixiv.net/web/v1/users/auth/pixiv/callback"
+LOGIN_URL = "https://app-api.pixiv.net/web/v1/login"
+AUTH_TOKEN_URL = "https://oauth.secure.pixiv.net/auth/token"
+CLIENT_ID = "MOBrBDS8blbauoSck0ZfDbtuzpyT"
+CLIENT_SECRET = "lsACyCD94FhDUtGTXi3QzcFE2uU1hqtDaKeqrdwj"
+
+
+app = QApplication(sys.argv)
+logger = _logger.opt(colors=True)
+
+
+class RequestInterceptor(QWebEngineUrlRequestInterceptor):
+ code_listener: Optional[Callable[[str], None]] = None
+
+ def __init__(self):
+ super().__init__()
+
+ def interceptRequest(self, info: QWebEngineUrlRequestInfo) -> None:
+ method = info.requestMethod().data().decode()
+ url = info.requestUrl().url()
+
+ if (
+ self.code_listener
+ and "app-api.pixiv.net" in info.requestUrl().host()
+ and info.requestUrl().path().endswith("callback")
+ ):
+ query = parse_qs(info.requestUrl().query())
+ code, *_ = query["code"]
+ self.code_listener(code)
+
+ logger.debug(f"{method} {url}")
+
+
+class WebView(QWebEngineView):
+ def __init__(self):
+ super().__init__()
+
+ self.cookies: dict[str, str] = {}
+
+ page = self.page()
+ assert page is not None
+ profile = page.profile()
+ assert profile is not None
+ profile.setHttpUserAgent(USER_AGENT)
+ page.contentsSize().setHeight(768)
+ page.contentsSize().setWidth(432)
+
+ self.interceptor = RequestInterceptor()
+ profile.setUrlRequestInterceptor(self.interceptor)
+ cookie_store = profile.cookieStore()
+ assert cookie_store is not None
+ cookie_store.cookieAdded.connect(self._on_cookie_added)
+
+ self.setFixedHeight(896)
+ self.setFixedWidth(414)
+
+ self.start("about:blank")
+
+ def start(self, goto: str):
+ self.page().profile().cookieStore().deleteAllCookies() # type: ignore
+ self.cookies.clear()
+ self.load(QUrl(goto))
+
+ def _on_cookie_added(self, cookie: QNetworkCookie):
+ domain = cookie.domain()
+ name = cookie.name().data().decode()
+ value = cookie.value().data().decode()
+ self.cookies[name] = value
+ logger.debug(f"Set-Cookie {domain} {name} -> {value!r}")
+
+
+class ResponseDataWidget(QWidget):
+ def __init__(self, webview: WebView):
+ super().__init__()
+ self.webview = webview
+
+ layout = QVBoxLayout()
+
+ self.cookie_paste = QPlainTextEdit()
+ self.cookie_paste.setDisabled(True)
+ self.cookie_paste.setPlaceholderText("得到的登录数据将会展示在这里")
+
+ layout.addWidget(self.cookie_paste)
+
+ copy_button = QPushButton()
+ copy_button.clicked.connect(self._on_clipboard_copy)
+ copy_button.setText("复制上述登录数据到剪贴板")
+
+ layout.addWidget(copy_button)
+
+ self.setLayout(layout)
+
+ def _on_clipboard_copy(self, checked: bool):
+ if paste_string := self.cookie_paste.toPlainText().strip():
+ app.clipboard().setText(paste_string) # type: ignore
+
+
+_T = TypeVar("_T", bound="LoginPhrase")
+
+
+class LoginPhrase:
+ @staticmethod
+ def s256(data: bytes):
+ return urlsafe_b64encode(hashlib.sha256(data).digest()).rstrip(b"=").decode()
+
+ @classmethod
+ def oauth_pkce(cls) -> tuple[str, str]:
+ code_verifier = token_urlsafe(32)
+ code_challenge = cls.s256(code_verifier.encode())
+ return code_verifier, code_challenge
+
+ def __init__(self: _T, url_open_callback: Callable[[str, _T], None]):
+ self.code_verifier, self.code_challenge = self.oauth_pkce()
+
+ login_params = {
+ "code_challenge": self.code_challenge,
+ "code_challenge_method": "S256",
+ "client": "pixiv-android",
+ }
+ login_url = f"{LOGIN_URL}?{urlencode(login_params)}"
+ url_open_callback(login_url, self)
+
+ def code_received(self, code: str):
+ response = requests.post(
+ AUTH_TOKEN_URL,
+ data={
+ "client_id": CLIENT_ID,
+ "client_secret": CLIENT_SECRET,
+ "code": code,
+ "code_verifier": self.code_verifier,
+ "grant_type": "authorization_code",
+ "include_policy": "true",
+ "redirect_uri": REDIRECT_URI,
+ },
+ headers={"User-Agent": USER_AGENT},
+ )
+ response.raise_for_status()
+ data: dict[str, Any] = response.json()
+
+ access_token = data["access_token"]
+ refresh_token = data["refresh_token"]
+ expires_in = data.get("expires_in", 0)
+
+ return_text = ""
+ return_text += f"access_token: {access_token}\n"
+ return_text += f"refresh_token: {refresh_token}\n"
+ return_text += f"expires_in: {expires_in}\n"
+
+ return return_text
+
+
+class MainWindow(QMainWindow):
+ def __init__(self):
+ super().__init__()
+ self.setWindowTitle("Pixiv login helper")
+
+ layout = QHBoxLayout()
+
+ self.webview = WebView()
+ layout.addWidget(self.webview)
+
+ self.form = ResponseDataWidget(self.webview)
+ layout.addWidget(self.form)
+
+ widget = QWidget()
+ widget.setLayout(layout)
+
+ self.setCentralWidget(widget)
+
+
+if __name__ == "__main__":
+ window = MainWindow()
+ window.show()
+
+ def url_open_callback(url: str, login_phrase: LoginPhrase):
+ def code_listener(code: str):
+ response = login_phrase.code_received(code)
+ window.form.cookie_paste.setPlainText(response)
+
+ window.webview.interceptor.code_listener = code_listener
+ window.webview.start(url)
+
+ LoginPhrase(url_open_callback)
+
+ exit(app.exec())
diff --git a/test/__init__.py b/test/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc
--- /dev/null
+++ b/test/__init__.py
@@ -0,0 +1 @@
+
diff --git a/test/test_base.py b/test/test_base.py
new file mode 100644
index 0000000000000000000000000000000000000000..8a50cdf490ebaa9329eab44a7e0d621d9fbfb201
--- /dev/null
+++ b/test/test_base.py
@@ -0,0 +1,124 @@
+from typing import Annotated, Any
+
+import pytest
+from fastapi import Depends
+from fastapi.testclient import TestClient
+from pytest_benchmark.fixture import BenchmarkFixture
+
+
+@pytest.fixture(scope="package")
+def client():
+ from hibiapi.app import app
+
+ with TestClient(app, base_url="http://testserver/") as client:
+ yield client
+
+
+def test_openapi(client: TestClient, in_stress: bool = False):
+ response = client.get("/openapi.json")
+ assert response.status_code == 200
+ assert response.json()
+
+ if in_stress:
+ return True
+
+
+def test_doc_page(client: TestClient, in_stress: bool = False):
+ response = client.get("/docs")
+ assert response.status_code == 200
+ assert response.text
+
+ response = client.get("/docs/test")
+ assert response.status_code == 200
+ assert response.text
+
+ if in_stress:
+ return True
+
+
+def test_openapi_stress(client: TestClient, benchmark: BenchmarkFixture):
+ assert benchmark.pedantic(
+ test_openapi,
+ args=(client, True),
+ rounds=200,
+ warmup_rounds=10,
+ iterations=3,
+ )
+
+
+def test_doc_page_stress(client: TestClient, benchmark: BenchmarkFixture):
+ assert benchmark.pedantic(
+ test_doc_page, args=(client, True), rounds=200, iterations=3
+ )
+
+
+def test_notfound(client: TestClient):
+ from hibiapi.utils.exceptions import ExceptionReturn
+
+ response = client.get("/notexistpath")
+ assert response.status_code == 404
+ assert ExceptionReturn.parse_obj(response.json())
+
+
+@pytest.mark.xfail(reason="not implemented yet")
+def test_net_request():
+ from hibiapi.utils.net import BaseNetClient
+ from hibiapi.utils.routing import BaseEndpoint, SlashRouter
+
+ test_headers = {"x-test-header": "random-string"}
+ test_data = {"test": "test"}
+
+ class TestEndpoint(BaseEndpoint):
+ base = "https://httpbin.org"
+
+ async def request(self, path: str, params: dict[str, Any]):
+ url = self._join(self.base, path, params)
+ response = await self.client.post(url, data=params)
+ response.raise_for_status()
+ return response.json()
+
+ async def form(self, *, data: dict[str, Any]):
+ return await self.request("/post", data)
+
+ async def teapot(self):
+ return await self.request("/status/{codes}", {"codes": 418})
+
+ class TestNetClient(BaseNetClient):
+ pass
+
+ async def net_client():
+ async with TestNetClient(headers=test_headers) as client:
+ yield TestEndpoint(client)
+
+ router = SlashRouter()
+
+ @router.post("form")
+ async def form(
+ *,
+ endpoint: Annotated[TestEndpoint, Depends(net_client)],
+ data: dict[str, Any],
+ ):
+ return await endpoint.form(data=data)
+
+ @router.post("teapot")
+ async def teapot(endpoint: Annotated[TestEndpoint, Depends(net_client)]):
+ return await endpoint.teapot()
+
+ from hibiapi.app.routes import router as api_router
+
+ api_router.include_router(router, prefix="/test")
+
+ from hibiapi.app import app
+ from hibiapi.utils.exceptions import ExceptionReturn
+
+ with TestClient(app, base_url="http://testserver/api/test/") as client:
+ response = client.post("form", json=test_data)
+ assert response.status_code == 200
+ response_data = response.json()
+ assert response_data["form"] == test_data
+ request_headers = {k.lower(): v for k, v in response_data["headers"].items()}
+ assert test_headers.items() <= request_headers.items()
+
+ response = client.post("teapot", json=test_data)
+ exception_return = ExceptionReturn.parse_obj(response.json())
+ assert exception_return.code == response.status_code
diff --git a/test/test_bika.py b/test/test_bika.py
new file mode 100644
index 0000000000000000000000000000000000000000..0c3008ddb9bda73ef84c07244f3285011c0c8a92
--- /dev/null
+++ b/test/test_bika.py
@@ -0,0 +1,101 @@
+from math import inf
+
+import pytest
+from fastapi.testclient import TestClient
+
+
+@pytest.fixture(scope="package")
+def client():
+ from hibiapi.app import app, application
+
+ application.RATE_LIMIT_MAX = inf
+
+ with TestClient(app, base_url="http://testserver/api/bika/") as client:
+ client.headers["Cache-Control"] = "no-cache"
+ yield client
+
+
+def test_collections(client: TestClient):
+ response = client.get("collections")
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_categories(client: TestClient):
+ response = client.get("categories")
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_keywords(client: TestClient):
+ response = client.get("keywords")
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_advanced_search(client: TestClient):
+ response = client.get(
+ "advanced_search", params={"keyword": "blend", "page": 1, "sort": "vd"}
+ )
+ assert response.status_code == 200
+ assert response.json()["code"] == 200 and response.json()["data"]
+
+
+def test_category_list(client: TestClient):
+ response = client.get(
+ "category_list", params={"category": "全彩", "page": 1, "sort": "vd"}
+ )
+ assert response.status_code == 200
+ assert response.json()["code"] == 200 and response.json()["data"]
+
+
+def test_author_list(client: TestClient):
+ response = client.get(
+ "author_list", params={"author": "ゆうき", "page": 1, "sort": "vd"}
+ )
+ assert response.status_code == 200
+ assert response.json()["code"] == 200 and response.json()["data"]
+
+
+def test_comic_detail(client: TestClient):
+ response = client.get("comic_detail", params={"id": "5873aa128fe1fa02b156863a"})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200 and response.json()["data"]
+
+
+def test_comic_recommendation(client: TestClient):
+ response = client.get(
+ "comic_recommendation", params={"id": "5873aa128fe1fa02b156863a"}
+ )
+ assert response.status_code == 200
+ assert response.json()["code"] == 200 and response.json()["data"]
+
+
+def test_comic_episodes(client: TestClient):
+ response = client.get("comic_episodes", params={"id": "5873aa128fe1fa02b156863a"})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200 and response.json()["data"]
+
+
+def test_comic_page(client: TestClient):
+ response = client.get("comic_page", params={"id": "5873aa128fe1fa02b156863a"})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200 and response.json()["data"]
+
+
+def test_comic_comments(client: TestClient):
+ response = client.get("comic_comments", params={"id": "5873aa128fe1fa02b156863a"})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200 and response.json()["data"]
+
+
+def test_games(client: TestClient):
+ response = client.get("games")
+ assert response.status_code == 200
+ assert response.json()["code"] == 200 and response.json()["data"]["games"]
+
+
+def test_game_detail(client: TestClient):
+ response = client.get("game_detail", params={"id": "6298dc83fee4a055417cdd98"})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200 and response.json()["data"]
diff --git a/test/test_bilibili_v2.py b/test/test_bilibili_v2.py
new file mode 100644
index 0000000000000000000000000000000000000000..cfae96168a2d0115d9bbb9b9973455b9a22b0092
--- /dev/null
+++ b/test/test_bilibili_v2.py
@@ -0,0 +1,92 @@
+from math import inf
+
+import pytest
+from fastapi.testclient import TestClient
+
+
+@pytest.fixture(scope="package")
+def client():
+ from hibiapi.app import app, application
+
+ application.RATE_LIMIT_MAX = inf
+
+ with TestClient(app, base_url="http://testserver/api/bilibili/v2/") as client:
+ yield client
+
+
+def test_playurl(client: TestClient):
+ response = client.get("playurl", params={"aid": 2})
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+def test_paged_playurl(client: TestClient):
+ response = client.get("playurl", params={"aid": 2, "page": 1})
+ assert response.status_code == 200
+
+ if response.json()["code"] != 0:
+ pytest.xfail(reason=response.text)
+
+
+def test_seasoninfo(client: TestClient):
+ response = client.get("seasoninfo", params={"season_id": 425})
+ assert response.status_code == 200
+ assert response.json()["code"] in (0, -404)
+
+
+def test_seasonrecommend(client: TestClient):
+ response = client.get("seasonrecommend", params={"season_id": 425})
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+def test_search(client: TestClient):
+ response = client.get("search", params={"keyword": "railgun"})
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+def test_search_suggest(client: TestClient):
+ from hibiapi.api.bilibili import SearchType
+
+ response = client.get(
+ "search", params={"keyword": "paperclip", "type": SearchType.suggest.value}
+ )
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+def test_search_hot(client: TestClient):
+ from hibiapi.api.bilibili import SearchType
+
+ response = client.get(
+ "search", params={"limit": "10", "type": SearchType.hot.value}
+ )
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+def test_timeline(client: TestClient):
+ from hibiapi.api.bilibili import TimelineType
+
+ response = client.get("timeline", params={"type": TimelineType.CN.value})
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+def test_space(client: TestClient):
+ response = client.get("space", params={"vmid": 2})
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+def test_archive(client: TestClient):
+ response = client.get("archive", params={"vmid": 2})
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+@pytest.mark.skip(reason="not implemented yet")
+def test_favlist(client: TestClient):
+ # TODO:add test case
+ pass
diff --git a/test/test_bilibili_v3.py b/test/test_bilibili_v3.py
new file mode 100644
index 0000000000000000000000000000000000000000..54bf52e2eeb14192df9e85c0cea4682b2864d750
--- /dev/null
+++ b/test/test_bilibili_v3.py
@@ -0,0 +1,91 @@
+from math import inf
+
+import pytest
+from fastapi.testclient import TestClient
+
+
+@pytest.fixture(scope="package")
+def client():
+ from hibiapi.app import app, application
+
+ application.RATE_LIMIT_MAX = inf
+
+ with TestClient(app, base_url="http://testserver/api/bilibili/v3/") as client:
+ yield client
+
+
+def test_video_info(client: TestClient):
+ response = client.get("video_info", params={"aid": 2})
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+def test_video_address(client: TestClient):
+ response = client.get(
+ "video_address",
+ params={"aid": 2, "cid": 62131},
+ )
+ assert response.status_code == 200
+
+ if response.json()["code"] != 0:
+ pytest.xfail(reason=response.text)
+
+
+def test_user_info(client: TestClient):
+ response = client.get("user_info", params={"uid": 2})
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+def test_user_uploaded(client: TestClient):
+ response = client.get("user_uploaded", params={"uid": 2})
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+@pytest.mark.skip(reason="not implemented yet")
+def test_user_favorite(client: TestClient):
+ # TODO:add test case
+ pass
+
+
+def test_season_info(client: TestClient):
+ response = client.get("season_info", params={"season_id": 425})
+ assert response.status_code == 200
+ assert response.json()["code"] in (0, -404)
+
+
+def test_season_recommend(client: TestClient):
+ response = client.get("season_recommend", params={"season_id": 425})
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+def test_season_episode(client: TestClient):
+ response = client.get("season_episode", params={"episode_id": 84340})
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+def test_season_timeline(client: TestClient):
+ response = client.get("season_timeline")
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+def test_search(client: TestClient):
+ response = client.get("search", params={"keyword": "railgun"})
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+def test_search_recommend(client: TestClient):
+ response = client.get("search_recommend")
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
+
+
+def test_search_suggestion(client: TestClient):
+ response = client.get("search_suggestion", params={"keyword": "paperclip"})
+ assert response.status_code == 200
+ assert response.json()["code"] == 0
diff --git a/test/test_netease.py b/test/test_netease.py
new file mode 100644
index 0000000000000000000000000000000000000000..211509b7b3751e203be006ffdd758d3331e87f92
--- /dev/null
+++ b/test/test_netease.py
@@ -0,0 +1,142 @@
+from math import inf
+
+import pytest
+from fastapi.testclient import TestClient
+
+
+@pytest.fixture(scope="package")
+def client():
+ from hibiapi.app import app, application
+
+ application.RATE_LIMIT_MAX = inf
+
+ with TestClient(app, base_url="http://testserver/api/netease/") as client:
+ yield client
+
+
+def test_search(client: TestClient):
+ response = client.get("search", params={"s": "test"})
+ assert response.status_code == 200
+
+ data = response.json()
+ assert data["code"] == 200
+ assert data["result"]["songs"]
+
+
+def test_artist(client: TestClient):
+ response = client.get("artist", params={"id": 1024317})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_album(client: TestClient):
+ response = client.get("album", params={"id": 63263})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_detail(client: TestClient):
+ response = client.get("detail", params={"id": 657666})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_detail_multiple(client: TestClient):
+ response = client.get("detail", params={"id": [657666, 657667, 77185]})
+ assert response.status_code == 200
+ data = response.json()
+
+ assert data["code"] == 200
+ assert len(data["songs"]) == 3
+
+
+def test_song(client: TestClient):
+ response = client.get("song", params={"id": 657666})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_song_multiple(client: TestClient):
+ response = client.get(
+ "song", params={"id": (input_ids := [657666, 657667, 77185, 86369])}
+ )
+ assert response.status_code == 200
+ data = response.json()
+
+ assert data["code"] == 200
+ assert len(data["data"]) == len(input_ids)
+
+
+def test_playlist(client: TestClient):
+ response = client.get("playlist", params={"id": 39983375})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_lyric(client: TestClient):
+ response = client.get("lyric", params={"id": 657666})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_mv(client: TestClient):
+ response = client.get("mv", params={"id": 425588})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_mv_url(client: TestClient):
+ response = client.get("mv_url", params={"id": 425588})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_comments(client: TestClient):
+ response = client.get("comments", params={"id": 657666})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_record(client: TestClient):
+ response = client.get("record", params={"id": 286609438})
+ assert response.status_code == 200
+ # TODO: test case is no longer valid
+ # assert response.json()["code"] == 200
+
+
+def test_djradio(client: TestClient):
+ response = client.get("djradio", params={"id": 350596191})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_dj(client: TestClient):
+ response = client.get("dj", params={"id": 10785929})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_detail_dj(client: TestClient):
+ response = client.get("detail_dj", params={"id": 1370045285})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_user(client: TestClient):
+ response = client.get("user", params={"id": 1887530069})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_user_playlist(client: TestClient):
+ response = client.get("user_playlist", params={"id": 1887530069})
+ assert response.status_code == 200
+ assert response.json()["code"] == 200
+
+
+def test_search_redirect(client: TestClient):
+ response = client.get("http://testserver/netease/search", params={"s": "test"})
+
+ assert response.status_code == 200
+ assert response.history
+ assert response.history[0].status_code == 301
diff --git a/test/test_pixiv.py b/test/test_pixiv.py
new file mode 100644
index 0000000000000000000000000000000000000000..13376930712bf726eb9d0c178a5741d59cc706a3
--- /dev/null
+++ b/test/test_pixiv.py
@@ -0,0 +1,289 @@
+from datetime import date, timedelta
+from math import inf
+
+import pytest
+from fastapi.testclient import TestClient
+from pytest_benchmark.fixture import BenchmarkFixture
+
+
+@pytest.fixture(scope="package")
+def client():
+ from hibiapi.app import app, application
+
+ application.RATE_LIMIT_MAX = inf
+
+ with TestClient(app, base_url="http://testserver/api/pixiv/") as client:
+ client.headers["Cache-Control"] = "no-cache"
+ client.headers["Accept-Language"] = "en-US,en;q=0.9"
+ yield client
+
+
+def test_illust(client: TestClient):
+ # https://www.pixiv.net/artworks/109862531
+ response = client.get("illust", params={"id": 109862531})
+ assert response.status_code == 200
+ assert response.json().get("illust")
+
+
+def test_member(client: TestClient):
+ response = client.get("member", params={"id": 3036679})
+ assert response.status_code == 200
+ assert response.json().get("user")
+
+
+def test_member_illust(client: TestClient):
+ response = client.get("member_illust", params={"id": 3036679})
+ assert response.status_code == 200
+ assert response.json().get("illusts") is not None
+
+
+def test_favorite(client: TestClient):
+ response = client.get("favorite", params={"id": 3036679})
+ assert response.status_code == 200
+
+
+def test_favorite_novel(client: TestClient):
+ response = client.get("favorite_novel", params={"id": 55170615})
+ assert response.status_code == 200
+
+
+def test_following(client: TestClient):
+ response = client.get("following", params={"id": 3036679})
+ assert response.status_code == 200
+ assert response.json().get("user_previews") is not None
+
+
+def test_follower(client: TestClient):
+ response = client.get("follower", params={"id": 3036679})
+ assert response.status_code == 200
+ assert response.json().get("user_previews") is not None
+
+
+def test_rank(client: TestClient):
+ for i in range(2, 5):
+ response = client.get(
+ "rank", params={"date": str(date.today() - timedelta(days=i))}
+ )
+ assert response.status_code == 200
+ assert response.json().get("illusts")
+
+
+def test_search(client: TestClient):
+ response = client.get("search", params={"word": "東方Project"})
+ assert response.status_code == 200
+ assert response.json().get("illusts")
+
+
+def test_popular_preview(client: TestClient):
+ response = client.get("popular_preview", params={"word": "東方Project"})
+ assert response.status_code == 200
+ assert response.json().get("illusts")
+
+
+def test_search_user(client: TestClient):
+ response = client.get("search_user", params={"word": "鬼针草"})
+ assert response.status_code == 200
+ assert response.json().get("user_previews")
+
+
+def test_tags(client: TestClient):
+ response = client.get("tags")
+ assert response.status_code == 200
+ assert response.json().get("trend_tags")
+
+
+def test_tags_autocomplete(client: TestClient):
+ response = client.get("tags_autocomplete", params={"word": "甘雨"})
+ assert response.status_code == 200
+ assert response.json().get("tags")
+
+
+def test_related(client: TestClient):
+ response = client.get("related", params={"id": 85162550})
+ assert response.status_code == 200
+ assert response.json().get("illusts")
+
+
+def test_ugoira_metadata(client: TestClient):
+ response = client.get("ugoira_metadata", params={"id": 85162550})
+ assert response.status_code == 200
+ assert response.json().get("ugoira_metadata")
+
+
+def test_spotlights(client: TestClient):
+ response = client.get("spotlights")
+ assert response.status_code == 200
+ assert response.json().get("spotlight_articles")
+
+
+def test_illust_new(client: TestClient):
+ response = client.get("illust_new")
+ assert response.status_code == 200
+ assert response.json().get("illusts")
+
+
+def test_illust_comments(client: TestClient):
+ response = client.get("illust_comments", params={"id": 99973718})
+ assert response.status_code == 200
+ assert response.json().get("comments")
+
+
+def test_illust_comment_replies(client: TestClient):
+ response = client.get("illust_comment_replies", params={"id": 151400579})
+ assert response.status_code == 200
+ assert response.json().get("comments")
+
+
+def test_novel_comments(client: TestClient):
+ response = client.get("novel_comments", params={"id": 12656898})
+ assert response.status_code == 200
+ assert response.json().get("comments")
+
+
+def test_novel_comment_replies(client: TestClient):
+ response = client.get("novel_comment_replies", params={"id": 42372000})
+ assert response.status_code == 200
+ assert response.json().get("comments")
+
+
+def test_rank_novel(client: TestClient):
+ for i in range(2, 5):
+ response = client.get(
+ "rank_novel", params={"date": str(date.today() - timedelta(days=i))}
+ )
+ assert response.status_code == 200
+ assert response.json().get("novels")
+
+
+def test_member_novel(client: TestClient):
+ response = client.get("member_novel", params={"id": 14883165})
+ assert response.status_code == 200
+ assert response.json().get("novels")
+
+
+def test_novel_series(client: TestClient):
+ response = client.get("novel_series", params={"id": 1496457})
+ assert response.status_code == 200
+ assert response.json().get("novels")
+
+
+def test_novel_detail(client: TestClient):
+ response = client.get("novel_detail", params={"id": 14617902})
+ assert response.status_code == 200
+ assert response.json().get("novel")
+
+
+def test_novel_text(client: TestClient):
+ response = client.get("novel_text", params={"id": 14617902})
+ assert response.status_code == 200
+ assert response.json().get("novel_text")
+
+
+def test_webview_novel(client: TestClient):
+ response = client.get("webview_novel", params={"id": 19791013})
+ assert response.status_code == 200
+ assert response.json().get("text")
+
+
+def test_live_list(client: TestClient):
+ response = client.get("live_list")
+ assert response.status_code == 200
+ assert response.json().get("lives")
+
+
+def test_related_novel(client: TestClient):
+ response = client.get("related_novel", params={"id": 19791013})
+ assert response.status_code == 200
+ assert response.json().get("novels")
+
+
+def test_related_member(client: TestClient):
+ response = client.get("related_member", params={"id": 10109777})
+ assert response.status_code == 200
+ assert response.json().get("user_previews")
+
+
+def test_illust_series(client: TestClient):
+ response = client.get("illust_series", params={"id": 218893})
+ assert response.status_code == 200
+ assert response.json().get("illust_series_detail")
+
+
+def test_member_illust_series(client: TestClient):
+ response = client.get("member_illust_series", params={"id": 4087934})
+ assert response.status_code == 200
+ assert response.json().get("illust_series_details")
+
+
+def test_member_novel_series(client: TestClient):
+ response = client.get("member_novel_series", params={"id": 86832559})
+ assert response.status_code == 200
+ assert response.json().get("novel_series_details")
+
+
+def test_tags_novel(client: TestClient):
+ response = client.get("tags_novel")
+ assert response.status_code == 200
+ assert response.json().get("trend_tags")
+
+
+def test_search_novel(client: TestClient):
+ response = client.get("search_novel", params={"word": "碧蓝航线"})
+ assert response.status_code == 200
+ assert response.json().get("novels")
+
+
+def test_popular_preview_novel(client: TestClient):
+ response = client.get("popular_preview_novel", params={"word": "東方Project"})
+ assert response.status_code == 200
+ assert response.json().get("novels")
+
+
+def test_novel_new(client: TestClient):
+ response = client.get("novel_new", params={"max_novel_id": 16002726})
+ assert response.status_code == 200
+ assert response.json().get("next_url")
+
+
+def test_request_cache(client: TestClient, benchmark: BenchmarkFixture):
+ client.headers["Cache-Control"] = "public"
+
+ first_response = client.get("rank")
+ assert first_response.status_code == 200
+
+ second_response = client.get("rank")
+ assert second_response.status_code == 200
+
+ assert "x-cache-hit" in second_response.headers
+ assert "cache-control" in second_response.headers
+ assert second_response.json() == first_response.json()
+
+ def cache_benchmark():
+ response = client.get("rank")
+ assert response.status_code == 200
+
+ assert "x-cache-hit" in response.headers
+ assert "cache-control" in response.headers
+
+ benchmark.pedantic(cache_benchmark, rounds=200, iterations=3)
+
+
+def test_rank_redirect(client: TestClient):
+ response = client.get("http://testserver/pixiv/rank")
+
+ assert response.status_code == 200
+ assert response.history
+ assert response.history[0].status_code == 301
+
+
+def test_rate_limit(client: TestClient):
+ from hibiapi.app import application
+
+ application.RATE_LIMIT_MAX = 1
+
+ first_response = client.get("rank")
+ assert first_response.status_code in (200, 429)
+
+ second_response = client.get("rank")
+ assert second_response.status_code == 429
+ assert "retry-after" in second_response.headers
diff --git a/test/test_qrcode.py b/test/test_qrcode.py
new file mode 100644
index 0000000000000000000000000000000000000000..dbc09bdf9f6228f239f5ede163db476ae47a80fb
--- /dev/null
+++ b/test/test_qrcode.py
@@ -0,0 +1,68 @@
+from math import inf
+from secrets import token_urlsafe
+
+import pytest
+from fastapi.testclient import TestClient
+from httpx import Response
+from pytest_benchmark.fixture import BenchmarkFixture
+
+
+@pytest.fixture(scope="package")
+def client():
+ from hibiapi.app import app, application
+
+ application.RATE_LIMIT_MAX = inf
+
+ with TestClient(app, base_url="http://testserver/api/") as client:
+ yield client
+
+
+def test_qrcode_generate(client: TestClient, in_stress: bool = False):
+ response = client.get(
+ "qrcode/",
+ params={
+ "text": token_urlsafe(32),
+ "encode": "raw",
+ },
+ )
+ assert response.status_code == 200
+ assert "image/png" in response.headers["content-type"]
+
+ if in_stress:
+ return True
+
+
+def test_qrcode_all(client: TestClient):
+ from hibiapi.api.qrcode import QRCodeLevel, ReturnEncode
+
+ encodes = [i.value for i in ReturnEncode.__members__.values()]
+ levels = [i.value for i in QRCodeLevel.__members__.values()]
+ responses: list[Response] = []
+ for encode in encodes:
+ for level in levels:
+ response = client.get(
+ "qrcode/",
+ params={"text": "Hello, World!", "encode": encode, "level": level},
+ )
+ responses.append(response)
+ assert not any(map(lambda r: r.status_code != 200, responses))
+
+
+def test_qrcode_stress(client: TestClient, benchmark: BenchmarkFixture):
+ assert benchmark.pedantic(
+ test_qrcode_generate,
+ args=(client, True),
+ rounds=50,
+ iterations=3,
+ )
+
+
+def test_qrcode_redirect(client: TestClient):
+ response = client.get("http://testserver/qrcode/", params={"text": "Hello, World!"})
+
+ assert response.status_code == 200
+
+ redirect1, redirect2 = response.history
+
+ assert redirect1.status_code == 301
+ assert redirect2.status_code == 302
diff --git a/test/test_sauce.jpg b/test/test_sauce.jpg
new file mode 100644
index 0000000000000000000000000000000000000000..d150d49574671c9ef2d14e9985f9900cf69300a1
Binary files /dev/null and b/test/test_sauce.jpg differ
diff --git a/test/test_sauce.py b/test/test_sauce.py
new file mode 100644
index 0000000000000000000000000000000000000000..912355115b84e684e5beab2fa72eb61af510a320
--- /dev/null
+++ b/test/test_sauce.py
@@ -0,0 +1,36 @@
+from math import inf
+from pathlib import Path
+
+import pytest
+from fastapi.testclient import TestClient
+from pytest_httpserver import HTTPServer
+
+LOCAL_SAUCE_PATH = Path(__file__).parent / "test_sauce.jpg"
+
+
+@pytest.fixture(scope="package")
+def client():
+ from hibiapi.app import app, application
+
+ application.RATE_LIMIT_MAX = inf
+
+ with TestClient(app, base_url="http://testserver/api/") as client:
+ yield client
+
+
+@pytest.mark.xfail(reason="rate limit possible reached")
+def test_sauce_url(client: TestClient, httpserver: HTTPServer):
+ httpserver.expect_request("/sauce").respond_with_data(LOCAL_SAUCE_PATH.read_bytes())
+ response = client.get("sauce/", params={"url": httpserver.url_for("/sauce")})
+ assert response.status_code == 200
+ data = response.json()
+ assert data["header"]["status"] == 0, data["header"]["message"]
+
+
+@pytest.mark.xfail(reason="rate limit possible reached")
+def test_sauce_file(client: TestClient):
+ with open(LOCAL_SAUCE_PATH, "rb") as file:
+ response = client.post("sauce/", files={"file": file})
+ assert response.status_code == 200
+ data = response.json()
+ assert data["header"]["status"] == 0, data["header"]["message"]
diff --git a/test/test_tieba.py b/test/test_tieba.py
new file mode 100644
index 0000000000000000000000000000000000000000..9d5b828b6e57839d1f0547f76a52772da77ccd91
--- /dev/null
+++ b/test/test_tieba.py
@@ -0,0 +1,50 @@
+from math import inf
+
+import pytest
+from fastapi.testclient import TestClient
+
+
+@pytest.fixture(scope="package")
+def client():
+ from hibiapi.app import app, application
+
+ application.RATE_LIMIT_MAX = inf
+
+ with TestClient(app, base_url="http://testserver/api/tieba/") as client:
+ yield client
+
+
+def test_post_list(client: TestClient):
+ response = client.get("post_list", params={"name": "minecraft"})
+ assert response.status_code == 200
+ if response.json()["error_code"] != "0":
+ pytest.xfail(reason=response.text)
+
+
+def test_post_list_chinese(client: TestClient):
+ # NOTE: reference https://github.com/mixmoe/HibiAPI/issues/117
+ response = client.get("post_list", params={"name": "图拉丁"})
+ assert response.status_code == 200
+ if response.json()["error_code"] != "0":
+ pytest.xfail(reason=response.text)
+
+
+def test_post_detail(client: TestClient):
+ response = client.get("post_detail", params={"tid": 1766018024})
+ assert response.status_code == 200
+ if response.json()["error_code"] != "0":
+ pytest.xfail(reason=response.text)
+
+
+def test_subpost_detail(client: TestClient):
+ response = client.get(
+ "subpost_detail", params={"tid": 1766018024, "pid": 22616319749}
+ )
+ assert response.status_code == 200
+ assert int(response.json()["error_code"]) == 0
+
+
+def test_user_profile(client: TestClient):
+ response = client.get("user_profile", params={"uid": 105525655})
+ assert response.status_code == 200
+ assert int(response.json()["error_code"]) == 0
diff --git a/test/test_wallpaper.py b/test/test_wallpaper.py
new file mode 100644
index 0000000000000000000000000000000000000000..b8ac9ef06dcf388fa4f4c98a85277488b6dc0a14
--- /dev/null
+++ b/test/test_wallpaper.py
@@ -0,0 +1,72 @@
+from math import inf
+
+import pytest
+from fastapi.testclient import TestClient
+
+
+@pytest.fixture(scope="package")
+def client():
+ from hibiapi.app import app, application
+
+ application.RATE_LIMIT_MAX = inf
+
+ with TestClient(app, base_url="http://testserver/api/wallpaper/") as client:
+ client.headers["Cache-Control"] = "no-cache"
+ yield client
+
+
+def test_wallpaper(client: TestClient):
+ response = client.get("wallpaper", params={"category": "girl"})
+ assert response.status_code == 200
+ assert response.json().get("msg") == "success"
+
+
+def test_wallpaper_limit(client: TestClient):
+ response = client.get("wallpaper", params={"category": "girl", "limit": "21"})
+
+ assert response.status_code == 200
+ assert response.json()["msg"] == "success"
+ assert len(response.json()["res"]["wallpaper"]) == 21
+
+
+def test_wallpaper_skip(client: TestClient):
+ response_1 = client.get(
+ "wallpaper", params={"category": "girl", "limit": "20", "skip": "20"}
+ )
+ response_2 = client.get(
+ "wallpaper", params={"category": "girl", "limit": "40", "skip": "0"}
+ )
+
+ assert response_1.status_code == 200 and response_2.status_code == 200
+ assert (
+ response_1.json()["res"]["wallpaper"][0]["id"]
+ == response_2.json()["res"]["wallpaper"][20]["id"]
+ )
+
+
+def test_vertical(client: TestClient):
+ response = client.get("vertical", params={"category": "girl"})
+ assert response.status_code == 200
+ assert response.json().get("msg") == "success"
+
+
+def test_vertical_limit(client: TestClient):
+ response = client.get("vertical", params={"category": "girl", "limit": "21"})
+ assert response.status_code == 200
+ assert response.json().get("msg") == "success"
+ assert len(response.json()["res"]["vertical"]) == 21
+
+
+def test_vertical_skip(client: TestClient):
+ response_1 = client.get(
+ "vertical", params={"category": "girl", "limit": "20", "skip": "20"}
+ )
+ response_2 = client.get(
+ "vertical", params={"category": "girl", "limit": "40", "skip": "0"}
+ )
+
+ assert response_1.status_code == 200 and response_2.status_code == 200
+ assert (
+ response_1.json()["res"]["vertical"][0]["id"]
+ == response_2.json()["res"]["vertical"][20]["id"]
+ )