index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
18,151 | jsondiff | _set_diff | null | def _set_diff(self, a, b):
removed = a.difference(b)
added = b.difference(a)
if not removed and not added:
return {}, 1.0
ranking = sorted(
(
(self._obj_diff(x, y)[1], x, y)
for x in removed
for y in added
),
reverse=True,
key=lambda x: x[0]
)
r2 = set(removed)
a2 = set(added)
n_common = len(a) - len(removed)
s_common = float(n_common)
for s, x, y in ranking:
if x in r2 and y in a2:
r2.discard(x)
a2.discard(y)
s_common += s
n_common += 1
if not r2 or not a2:
break
n_tot = len(a) + len(added)
s = s_common / n_tot if n_tot != 0 else 1.0
return self.options.syntax.emit_set_diff(a, b, s, added, removed), s
| (self, a, b) |
18,152 | jsondiff | _unescape | null | def _unescape(self, x):
if isinstance(x, string_types):
sym = self._symbol_map.get(x, None)
if sym is not None:
return sym
if x.startswith(self.options.escape_str):
return x[1:]
return x
| (self, x) |
18,153 | jsondiff | diff | null | def diff(self, a, b, fp=None):
if self.options.load:
a = self.options.loader(a)
b = self.options.loader(b)
d, s = self._obj_diff(a, b)
if self.options.marshal or self.options.dump:
d = self.marshal(d)
if self.options.dump:
return self.options.dumper(d, fp)
else:
return d
| (self, a, b, fp=None) |
18,154 | jsondiff | marshal | null | def marshal(self, d):
if isinstance(d, dict):
return {
self._escape(k): self.marshal(v)
for k, v in d.items()
}
elif isinstance(d, (list, tuple)):
return type(d)(
self.marshal(x)
for x in d
)
else:
return self._escape(d)
| (self, d) |
18,155 | jsondiff | patch | null | def patch(self, a, d, fp=None):
if self.options.load:
a = self.options.loader(a)
d = self.options.loader(d)
if self.options.marshal or self.options.load:
d = self.unmarshal(d)
b = self.options.syntax.patch(a, d)
if self.options.dump:
return self.options.dumper(b, fp)
else:
return b
| (self, a, d, fp=None) |
18,156 | jsondiff | similarity | null | def similarity(self, a, b):
if self.options.load:
a = self.options.loader(a)
b = self.options.loader(b)
d, s = self._obj_diff(a, b)
return s
| (self, a, b) |
18,157 | jsondiff | unmarshal | null | def unmarshal(self, d):
if isinstance(d, dict):
return {
self._unescape(k): self.unmarshal(v)
for k, v in d.items()
}
elif isinstance(d, (list, tuple)):
return type(d)(
self.unmarshal(x)
for x in d
)
else:
return self._unescape(d)
| (self, d) |
18,158 | jsondiff | unpatch | null | def unpatch(self, b, d, fp=None):
if self.options.load:
b = self.options.loader(b)
d = self.options.loader(d)
if self.options.marshal or self.options.load:
d = self.unmarshal(d)
a = self.options.syntax.unpatch(b, d)
if self.options.dump:
return self.options.dumper(a, fp)
else:
return a
| (self, b, d, fp=None) |
18,159 | jsondiff | JsonDumper | null | class JsonDumper(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
def __call__(self, obj, dest=None):
if dest is None:
return json.dumps(obj, **self.kwargs)
else:
return json.dump(obj, dest, **self.kwargs)
| (**kwargs) |
18,160 | jsondiff | __call__ | null | def __call__(self, obj, dest=None):
if dest is None:
return json.dumps(obj, **self.kwargs)
else:
return json.dump(obj, dest, **self.kwargs)
| (self, obj, dest=None) |
18,161 | jsondiff | __init__ | null | def __init__(self, **kwargs):
self.kwargs = kwargs
| (self, **kwargs) |
18,162 | jsondiff | JsonLoader | null | class JsonLoader(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
def __call__(self, src):
if isinstance(src, string_types):
return json.loads(src, **self.kwargs)
else:
return json.load(src, **self.kwargs)
| (**kwargs) |
18,163 | jsondiff | __call__ | null | def __call__(self, src):
if isinstance(src, string_types):
return json.loads(src, **self.kwargs)
else:
return json.load(src, **self.kwargs)
| (self, src) |
18,165 | jsondiff.symbols | Symbol | null | class Symbol(object):
def __init__(self, label):
self.label = label
def __repr__(self):
return self.label
def __str__(self):
return "$" + self.label
| (label) |
18,166 | jsondiff.symbols | __init__ | null | def __init__(self, label):
self.label = label
| (self, label) |
18,167 | jsondiff.symbols | __repr__ | null | def __repr__(self):
return self.label
| (self) |
18,168 | jsondiff.symbols | __str__ | null | def __str__(self):
return "$" + self.label
| (self) |
18,169 | jsondiff | SymmetricJsonDiffSyntax | null | class SymmetricJsonDiffSyntax(object):
def emit_set_diff(self, a, b, s, added, removed):
if s == 0.0 or len(removed) == len(a):
return [a, b]
else:
d = {}
if added:
d[add] = added
if removed:
d[discard] = removed
return d
def emit_list_diff(self, a, b, s, inserted, changed, deleted):
if s == 0.0:
return [a, b]
elif s == 1.0:
return {}
else:
d = changed
if inserted:
d[insert] = inserted
if deleted:
d[delete] = deleted
return d
def emit_dict_diff(self, a, b, s, added, changed, removed):
if s == 0.0:
return [a, b]
elif s == 1.0:
return {}
else:
d = changed
if added:
d[insert] = added
if removed:
d[delete] = removed
return d
def emit_value_diff(self, a, b, s):
if s == 1.0:
return {}
else:
return [a, b]
def patch(self, a, d):
if isinstance(d, list):
_, b = d
return b
elif isinstance(d, dict):
if not d:
return a
if isinstance(a, dict):
a = dict(a)
for k, v in d.items():
if k is delete:
for kdel, _ in v.items():
del a[kdel]
elif k is insert:
for kk, vv in v.items():
a[kk] = vv
else:
a[k] = self.patch(a[k], v)
return a
elif isinstance(a, (list, tuple)):
original_type = type(a)
a = list(a)
if delete in d:
for pos, value in d[delete]:
a.pop(pos)
if insert in d:
for pos, value in d[insert]:
a.insert(pos, value)
for k, v in d.items():
if k is not delete and k is not insert:
k = int(k)
a[k] = self.patch(a[k], v)
if original_type is not list:
a = original_type(a)
return a
elif isinstance(a, set):
a = set(a)
if discard in d:
for x in d[discard]:
a.discard(x)
if add in d:
for x in d[add]:
a.add(x)
return a
raise Exception("Invalid symmetric diff")
def unpatch(self, b, d):
if isinstance(d, list):
a, _ = d
return a
elif isinstance(d, dict):
if not d:
return b
if isinstance(b, dict):
b = dict(b)
for k, v in d.items():
if k is delete:
for kk, vv in v.items():
b[kk] = vv
elif k is insert:
for kk, vv in v.items():
del b[kk]
else:
b[k] = self.unpatch(b[k], v)
return b
elif isinstance(b, (list, tuple)):
original_type = type(b)
b = list(b)
for k, v in d.items():
if k is not delete and k is not insert:
k = int(k)
b[k] = self.unpatch(b[k], v)
if insert in d:
for pos, value in reversed(d[insert]):
b.pop(pos)
if delete in d:
for pos, value in reversed(d[delete]):
b.insert(pos, value)
if original_type is not list:
b = original_type(b)
return b
elif isinstance(b, set):
b = set(b)
if discard in d:
for x in d[discard]:
b.add(x)
if add in d:
for x in d[add]:
b.discard(x)
return b
raise Exception("Invalid symmetric diff")
| () |
18,170 | jsondiff | emit_dict_diff | null | def emit_dict_diff(self, a, b, s, added, changed, removed):
if s == 0.0:
return [a, b]
elif s == 1.0:
return {}
else:
d = changed
if added:
d[insert] = added
if removed:
d[delete] = removed
return d
| (self, a, b, s, added, changed, removed) |
18,171 | jsondiff | emit_list_diff | null | def emit_list_diff(self, a, b, s, inserted, changed, deleted):
if s == 0.0:
return [a, b]
elif s == 1.0:
return {}
else:
d = changed
if inserted:
d[insert] = inserted
if deleted:
d[delete] = deleted
return d
| (self, a, b, s, inserted, changed, deleted) |
18,172 | jsondiff | emit_set_diff | null | def emit_set_diff(self, a, b, s, added, removed):
if s == 0.0 or len(removed) == len(a):
return [a, b]
else:
d = {}
if added:
d[add] = added
if removed:
d[discard] = removed
return d
| (self, a, b, s, added, removed) |
18,173 | jsondiff | emit_value_diff | null | def emit_value_diff(self, a, b, s):
if s == 1.0:
return {}
else:
return [a, b]
| (self, a, b, s) |
18,174 | jsondiff | patch | null | def patch(self, a, d):
if isinstance(d, list):
_, b = d
return b
elif isinstance(d, dict):
if not d:
return a
if isinstance(a, dict):
a = dict(a)
for k, v in d.items():
if k is delete:
for kdel, _ in v.items():
del a[kdel]
elif k is insert:
for kk, vv in v.items():
a[kk] = vv
else:
a[k] = self.patch(a[k], v)
return a
elif isinstance(a, (list, tuple)):
original_type = type(a)
a = list(a)
if delete in d:
for pos, value in d[delete]:
a.pop(pos)
if insert in d:
for pos, value in d[insert]:
a.insert(pos, value)
for k, v in d.items():
if k is not delete and k is not insert:
k = int(k)
a[k] = self.patch(a[k], v)
if original_type is not list:
a = original_type(a)
return a
elif isinstance(a, set):
a = set(a)
if discard in d:
for x in d[discard]:
a.discard(x)
if add in d:
for x in d[add]:
a.add(x)
return a
raise Exception("Invalid symmetric diff")
| (self, a, d) |
18,175 | jsondiff | unpatch | null | def unpatch(self, b, d):
if isinstance(d, list):
a, _ = d
return a
elif isinstance(d, dict):
if not d:
return b
if isinstance(b, dict):
b = dict(b)
for k, v in d.items():
if k is delete:
for kk, vv in v.items():
b[kk] = vv
elif k is insert:
for kk, vv in v.items():
del b[kk]
else:
b[k] = self.unpatch(b[k], v)
return b
elif isinstance(b, (list, tuple)):
original_type = type(b)
b = list(b)
for k, v in d.items():
if k is not delete and k is not insert:
k = int(k)
b[k] = self.unpatch(b[k], v)
if insert in d:
for pos, value in reversed(d[insert]):
b.pop(pos)
if delete in d:
for pos, value in reversed(d[delete]):
b.insert(pos, value)
if original_type is not list:
b = original_type(b)
return b
elif isinstance(b, set):
b = set(b)
if discard in d:
for x in d[discard]:
b.add(x)
if add in d:
for x in d[add]:
b.discard(x)
return b
raise Exception("Invalid symmetric diff")
| (self, b, d) |
18,176 | jsondiff | diff | null | def diff(a, b, fp=None, cls=JsonDiffer, **kwargs):
return cls(**kwargs).diff(a, b, fp)
| (a, b, fp=None, cls=<class 'jsondiff.JsonDiffer'>, **kwargs) |
18,178 | jsondiff | patch | null | def patch(a, d, fp=None, cls=JsonDiffer, **kwargs):
return cls(**kwargs).patch(a, d, fp)
| (a, d, fp=None, cls=<class 'jsondiff.JsonDiffer'>, **kwargs) |
18,179 | jsondiff | similarity | null | def similarity(a, b, cls=JsonDiffer, **kwargs):
return cls(**kwargs).similarity(a, b)
| (a, b, cls=<class 'jsondiff.JsonDiffer'>, **kwargs) |
18,183 | aiohttp.client_reqrep | ClientResponse | null | class ClientResponse(HeadersMixin):
# Some of these attributes are None when created,
# but will be set by the start() method.
# As the end user will likely never see the None values, we cheat the types below.
# from the Status-Line of the response
version: Optional[HttpVersion] = None # HTTP-Version
status: int = None # type: ignore[assignment] # Status-Code
reason: Optional[str] = None # Reason-Phrase
content: StreamReader = None # type: ignore[assignment] # Payload stream
_headers: CIMultiDictProxy[str] = None # type: ignore[assignment]
_raw_headers: RawHeaders = None # type: ignore[assignment]
_connection = None # current connection
_source_traceback: Optional[traceback.StackSummary] = None
# set up by ClientRequest after ClientResponse object creation
# post-init stage allows to not change ctor signature
_closed = True # to allow __del__ for non-initialized properly response
_released = False
__writer = None
def __init__(
self,
method: str,
url: URL,
*,
writer: "asyncio.Task[None]",
continue100: Optional["asyncio.Future[bool]"],
timer: BaseTimerContext,
request_info: RequestInfo,
traces: List["Trace"],
loop: asyncio.AbstractEventLoop,
session: "ClientSession",
) -> None:
assert isinstance(url, URL)
self.method = method
self.cookies = SimpleCookie()
self._real_url = url
self._url = url.with_fragment(None)
self._body: Any = None
self._writer: Optional[asyncio.Task[None]] = writer
self._continue = continue100 # None by default
self._closed = True
self._history: Tuple[ClientResponse, ...] = ()
self._request_info = request_info
self._timer = timer if timer is not None else TimerNoop()
self._cache: Dict[str, Any] = {}
self._traces = traces
self._loop = loop
# store a reference to session #1985
self._session: Optional[ClientSession] = session
# Save reference to _resolve_charset, so that get_encoding() will still
# work after the response has finished reading the body.
if session is None:
# TODO: Fix session=None in tests (see ClientRequest.__init__).
self._resolve_charset: Callable[
["ClientResponse", bytes], str
] = lambda *_: "utf-8"
else:
self._resolve_charset = session._resolve_charset
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
def __reset_writer(self, _: object = None) -> None:
self.__writer = None
@property
def _writer(self) -> Optional["asyncio.Task[None]"]:
return self.__writer
@_writer.setter
def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
if self.__writer is not None:
self.__writer.remove_done_callback(self.__reset_writer)
self.__writer = writer
if writer is not None:
writer.add_done_callback(self.__reset_writer)
@reify
def url(self) -> URL:
return self._url
@reify
def url_obj(self) -> URL:
warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
return self._url
@reify
def real_url(self) -> URL:
return self._real_url
@reify
def host(self) -> str:
assert self._url.host is not None
return self._url.host
@reify
def headers(self) -> "CIMultiDictProxy[str]":
return self._headers
@reify
def raw_headers(self) -> RawHeaders:
return self._raw_headers
@reify
def request_info(self) -> RequestInfo:
return self._request_info
@reify
def content_disposition(self) -> Optional[ContentDisposition]:
raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
if raw is None:
return None
disposition_type, params_dct = multipart.parse_content_disposition(raw)
params = MappingProxyType(params_dct)
filename = multipart.content_disposition_filename(params)
return ContentDisposition(disposition_type, params, filename)
def __del__(self, _warnings: Any = warnings) -> None:
if self._closed:
return
if self._connection is not None:
self._connection.release()
self._cleanup_writer()
if self._loop.get_debug():
kwargs = {"source": self}
_warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
context = {"client_response": self, "message": "Unclosed response"}
if self._source_traceback:
context["source_traceback"] = self._source_traceback
self._loop.call_exception_handler(context)
def __repr__(self) -> str:
out = io.StringIO()
ascii_encodable_url = str(self.url)
if self.reason:
ascii_encodable_reason = self.reason.encode(
"ascii", "backslashreplace"
).decode("ascii")
else:
ascii_encodable_reason = "None"
print(
"<ClientResponse({}) [{} {}]>".format(
ascii_encodable_url, self.status, ascii_encodable_reason
),
file=out,
)
print(self.headers, file=out)
return out.getvalue()
@property
def connection(self) -> Optional["Connection"]:
return self._connection
@reify
def history(self) -> Tuple["ClientResponse", ...]:
"""A sequence of of responses, if redirects occurred."""
return self._history
@reify
def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
links_str = ", ".join(self.headers.getall("link", []))
if not links_str:
return MultiDictProxy(MultiDict())
links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
for val in re.split(r",(?=\s*<)", links_str):
match = re.match(r"\s*<(.*)>(.*)", val)
if match is None: # pragma: no cover
# the check exists to suppress mypy error
continue
url, params_str = match.groups()
params = params_str.split(";")[1:]
link: MultiDict[Union[str, URL]] = MultiDict()
for param in params:
match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
if match is None: # pragma: no cover
# the check exists to suppress mypy error
continue
key, _, value, _ = match.groups()
link.add(key, value)
key = link.get("rel", url)
link.add("url", self.url.join(URL(url)))
links.add(str(key), MultiDictProxy(link))
return MultiDictProxy(links)
async def start(self, connection: "Connection") -> "ClientResponse":
"""Start response processing."""
self._closed = False
self._protocol = connection.protocol
self._connection = connection
with self._timer:
while True:
# read response
try:
protocol = self._protocol
message, payload = await protocol.read() # type: ignore[union-attr]
except http.HttpProcessingError as exc:
raise ClientResponseError(
self.request_info,
self.history,
status=exc.code,
message=exc.message,
headers=exc.headers,
) from exc
if message.code < 100 or message.code > 199 or message.code == 101:
break
if self._continue is not None:
set_result(self._continue, True)
self._continue = None
# payload eof handler
payload.on_eof(self._response_eof)
# response status
self.version = message.version
self.status = message.code
self.reason = message.reason
# headers
self._headers = message.headers # type is CIMultiDictProxy
self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
# payload
self.content = payload
# cookies
for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
try:
self.cookies.load(hdr)
except CookieError as exc:
client_logger.warning("Can not load response cookies: %s", exc)
return self
def _response_eof(self) -> None:
if self._closed:
return
# protocol could be None because connection could be detached
protocol = self._connection and self._connection.protocol
if protocol is not None and protocol.upgraded:
return
self._closed = True
self._cleanup_writer()
self._release_connection()
@property
def closed(self) -> bool:
return self._closed
def close(self) -> None:
if not self._released:
self._notify_content()
self._closed = True
if self._loop is None or self._loop.is_closed():
return
self._cleanup_writer()
if self._connection is not None:
self._connection.close()
self._connection = None
def release(self) -> Any:
if not self._released:
self._notify_content()
self._closed = True
self._cleanup_writer()
self._release_connection()
return noop()
@property
def ok(self) -> bool:
"""Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
This is **not** a check for ``200 OK`` but a check that the response
status is under 400.
"""
return 400 > self.status
def raise_for_status(self) -> None:
if not self.ok:
# reason should always be not None for a started response
assert self.reason is not None
self.release()
raise ClientResponseError(
self.request_info,
self.history,
status=self.status,
message=self.reason,
headers=self.headers,
)
def _release_connection(self) -> None:
if self._connection is not None:
if self._writer is None:
self._connection.release()
self._connection = None
else:
self._writer.add_done_callback(lambda f: self._release_connection())
async def _wait_released(self) -> None:
if self._writer is not None:
await self._writer
self._release_connection()
def _cleanup_writer(self) -> None:
if self._writer is not None:
self._writer.cancel()
self._session = None
def _notify_content(self) -> None:
content = self.content
if content and content.exception() is None:
set_exception(content, ClientConnectionError("Connection closed"))
self._released = True
async def wait_for_close(self) -> None:
if self._writer is not None:
await self._writer
self.release()
async def read(self) -> bytes:
"""Read response payload."""
if self._body is None:
try:
self._body = await self.content.read()
for trace in self._traces:
await trace.send_response_chunk_received(
self.method, self.url, self._body
)
except BaseException:
self.close()
raise
elif self._released: # Response explicitly released
raise ClientConnectionError("Connection closed")
protocol = self._connection and self._connection.protocol
if protocol is None or not protocol.upgraded:
await self._wait_released() # Underlying connection released
return self._body # type: ignore[no-any-return]
def get_encoding(self) -> str:
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
mimetype = helpers.parse_mimetype(ctype)
encoding = mimetype.parameters.get("charset")
if encoding:
with contextlib.suppress(LookupError):
return codecs.lookup(encoding).name
if mimetype.type == "application" and (
mimetype.subtype == "json" or mimetype.subtype == "rdap"
):
# RFC 7159 states that the default encoding is UTF-8.
# RFC 7483 defines application/rdap+json
return "utf-8"
if self._body is None:
raise RuntimeError(
"Cannot compute fallback encoding of a not yet read body"
)
return self._resolve_charset(self, self._body)
async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
"""Read response payload and decode."""
if self._body is None:
await self.read()
if encoding is None:
encoding = self.get_encoding()
return self._body.decode( # type: ignore[no-any-return,union-attr]
encoding, errors=errors
)
async def json(
self,
*,
encoding: Optional[str] = None,
loads: JSONDecoder = DEFAULT_JSON_DECODER,
content_type: Optional[str] = "application/json",
) -> Any:
"""Read and decodes JSON response."""
if self._body is None:
await self.read()
if content_type:
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
if not _is_expected_content_type(ctype, content_type):
raise ContentTypeError(
self.request_info,
self.history,
message=(
"Attempt to decode JSON with " "unexpected mimetype: %s" % ctype
),
headers=self.headers,
)
stripped = self._body.strip() # type: ignore[union-attr]
if not stripped:
return None
if encoding is None:
encoding = self.get_encoding()
return loads(stripped.decode(encoding))
async def __aenter__(self) -> "ClientResponse":
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
# similar to _RequestContextManager, we do not need to check
# for exceptions, response object can close connection
# if state is broken
self.release()
await self.wait_for_close()
| (method: str, url: yarl.URL, *, writer: 'asyncio.Task[None]', continue100: Optional[ForwardRef('asyncio.Future[bool]')], timer: aiohttp.helpers.BaseTimerContext, request_info: aiohttp.client_reqrep.RequestInfo, traces: List[ForwardRef('Trace')], loop: asyncio.events.AbstractEventLoop, session: 'ClientSession') -> None |
18,184 | aiohttp.client_reqrep | __reset_writer | null | def __reset_writer(self, _: object = None) -> None:
self.__writer = None
| (self, _: Optional[object] = None) -> NoneType |
18,185 | aiohttp.client_reqrep | __aenter__ | null | def get_encoding(self) -> str:
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
mimetype = helpers.parse_mimetype(ctype)
encoding = mimetype.parameters.get("charset")
if encoding:
with contextlib.suppress(LookupError):
return codecs.lookup(encoding).name
if mimetype.type == "application" and (
mimetype.subtype == "json" or mimetype.subtype == "rdap"
):
# RFC 7159 states that the default encoding is UTF-8.
# RFC 7483 defines application/rdap+json
return "utf-8"
if self._body is None:
raise RuntimeError(
"Cannot compute fallback encoding of a not yet read body"
)
return self._resolve_charset(self, self._body)
| (self) -> aiohttp.client_reqrep.ClientResponse |
18,187 | aiohttp.client_reqrep | __del__ | null | def __del__(self, _warnings: Any = warnings) -> None:
if self._closed:
return
if self._connection is not None:
self._connection.release()
self._cleanup_writer()
if self._loop.get_debug():
kwargs = {"source": self}
_warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
context = {"client_response": self, "message": "Unclosed response"}
if self._source_traceback:
context["source_traceback"] = self._source_traceback
self._loop.call_exception_handler(context)
| (self, _warnings: Any = <module 'warnings' from '/usr/local/lib/python3.10/warnings.py'>) -> NoneType |
18,188 | aiohttp.client_reqrep | __init__ | null | def __init__(
self,
method: str,
url: URL,
*,
writer: "asyncio.Task[None]",
continue100: Optional["asyncio.Future[bool]"],
timer: BaseTimerContext,
request_info: RequestInfo,
traces: List["Trace"],
loop: asyncio.AbstractEventLoop,
session: "ClientSession",
) -> None:
assert isinstance(url, URL)
self.method = method
self.cookies = SimpleCookie()
self._real_url = url
self._url = url.with_fragment(None)
self._body: Any = None
self._writer: Optional[asyncio.Task[None]] = writer
self._continue = continue100 # None by default
self._closed = True
self._history: Tuple[ClientResponse, ...] = ()
self._request_info = request_info
self._timer = timer if timer is not None else TimerNoop()
self._cache: Dict[str, Any] = {}
self._traces = traces
self._loop = loop
# store a reference to session #1985
self._session: Optional[ClientSession] = session
# Save reference to _resolve_charset, so that get_encoding() will still
# work after the response has finished reading the body.
if session is None:
# TODO: Fix session=None in tests (see ClientRequest.__init__).
self._resolve_charset: Callable[
["ClientResponse", bytes], str
] = lambda *_: "utf-8"
else:
self._resolve_charset = session._resolve_charset
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
| (self, method: str, url: yarl.URL, *, writer: 'asyncio.Task[None]', continue100: Optional[ForwardRef('asyncio.Future[bool]')], timer: aiohttp.helpers.BaseTimerContext, request_info: aiohttp.client_reqrep.RequestInfo, traces: List[ForwardRef('Trace')], loop: asyncio.events.AbstractEventLoop, session: 'ClientSession') -> None |
18,189 | aiohttp.client_reqrep | __repr__ | null | def __repr__(self) -> str:
out = io.StringIO()
ascii_encodable_url = str(self.url)
if self.reason:
ascii_encodable_reason = self.reason.encode(
"ascii", "backslashreplace"
).decode("ascii")
else:
ascii_encodable_reason = "None"
print(
"<ClientResponse({}) [{} {}]>".format(
ascii_encodable_url, self.status, ascii_encodable_reason
),
file=out,
)
print(self.headers, file=out)
return out.getvalue()
| (self) -> str |
18,190 | aiohttp.client_reqrep | _cleanup_writer | null | def _cleanup_writer(self) -> None:
if self._writer is not None:
self._writer.cancel()
self._session = None
| (self) -> NoneType |
18,191 | aiohttp.client_reqrep | _notify_content | null | def _notify_content(self) -> None:
content = self.content
if content and content.exception() is None:
set_exception(content, ClientConnectionError("Connection closed"))
self._released = True
| (self) -> NoneType |
18,192 | aiohttp.helpers | _parse_content_type | null | def _parse_content_type(self, raw: Optional[str]) -> None:
self._stored_content_type = raw
if raw is None:
# default value according to RFC 2616
self._content_type = "application/octet-stream"
self._content_dict = {}
else:
msg = HeaderParser().parsestr("Content-Type: " + raw)
self._content_type = msg.get_content_type()
params = msg.get_params(())
self._content_dict = dict(params[1:]) # First element is content type again
| (self, raw: Optional[str]) -> NoneType |
18,193 | aiohttp.client_reqrep | _release_connection | null | def _release_connection(self) -> None:
if self._connection is not None:
if self._writer is None:
self._connection.release()
self._connection = None
else:
self._writer.add_done_callback(lambda f: self._release_connection())
| (self) -> NoneType |
18,194 | aiohttp.client_reqrep | _response_eof | null | def _response_eof(self) -> None:
if self._closed:
return
# protocol could be None because connection could be detached
protocol = self._connection and self._connection.protocol
if protocol is not None and protocol.upgraded:
return
self._closed = True
self._cleanup_writer()
self._release_connection()
| (self) -> NoneType |
18,196 | aiohttp.client_reqrep | close | null | def close(self) -> None:
if not self._released:
self._notify_content()
self._closed = True
if self._loop is None or self._loop.is_closed():
return
self._cleanup_writer()
if self._connection is not None:
self._connection.close()
self._connection = None
| (self) -> NoneType |
18,198 | aiohttp.client_reqrep | json | Read and decodes JSON response. | def get_encoding(self) -> str:
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
mimetype = helpers.parse_mimetype(ctype)
encoding = mimetype.parameters.get("charset")
if encoding:
with contextlib.suppress(LookupError):
return codecs.lookup(encoding).name
if mimetype.type == "application" and (
mimetype.subtype == "json" or mimetype.subtype == "rdap"
):
# RFC 7159 states that the default encoding is UTF-8.
# RFC 7483 defines application/rdap+json
return "utf-8"
if self._body is None:
raise RuntimeError(
"Cannot compute fallback encoding of a not yet read body"
)
return self._resolve_charset(self, self._body)
| (self, *, encoding: Optional[str] = None, loads: Callable[[str], Any] = <function loads at 0x7f632ccb1120>, content_type: Optional[str] = 'application/json') -> Any |
18,199 | aiohttp.client_reqrep | raise_for_status | null | def raise_for_status(self) -> None:
if not self.ok:
# reason should always be not None for a started response
assert self.reason is not None
self.release()
raise ClientResponseError(
self.request_info,
self.history,
status=self.status,
message=self.reason,
headers=self.headers,
)
| (self) -> NoneType |
18,200 | aiohttp.client_reqrep | read | Read response payload. | def _notify_content(self) -> None:
content = self.content
if content and content.exception() is None:
set_exception(content, ClientConnectionError("Connection closed"))
self._released = True
| (self) -> bytes |
18,201 | aiohttp.client_reqrep | release | null | def release(self) -> Any:
if not self._released:
self._notify_content()
self._closed = True
self._cleanup_writer()
self._release_connection()
return noop()
| (self) -> Any |
18,202 | aiohttp.client_reqrep | start | Start response processing. | @reify
def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
links_str = ", ".join(self.headers.getall("link", []))
if not links_str:
return MultiDictProxy(MultiDict())
links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
for val in re.split(r",(?=\s*<)", links_str):
match = re.match(r"\s*<(.*)>(.*)", val)
if match is None: # pragma: no cover
# the check exists to suppress mypy error
continue
url, params_str = match.groups()
params = params_str.split(";")[1:]
link: MultiDict[Union[str, URL]] = MultiDict()
for param in params:
match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
if match is None: # pragma: no cover
# the check exists to suppress mypy error
continue
key, _, value, _ = match.groups()
link.add(key, value)
key = link.get("rel", url)
link.add("url", self.url.join(URL(url)))
links.add(str(key), MultiDictProxy(link))
return MultiDictProxy(links)
| (self, connection: 'Connection') -> 'ClientResponse' |
18,203 | aiohttp.client_reqrep | text | Read response payload and decode. | def get_encoding(self) -> str:
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
mimetype = helpers.parse_mimetype(ctype)
encoding = mimetype.parameters.get("charset")
if encoding:
with contextlib.suppress(LookupError):
return codecs.lookup(encoding).name
if mimetype.type == "application" and (
mimetype.subtype == "json" or mimetype.subtype == "rdap"
):
# RFC 7159 states that the default encoding is UTF-8.
# RFC 7483 defines application/rdap+json
return "utf-8"
if self._body is None:
raise RuntimeError(
"Cannot compute fallback encoding of a not yet read body"
)
return self._resolve_charset(self, self._body)
| (self, encoding: Optional[str] = None, errors: str = 'strict') -> str |
18,205 | aiohttp.client | ClientSession | First-class interface for making HTTP requests. | class ClientSession:
"""First-class interface for making HTTP requests."""
ATTRS = frozenset(
[
"_base_url",
"_source_traceback",
"_connector",
"requote_redirect_url",
"_loop",
"_cookie_jar",
"_connector_owner",
"_default_auth",
"_version",
"_json_serialize",
"_requote_redirect_url",
"_timeout",
"_raise_for_status",
"_auto_decompress",
"_trust_env",
"_default_headers",
"_skip_auto_headers",
"_request_class",
"_response_class",
"_ws_response_class",
"_trace_configs",
"_read_bufsize",
"_max_line_size",
"_max_field_size",
"_resolve_charset",
]
)
_source_traceback: Optional[traceback.StackSummary] = None
_connector: Optional[BaseConnector] = None
def __init__(
self,
base_url: Optional[StrOrURL] = None,
*,
connector: Optional[BaseConnector] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
cookies: Optional[LooseCookies] = None,
headers: Optional[LooseHeaders] = None,
skip_auto_headers: Optional[Iterable[str]] = None,
auth: Optional[BasicAuth] = None,
json_serialize: JSONEncoder = json.dumps,
request_class: Type[ClientRequest] = ClientRequest,
response_class: Type[ClientResponse] = ClientResponse,
ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,
version: HttpVersion = http.HttpVersion11,
cookie_jar: Optional[AbstractCookieJar] = None,
connector_owner: bool = True,
raise_for_status: Union[
bool, Callable[[ClientResponse], Awaitable[None]]
] = False,
read_timeout: Union[float, _SENTINEL] = sentinel,
conn_timeout: Optional[float] = None,
timeout: Union[object, ClientTimeout] = sentinel,
auto_decompress: bool = True,
trust_env: bool = False,
requote_redirect_url: bool = True,
trace_configs: Optional[List[TraceConfig]] = None,
read_bufsize: int = 2**16,
max_line_size: int = 8190,
max_field_size: int = 8190,
fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8",
) -> None:
# We initialise _connector to None immediately, as it's referenced in __del__()
# and could cause issues if an exception occurs during initialisation.
self._connector: Optional[BaseConnector] = None
if timeout is sentinel or timeout is None:
self._timeout = DEFAULT_TIMEOUT
if read_timeout is not sentinel:
warnings.warn(
"read_timeout is deprecated, " "use timeout argument instead",
DeprecationWarning,
stacklevel=2,
)
self._timeout = attr.evolve(self._timeout, total=read_timeout)
if conn_timeout is not None:
self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
warnings.warn(
"conn_timeout is deprecated, " "use timeout argument instead",
DeprecationWarning,
stacklevel=2,
)
else:
if not isinstance(timeout, ClientTimeout):
raise ValueError(
f"timeout parameter cannot be of {type(timeout)} type, "
"please use 'timeout=ClientTimeout(...)'",
)
self._timeout = timeout
if read_timeout is not sentinel:
raise ValueError(
"read_timeout and timeout parameters "
"conflict, please setup "
"timeout.read"
)
if conn_timeout is not None:
raise ValueError(
"conn_timeout and timeout parameters "
"conflict, please setup "
"timeout.connect"
)
if loop is None:
if connector is not None:
loop = connector._loop
loop = get_running_loop(loop)
if base_url is None or isinstance(base_url, URL):
self._base_url: Optional[URL] = base_url
else:
self._base_url = URL(base_url)
assert (
self._base_url.origin() == self._base_url
), "Only absolute URLs without path part are supported"
if connector is None:
connector = TCPConnector(loop=loop)
if connector._loop is not loop:
raise RuntimeError("Session and connector has to use same event loop")
self._loop = loop
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
if cookie_jar is None:
cookie_jar = CookieJar(loop=loop)
self._cookie_jar = cookie_jar
if cookies is not None:
self._cookie_jar.update_cookies(cookies)
self._connector = connector
self._connector_owner = connector_owner
self._default_auth = auth
self._version = version
self._json_serialize = json_serialize
self._raise_for_status = raise_for_status
self._auto_decompress = auto_decompress
self._trust_env = trust_env
self._requote_redirect_url = requote_redirect_url
self._read_bufsize = read_bufsize
self._max_line_size = max_line_size
self._max_field_size = max_field_size
# Convert to list of tuples
if headers:
real_headers: CIMultiDict[str] = CIMultiDict(headers)
else:
real_headers = CIMultiDict()
self._default_headers: CIMultiDict[str] = real_headers
if skip_auto_headers is not None:
self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
else:
self._skip_auto_headers = frozenset()
self._request_class = request_class
self._response_class = response_class
self._ws_response_class = ws_response_class
self._trace_configs = trace_configs or []
for trace_config in self._trace_configs:
trace_config.freeze()
self._resolve_charset = fallback_charset_resolver
def __init_subclass__(cls: Type["ClientSession"]) -> None:
warnings.warn(
"Inheritance class {} from ClientSession "
"is discouraged".format(cls.__name__),
DeprecationWarning,
stacklevel=2,
)
if DEBUG:
def __setattr__(self, name: str, val: Any) -> None:
if name not in self.ATTRS:
warnings.warn(
"Setting custom ClientSession.{} attribute "
"is discouraged".format(name),
DeprecationWarning,
stacklevel=2,
)
super().__setattr__(name, val)
def __del__(self, _warnings: Any = warnings) -> None:
if not self.closed:
kwargs = {"source": self}
_warnings.warn(
f"Unclosed client session {self!r}", ResourceWarning, **kwargs
)
context = {"client_session": self, "message": "Unclosed client session"}
if self._source_traceback is not None:
context["source_traceback"] = self._source_traceback
self._loop.call_exception_handler(context)
def request(
self, method: str, url: StrOrURL, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP request."""
return _RequestContextManager(self._request(method, url, **kwargs))
def _build_url(self, str_or_url: StrOrURL) -> URL:
url = URL(str_or_url)
if self._base_url is None:
return url
else:
assert not url.is_absolute() and url.path.startswith("/")
return self._base_url.join(url)
async def _request(
self,
method: str,
str_or_url: StrOrURL,
*,
params: Optional[Mapping[str, str]] = None,
data: Any = None,
json: Any = None,
cookies: Optional[LooseCookies] = None,
headers: Optional[LooseHeaders] = None,
skip_auto_headers: Optional[Iterable[str]] = None,
auth: Optional[BasicAuth] = None,
allow_redirects: bool = True,
max_redirects: int = 10,
compress: Optional[str] = None,
chunked: Optional[bool] = None,
expect100: bool = False,
raise_for_status: Union[
None, bool, Callable[[ClientResponse], Awaitable[None]]
] = None,
read_until_eof: bool = True,
proxy: Optional[StrOrURL] = None,
proxy_auth: Optional[BasicAuth] = None,
timeout: Union[ClientTimeout, _SENTINEL] = sentinel,
verify_ssl: Optional[bool] = None,
fingerprint: Optional[bytes] = None,
ssl_context: Optional[SSLContext] = None,
ssl: Union[SSLContext, bool, Fingerprint] = True,
server_hostname: Optional[str] = None,
proxy_headers: Optional[LooseHeaders] = None,
trace_request_ctx: Optional[SimpleNamespace] = None,
read_bufsize: Optional[int] = None,
auto_decompress: Optional[bool] = None,
max_line_size: Optional[int] = None,
max_field_size: Optional[int] = None,
) -> ClientResponse:
# NOTE: timeout clamps existing connect and read timeouts. We cannot
# set the default to None because we need to detect if the user wants
# to use the existing timeouts by setting timeout to None.
if self.closed:
raise RuntimeError("Session is closed")
ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
if data is not None and json is not None:
raise ValueError(
"data and json parameters can not be used at the same time"
)
elif json is not None:
data = payload.JsonPayload(json, dumps=self._json_serialize)
if not isinstance(chunked, bool) and chunked is not None:
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
redirects = 0
history = []
version = self._version
params = params or {}
# Merge with default headers and transform to CIMultiDict
headers = self._prepare_headers(headers)
proxy_headers = self._prepare_headers(proxy_headers)
try:
url = self._build_url(str_or_url)
except ValueError as e:
raise InvalidURL(str_or_url) from e
skip_headers = set(self._skip_auto_headers)
if skip_auto_headers is not None:
for i in skip_auto_headers:
skip_headers.add(istr(i))
if proxy is not None:
try:
proxy = URL(proxy)
except ValueError as e:
raise InvalidURL(proxy) from e
if timeout is sentinel:
real_timeout: ClientTimeout = self._timeout
else:
if not isinstance(timeout, ClientTimeout):
real_timeout = ClientTimeout(total=timeout)
else:
real_timeout = timeout
# timeout is cumulative for all request operations
# (request, redirects, responses, data consuming)
tm = TimeoutHandle(
self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold
)
handle = tm.start()
if read_bufsize is None:
read_bufsize = self._read_bufsize
if auto_decompress is None:
auto_decompress = self._auto_decompress
if max_line_size is None:
max_line_size = self._max_line_size
if max_field_size is None:
max_field_size = self._max_field_size
traces = [
Trace(
self,
trace_config,
trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),
)
for trace_config in self._trace_configs
]
for trace in traces:
await trace.send_request_start(method, url.update_query(params), headers)
timer = tm.timer()
try:
with timer:
while True:
url, auth_from_url = strip_auth_from_url(url)
if auth and auth_from_url:
raise ValueError(
"Cannot combine AUTH argument with "
"credentials encoded in URL"
)
if auth is None:
auth = auth_from_url
if auth is None:
auth = self._default_auth
# It would be confusing if we support explicit
# Authorization header with auth argument
if (
headers is not None
and auth is not None
and hdrs.AUTHORIZATION in headers
):
raise ValueError(
"Cannot combine AUTHORIZATION header "
"with AUTH argument or credentials "
"encoded in URL"
)
all_cookies = self._cookie_jar.filter_cookies(url)
if cookies is not None:
tmp_cookie_jar = CookieJar()
tmp_cookie_jar.update_cookies(cookies)
req_cookies = tmp_cookie_jar.filter_cookies(url)
if req_cookies:
all_cookies.load(req_cookies)
if proxy is not None:
proxy = URL(proxy)
elif self._trust_env:
with suppress(LookupError):
proxy, proxy_auth = get_env_proxy_for_url(url)
req = self._request_class(
method,
url,
params=params,
headers=headers,
skip_auto_headers=skip_headers,
data=data,
cookies=all_cookies,
auth=auth,
version=version,
compress=compress,
chunked=chunked,
expect100=expect100,
loop=self._loop,
response_class=self._response_class,
proxy=proxy,
proxy_auth=proxy_auth,
timer=timer,
session=self,
ssl=ssl if ssl is not None else True,
server_hostname=server_hostname,
proxy_headers=proxy_headers,
traces=traces,
trust_env=self.trust_env,
)
# connection timeout
try:
async with ceil_timeout(
real_timeout.connect,
ceil_threshold=real_timeout.ceil_threshold,
):
assert self._connector is not None
conn = await self._connector.connect(
req, traces=traces, timeout=real_timeout
)
except asyncio.TimeoutError as exc:
raise ServerTimeoutError(
"Connection timeout " "to host {}".format(url)
) from exc
assert conn.transport is not None
assert conn.protocol is not None
conn.protocol.set_response_params(
timer=timer,
skip_payload=method_must_be_empty_body(method),
read_until_eof=read_until_eof,
auto_decompress=auto_decompress,
read_timeout=real_timeout.sock_read,
read_bufsize=read_bufsize,
timeout_ceil_threshold=self._connector._timeout_ceil_threshold,
max_line_size=max_line_size,
max_field_size=max_field_size,
)
try:
try:
resp = await req.send(conn)
try:
await resp.start(conn)
except BaseException:
resp.close()
raise
except BaseException:
conn.close()
raise
except ClientError:
raise
except OSError as exc:
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
raise
raise ClientOSError(*exc.args) from exc
self._cookie_jar.update_cookies(resp.cookies, resp.url)
# redirects
if resp.status in (301, 302, 303, 307, 308) and allow_redirects:
for trace in traces:
await trace.send_request_redirect(
method, url.update_query(params), headers, resp
)
redirects += 1
history.append(resp)
if max_redirects and redirects >= max_redirects:
resp.close()
raise TooManyRedirects(
history[0].request_info, tuple(history)
)
# For 301 and 302, mimic IE, now changed in RFC
# https://github.com/kennethreitz/requests/pull/269
if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (
resp.status in (301, 302) and resp.method == hdrs.METH_POST
):
method = hdrs.METH_GET
data = None
if headers.get(hdrs.CONTENT_LENGTH):
headers.pop(hdrs.CONTENT_LENGTH)
r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
hdrs.URI
)
if r_url is None:
# see github.com/aio-libs/aiohttp/issues/2022
break
else:
# reading from correct redirection
# response is forbidden
resp.release()
try:
parsed_url = URL(
r_url, encoded=not self._requote_redirect_url
)
except ValueError as e:
raise InvalidURL(r_url) from e
scheme = parsed_url.scheme
if scheme not in ("http", "https", ""):
resp.close()
raise ValueError("Can redirect only to http or https")
elif not scheme:
parsed_url = url.join(parsed_url)
if url.origin() != parsed_url.origin():
auth = None
headers.pop(hdrs.AUTHORIZATION, None)
url = parsed_url
params = {}
resp.release()
continue
break
# check response status
if raise_for_status is None:
raise_for_status = self._raise_for_status
if raise_for_status is None:
pass
elif callable(raise_for_status):
await raise_for_status(resp)
elif raise_for_status:
resp.raise_for_status()
# register connection
if handle is not None:
if resp.connection is not None:
resp.connection.add_callback(handle.cancel)
else:
handle.cancel()
resp._history = tuple(history)
for trace in traces:
await trace.send_request_end(
method, url.update_query(params), headers, resp
)
return resp
except BaseException as e:
# cleanup timer
tm.close()
if handle:
handle.cancel()
handle = None
for trace in traces:
await trace.send_request_exception(
method, url.update_query(params), headers, e
)
raise
def ws_connect(
self,
url: StrOrURL,
*,
method: str = hdrs.METH_GET,
protocols: Iterable[str] = (),
timeout: float = 10.0,
receive_timeout: Optional[float] = None,
autoclose: bool = True,
autoping: bool = True,
heartbeat: Optional[float] = None,
auth: Optional[BasicAuth] = None,
origin: Optional[str] = None,
params: Optional[Mapping[str, str]] = None,
headers: Optional[LooseHeaders] = None,
proxy: Optional[StrOrURL] = None,
proxy_auth: Optional[BasicAuth] = None,
ssl: Union[SSLContext, bool, None, Fingerprint] = True,
verify_ssl: Optional[bool] = None,
fingerprint: Optional[bytes] = None,
ssl_context: Optional[SSLContext] = None,
proxy_headers: Optional[LooseHeaders] = None,
compress: int = 0,
max_msg_size: int = 4 * 1024 * 1024,
) -> "_WSRequestContextManager":
"""Initiate websocket connection."""
return _WSRequestContextManager(
self._ws_connect(
url,
method=method,
protocols=protocols,
timeout=timeout,
receive_timeout=receive_timeout,
autoclose=autoclose,
autoping=autoping,
heartbeat=heartbeat,
auth=auth,
origin=origin,
params=params,
headers=headers,
proxy=proxy,
proxy_auth=proxy_auth,
ssl=ssl,
verify_ssl=verify_ssl,
fingerprint=fingerprint,
ssl_context=ssl_context,
proxy_headers=proxy_headers,
compress=compress,
max_msg_size=max_msg_size,
)
)
async def _ws_connect(
self,
url: StrOrURL,
*,
method: str = hdrs.METH_GET,
protocols: Iterable[str] = (),
timeout: float = 10.0,
receive_timeout: Optional[float] = None,
autoclose: bool = True,
autoping: bool = True,
heartbeat: Optional[float] = None,
auth: Optional[BasicAuth] = None,
origin: Optional[str] = None,
params: Optional[Mapping[str, str]] = None,
headers: Optional[LooseHeaders] = None,
proxy: Optional[StrOrURL] = None,
proxy_auth: Optional[BasicAuth] = None,
ssl: Optional[Union[SSLContext, bool, Fingerprint]] = True,
verify_ssl: Optional[bool] = None,
fingerprint: Optional[bytes] = None,
ssl_context: Optional[SSLContext] = None,
proxy_headers: Optional[LooseHeaders] = None,
compress: int = 0,
max_msg_size: int = 4 * 1024 * 1024,
) -> ClientWebSocketResponse:
if headers is None:
real_headers: CIMultiDict[str] = CIMultiDict()
else:
real_headers = CIMultiDict(headers)
default_headers = {
hdrs.UPGRADE: "websocket",
hdrs.CONNECTION: "Upgrade",
hdrs.SEC_WEBSOCKET_VERSION: "13",
}
for key, value in default_headers.items():
real_headers.setdefault(key, value)
sec_key = base64.b64encode(os.urandom(16))
real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()
if protocols:
real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols)
if origin is not None:
real_headers[hdrs.ORIGIN] = origin
if compress:
extstr = ws_ext_gen(compress=compress)
real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr
# For the sake of backward compatibility, if user passes in None, convert it to True
if ssl is None:
ssl = True
ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
# send request
resp = await self.request(
method,
url,
params=params,
headers=real_headers,
read_until_eof=False,
auth=auth,
proxy=proxy,
proxy_auth=proxy_auth,
ssl=ssl,
proxy_headers=proxy_headers,
)
try:
# check handshake
if resp.status != 101:
raise WSServerHandshakeError(
resp.request_info,
resp.history,
message="Invalid response status",
status=resp.status,
headers=resp.headers,
)
if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket":
raise WSServerHandshakeError(
resp.request_info,
resp.history,
message="Invalid upgrade header",
status=resp.status,
headers=resp.headers,
)
if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade":
raise WSServerHandshakeError(
resp.request_info,
resp.history,
message="Invalid connection header",
status=resp.status,
headers=resp.headers,
)
# key calculation
r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "")
match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()
if r_key != match:
raise WSServerHandshakeError(
resp.request_info,
resp.history,
message="Invalid challenge response",
status=resp.status,
headers=resp.headers,
)
# websocket protocol
protocol = None
if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:
resp_protocols = [
proto.strip()
for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
]
for proto in resp_protocols:
if proto in protocols:
protocol = proto
break
# websocket compress
notakeover = False
if compress:
compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
if compress_hdrs:
try:
compress, notakeover = ws_ext_parse(compress_hdrs)
except WSHandshakeError as exc:
raise WSServerHandshakeError(
resp.request_info,
resp.history,
message=exc.args[0],
status=resp.status,
headers=resp.headers,
) from exc
else:
compress = 0
notakeover = False
conn = resp.connection
assert conn is not None
conn_proto = conn.protocol
assert conn_proto is not None
transport = conn.transport
assert transport is not None
reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue(
conn_proto, 2**16, loop=self._loop
)
conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
writer = WebSocketWriter(
conn_proto,
transport,
use_mask=True,
compress=compress,
notakeover=notakeover,
)
except BaseException:
resp.close()
raise
else:
return self._ws_response_class(
reader,
writer,
protocol,
resp,
timeout,
autoclose,
autoping,
self._loop,
receive_timeout=receive_timeout,
heartbeat=heartbeat,
compress=compress,
client_notakeover=notakeover,
)
def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
"""Add default headers and transform it to CIMultiDict"""
# Convert headers to MultiDict
result = CIMultiDict(self._default_headers)
if headers:
if not isinstance(headers, (MultiDictProxy, MultiDict)):
headers = CIMultiDict(headers)
added_names: Set[str] = set()
for key, value in headers.items():
if key in added_names:
result.add(key, value)
else:
result[key] = value
added_names.add(key)
return result
def get(
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP GET request."""
return _RequestContextManager(
self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs)
)
def options(
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP OPTIONS request."""
return _RequestContextManager(
self._request(
hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs
)
)
def head(
self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP HEAD request."""
return _RequestContextManager(
self._request(
hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs
)
)
def post(
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP POST request."""
return _RequestContextManager(
self._request(hdrs.METH_POST, url, data=data, **kwargs)
)
def put(
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP PUT request."""
return _RequestContextManager(
self._request(hdrs.METH_PUT, url, data=data, **kwargs)
)
def patch(
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP PATCH request."""
return _RequestContextManager(
self._request(hdrs.METH_PATCH, url, data=data, **kwargs)
)
def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
"""Perform HTTP DELETE request."""
return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs))
async def close(self) -> None:
"""Close underlying connector.
Release all acquired resources.
"""
if not self.closed:
if self._connector is not None and self._connector_owner:
await self._connector.close()
self._connector = None
@property
def closed(self) -> bool:
"""Is client session closed.
A readonly property.
"""
return self._connector is None or self._connector.closed
@property
def connector(self) -> Optional[BaseConnector]:
"""Connector instance used for the session."""
return self._connector
@property
def cookie_jar(self) -> AbstractCookieJar:
"""The session cookies."""
return self._cookie_jar
@property
def version(self) -> Tuple[int, int]:
"""The session HTTP protocol version."""
return self._version
@property
def requote_redirect_url(self) -> bool:
"""Do URL requoting on redirection handling."""
return self._requote_redirect_url
@requote_redirect_url.setter
def requote_redirect_url(self, val: bool) -> None:
"""Do URL requoting on redirection handling."""
warnings.warn(
"session.requote_redirect_url modification " "is deprecated #2778",
DeprecationWarning,
stacklevel=2,
)
self._requote_redirect_url = val
@property
def loop(self) -> asyncio.AbstractEventLoop:
"""Session's loop."""
warnings.warn(
"client.loop property is deprecated", DeprecationWarning, stacklevel=2
)
return self._loop
@property
def timeout(self) -> ClientTimeout:
"""Timeout for the session."""
return self._timeout
@property
def headers(self) -> "CIMultiDict[str]":
"""The default headers of the client session."""
return self._default_headers
@property
def skip_auto_headers(self) -> FrozenSet[istr]:
"""Headers for which autogeneration should be skipped"""
return self._skip_auto_headers
@property
def auth(self) -> Optional[BasicAuth]:
"""An object that represents HTTP Basic Authorization"""
return self._default_auth
@property
def json_serialize(self) -> JSONEncoder:
"""Json serializer callable"""
return self._json_serialize
@property
def connector_owner(self) -> bool:
"""Should connector be closed on session closing"""
return self._connector_owner
@property
def raise_for_status(
self,
) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
"""Should `ClientResponse.raise_for_status()` be called for each response."""
return self._raise_for_status
@property
def auto_decompress(self) -> bool:
"""Should the body response be automatically decompressed."""
return self._auto_decompress
@property
def trust_env(self) -> bool:
"""
Should proxies information from environment or netrc be trusted.
Information is from HTTP_PROXY / HTTPS_PROXY environment variables
or ~/.netrc file if present.
"""
return self._trust_env
@property
def trace_configs(self) -> List[TraceConfig]:
"""A list of TraceConfig instances used for client tracing"""
return self._trace_configs
def detach(self) -> None:
"""Detach connector from session without closing the former.
Session is switched to closed state anyway.
"""
self._connector = None
def __enter__(self) -> None:
raise TypeError("Use async with instead")
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
# __exit__ should exist in pair with __enter__ but never executed
pass # pragma: no cover
async def __aenter__(self) -> "ClientSession":
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
await self.close()
| (base_url: Union[str, yarl.URL, NoneType] = None, *, connector: Optional[aiohttp.connector.BaseConnector] = None, loop: Optional[asyncio.events.AbstractEventLoop] = None, cookies: Union[Mapping[str, Union[str, ForwardRef('BaseCookie[str]'), ForwardRef('Morsel[Any]')]], Iterable[Tuple[str, Union[str, ForwardRef('BaseCookie[str]'), ForwardRef('Morsel[Any]')]]], ForwardRef('BaseCookie[str]'), NoneType] = None, headers: Union[Mapping[Union[str, multidict._multidict.istr], str], multidict._multidict.CIMultiDict, multidict._multidict.CIMultiDictProxy, NoneType] = None, skip_auto_headers: Optional[Iterable[str]] = None, auth: Optional[aiohttp.helpers.BasicAuth] = None, json_serialize: Callable[[Any], str] = <function dumps at 0x7f632ccb0b80>, request_class: Type[aiohttp.client_reqrep.ClientRequest] = <class 'aiohttp.client_reqrep.ClientRequest'>, response_class: Type[aiohttp.client_reqrep.ClientResponse] = <class 'aiohttp.client_reqrep.ClientResponse'>, ws_response_class: Type[aiohttp.client_ws.ClientWebSocketResponse] = <class 'aiohttp.client_ws.ClientWebSocketResponse'>, version: aiohttp.http_writer.HttpVersion = HttpVersion(major=1, minor=1), cookie_jar: Optional[aiohttp.abc.AbstractCookieJar] = None, connector_owner: bool = True, raise_for_status: Union[bool, Callable[[aiohttp.client_reqrep.ClientResponse], Awaitable[NoneType]]] = False, read_timeout: Union[float, aiohttp.helpers._SENTINEL] = <_SENTINEL.sentinel: 1>, conn_timeout: Optional[float] = None, timeout: Union[object, aiohttp.client.ClientTimeout] = <_SENTINEL.sentinel: 1>, auto_decompress: bool = True, trust_env: bool = False, requote_redirect_url: bool = True, trace_configs: Optional[List[aiohttp.tracing.TraceConfig]] = None, read_bufsize: int = 65536, max_line_size: int = 8190, max_field_size: int = 8190, fallback_charset_resolver: Callable[[aiohttp.client_reqrep.ClientResponse, bytes], str] = <function ClientSession.<lambda> at 0x7f632b6bf6d0>) -> None |
18,206 | aiohttp.client | __aenter__ | null | def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
# __exit__ should exist in pair with __enter__ but never executed
pass # pragma: no cover
| (self) -> aiohttp.client.ClientSession |
18,208 | aiohttp.client | __del__ | null | def __del__(self, _warnings: Any = warnings) -> None:
if not self.closed:
kwargs = {"source": self}
_warnings.warn(
f"Unclosed client session {self!r}", ResourceWarning, **kwargs
)
context = {"client_session": self, "message": "Unclosed client session"}
if self._source_traceback is not None:
context["source_traceback"] = self._source_traceback
self._loop.call_exception_handler(context)
| (self, _warnings: Any = <module 'warnings' from '/usr/local/lib/python3.10/warnings.py'>) -> NoneType |
18,209 | aiohttp.client | __enter__ | null | def __enter__(self) -> None:
raise TypeError("Use async with instead")
| (self) -> NoneType |
18,211 | aiohttp.client | __init__ | null | def __init__(
self,
base_url: Optional[StrOrURL] = None,
*,
connector: Optional[BaseConnector] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
cookies: Optional[LooseCookies] = None,
headers: Optional[LooseHeaders] = None,
skip_auto_headers: Optional[Iterable[str]] = None,
auth: Optional[BasicAuth] = None,
json_serialize: JSONEncoder = json.dumps,
request_class: Type[ClientRequest] = ClientRequest,
response_class: Type[ClientResponse] = ClientResponse,
ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,
version: HttpVersion = http.HttpVersion11,
cookie_jar: Optional[AbstractCookieJar] = None,
connector_owner: bool = True,
raise_for_status: Union[
bool, Callable[[ClientResponse], Awaitable[None]]
] = False,
read_timeout: Union[float, _SENTINEL] = sentinel,
conn_timeout: Optional[float] = None,
timeout: Union[object, ClientTimeout] = sentinel,
auto_decompress: bool = True,
trust_env: bool = False,
requote_redirect_url: bool = True,
trace_configs: Optional[List[TraceConfig]] = None,
read_bufsize: int = 2**16,
max_line_size: int = 8190,
max_field_size: int = 8190,
fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8",
) -> None:
# We initialise _connector to None immediately, as it's referenced in __del__()
# and could cause issues if an exception occurs during initialisation.
self._connector: Optional[BaseConnector] = None
if timeout is sentinel or timeout is None:
self._timeout = DEFAULT_TIMEOUT
if read_timeout is not sentinel:
warnings.warn(
"read_timeout is deprecated, " "use timeout argument instead",
DeprecationWarning,
stacklevel=2,
)
self._timeout = attr.evolve(self._timeout, total=read_timeout)
if conn_timeout is not None:
self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
warnings.warn(
"conn_timeout is deprecated, " "use timeout argument instead",
DeprecationWarning,
stacklevel=2,
)
else:
if not isinstance(timeout, ClientTimeout):
raise ValueError(
f"timeout parameter cannot be of {type(timeout)} type, "
"please use 'timeout=ClientTimeout(...)'",
)
self._timeout = timeout
if read_timeout is not sentinel:
raise ValueError(
"read_timeout and timeout parameters "
"conflict, please setup "
"timeout.read"
)
if conn_timeout is not None:
raise ValueError(
"conn_timeout and timeout parameters "
"conflict, please setup "
"timeout.connect"
)
if loop is None:
if connector is not None:
loop = connector._loop
loop = get_running_loop(loop)
if base_url is None or isinstance(base_url, URL):
self._base_url: Optional[URL] = base_url
else:
self._base_url = URL(base_url)
assert (
self._base_url.origin() == self._base_url
), "Only absolute URLs without path part are supported"
if connector is None:
connector = TCPConnector(loop=loop)
if connector._loop is not loop:
raise RuntimeError("Session and connector has to use same event loop")
self._loop = loop
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
if cookie_jar is None:
cookie_jar = CookieJar(loop=loop)
self._cookie_jar = cookie_jar
if cookies is not None:
self._cookie_jar.update_cookies(cookies)
self._connector = connector
self._connector_owner = connector_owner
self._default_auth = auth
self._version = version
self._json_serialize = json_serialize
self._raise_for_status = raise_for_status
self._auto_decompress = auto_decompress
self._trust_env = trust_env
self._requote_redirect_url = requote_redirect_url
self._read_bufsize = read_bufsize
self._max_line_size = max_line_size
self._max_field_size = max_field_size
# Convert to list of tuples
if headers:
real_headers: CIMultiDict[str] = CIMultiDict(headers)
else:
real_headers = CIMultiDict()
self._default_headers: CIMultiDict[str] = real_headers
if skip_auto_headers is not None:
self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
else:
self._skip_auto_headers = frozenset()
self._request_class = request_class
self._response_class = response_class
self._ws_response_class = ws_response_class
self._trace_configs = trace_configs or []
for trace_config in self._trace_configs:
trace_config.freeze()
self._resolve_charset = fallback_charset_resolver
| (self, base_url: Union[str, yarl.URL, NoneType] = None, *, connector: Optional[aiohttp.connector.BaseConnector] = None, loop: Optional[asyncio.events.AbstractEventLoop] = None, cookies: Union[Mapping[str, Union[str, ForwardRef('BaseCookie[str]'), ForwardRef('Morsel[Any]')]], Iterable[Tuple[str, Union[str, ForwardRef('BaseCookie[str]'), ForwardRef('Morsel[Any]')]]], ForwardRef('BaseCookie[str]'), NoneType] = None, headers: Union[Mapping[Union[str, multidict._multidict.istr], str], multidict._multidict.CIMultiDict, multidict._multidict.CIMultiDictProxy, NoneType] = None, skip_auto_headers: Optional[Iterable[str]] = None, auth: Optional[aiohttp.helpers.BasicAuth] = None, json_serialize: Callable[[Any], str] = <function dumps at 0x7f632ccb0b80>, request_class: Type[aiohttp.client_reqrep.ClientRequest] = <class 'aiohttp.client_reqrep.ClientRequest'>, response_class: Type[aiohttp.client_reqrep.ClientResponse] = <class 'aiohttp.client_reqrep.ClientResponse'>, ws_response_class: Type[aiohttp.client_ws.ClientWebSocketResponse] = <class 'aiohttp.client_ws.ClientWebSocketResponse'>, version: aiohttp.http_writer.HttpVersion = HttpVersion(major=1, minor=1), cookie_jar: Optional[aiohttp.abc.AbstractCookieJar] = None, connector_owner: bool = True, raise_for_status: Union[bool, Callable[[aiohttp.client_reqrep.ClientResponse], Awaitable[NoneType]]] = False, read_timeout: Union[float, aiohttp.helpers._SENTINEL] = <_SENTINEL.sentinel: 1>, conn_timeout: Optional[float] = None, timeout: Union[object, aiohttp.client.ClientTimeout] = <_SENTINEL.sentinel: 1>, auto_decompress: bool = True, trust_env: bool = False, requote_redirect_url: bool = True, trace_configs: Optional[List[aiohttp.tracing.TraceConfig]] = None, read_bufsize: int = 65536, max_line_size: int = 8190, max_field_size: int = 8190, fallback_charset_resolver: Callable[[aiohttp.client_reqrep.ClientResponse, bytes], str] = <function ClientSession.<lambda> at 0x7f632b6bf6d0>) -> None |
18,212 | aiohttp.client | _build_url | null | def _build_url(self, str_or_url: StrOrURL) -> URL:
url = URL(str_or_url)
if self._base_url is None:
return url
else:
assert not url.is_absolute() and url.path.startswith("/")
return self._base_url.join(url)
| (self, str_or_url: Union[str, yarl.URL]) -> yarl.URL |
18,213 | aiohttp.client | _prepare_headers | Add default headers and transform it to CIMultiDict | def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
"""Add default headers and transform it to CIMultiDict"""
# Convert headers to MultiDict
result = CIMultiDict(self._default_headers)
if headers:
if not isinstance(headers, (MultiDictProxy, MultiDict)):
headers = CIMultiDict(headers)
added_names: Set[str] = set()
for key, value in headers.items():
if key in added_names:
result.add(key, value)
else:
result[key] = value
added_names.add(key)
return result
| (self, headers: Union[Mapping[Union[str, multidict._multidict.istr], str], multidict._multidict.CIMultiDict, multidict._multidict.CIMultiDictProxy, NoneType]) -> multidict._multidict.CIMultiDict[str] |
18,215 | aiohttp.client | _ws_connect | null | def ws_connect(
self,
url: StrOrURL,
*,
method: str = hdrs.METH_GET,
protocols: Iterable[str] = (),
timeout: float = 10.0,
receive_timeout: Optional[float] = None,
autoclose: bool = True,
autoping: bool = True,
heartbeat: Optional[float] = None,
auth: Optional[BasicAuth] = None,
origin: Optional[str] = None,
params: Optional[Mapping[str, str]] = None,
headers: Optional[LooseHeaders] = None,
proxy: Optional[StrOrURL] = None,
proxy_auth: Optional[BasicAuth] = None,
ssl: Union[SSLContext, bool, None, Fingerprint] = True,
verify_ssl: Optional[bool] = None,
fingerprint: Optional[bytes] = None,
ssl_context: Optional[SSLContext] = None,
proxy_headers: Optional[LooseHeaders] = None,
compress: int = 0,
max_msg_size: int = 4 * 1024 * 1024,
) -> "_WSRequestContextManager":
"""Initiate websocket connection."""
return _WSRequestContextManager(
self._ws_connect(
url,
method=method,
protocols=protocols,
timeout=timeout,
receive_timeout=receive_timeout,
autoclose=autoclose,
autoping=autoping,
heartbeat=heartbeat,
auth=auth,
origin=origin,
params=params,
headers=headers,
proxy=proxy,
proxy_auth=proxy_auth,
ssl=ssl,
verify_ssl=verify_ssl,
fingerprint=fingerprint,
ssl_context=ssl_context,
proxy_headers=proxy_headers,
compress=compress,
max_msg_size=max_msg_size,
)
)
| (self, url: Union[str, yarl.URL], *, method: str = 'GET', protocols: Iterable[str] = (), timeout: float = 10.0, receive_timeout: Optional[float] = None, autoclose: bool = True, autoping: bool = True, heartbeat: Optional[float] = None, auth: Optional[aiohttp.helpers.BasicAuth] = None, origin: Optional[str] = None, params: Optional[Mapping[str, str]] = None, headers: Union[Mapping[Union[str, multidict._multidict.istr], str], multidict._multidict.CIMultiDict, multidict._multidict.CIMultiDictProxy, NoneType] = None, proxy: Union[str, yarl.URL, NoneType] = None, proxy_auth: Optional[aiohttp.helpers.BasicAuth] = None, ssl: Union[NoneType, bool, aiohttp.client_reqrep.Fingerprint] = True, verify_ssl: Optional[bool] = None, fingerprint: Optional[bytes] = None, ssl_context: NoneType = None, proxy_headers: Union[Mapping[Union[str, multidict._multidict.istr], str], multidict._multidict.CIMultiDict, multidict._multidict.CIMultiDictProxy, NoneType] = None, compress: int = 0, max_msg_size: int = 4194304) -> aiohttp.client_ws.ClientWebSocketResponse |
18,216 | aiohttp.client | close | Close underlying connector.
Release all acquired resources.
| def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
"""Perform HTTP DELETE request."""
return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs))
| (self) -> NoneType |
18,217 | aiohttp.client | delete | Perform HTTP DELETE request. | def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
"""Perform HTTP DELETE request."""
return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs))
| (self, url: Union[str, yarl.URL], **kwargs: Any) -> aiohttp.client._RequestContextManager |
18,218 | aiohttp.client | detach | Detach connector from session without closing the former.
Session is switched to closed state anyway.
| def detach(self) -> None:
"""Detach connector from session without closing the former.
Session is switched to closed state anyway.
"""
self._connector = None
| (self) -> NoneType |
18,219 | aiohttp.client | get | Perform HTTP GET request. | def get(
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP GET request."""
return _RequestContextManager(
self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs)
)
| (self, url: Union[str, yarl.URL], *, allow_redirects: bool = True, **kwargs: Any) -> aiohttp.client._RequestContextManager |
18,220 | aiohttp.client | head | Perform HTTP HEAD request. | def head(
self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP HEAD request."""
return _RequestContextManager(
self._request(
hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs
)
)
| (self, url: Union[str, yarl.URL], *, allow_redirects: bool = False, **kwargs: Any) -> aiohttp.client._RequestContextManager |
18,221 | aiohttp.client | options | Perform HTTP OPTIONS request. | def options(
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP OPTIONS request."""
return _RequestContextManager(
self._request(
hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs
)
)
| (self, url: Union[str, yarl.URL], *, allow_redirects: bool = True, **kwargs: Any) -> aiohttp.client._RequestContextManager |
18,222 | aiohttp.client | patch | Perform HTTP PATCH request. | def patch(
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP PATCH request."""
return _RequestContextManager(
self._request(hdrs.METH_PATCH, url, data=data, **kwargs)
)
| (self, url: Union[str, yarl.URL], *, data: Optional[Any] = None, **kwargs: Any) -> aiohttp.client._RequestContextManager |
18,223 | aiohttp.client | post | Perform HTTP POST request. | def post(
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP POST request."""
return _RequestContextManager(
self._request(hdrs.METH_POST, url, data=data, **kwargs)
)
| (self, url: Union[str, yarl.URL], *, data: Optional[Any] = None, **kwargs: Any) -> aiohttp.client._RequestContextManager |
18,224 | aiohttp.client | put | Perform HTTP PUT request. | def put(
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP PUT request."""
return _RequestContextManager(
self._request(hdrs.METH_PUT, url, data=data, **kwargs)
)
| (self, url: Union[str, yarl.URL], *, data: Optional[Any] = None, **kwargs: Any) -> aiohttp.client._RequestContextManager |
18,225 | aiohttp.client | request | Perform HTTP request. | def request(
self, method: str, url: StrOrURL, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP request."""
return _RequestContextManager(self._request(method, url, **kwargs))
| (self, method: str, url: Union[str, yarl.URL], **kwargs: Any) -> aiohttp.client._RequestContextManager |
18,226 | aiohttp.client | ws_connect | Initiate websocket connection. | def ws_connect(
self,
url: StrOrURL,
*,
method: str = hdrs.METH_GET,
protocols: Iterable[str] = (),
timeout: float = 10.0,
receive_timeout: Optional[float] = None,
autoclose: bool = True,
autoping: bool = True,
heartbeat: Optional[float] = None,
auth: Optional[BasicAuth] = None,
origin: Optional[str] = None,
params: Optional[Mapping[str, str]] = None,
headers: Optional[LooseHeaders] = None,
proxy: Optional[StrOrURL] = None,
proxy_auth: Optional[BasicAuth] = None,
ssl: Union[SSLContext, bool, None, Fingerprint] = True,
verify_ssl: Optional[bool] = None,
fingerprint: Optional[bytes] = None,
ssl_context: Optional[SSLContext] = None,
proxy_headers: Optional[LooseHeaders] = None,
compress: int = 0,
max_msg_size: int = 4 * 1024 * 1024,
) -> "_WSRequestContextManager":
"""Initiate websocket connection."""
return _WSRequestContextManager(
self._ws_connect(
url,
method=method,
protocols=protocols,
timeout=timeout,
receive_timeout=receive_timeout,
autoclose=autoclose,
autoping=autoping,
heartbeat=heartbeat,
auth=auth,
origin=origin,
params=params,
headers=headers,
proxy=proxy,
proxy_auth=proxy_auth,
ssl=ssl,
verify_ssl=verify_ssl,
fingerprint=fingerprint,
ssl_context=ssl_context,
proxy_headers=proxy_headers,
compress=compress,
max_msg_size=max_msg_size,
)
)
| (self, url: Union[str, yarl.URL], *, method: str = 'GET', protocols: Iterable[str] = (), timeout: float = 10.0, receive_timeout: Optional[float] = None, autoclose: bool = True, autoping: bool = True, heartbeat: Optional[float] = None, auth: Optional[aiohttp.helpers.BasicAuth] = None, origin: Optional[str] = None, params: Optional[Mapping[str, str]] = None, headers: Union[Mapping[Union[str, multidict._multidict.istr], str], multidict._multidict.CIMultiDict, multidict._multidict.CIMultiDictProxy, NoneType] = None, proxy: Union[str, yarl.URL, NoneType] = None, proxy_auth: Optional[aiohttp.helpers.BasicAuth] = None, ssl: Union[NoneType, bool, aiohttp.client_reqrep.Fingerprint] = True, verify_ssl: Optional[bool] = None, fingerprint: Optional[bytes] = None, ssl_context: NoneType = None, proxy_headers: Union[Mapping[Union[str, multidict._multidict.istr], str], multidict._multidict.CIMultiDict, multidict._multidict.CIMultiDictProxy, NoneType] = None, compress: int = 0, max_msg_size: int = 4194304) -> aiohttp.client._WSRequestContextManager |
18,227 | aiohttp_retry.retry_options | ExponentialRetry | null | class ExponentialRetry(RetryOptionsBase):
def __init__(
self,
attempts: int = 3, # How many times we should retry
start_timeout: float = 0.1, # Base timeout time, then it exponentially grow
max_timeout: float = 30.0, # Max possible timeout between tries
factor: float = 2.0, # How much we increase timeout each time
statuses: Optional[Set[int]] = None, # On which statuses we should retry
exceptions: Optional[Set[Type[Exception]]] = None, # On which exceptions we should retry
retry_all_server_errors: bool = True,
evaluate_response_callback: Optional[EvaluateResponseCallbackType] = None,
):
super().__init__(
attempts=attempts,
statuses=statuses,
exceptions=exceptions,
retry_all_server_errors=retry_all_server_errors,
evaluate_response_callback=evaluate_response_callback,
)
self._start_timeout: float = start_timeout
self._max_timeout: float = max_timeout
self._factor: float = factor
def get_timeout(self, attempt: int, response: Optional[ClientResponse] = None) -> float:
"""Return timeout with exponential backoff."""
timeout = self._start_timeout * (self._factor ** attempt)
return min(timeout, self._max_timeout)
| (attempts: int = 3, start_timeout: float = 0.1, max_timeout: float = 30.0, factor: float = 2.0, statuses: Optional[Set[int]] = None, exceptions: Optional[Set[Type[Exception]]] = None, retry_all_server_errors: bool = True, evaluate_response_callback: Optional[Callable[[aiohttp.client_reqrep.ClientResponse], Awaitable[bool]]] = None) |
18,228 | aiohttp_retry.retry_options | __init__ | null | def __init__(
self,
attempts: int = 3, # How many times we should retry
start_timeout: float = 0.1, # Base timeout time, then it exponentially grow
max_timeout: float = 30.0, # Max possible timeout between tries
factor: float = 2.0, # How much we increase timeout each time
statuses: Optional[Set[int]] = None, # On which statuses we should retry
exceptions: Optional[Set[Type[Exception]]] = None, # On which exceptions we should retry
retry_all_server_errors: bool = True,
evaluate_response_callback: Optional[EvaluateResponseCallbackType] = None,
):
super().__init__(
attempts=attempts,
statuses=statuses,
exceptions=exceptions,
retry_all_server_errors=retry_all_server_errors,
evaluate_response_callback=evaluate_response_callback,
)
self._start_timeout: float = start_timeout
self._max_timeout: float = max_timeout
self._factor: float = factor
| (self, attempts: int = 3, start_timeout: float = 0.1, max_timeout: float = 30.0, factor: float = 2.0, statuses: Optional[Set[int]] = None, exceptions: Optional[Set[Type[Exception]]] = None, retry_all_server_errors: bool = True, evaluate_response_callback: Optional[Callable[[aiohttp.client_reqrep.ClientResponse], Awaitable[bool]]] = None) |
18,229 | aiohttp_retry.retry_options | get_timeout | Return timeout with exponential backoff. | def get_timeout(self, attempt: int, response: Optional[ClientResponse] = None) -> float:
"""Return timeout with exponential backoff."""
timeout = self._start_timeout * (self._factor ** attempt)
return min(timeout, self._max_timeout)
| (self, attempt: int, response: Optional[aiohttp.client_reqrep.ClientResponse] = None) -> float |
18,230 | aiohttp_retry.retry_options | FibonacciRetry | null | class FibonacciRetry(RetryOptionsBase):
def __init__(
self,
attempts: int = 3,
multiplier: float = 1.0,
statuses: Optional[Iterable[int]] = None,
exceptions: Optional[Iterable[Type[Exception]]] = None,
max_timeout: float = 3.0, # Maximum possible timeout between tries
retry_all_server_errors: bool = True,
evaluate_response_callback: Optional[EvaluateResponseCallbackType] = None,
):
super().__init__(
attempts=attempts,
statuses=statuses,
exceptions=exceptions,
retry_all_server_errors=retry_all_server_errors,
evaluate_response_callback=evaluate_response_callback,
)
self.max_timeout = max_timeout
self.multiplier = multiplier
self.prev_step = 1.0
self.current_step = 1.0
def get_timeout(self, attempt: int, response: Optional[ClientResponse] = None) -> float:
new_current_step = self.prev_step + self.current_step
self.prev_step = self.current_step
self.current_step = new_current_step
return min(self.multiplier * new_current_step, self.max_timeout)
| (attempts: int = 3, multiplier: float = 1.0, statuses: Optional[Iterable[int]] = None, exceptions: Optional[Iterable[Type[Exception]]] = None, max_timeout: float = 3.0, retry_all_server_errors: bool = True, evaluate_response_callback: Optional[Callable[[aiohttp.client_reqrep.ClientResponse], Awaitable[bool]]] = None) |
18,231 | aiohttp_retry.retry_options | __init__ | null | def __init__(
self,
attempts: int = 3,
multiplier: float = 1.0,
statuses: Optional[Iterable[int]] = None,
exceptions: Optional[Iterable[Type[Exception]]] = None,
max_timeout: float = 3.0, # Maximum possible timeout between tries
retry_all_server_errors: bool = True,
evaluate_response_callback: Optional[EvaluateResponseCallbackType] = None,
):
super().__init__(
attempts=attempts,
statuses=statuses,
exceptions=exceptions,
retry_all_server_errors=retry_all_server_errors,
evaluate_response_callback=evaluate_response_callback,
)
self.max_timeout = max_timeout
self.multiplier = multiplier
self.prev_step = 1.0
self.current_step = 1.0
| (self, attempts: int = 3, multiplier: float = 1.0, statuses: Optional[Iterable[int]] = None, exceptions: Optional[Iterable[Type[Exception]]] = None, max_timeout: float = 3.0, retry_all_server_errors: bool = True, evaluate_response_callback: Optional[Callable[[aiohttp.client_reqrep.ClientResponse], Awaitable[bool]]] = None) |
18,232 | aiohttp_retry.retry_options | get_timeout | null | def get_timeout(self, attempt: int, response: Optional[ClientResponse] = None) -> float:
new_current_step = self.prev_step + self.current_step
self.prev_step = self.current_step
self.current_step = new_current_step
return min(self.multiplier * new_current_step, self.max_timeout)
| (self, attempt: int, response: Optional[aiohttp.client_reqrep.ClientResponse] = None) -> float |
18,233 | aiohttp_retry.retry_options | JitterRetry | https://github.com/inyutin/aiohttp_retry/issues/44 | class JitterRetry(ExponentialRetry):
"""https://github.com/inyutin/aiohttp_retry/issues/44"""
def __init__(
self,
attempts: int = 3, # How many times we should retry
start_timeout: float = 0.1, # Base timeout time, then it exponentially grow
max_timeout: float = 30.0, # Max possible timeout between tries
factor: float = 2.0, # How much we increase timeout each time
statuses: Optional[Set[int]] = None, # On which statuses we should retry
exceptions: Optional[Set[Type[Exception]]] = None, # On which exceptions we should retry
random_interval_size: float = 2.0, # size of interval for random component
retry_all_server_errors: bool = True,
evaluate_response_callback: Optional[EvaluateResponseCallbackType] = None,
):
super().__init__(
attempts=attempts,
start_timeout=start_timeout,
max_timeout=max_timeout,
factor=factor,
statuses=statuses,
exceptions=exceptions,
retry_all_server_errors=retry_all_server_errors,
evaluate_response_callback=evaluate_response_callback,
)
self._start_timeout: float = start_timeout
self._max_timeout: float = max_timeout
self._factor: float = factor
self._random_interval_size = random_interval_size
def get_timeout(self, attempt: int, response: Optional[ClientResponse] = None) -> float:
timeout: float = super().get_timeout(attempt) + random.uniform(0, self._random_interval_size) ** self._factor
return timeout
| (attempts: int = 3, start_timeout: float = 0.1, max_timeout: float = 30.0, factor: float = 2.0, statuses: Optional[Set[int]] = None, exceptions: Optional[Set[Type[Exception]]] = None, random_interval_size: float = 2.0, retry_all_server_errors: bool = True, evaluate_response_callback: Optional[Callable[[aiohttp.client_reqrep.ClientResponse], Awaitable[bool]]] = None) |
18,234 | aiohttp_retry.retry_options | __init__ | null | def __init__(
self,
attempts: int = 3, # How many times we should retry
start_timeout: float = 0.1, # Base timeout time, then it exponentially grow
max_timeout: float = 30.0, # Max possible timeout between tries
factor: float = 2.0, # How much we increase timeout each time
statuses: Optional[Set[int]] = None, # On which statuses we should retry
exceptions: Optional[Set[Type[Exception]]] = None, # On which exceptions we should retry
random_interval_size: float = 2.0, # size of interval for random component
retry_all_server_errors: bool = True,
evaluate_response_callback: Optional[EvaluateResponseCallbackType] = None,
):
super().__init__(
attempts=attempts,
start_timeout=start_timeout,
max_timeout=max_timeout,
factor=factor,
statuses=statuses,
exceptions=exceptions,
retry_all_server_errors=retry_all_server_errors,
evaluate_response_callback=evaluate_response_callback,
)
self._start_timeout: float = start_timeout
self._max_timeout: float = max_timeout
self._factor: float = factor
self._random_interval_size = random_interval_size
| (self, attempts: int = 3, start_timeout: float = 0.1, max_timeout: float = 30.0, factor: float = 2.0, statuses: Optional[Set[int]] = None, exceptions: Optional[Set[Type[Exception]]] = None, random_interval_size: float = 2.0, retry_all_server_errors: bool = True, evaluate_response_callback: Optional[Callable[[aiohttp.client_reqrep.ClientResponse], Awaitable[bool]]] = None) |
18,235 | aiohttp_retry.retry_options | get_timeout | null | def get_timeout(self, attempt: int, response: Optional[ClientResponse] = None) -> float:
timeout: float = super().get_timeout(attempt) + random.uniform(0, self._random_interval_size) ** self._factor
return timeout
| (self, attempt: int, response: Optional[aiohttp.client_reqrep.ClientResponse] = None) -> float |
18,236 | aiohttp_retry.retry_options | ListRetry | null | class ListRetry(RetryOptionsBase):
def __init__(
self,
timeouts: List[float],
statuses: Optional[Iterable[int]] = None, # On which statuses we should retry
exceptions: Optional[Iterable[Type[Exception]]] = None, # On which exceptions we should retry
retry_all_server_errors: bool = True,
evaluate_response_callback: Optional[EvaluateResponseCallbackType] = None,
):
super().__init__(
attempts=len(timeouts),
statuses=statuses,
exceptions=exceptions,
retry_all_server_errors=retry_all_server_errors,
evaluate_response_callback=evaluate_response_callback,
)
self.timeouts = timeouts
def get_timeout(self, attempt: int, response: Optional[ClientResponse] = None) -> float:
"""timeouts from a defined list."""
return self.timeouts[attempt]
| (timeouts: List[float], statuses: Optional[Iterable[int]] = None, exceptions: Optional[Iterable[Type[Exception]]] = None, retry_all_server_errors: bool = True, evaluate_response_callback: Optional[Callable[[aiohttp.client_reqrep.ClientResponse], Awaitable[bool]]] = None) |
18,237 | aiohttp_retry.retry_options | __init__ | null | def __init__(
self,
timeouts: List[float],
statuses: Optional[Iterable[int]] = None, # On which statuses we should retry
exceptions: Optional[Iterable[Type[Exception]]] = None, # On which exceptions we should retry
retry_all_server_errors: bool = True,
evaluate_response_callback: Optional[EvaluateResponseCallbackType] = None,
):
super().__init__(
attempts=len(timeouts),
statuses=statuses,
exceptions=exceptions,
retry_all_server_errors=retry_all_server_errors,
evaluate_response_callback=evaluate_response_callback,
)
self.timeouts = timeouts
| (self, timeouts: List[float], statuses: Optional[Iterable[int]] = None, exceptions: Optional[Iterable[Type[Exception]]] = None, retry_all_server_errors: bool = True, evaluate_response_callback: Optional[Callable[[aiohttp.client_reqrep.ClientResponse], Awaitable[bool]]] = None) |
18,238 | aiohttp_retry.retry_options | get_timeout | timeouts from a defined list. | def get_timeout(self, attempt: int, response: Optional[ClientResponse] = None) -> float:
"""timeouts from a defined list."""
return self.timeouts[attempt]
| (self, attempt: int, response: Optional[aiohttp.client_reqrep.ClientResponse] = None) -> float |
18,239 | typing | Protocol | Base class for protocol classes.
Protocol classes are defined as::
class Proto(Protocol):
def meth(self) -> int:
...
Such classes are primarily used with static type checkers that recognize
structural subtyping (static duck-typing), for example::
class C:
def meth(self) -> int:
return 0
def func(x: Proto) -> int:
return x.meth()
func(C()) # Passes static type check
See PEP 544 for details. Protocol classes decorated with
@typing.runtime_checkable act as simple-minded runtime protocols that check
only the presence of given attributes, ignoring their type signatures.
Protocol classes can be generic, they are defined as::
class GenProto(Protocol[T]):
def meth(self) -> T:
...
| class Protocol(Generic, metaclass=_ProtocolMeta):
"""Base class for protocol classes.
Protocol classes are defined as::
class Proto(Protocol):
def meth(self) -> int:
...
Such classes are primarily used with static type checkers that recognize
structural subtyping (static duck-typing), for example::
class C:
def meth(self) -> int:
return 0
def func(x: Proto) -> int:
return x.meth()
func(C()) # Passes static type check
See PEP 544 for details. Protocol classes decorated with
@typing.runtime_checkable act as simple-minded runtime protocols that check
only the presence of given attributes, ignoring their type signatures.
Protocol classes can be generic, they are defined as::
class GenProto(Protocol[T]):
def meth(self) -> T:
...
"""
__slots__ = ()
_is_protocol = True
_is_runtime_protocol = False
def __init_subclass__(cls, *args, **kwargs):
super().__init_subclass__(*args, **kwargs)
# Determine if this is a protocol or a concrete subclass.
if not cls.__dict__.get('_is_protocol', False):
cls._is_protocol = any(b is Protocol for b in cls.__bases__)
# Set (or override) the protocol subclass hook.
def _proto_hook(other):
if not cls.__dict__.get('_is_protocol', False):
return NotImplemented
# First, perform various sanity checks.
if not getattr(cls, '_is_runtime_protocol', False):
if _allow_reckless_class_checks():
return NotImplemented
raise TypeError("Instance and class checks can only be used with"
" @runtime_checkable protocols")
if not _is_callable_members_only(cls):
if _allow_reckless_class_checks():
return NotImplemented
raise TypeError("Protocols with non-method members"
" don't support issubclass()")
if not isinstance(other, type):
# Same error message as for issubclass(1, int).
raise TypeError('issubclass() arg 1 must be a class')
# Second, perform the actual structural compatibility check.
for attr in _get_protocol_attrs(cls):
for base in other.__mro__:
# Check if the members appears in the class dictionary...
if attr in base.__dict__:
if base.__dict__[attr] is None:
return NotImplemented
break
# ...or in annotations, if it is a sub-protocol.
annotations = getattr(base, '__annotations__', {})
if (isinstance(annotations, collections.abc.Mapping) and
attr in annotations and
issubclass(other, Generic) and other._is_protocol):
break
else:
return NotImplemented
return True
if '__subclasshook__' not in cls.__dict__:
cls.__subclasshook__ = _proto_hook
# We have nothing more to do for non-protocols...
if not cls._is_protocol:
return
# ... otherwise check consistency of bases, and prohibit instantiation.
for base in cls.__bases__:
if not (base in (object, Generic) or
base.__module__ in _PROTO_ALLOWLIST and
base.__name__ in _PROTO_ALLOWLIST[base.__module__] or
issubclass(base, Generic) and base._is_protocol):
raise TypeError('Protocols can only inherit from other'
' protocols, got %r' % base)
cls.__init__ = _no_init_or_replace_init
| () |
18,240 | aiohttp_retry.retry_options | RandomRetry | null | class RandomRetry(RetryOptionsBase):
def __init__(
self,
attempts: int = 3, # How many times we should retry
statuses: Optional[Iterable[int]] = None, # On which statuses we should retry
exceptions: Optional[Iterable[Type[Exception]]] = None, # On which exceptions we should retry
min_timeout: float = 0.1, # Minimum possible timeout
max_timeout: float = 3.0, # Maximum possible timeout between tries
random_func: Callable[[], float] = random.random, # Random number generator
retry_all_server_errors: bool = True,
evaluate_response_callback: Optional[EvaluateResponseCallbackType] = None,
):
super().__init__(
attempts=attempts,
statuses=statuses,
exceptions=exceptions,
retry_all_server_errors=retry_all_server_errors,
evaluate_response_callback=evaluate_response_callback,
)
self.attempts: int = attempts
self.min_timeout: float = min_timeout
self.max_timeout: float = max_timeout
self.random = random_func
def get_timeout(self, attempt: int, response: Optional[ClientResponse] = None) -> float:
"""Generate random timeouts."""
return self.min_timeout + self.random() * (self.max_timeout - self.min_timeout)
| (attempts: int = 3, statuses: Optional[Iterable[int]] = None, exceptions: Optional[Iterable[Type[Exception]]] = None, min_timeout: float = 0.1, max_timeout: float = 3.0, random_func: Callable[[], float] = <built-in method random of Random object at 0x55d6c9638390>, retry_all_server_errors: bool = True, evaluate_response_callback: Optional[Callable[[aiohttp.client_reqrep.ClientResponse], Awaitable[bool]]] = None) |
18,241 | aiohttp_retry.retry_options | __init__ | null | def __init__(
self,
attempts: int = 3, # How many times we should retry
statuses: Optional[Iterable[int]] = None, # On which statuses we should retry
exceptions: Optional[Iterable[Type[Exception]]] = None, # On which exceptions we should retry
min_timeout: float = 0.1, # Minimum possible timeout
max_timeout: float = 3.0, # Maximum possible timeout between tries
random_func: Callable[[], float] = random.random, # Random number generator
retry_all_server_errors: bool = True,
evaluate_response_callback: Optional[EvaluateResponseCallbackType] = None,
):
super().__init__(
attempts=attempts,
statuses=statuses,
exceptions=exceptions,
retry_all_server_errors=retry_all_server_errors,
evaluate_response_callback=evaluate_response_callback,
)
self.attempts: int = attempts
self.min_timeout: float = min_timeout
self.max_timeout: float = max_timeout
self.random = random_func
| (self, attempts: int = 3, statuses: Optional[Iterable[int]] = None, exceptions: Optional[Iterable[Type[Exception]]] = None, min_timeout: float = 0.1, max_timeout: float = 3.0, random_func: Callable[[], float] = <built-in method random of Random object at 0x55d6c9638390>, retry_all_server_errors: bool = True, evaluate_response_callback: Optional[Callable[[aiohttp.client_reqrep.ClientResponse], Awaitable[bool]]] = None) |
18,242 | aiohttp_retry.retry_options | get_timeout | Generate random timeouts. | def get_timeout(self, attempt: int, response: Optional[ClientResponse] = None) -> float:
"""Generate random timeouts."""
return self.min_timeout + self.random() * (self.max_timeout - self.min_timeout)
| (self, attempt: int, response: Optional[aiohttp.client_reqrep.ClientResponse] = None) -> float |
18,243 | aiohttp_retry.client | RequestParams | RequestParams(method: str, url: Union[str, yarl.URL], headers: Optional[Dict[str, Any]] = None, trace_request_ctx: Optional[Dict[str, Any]] = None, kwargs: Optional[Dict[str, Any]] = None) | class RequestParams:
method: str
url: _RAW_URL_TYPE
headers: Optional[Dict[str, Any]] = None
trace_request_ctx: Optional[Dict[str, Any]] = None
kwargs: Optional[Dict[str, Any]] = None
| (method: str, url: Union[str, yarl.URL], headers: Optional[Dict[str, Any]] = None, trace_request_ctx: Optional[Dict[str, Any]] = None, kwargs: Optional[Dict[str, Any]] = None) -> None |
18,244 | aiohttp_retry.client | __eq__ | null | import asyncio
import logging
import sys
from abc import abstractmethod
from dataclasses import dataclass
from types import TracebackType
from typing import (
Any,
Awaitable,
Callable,
Dict,
Generator,
List,
Optional,
Tuple,
Type,
Union,
)
from aiohttp import ClientResponse, ClientSession, hdrs
from aiohttp.typedefs import StrOrURL
from yarl import URL as YARL_URL
from .retry_options import ExponentialRetry, RetryOptionsBase
if sys.version_info >= (3, 8):
from typing import Protocol
else:
from typing_extensions import Protocol
class _Logger(Protocol):
"""
_Logger defines which methods logger object should have
"""
@abstractmethod
def debug(self, msg: str, *args: Any, **kwargs: Any) -> None: pass
@abstractmethod
def warning(self, msg: str, *args: Any, **kwargs: Any) -> None: pass
@abstractmethod
def exception(self, msg: str, *args: Any, **kwargs: Any) -> None: pass
| (self, other) |
18,246 | aiohttp_retry.client | __repr__ | null | def request(
self,
method: str,
url: StrOrURL,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=method,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
| (self) |
18,247 | aiohttp_retry.client | RetryClient | null | class RetryClient:
def __init__(
self,
client_session: Optional[ClientSession] = None,
logger: Optional[_LoggerType] = None,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: bool = False,
*args: Any,
**kwargs: Any,
) -> None:
if client_session is not None:
client = client_session
closed = None
else:
client = ClientSession(*args, **kwargs)
closed = False
self._client = client
self._closed = closed
self._logger: _LoggerType = logger or logging.getLogger("aiohttp_retry")
self._retry_options: RetryOptionsBase = retry_options or ExponentialRetry()
self._raise_for_status = raise_for_status
@property
def retry_options(self) -> RetryOptionsBase:
return self._retry_options
def requests(
self,
params_list: List[RequestParams],
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
) -> _RequestContext:
return self._make_requests(
params_list=params_list,
retry_options=retry_options,
raise_for_status=raise_for_status,
)
def request(
self,
method: str,
url: StrOrURL,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=method,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
def get(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=hdrs.METH_GET,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
def options(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=hdrs.METH_OPTIONS,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
def head(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None, **kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=hdrs.METH_HEAD,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
def post(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=hdrs.METH_POST,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
def put(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=hdrs.METH_PUT,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
def patch(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=hdrs.METH_PATCH,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
def delete(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=hdrs.METH_DELETE,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
async def close(self) -> None:
await self._client.close()
self._closed = True
def _make_request(
self,
method: str,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
url_list = _url_to_urls(url)
params_list = [RequestParams(
method=method,
url=url,
headers=kwargs.pop('headers', {}),
trace_request_ctx=kwargs.pop('trace_request_ctx', None),
kwargs=kwargs,
) for url in url_list]
return self._make_requests(
params_list=params_list,
retry_options=retry_options,
raise_for_status=raise_for_status,
)
def _make_requests(
self,
params_list: List[RequestParams],
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
) -> _RequestContext:
if retry_options is None:
retry_options = self._retry_options
if raise_for_status is None:
raise_for_status = self._raise_for_status
return _RequestContext(
request_func=self._client.request,
params_list=params_list,
logger=self._logger,
retry_options=retry_options,
raise_for_status=raise_for_status,
)
async def __aenter__(self) -> 'RetryClient':
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
await self.close()
def __del__(self) -> None:
if getattr(self, '_closed', None) is None:
# in case object was not initialized (__init__ raised an exception)
return
if not self._closed:
self._logger.warning("Aiohttp retry client was not closed")
| (client_session: Optional[aiohttp.client.ClientSession] = None, logger: Union[aiohttp_retry.client._Logger, logging.Logger, NoneType] = None, retry_options: Optional[aiohttp_retry.retry_options.RetryOptionsBase] = None, raise_for_status: bool = False, *args: Any, **kwargs: Any) -> None |
18,248 | aiohttp_retry.client | __aenter__ | null | def _make_requests(
self,
params_list: List[RequestParams],
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
) -> _RequestContext:
if retry_options is None:
retry_options = self._retry_options
if raise_for_status is None:
raise_for_status = self._raise_for_status
return _RequestContext(
request_func=self._client.request,
params_list=params_list,
logger=self._logger,
retry_options=retry_options,
raise_for_status=raise_for_status,
)
| (self) -> aiohttp_retry.client.RetryClient |
18,250 | aiohttp_retry.client | __del__ | null | def __del__(self) -> None:
if getattr(self, '_closed', None) is None:
# in case object was not initialized (__init__ raised an exception)
return
if not self._closed:
self._logger.warning("Aiohttp retry client was not closed")
| (self) -> NoneType |
18,251 | aiohttp_retry.client | __init__ | null | def __init__(
self,
client_session: Optional[ClientSession] = None,
logger: Optional[_LoggerType] = None,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: bool = False,
*args: Any,
**kwargs: Any,
) -> None:
if client_session is not None:
client = client_session
closed = None
else:
client = ClientSession(*args, **kwargs)
closed = False
self._client = client
self._closed = closed
self._logger: _LoggerType = logger or logging.getLogger("aiohttp_retry")
self._retry_options: RetryOptionsBase = retry_options or ExponentialRetry()
self._raise_for_status = raise_for_status
| (self, client_session: Optional[aiohttp.client.ClientSession] = None, logger: Union[aiohttp_retry.client._Logger, logging.Logger, NoneType] = None, retry_options: Optional[aiohttp_retry.retry_options.RetryOptionsBase] = None, raise_for_status: bool = False, *args: Any, **kwargs: Any) -> NoneType |
18,252 | aiohttp_retry.client | _make_request | null | def _make_request(
self,
method: str,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
url_list = _url_to_urls(url)
params_list = [RequestParams(
method=method,
url=url,
headers=kwargs.pop('headers', {}),
trace_request_ctx=kwargs.pop('trace_request_ctx', None),
kwargs=kwargs,
) for url in url_list]
return self._make_requests(
params_list=params_list,
retry_options=retry_options,
raise_for_status=raise_for_status,
)
| (self, method: str, url: Union[str, yarl.URL, List[Union[str, yarl.URL]], Tuple[Union[str, yarl.URL], ...]], retry_options: Optional[aiohttp_retry.retry_options.RetryOptionsBase] = None, raise_for_status: Optional[bool] = None, **kwargs: Any) -> aiohttp_retry.client._RequestContext |
18,254 | aiohttp_retry.client | close | null | def delete(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=hdrs.METH_DELETE,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
| (self) -> NoneType |
18,256 | aiohttp_retry.client | get | null | def get(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=hdrs.METH_GET,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
| (self, url: Union[str, yarl.URL, List[Union[str, yarl.URL]], Tuple[Union[str, yarl.URL], ...]], retry_options: Optional[aiohttp_retry.retry_options.RetryOptionsBase] = None, raise_for_status: Optional[bool] = None, **kwargs: Any) -> aiohttp_retry.client._RequestContext |
18,257 | aiohttp_retry.client | head | null | def head(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None, **kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=hdrs.METH_HEAD,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
| (self, url: Union[str, yarl.URL, List[Union[str, yarl.URL]], Tuple[Union[str, yarl.URL], ...]], retry_options: Optional[aiohttp_retry.retry_options.RetryOptionsBase] = None, raise_for_status: Optional[bool] = None, **kwargs: Any) -> aiohttp_retry.client._RequestContext |
18,258 | aiohttp_retry.client | options | null | def options(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=hdrs.METH_OPTIONS,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
| (self, url: Union[str, yarl.URL, List[Union[str, yarl.URL]], Tuple[Union[str, yarl.URL], ...]], retry_options: Optional[aiohttp_retry.retry_options.RetryOptionsBase] = None, raise_for_status: Optional[bool] = None, **kwargs: Any) -> aiohttp_retry.client._RequestContext |
18,259 | aiohttp_retry.client | patch | null | def patch(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=hdrs.METH_PATCH,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
| (self, url: Union[str, yarl.URL, List[Union[str, yarl.URL]], Tuple[Union[str, yarl.URL], ...]], retry_options: Optional[aiohttp_retry.retry_options.RetryOptionsBase] = None, raise_for_status: Optional[bool] = None, **kwargs: Any) -> aiohttp_retry.client._RequestContext |
18,260 | aiohttp_retry.client | post | null | def post(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=hdrs.METH_POST,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
| (self, url: Union[str, yarl.URL, List[Union[str, yarl.URL]], Tuple[Union[str, yarl.URL], ...]], retry_options: Optional[aiohttp_retry.retry_options.RetryOptionsBase] = None, raise_for_status: Optional[bool] = None, **kwargs: Any) -> aiohttp_retry.client._RequestContext |
18,261 | aiohttp_retry.client | put | null | def put(
self,
url: _URL_TYPE,
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
**kwargs: Any,
) -> _RequestContext:
return self._make_request(
method=hdrs.METH_PUT,
url=url,
retry_options=retry_options,
raise_for_status=raise_for_status,
**kwargs,
)
| (self, url: Union[str, yarl.URL, List[Union[str, yarl.URL]], Tuple[Union[str, yarl.URL], ...]], retry_options: Optional[aiohttp_retry.retry_options.RetryOptionsBase] = None, raise_for_status: Optional[bool] = None, **kwargs: Any) -> aiohttp_retry.client._RequestContext |
18,263 | aiohttp_retry.client | requests | null | def requests(
self,
params_list: List[RequestParams],
retry_options: Optional[RetryOptionsBase] = None,
raise_for_status: Optional[bool] = None,
) -> _RequestContext:
return self._make_requests(
params_list=params_list,
retry_options=retry_options,
raise_for_status=raise_for_status,
)
| (self, params_list: List[aiohttp_retry.client.RequestParams], retry_options: Optional[aiohttp_retry.retry_options.RetryOptionsBase] = None, raise_for_status: Optional[bool] = None) -> aiohttp_retry.client._RequestContext |
18,264 | aiohttp_retry.retry_options | RetryOptions | null | def RetryOptions(*args: Any, **kwargs: Any) -> ExponentialRetry:
warn("RetryOptions is deprecated, use ExponentialRetry")
return ExponentialRetry(*args, **kwargs)
| (*args: Any, **kwargs: Any) -> aiohttp_retry.retry_options.ExponentialRetry |
18,265 | aiohttp_retry.retry_options | RetryOptionsBase | null | class RetryOptionsBase:
def __init__(
self,
attempts: int = 3, # How many times we should retry
statuses: Optional[Iterable[int]] = None, # On which statuses we should retry
exceptions: Optional[Iterable[Type[Exception]]] = None, # On which exceptions we should retry
retry_all_server_errors: bool = True, # If should retry all 500 errors or not
# a callback that will run on response to decide if retry
evaluate_response_callback: Optional[EvaluateResponseCallbackType] = None,
):
self.attempts: int = attempts
if statuses is None:
statuses = set()
self.statuses: Iterable[int] = statuses
if exceptions is None:
exceptions = set()
self.exceptions: Iterable[Type[Exception]] = exceptions
self.retry_all_server_errors = retry_all_server_errors
self.evaluate_response_callback = evaluate_response_callback
@abc.abstractmethod
def get_timeout(self, attempt: int, response: Optional[ClientResponse] = None) -> float:
raise NotImplementedError
| (attempts: int = 3, statuses: Optional[Iterable[int]] = None, exceptions: Optional[Iterable[Type[Exception]]] = None, retry_all_server_errors: bool = True, evaluate_response_callback: Optional[Callable[[aiohttp.client_reqrep.ClientResponse], Awaitable[bool]]] = None) |
18,266 | aiohttp_retry.retry_options | __init__ | null | def __init__(
self,
attempts: int = 3, # How many times we should retry
statuses: Optional[Iterable[int]] = None, # On which statuses we should retry
exceptions: Optional[Iterable[Type[Exception]]] = None, # On which exceptions we should retry
retry_all_server_errors: bool = True, # If should retry all 500 errors or not
# a callback that will run on response to decide if retry
evaluate_response_callback: Optional[EvaluateResponseCallbackType] = None,
):
self.attempts: int = attempts
if statuses is None:
statuses = set()
self.statuses: Iterable[int] = statuses
if exceptions is None:
exceptions = set()
self.exceptions: Iterable[Type[Exception]] = exceptions
self.retry_all_server_errors = retry_all_server_errors
self.evaluate_response_callback = evaluate_response_callback
| (self, attempts: int = 3, statuses: Optional[Iterable[int]] = None, exceptions: Optional[Iterable[Type[Exception]]] = None, retry_all_server_errors: bool = True, evaluate_response_callback: Optional[Callable[[aiohttp.client_reqrep.ClientResponse], Awaitable[bool]]] = None) |
18,267 | aiohttp_retry.retry_options | get_timeout | null | @abc.abstractmethod
def get_timeout(self, attempt: int, response: Optional[ClientResponse] = None) -> float:
raise NotImplementedError
| (self, attempt: int, response: Optional[aiohttp.client_reqrep.ClientResponse] = None) -> float |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.