index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
21,759 | aiosocks.protocols | resume_writing | null | def resume_writing(self):
if self._negotiate_done and self._app_protocol is not self:
self._app_protocol.resume_writing()
else:
super().resume_writing()
| (self) |
21,762 | aiosocks.helpers | Socks5Addr | null | class Socks5Addr(SocksAddr):
pass
| (host, port=1080) |
21,768 | aiosocks.helpers | Socks5Auth | null | class Socks5Auth(namedtuple('Socks5Auth', ['login', 'password', 'encoding'])):
def __new__(cls, login, password, encoding='utf-8'):
if login is None:
raise ValueError('None is not allowed as login value')
if password is None:
raise ValueError('None is not allowed as password value')
return super().__new__(cls,
login.encode(encoding),
password.encode(encoding), encoding)
| (login, password, encoding='utf-8') |
21,770 | aiosocks.helpers | __new__ | null | def __new__(cls, login, password, encoding='utf-8'):
if login is None:
raise ValueError('None is not allowed as login value')
if password is None:
raise ValueError('None is not allowed as password value')
return super().__new__(cls,
login.encode(encoding),
password.encode(encoding), encoding)
| (cls, login, password, encoding='utf-8') |
21,773 | collections | _replace | Return a new Socks5Auth object replacing specified fields with new values | def namedtuple(typename, field_names, *, rename=False, defaults=None, module=None):
"""Returns a new subclass of tuple with named fields.
>>> Point = namedtuple('Point', ['x', 'y'])
>>> Point.__doc__ # docstring for the new class
'Point(x, y)'
>>> p = Point(11, y=22) # instantiate with positional args or keywords
>>> p[0] + p[1] # indexable like a plain tuple
33
>>> x, y = p # unpack like a regular tuple
>>> x, y
(11, 22)
>>> p.x + p.y # fields also accessible by name
33
>>> d = p._asdict() # convert to a dictionary
>>> d['x']
11
>>> Point(**d) # convert from a dictionary
Point(x=11, y=22)
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
Point(x=100, y=22)
"""
# Validate the field names. At the user's option, either generate an error
# message or automatically replace the field name with a valid name.
if isinstance(field_names, str):
field_names = field_names.replace(',', ' ').split()
field_names = list(map(str, field_names))
typename = _sys.intern(str(typename))
if rename:
seen = set()
for index, name in enumerate(field_names):
if (not name.isidentifier()
or _iskeyword(name)
or name.startswith('_')
or name in seen):
field_names[index] = f'_{index}'
seen.add(name)
for name in [typename] + field_names:
if type(name) is not str:
raise TypeError('Type names and field names must be strings')
if not name.isidentifier():
raise ValueError('Type names and field names must be valid '
f'identifiers: {name!r}')
if _iskeyword(name):
raise ValueError('Type names and field names cannot be a '
f'keyword: {name!r}')
seen = set()
for name in field_names:
if name.startswith('_') and not rename:
raise ValueError('Field names cannot start with an underscore: '
f'{name!r}')
if name in seen:
raise ValueError(f'Encountered duplicate field name: {name!r}')
seen.add(name)
field_defaults = {}
if defaults is not None:
defaults = tuple(defaults)
if len(defaults) > len(field_names):
raise TypeError('Got more default values than field names')
field_defaults = dict(reversed(list(zip(reversed(field_names),
reversed(defaults)))))
# Variables used in the methods and docstrings
field_names = tuple(map(_sys.intern, field_names))
num_fields = len(field_names)
arg_list = ', '.join(field_names)
if num_fields == 1:
arg_list += ','
repr_fmt = '(' + ', '.join(f'{name}=%r' for name in field_names) + ')'
tuple_new = tuple.__new__
_dict, _tuple, _len, _map, _zip = dict, tuple, len, map, zip
# Create all the named tuple methods to be added to the class namespace
namespace = {
'_tuple_new': tuple_new,
'__builtins__': {},
'__name__': f'namedtuple_{typename}',
}
code = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))'
__new__ = eval(code, namespace)
__new__.__name__ = '__new__'
__new__.__doc__ = f'Create new instance of {typename}({arg_list})'
if defaults is not None:
__new__.__defaults__ = defaults
@classmethod
def _make(cls, iterable):
result = tuple_new(cls, iterable)
if _len(result) != num_fields:
raise TypeError(f'Expected {num_fields} arguments, got {len(result)}')
return result
_make.__func__.__doc__ = (f'Make a new {typename} object from a sequence '
'or iterable')
def _replace(self, /, **kwds):
result = self._make(_map(kwds.pop, field_names, self))
if kwds:
raise ValueError(f'Got unexpected field names: {list(kwds)!r}')
return result
_replace.__doc__ = (f'Return a new {typename} object replacing specified '
'fields with new values')
def __repr__(self):
'Return a nicely formatted representation string'
return self.__class__.__name__ + repr_fmt % self
def _asdict(self):
'Return a new dict which maps field names to their values.'
return _dict(_zip(self._fields, self))
def __getnewargs__(self):
'Return self as a plain tuple. Used by copy and pickle.'
return _tuple(self)
# Modify function metadata to help with introspection and debugging
for method in (
__new__,
_make.__func__,
_replace,
__repr__,
_asdict,
__getnewargs__,
):
method.__qualname__ = f'{typename}.{method.__name__}'
# Build-up the class namespace dictionary
# and use type() to build the result class
class_namespace = {
'__doc__': f'{typename}({arg_list})',
'__slots__': (),
'_fields': field_names,
'_field_defaults': field_defaults,
'__new__': __new__,
'_make': _make,
'_replace': _replace,
'__repr__': __repr__,
'_asdict': _asdict,
'__getnewargs__': __getnewargs__,
'__match_args__': field_names,
}
for index, name in enumerate(field_names):
doc = _sys.intern(f'Alias for field number {index}')
class_namespace[name] = _tuplegetter(index, doc)
result = type(typename, (tuple,), class_namespace)
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in environments where
# sys._getframe is not defined (Jython for example) or sys._getframe is not
# defined for arguments greater than 0 (IronPython), or where the user has
# specified a particular module.
if module is None:
try:
module = _sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
if module is not None:
result.__module__ = module
return result
| (self, /, **kwds) |
21,774 | aiosocks.protocols | Socks5Protocol | null | class Socks5Protocol(BaseSocksProtocol):
def __init__(self, proxy, proxy_auth, dst, app_protocol_factory, waiter,
remote_resolve=True, loop=None, ssl=False,
server_hostname=None, negotiate_done_cb=None,
reader_limit=DEFAULT_LIMIT):
proxy_auth = proxy_auth or Socks5Auth('', '')
if not isinstance(proxy, Socks5Addr):
raise ValueError('Invalid proxy format')
if not isinstance(proxy_auth, Socks5Auth):
raise ValueError('Invalid proxy_auth format')
super().__init__(proxy, proxy_auth, dst, app_protocol_factory,
waiter, remote_resolve=remote_resolve, loop=loop,
ssl=ssl, server_hostname=server_hostname,
reader_limit=reader_limit,
negotiate_done_cb=negotiate_done_cb)
async def socks_request(self, cmd):
await self.authenticate()
# build and send command
dst_addr, resolved = await self.build_dst_address(
self._dst_host, self._dst_port)
self.write_request([c.SOCKS_VER5, cmd, c.RSV] + dst_addr)
# read/process command response
resp = await self.read_response(3)
if resp[0] != c.SOCKS_VER5:
raise InvalidServerVersion(
'SOCKS5 proxy server sent invalid version'
)
if resp[1] != c.SOCKS5_GRANTED:
error = c.SOCKS5_ERRORS.get(resp[1], 'Unknown error')
raise SocksError('[Errno {0:#04x}]: {1}'.format(resp[1], error))
binded = await self.read_address()
return resolved, binded
async def authenticate(self):
# send available auth methods
if self._auth.login and self._auth.password:
req = [c.SOCKS_VER5, 0x02,
c.SOCKS5_AUTH_ANONYMOUS, c.SOCKS5_AUTH_UNAME_PWD]
else:
req = [c.SOCKS_VER5, 0x01, c.SOCKS5_AUTH_ANONYMOUS]
self.write_request(req)
# read/process response and send auth data if necessary
chosen_auth = await self.read_response(2)
if chosen_auth[0] != c.SOCKS_VER5:
raise InvalidServerVersion(
'SOCKS5 proxy server sent invalid version'
)
if chosen_auth[1] == c.SOCKS5_AUTH_UNAME_PWD:
req = [0x01, chr(len(self._auth.login)).encode(), self._auth.login,
chr(len(self._auth.password)).encode(), self._auth.password]
self.write_request(req)
auth_status = await self.read_response(2)
if auth_status[0] != 0x01:
raise InvalidServerReply(
'SOCKS5 proxy server sent invalid data'
)
if auth_status[1] != c.SOCKS5_GRANTED:
raise LoginAuthenticationFailed(
"SOCKS5 authentication failed"
)
# offered auth methods rejected
elif chosen_auth[1] != c.SOCKS5_AUTH_ANONYMOUS:
if chosen_auth[1] == c.SOCKS5_AUTH_NO_ACCEPTABLE_METHODS:
raise NoAcceptableAuthMethods(
'All offered SOCKS5 authentication methods were rejected'
)
else:
raise InvalidServerReply(
'SOCKS5 proxy server sent invalid data'
)
async def build_dst_address(self, host, port):
family_to_byte = {socket.AF_INET: c.SOCKS5_ATYP_IPv4,
socket.AF_INET6: c.SOCKS5_ATYP_IPv6}
port_bytes = struct.pack('>H', port)
# if the given destination address is an IP address, we will
# use the IP address request even if remote resolving was specified.
for family in (socket.AF_INET, socket.AF_INET6):
try:
host_bytes = socket.inet_pton(family, host)
req = [family_to_byte[family], host_bytes, port_bytes]
return req, (host, port)
except socket.error:
pass
# it's not an IP number, so it's probably a DNS name.
if self._remote_resolve:
host_bytes = host.encode('idna')
req = [c.SOCKS5_ATYP_DOMAIN, chr(len(host_bytes)).encode(),
host_bytes, port_bytes]
else:
family, host_bytes = await self._get_dst_addr()
host_bytes = socket.inet_pton(family, host_bytes)
req = [family_to_byte[family], host_bytes, port_bytes]
host = socket.inet_ntop(family, host_bytes)
return req, (host, port)
async def read_address(self):
atype = await self.read_response(1)
if atype[0] == c.SOCKS5_ATYP_IPv4:
addr = socket.inet_ntoa((await self.read_response(4)))
elif atype[0] == c.SOCKS5_ATYP_DOMAIN:
length = await self.read_response(1)
addr = await self.read_response(ord(length))
elif atype[0] == c.SOCKS5_ATYP_IPv6:
addr = await self.read_response(16)
addr = socket.inet_ntop(socket.AF_INET6, addr)
else:
raise InvalidServerReply('SOCKS5 proxy server sent invalid data')
port = await self.read_response(2)
port = struct.unpack('>H', port)[0]
return addr, port
| (proxy, proxy_auth, dst, app_protocol_factory, waiter, remote_resolve=True, loop=None, ssl=False, server_hostname=None, negotiate_done_cb=None, reader_limit=65536) |
21,776 | aiosocks.protocols | __init__ | null | def __init__(self, proxy, proxy_auth, dst, app_protocol_factory, waiter,
remote_resolve=True, loop=None, ssl=False,
server_hostname=None, negotiate_done_cb=None,
reader_limit=DEFAULT_LIMIT):
proxy_auth = proxy_auth or Socks5Auth('', '')
if not isinstance(proxy, Socks5Addr):
raise ValueError('Invalid proxy format')
if not isinstance(proxy_auth, Socks5Auth):
raise ValueError('Invalid proxy_auth format')
super().__init__(proxy, proxy_auth, dst, app_protocol_factory,
waiter, remote_resolve=remote_resolve, loop=loop,
ssl=ssl, server_hostname=server_hostname,
reader_limit=reader_limit,
negotiate_done_cb=negotiate_done_cb)
| (self, proxy, proxy_auth, dst, app_protocol_factory, waiter, remote_resolve=True, loop=None, ssl=False, server_hostname=None, negotiate_done_cb=None, reader_limit=65536) |
21,793 | aiosocks.helpers | SocksAddr | null | class SocksAddr(namedtuple('SocksServer', ['host', 'port'])):
def __new__(cls, host, port=1080):
if host is None:
raise ValueError('None is not allowed as host value')
if port is None:
port = 1080 # default socks server port
return super().__new__(cls, host, port)
| (host, port=1080) |
21,799 | aiosocks.errors | SocksConnectionError | null | class SocksConnectionError(OSError):
pass
| null |
21,800 | aiosocks.errors | SocksError | null | class SocksError(Exception):
pass
| null |
21,803 | aiosocks | create_connection | null | import asyncio
from .errors import (
SocksError, NoAcceptableAuthMethods, LoginAuthenticationFailed,
SocksConnectionError, InvalidServerReply, InvalidServerVersion
)
from .helpers import (
SocksAddr, Socks4Addr, Socks5Addr, Socks4Auth, Socks5Auth
)
from .protocols import Socks4Protocol, Socks5Protocol, DEFAULT_LIMIT
__version__ = '0.2.6'
__all__ = ('Socks4Protocol', 'Socks5Protocol', 'Socks4Auth',
'Socks5Auth', 'Socks4Addr', 'Socks5Addr', 'SocksError',
'NoAcceptableAuthMethods', 'LoginAuthenticationFailed',
'SocksConnectionError', 'InvalidServerVersion',
'InvalidServerReply', 'create_connection', 'open_connection')
async def create_connection(protocol_factory, proxy, proxy_auth, dst, *,
remote_resolve=True, loop=None, ssl=None, family=0,
proto=0, flags=0, sock=None, local_addr=None,
server_hostname=None, reader_limit=DEFAULT_LIMIT):
assert isinstance(proxy, SocksAddr), (
'proxy must be Socks4Addr() or Socks5Addr() tuple'
)
assert proxy_auth is None or isinstance(proxy_auth,
(Socks4Auth, Socks5Auth)), (
'proxy_auth must be None or Socks4Auth() '
'or Socks5Auth() tuple', proxy_auth
)
assert isinstance(dst, (tuple, list)) and len(dst) == 2, (
'invalid dst format, tuple("dst_host", dst_port))'
)
if (isinstance(proxy, Socks4Addr) and not
(proxy_auth is None or isinstance(proxy_auth, Socks4Auth))):
raise ValueError(
"proxy is Socks4Addr but proxy_auth is not Socks4Auth"
)
if (isinstance(proxy, Socks5Addr) and not
(proxy_auth is None or isinstance(proxy_auth, Socks5Auth))):
raise ValueError(
"proxy is Socks5Addr but proxy_auth is not Socks5Auth"
)
if server_hostname is not None and not ssl:
raise ValueError('server_hostname is only meaningful with ssl')
if server_hostname is None and ssl:
# read details: asyncio.create_connection
server_hostname = dst[0]
loop = loop or asyncio.get_event_loop()
waiter = asyncio.Future(loop=loop)
def socks_factory():
if isinstance(proxy, Socks4Addr):
socks_proto = Socks4Protocol
else:
socks_proto = Socks5Protocol
return socks_proto(proxy=proxy, proxy_auth=proxy_auth, dst=dst,
app_protocol_factory=protocol_factory,
waiter=waiter, remote_resolve=remote_resolve,
loop=loop, ssl=ssl, server_hostname=server_hostname,
reader_limit=reader_limit)
try:
transport, protocol = await loop.create_connection(
socks_factory, proxy.host, proxy.port, family=family,
proto=proto, flags=flags, sock=sock, local_addr=local_addr)
except OSError as exc:
raise SocksConnectionError(
'[Errno %s] Can not connect to proxy %s:%d [%s]' %
(exc.errno, proxy.host, proxy.port, exc.strerror)) from exc
try:
await waiter
except: # noqa
transport.close()
raise
return protocol.app_transport, protocol.app_protocol
| (protocol_factory, proxy, proxy_auth, dst, *, remote_resolve=True, loop=None, ssl=None, family=0, proto=0, flags=0, sock=None, local_addr=None, server_hostname=None, reader_limit=65536) |
21,806 | aiosocks | open_connection | null | def socks_factory():
if isinstance(proxy, Socks4Addr):
socks_proto = Socks4Protocol
else:
socks_proto = Socks5Protocol
return socks_proto(proxy=proxy, proxy_auth=proxy_auth, dst=dst,
app_protocol_factory=protocol_factory,
waiter=waiter, remote_resolve=remote_resolve,
loop=loop, ssl=ssl, server_hostname=server_hostname,
reader_limit=reader_limit)
| (proxy, proxy_auth, dst, *, remote_resolve=True, loop=None, limit=65536, **kwds) |
21,808 | datadog_api_client.api_client | ApiClient | Generic API client for OpenAPI client library builds.
OpenAPI generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the OpenAPI
templates.
:param configuration: Configuration object for this client
:param header_name: A header to pass when making calls to the API.
:param header_value: A header value to pass when making calls to
the API.
| class ApiClient:
"""Generic API client for OpenAPI client library builds.
OpenAPI generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the OpenAPI
templates.
:param configuration: Configuration object for this client
:param header_name: A header to pass when making calls to the API.
:param header_value: A header value to pass when making calls to
the API.
"""
def __init__(self, configuration: Configuration):
self.configuration = configuration
self.rest_client = self._build_rest_client()
self.default_headers = {}
if self.configuration.compress:
self.default_headers["Accept-Encoding"] = "gzip"
# Set default User-Agent.
self.user_agent = user_agent()
def __enter__(self) -> Self:
return self
def __exit__(self, exc_type, exc_value, traceback) -> None:
self.close()
def close(self) -> None:
self.rest_client.pool_manager.clear()
def _build_rest_client(self):
return rest.RESTClientObject(self.configuration)
@property
def user_agent(self) -> str:
"""User agent for this API client"""
return self.default_headers["User-Agent"]
@user_agent.setter
def user_agent(self, value: str) -> None:
self.default_headers["User-Agent"] = value
def set_default_header(self, header_name: str, header_value: str) -> None:
self.default_headers[header_name] = header_value
def _call_api(
self,
method: str,
url: str,
query_params: Optional[List[Tuple[str, Any]]] = None,
header_params: Optional[Dict[str, Any]] = None,
body: Optional[Any] = None,
post_params: Optional[List[Tuple[str, Any]]] = None,
response_type: Optional[Tuple[Any]] = None,
return_http_data_only: Optional[bool] = None,
preload_content: bool = True,
request_timeout: Optional[Union[int, float, Tuple[Union[int, float], Union[int, float]]]] = None,
check_type: Optional[bool] = None,
):
# perform request and return response
response = self.rest_client.request(
method,
url,
query_params=query_params,
headers=header_params,
post_params=post_params,
body=body,
preload_content=preload_content,
request_timeout=request_timeout,
)
if not preload_content:
return response
# deserialize response data
if response_type:
if response_type == (file_type,):
content_disposition = response.headers.get("Content-Disposition")
return_data = deserialize_file(
response.data, self.configuration.temp_folder_path, content_disposition=content_disposition
)
else:
encoding = "utf-8"
content_type = response.headers.get("Content-Type")
if content_type is not None:
match = re.search(r"charset=([a-zA-Z\-\d]+)[\s\;]?", content_type)
if match:
encoding = match.group(1)
response_data = response.data.decode(encoding)
return_data = self.deserialize(response_data, response_type, check_type)
else:
return_data = None
if return_http_data_only:
return return_data
return (return_data, response.status, dict(response.headers))
def parameters_to_multipart(self, params):
"""Get parameters as list of tuples, formatting as json if value is dict.
:param params: Parameters as list of two-tuples.
:return: Parameters as list of tuple or urllib3.fields.RequestField
"""
new_params = []
for k, v in params.items() if isinstance(params, dict) else params:
if isinstance(v, dict): # v is instance of collection_type, formatting as application/json
v = json.dumps(v, ensure_ascii=False).encode("utf-8")
field = RequestField(k, v)
field.make_multipart(content_type="application/json; charset=utf-8")
new_params.append(field)
else:
new_params.append((k, v))
return new_params
@classmethod
def sanitize_for_serialization(cls, obj):
"""Prepares data for transmission before it is sent with the rest client.
If obj is None, return None.
If obj is str, int, long, float, bool, return directly.
If obj is datetime.datetime, datetime.date convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is OpenAPI model, return the properties dict.
If obj is io.IOBase, return the bytes.
:param obj: The data to serialize.
:return: The serialized form of data.
"""
if isinstance(obj, (ModelNormal, ModelComposed)):
return {key: cls.sanitize_for_serialization(val) for key, val in model_to_dict(obj).items()}
elif isinstance(obj, io.IOBase):
return cls.get_file_data_and_close_file(obj)
elif isinstance(obj, (str, int, float, bool)) or obj is None:
return obj
elif isinstance(obj, (datetime, date)):
if getattr(obj, "tzinfo", None) is not None:
return obj.isoformat()
return "{}Z".format(obj.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3])
elif isinstance(obj, UUID):
return str(obj)
elif isinstance(obj, ModelSimple):
return cls.sanitize_for_serialization(obj.value)
elif isinstance(obj, (list, tuple)):
return [cls.sanitize_for_serialization(item) for item in obj]
if isinstance(obj, dict):
return {key: cls.sanitize_for_serialization(val) for key, val in obj.items()}
raise ApiValueError("Unable to prepare type {} for serialization".format(obj.__class__.__name__))
def deserialize(self, response_data: str, response_type: Any, check_type: Optional[bool]):
"""Deserializes response into an object.
:param response_data: Response data to be deserialized.
:param response_type: For the response, a tuple containing:
valid classes
a list containing valid classes (for list schemas)
a dict containing a tuple of valid classes as the value
Example values:
(str,)
(Pet,)
(float, none_type)
([int, none_type],)
({str: (bool, str, int, float, date, datetime, str, none_type)},)
:param check_type: boolean, whether to check the types of the data
received from the server
:type check_type: bool
:return: deserialized object.
"""
# fetch data from response object
try:
received_data = json.loads(response_data)
except ValueError:
received_data = response_data
# store our data under the key of 'received_data' so users have some
# context if they are deserializing a string and the data type is wrong
deserialized_data = validate_and_convert_types(
received_data, response_type, ["received_data"], True, check_type, configuration=self.configuration
)
return deserialized_data
def call_api(
self,
resource_path: str,
method: str,
path_params: Optional[Dict[str, Any]] = None,
query_params: Optional[List[Tuple[str, Any]]] = None,
header_params: Optional[Dict[str, Any]] = None,
body: Optional[Any] = None,
post_params: Optional[List[Tuple[str, Any]]] = None,
files: Optional[Dict[str, List[io.FileIO]]] = None,
response_type: Optional[Tuple[Any]] = None,
return_http_data_only: Optional[bool] = None,
collection_formats: Optional[Dict[str, str]] = None,
preload_content: bool = True,
request_timeout: Optional[Union[int, float, Tuple[Union[int, float], Union[int, float]]]] = None,
host: Optional[str] = None,
check_type: Optional[bool] = None,
):
"""Makes the HTTP request (synchronous) and returns deserialized data.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param response_type: For the response, a tuple containing:
valid classes
a list containing valid classes (for list schemas)
a dict containing a tuple of valid classes as the value
Example values:
(str,)
(Pet,)
(float, none_type)
([int, none_type],)
({str: (bool, str, int, float, date, datetime, str, none_type)},)
:param files: key -> field name, value -> a list of open file
objects for `multipart/form-data`.
:type files: dict
:param return_http_data_only: response data without head status code
and headers
:type return_http_data_only: bool, optional
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:type collection_formats: dict, optional
:param preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type preload_content: bool, optional
:param request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param check_type: boolean describing if the data back from the server
should have its type checked.
:type check_type: bool, optional
:return: the HTTP response.
"""
# header parameters
header_params = header_params or {}
header_params.update(self.default_headers)
if header_params:
header_params = self.sanitize_for_serialization(header_params)
header_params = dict(self.parameters_to_tuples(header_params, collection_formats))
# path parameters
if path_params:
path_params = self.sanitize_for_serialization(path_params)
for k, v in self.parameters_to_tuples(path_params, collection_formats):
# specified safe chars, encode everything
resource_path = resource_path.replace(
f"{{{k}}}", quote(str(v), safe=self.configuration.safe_chars_for_path_param)
)
# query parameters
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = self.parameters_to_tuples(query_params, collection_formats)
# post parameters
if post_params or files:
post_params = post_params or []
post_params = self.sanitize_for_serialization(post_params)
post_params = self.parameters_to_tuples(post_params, collection_formats)
post_params.extend(self.files_parameters(files))
if header_params["Content-Type"].startswith("multipart"):
post_params = self.parameters_to_multipart(post_params)
# body
if body:
body = self.sanitize_for_serialization(body)
# request url
if host is None:
url = self.configuration.host + resource_path
else:
# use server/host defined in path or operation instead
url = host + resource_path
return self._call_api(
method,
url,
query_params,
header_params,
body,
post_params,
response_type,
return_http_data_only,
preload_content,
request_timeout,
check_type,
)
def call_api_paginated(
self,
resource_path: str,
method: str,
pagination: dict,
response_type: Optional[Tuple[Any]] = None,
request_timeout: Optional[Union[int, float, Tuple[Union[int, float], Union[int, float]]]] = None,
host: Optional[str] = None,
check_type: Optional[bool] = None,
):
if "page_param" in pagination:
set_attribute_from_path(
pagination["kwargs"],
pagination["page_param"],
0,
pagination["endpoint"].params_map,
)
params = pagination["endpoint"].gather_params(pagination["kwargs"])
while True:
response = self.call_api(
resource_path,
method,
params["path"],
params["query"],
params["header"],
body=params["body"],
post_params=params["form"],
files=params["file"],
response_type=response_type,
check_type=check_type,
return_http_data_only=True,
preload_content=True,
request_timeout=request_timeout,
host=host,
collection_formats=params["collection_format"],
)
for item in get_attribute_from_path(response, pagination.get("results_path")):
yield item
if len(get_attribute_from_path(response, pagination.get("results_path"))) < pagination["limit_value"]:
break
params = self._update_paginated_params(pagination, response)
def _update_paginated_params(self, pagination, response):
if "page_offset_param" in pagination:
set_attribute_from_path(
pagination["kwargs"],
pagination["page_offset_param"],
get_attribute_from_path(pagination["kwargs"], pagination["page_offset_param"], 0)
+ pagination["limit_value"],
pagination["endpoint"].params_map,
)
elif "page_param" in pagination:
set_attribute_from_path(
pagination["kwargs"],
pagination["page_param"],
get_attribute_from_path(pagination["kwargs"], pagination["page_param"], 0) + 1,
pagination["endpoint"].params_map,
)
else:
set_attribute_from_path(
pagination["kwargs"],
pagination["cursor_param"],
get_attribute_from_path(response, pagination["cursor_path"]),
pagination["endpoint"].params_map,
)
return pagination["endpoint"].gather_params(pagination["kwargs"])
def parameters_to_tuples(self, params, collection_formats) -> List[Tuple[str, Any]]:
"""Get parameters as list of tuples, formatting collections.
:param params: Parameters as dict or list of two-tuples
:param dict collection_formats: Parameter collection formats
:return: Parameters as list of tuples, collections formatted
"""
new_params: List[Tuple[str, str]] = []
if collection_formats is None:
collection_formats = {}
for k, v in params.items() if isinstance(params, dict) else params:
if k in collection_formats:
collection_format = collection_formats[k]
if collection_format == "multi":
new_params.extend((k, value) for value in v)
else:
if collection_format == "ssv":
delimiter = " "
elif collection_format == "tsv":
delimiter = "\t"
elif collection_format == "pipes":
delimiter = "|"
else: # csv is the default
delimiter = ","
new_params.append((k, delimiter.join(str(value) for value in v)))
else:
if isinstance(v, bool):
v = json.dumps(v)
new_params.append((k, v))
return new_params
@staticmethod
def get_file_data_and_close_file(file_instance: io.IOBase) -> bytes:
file_data = file_instance.read()
file_instance.close()
return file_data
def files_parameters(self, files: Optional[Dict[str, List[io.FileIO]]] = None):
"""Builds form parameters.
:param files: None or a dict with key=param_name and
value is a list of open file objects
:return: List of tuples of form parameters with file data
"""
if files is None:
return []
params = []
for param_name, file_instances in files.items():
if file_instances is None:
# if the file field is nullable, skip None values
continue
for file_instance in file_instances:
if file_instance is None:
# if the file field is nullable, skip None values
continue
if file_instance.closed is True:
raise ApiValueError(
"Cannot read a closed file. The passed in file_type " "for %s must be open." % param_name
)
filename = os.path.basename(str(file_instance.name))
filedata = self.get_file_data_and_close_file(file_instance)
mimetype = mimetypes.guess_type(filename)[0] or "application/octet-stream"
params.append(tuple([param_name, tuple([filename, filedata, mimetype])]))
return params
def select_header_accept(self, accepts: List[str]) -> str:
"""Returns `Accept` based on an array of accepts provided.
:param accepts: List of headers.
:return: Accept (e.g. application/json).
"""
return ", ".join(accepts)
def select_header_content_type(self, content_types: List[str]) -> str:
"""Returns `Content-Type` based on an array of content_types provided.
:param content_types: List of content-types.
:return: Content-Type (e.g. application/json).
"""
if not content_types:
return "application/json"
content_types = [x.lower() for x in content_types]
if "application/json" in content_types or "*/*" in content_types:
return "application/json"
return content_types[0]
| (configuration: datadog_api_client.configuration.Configuration) |
21,809 | datadog_api_client.api_client | __enter__ | null | def __enter__(self) -> Self:
return self
| (self) -> typing_extensions.Self |
21,810 | datadog_api_client.api_client | __exit__ | null | def __exit__(self, exc_type, exc_value, traceback) -> None:
self.close()
| (self, exc_type, exc_value, traceback) -> NoneType |
21,811 | datadog_api_client.api_client | __init__ | null | def __init__(self, configuration: Configuration):
self.configuration = configuration
self.rest_client = self._build_rest_client()
self.default_headers = {}
if self.configuration.compress:
self.default_headers["Accept-Encoding"] = "gzip"
# Set default User-Agent.
self.user_agent = user_agent()
| (self, configuration: datadog_api_client.configuration.Configuration) |
21,812 | datadog_api_client.api_client | _build_rest_client | null | def _build_rest_client(self):
return rest.RESTClientObject(self.configuration)
| (self) |
21,813 | datadog_api_client.api_client | _call_api | null | def _call_api(
self,
method: str,
url: str,
query_params: Optional[List[Tuple[str, Any]]] = None,
header_params: Optional[Dict[str, Any]] = None,
body: Optional[Any] = None,
post_params: Optional[List[Tuple[str, Any]]] = None,
response_type: Optional[Tuple[Any]] = None,
return_http_data_only: Optional[bool] = None,
preload_content: bool = True,
request_timeout: Optional[Union[int, float, Tuple[Union[int, float], Union[int, float]]]] = None,
check_type: Optional[bool] = None,
):
# perform request and return response
response = self.rest_client.request(
method,
url,
query_params=query_params,
headers=header_params,
post_params=post_params,
body=body,
preload_content=preload_content,
request_timeout=request_timeout,
)
if not preload_content:
return response
# deserialize response data
if response_type:
if response_type == (file_type,):
content_disposition = response.headers.get("Content-Disposition")
return_data = deserialize_file(
response.data, self.configuration.temp_folder_path, content_disposition=content_disposition
)
else:
encoding = "utf-8"
content_type = response.headers.get("Content-Type")
if content_type is not None:
match = re.search(r"charset=([a-zA-Z\-\d]+)[\s\;]?", content_type)
if match:
encoding = match.group(1)
response_data = response.data.decode(encoding)
return_data = self.deserialize(response_data, response_type, check_type)
else:
return_data = None
if return_http_data_only:
return return_data
return (return_data, response.status, dict(response.headers))
| (self, method: str, url: str, query_params: Optional[List[Tuple[str, Any]]] = None, header_params: Optional[Dict[str, Any]] = None, body: Optional[Any] = None, post_params: Optional[List[Tuple[str, Any]]] = None, response_type: Optional[Tuple[Any]] = None, return_http_data_only: Optional[bool] = None, preload_content: bool = True, request_timeout: Union[int, float, Tuple[Union[int, float], Union[int, float]], NoneType] = None, check_type: Optional[bool] = None) |
21,814 | datadog_api_client.api_client | _update_paginated_params | null | def _update_paginated_params(self, pagination, response):
if "page_offset_param" in pagination:
set_attribute_from_path(
pagination["kwargs"],
pagination["page_offset_param"],
get_attribute_from_path(pagination["kwargs"], pagination["page_offset_param"], 0)
+ pagination["limit_value"],
pagination["endpoint"].params_map,
)
elif "page_param" in pagination:
set_attribute_from_path(
pagination["kwargs"],
pagination["page_param"],
get_attribute_from_path(pagination["kwargs"], pagination["page_param"], 0) + 1,
pagination["endpoint"].params_map,
)
else:
set_attribute_from_path(
pagination["kwargs"],
pagination["cursor_param"],
get_attribute_from_path(response, pagination["cursor_path"]),
pagination["endpoint"].params_map,
)
return pagination["endpoint"].gather_params(pagination["kwargs"])
| (self, pagination, response) |
21,815 | datadog_api_client.api_client | call_api | Makes the HTTP request (synchronous) and returns deserialized data.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param response_type: For the response, a tuple containing:
valid classes
a list containing valid classes (for list schemas)
a dict containing a tuple of valid classes as the value
Example values:
(str,)
(Pet,)
(float, none_type)
([int, none_type],)
({str: (bool, str, int, float, date, datetime, str, none_type)},)
:param files: key -> field name, value -> a list of open file
objects for `multipart/form-data`.
:type files: dict
:param return_http_data_only: response data without head status code
and headers
:type return_http_data_only: bool, optional
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:type collection_formats: dict, optional
:param preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type preload_content: bool, optional
:param request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param check_type: boolean describing if the data back from the server
should have its type checked.
:type check_type: bool, optional
:return: the HTTP response.
| def call_api(
self,
resource_path: str,
method: str,
path_params: Optional[Dict[str, Any]] = None,
query_params: Optional[List[Tuple[str, Any]]] = None,
header_params: Optional[Dict[str, Any]] = None,
body: Optional[Any] = None,
post_params: Optional[List[Tuple[str, Any]]] = None,
files: Optional[Dict[str, List[io.FileIO]]] = None,
response_type: Optional[Tuple[Any]] = None,
return_http_data_only: Optional[bool] = None,
collection_formats: Optional[Dict[str, str]] = None,
preload_content: bool = True,
request_timeout: Optional[Union[int, float, Tuple[Union[int, float], Union[int, float]]]] = None,
host: Optional[str] = None,
check_type: Optional[bool] = None,
):
"""Makes the HTTP request (synchronous) and returns deserialized data.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param response_type: For the response, a tuple containing:
valid classes
a list containing valid classes (for list schemas)
a dict containing a tuple of valid classes as the value
Example values:
(str,)
(Pet,)
(float, none_type)
([int, none_type],)
({str: (bool, str, int, float, date, datetime, str, none_type)},)
:param files: key -> field name, value -> a list of open file
objects for `multipart/form-data`.
:type files: dict
:param return_http_data_only: response data without head status code
and headers
:type return_http_data_only: bool, optional
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:type collection_formats: dict, optional
:param preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:type preload_content: bool, optional
:param request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:param check_type: boolean describing if the data back from the server
should have its type checked.
:type check_type: bool, optional
:return: the HTTP response.
"""
# header parameters
header_params = header_params or {}
header_params.update(self.default_headers)
if header_params:
header_params = self.sanitize_for_serialization(header_params)
header_params = dict(self.parameters_to_tuples(header_params, collection_formats))
# path parameters
if path_params:
path_params = self.sanitize_for_serialization(path_params)
for k, v in self.parameters_to_tuples(path_params, collection_formats):
# specified safe chars, encode everything
resource_path = resource_path.replace(
f"{{{k}}}", quote(str(v), safe=self.configuration.safe_chars_for_path_param)
)
# query parameters
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = self.parameters_to_tuples(query_params, collection_formats)
# post parameters
if post_params or files:
post_params = post_params or []
post_params = self.sanitize_for_serialization(post_params)
post_params = self.parameters_to_tuples(post_params, collection_formats)
post_params.extend(self.files_parameters(files))
if header_params["Content-Type"].startswith("multipart"):
post_params = self.parameters_to_multipart(post_params)
# body
if body:
body = self.sanitize_for_serialization(body)
# request url
if host is None:
url = self.configuration.host + resource_path
else:
# use server/host defined in path or operation instead
url = host + resource_path
return self._call_api(
method,
url,
query_params,
header_params,
body,
post_params,
response_type,
return_http_data_only,
preload_content,
request_timeout,
check_type,
)
| (self, resource_path: str, method: str, path_params: Optional[Dict[str, Any]] = None, query_params: Optional[List[Tuple[str, Any]]] = None, header_params: Optional[Dict[str, Any]] = None, body: Optional[Any] = None, post_params: Optional[List[Tuple[str, Any]]] = None, files: Optional[Dict[str, List[_io.FileIO]]] = None, response_type: Optional[Tuple[Any]] = None, return_http_data_only: Optional[bool] = None, collection_formats: Optional[Dict[str, str]] = None, preload_content: bool = True, request_timeout: Union[int, float, Tuple[Union[int, float], Union[int, float]], NoneType] = None, host: Optional[str] = None, check_type: Optional[bool] = None) |
21,816 | datadog_api_client.api_client | call_api_paginated | null | def call_api_paginated(
self,
resource_path: str,
method: str,
pagination: dict,
response_type: Optional[Tuple[Any]] = None,
request_timeout: Optional[Union[int, float, Tuple[Union[int, float], Union[int, float]]]] = None,
host: Optional[str] = None,
check_type: Optional[bool] = None,
):
if "page_param" in pagination:
set_attribute_from_path(
pagination["kwargs"],
pagination["page_param"],
0,
pagination["endpoint"].params_map,
)
params = pagination["endpoint"].gather_params(pagination["kwargs"])
while True:
response = self.call_api(
resource_path,
method,
params["path"],
params["query"],
params["header"],
body=params["body"],
post_params=params["form"],
files=params["file"],
response_type=response_type,
check_type=check_type,
return_http_data_only=True,
preload_content=True,
request_timeout=request_timeout,
host=host,
collection_formats=params["collection_format"],
)
for item in get_attribute_from_path(response, pagination.get("results_path")):
yield item
if len(get_attribute_from_path(response, pagination.get("results_path"))) < pagination["limit_value"]:
break
params = self._update_paginated_params(pagination, response)
| (self, resource_path: str, method: str, pagination: dict, response_type: Optional[Tuple[Any]] = None, request_timeout: Union[int, float, Tuple[Union[int, float], Union[int, float]], NoneType] = None, host: Optional[str] = None, check_type: Optional[bool] = None) |
21,817 | datadog_api_client.api_client | close | null | def close(self) -> None:
self.rest_client.pool_manager.clear()
| (self) -> NoneType |
21,818 | datadog_api_client.api_client | deserialize | Deserializes response into an object.
:param response_data: Response data to be deserialized.
:param response_type: For the response, a tuple containing:
valid classes
a list containing valid classes (for list schemas)
a dict containing a tuple of valid classes as the value
Example values:
(str,)
(Pet,)
(float, none_type)
([int, none_type],)
({str: (bool, str, int, float, date, datetime, str, none_type)},)
:param check_type: boolean, whether to check the types of the data
received from the server
:type check_type: bool
:return: deserialized object.
| def deserialize(self, response_data: str, response_type: Any, check_type: Optional[bool]):
"""Deserializes response into an object.
:param response_data: Response data to be deserialized.
:param response_type: For the response, a tuple containing:
valid classes
a list containing valid classes (for list schemas)
a dict containing a tuple of valid classes as the value
Example values:
(str,)
(Pet,)
(float, none_type)
([int, none_type],)
({str: (bool, str, int, float, date, datetime, str, none_type)},)
:param check_type: boolean, whether to check the types of the data
received from the server
:type check_type: bool
:return: deserialized object.
"""
# fetch data from response object
try:
received_data = json.loads(response_data)
except ValueError:
received_data = response_data
# store our data under the key of 'received_data' so users have some
# context if they are deserializing a string and the data type is wrong
deserialized_data = validate_and_convert_types(
received_data, response_type, ["received_data"], True, check_type, configuration=self.configuration
)
return deserialized_data
| (self, response_data: str, response_type: Any, check_type: Optional[bool]) |
21,819 | datadog_api_client.api_client | files_parameters | Builds form parameters.
:param files: None or a dict with key=param_name and
value is a list of open file objects
:return: List of tuples of form parameters with file data
| def files_parameters(self, files: Optional[Dict[str, List[io.FileIO]]] = None):
"""Builds form parameters.
:param files: None or a dict with key=param_name and
value is a list of open file objects
:return: List of tuples of form parameters with file data
"""
if files is None:
return []
params = []
for param_name, file_instances in files.items():
if file_instances is None:
# if the file field is nullable, skip None values
continue
for file_instance in file_instances:
if file_instance is None:
# if the file field is nullable, skip None values
continue
if file_instance.closed is True:
raise ApiValueError(
"Cannot read a closed file. The passed in file_type " "for %s must be open." % param_name
)
filename = os.path.basename(str(file_instance.name))
filedata = self.get_file_data_and_close_file(file_instance)
mimetype = mimetypes.guess_type(filename)[0] or "application/octet-stream"
params.append(tuple([param_name, tuple([filename, filedata, mimetype])]))
return params
| (self, files: Optional[Dict[str, List[_io.FileIO]]] = None) |
21,820 | datadog_api_client.api_client | get_file_data_and_close_file | null | @staticmethod
def get_file_data_and_close_file(file_instance: io.IOBase) -> bytes:
file_data = file_instance.read()
file_instance.close()
return file_data
| (file_instance: io.IOBase) -> bytes |
21,821 | datadog_api_client.api_client | parameters_to_multipart | Get parameters as list of tuples, formatting as json if value is dict.
:param params: Parameters as list of two-tuples.
:return: Parameters as list of tuple or urllib3.fields.RequestField
| def parameters_to_multipart(self, params):
"""Get parameters as list of tuples, formatting as json if value is dict.
:param params: Parameters as list of two-tuples.
:return: Parameters as list of tuple or urllib3.fields.RequestField
"""
new_params = []
for k, v in params.items() if isinstance(params, dict) else params:
if isinstance(v, dict): # v is instance of collection_type, formatting as application/json
v = json.dumps(v, ensure_ascii=False).encode("utf-8")
field = RequestField(k, v)
field.make_multipart(content_type="application/json; charset=utf-8")
new_params.append(field)
else:
new_params.append((k, v))
return new_params
| (self, params) |
21,822 | datadog_api_client.api_client | parameters_to_tuples | Get parameters as list of tuples, formatting collections.
:param params: Parameters as dict or list of two-tuples
:param dict collection_formats: Parameter collection formats
:return: Parameters as list of tuples, collections formatted
| def parameters_to_tuples(self, params, collection_formats) -> List[Tuple[str, Any]]:
"""Get parameters as list of tuples, formatting collections.
:param params: Parameters as dict or list of two-tuples
:param dict collection_formats: Parameter collection formats
:return: Parameters as list of tuples, collections formatted
"""
new_params: List[Tuple[str, str]] = []
if collection_formats is None:
collection_formats = {}
for k, v in params.items() if isinstance(params, dict) else params:
if k in collection_formats:
collection_format = collection_formats[k]
if collection_format == "multi":
new_params.extend((k, value) for value in v)
else:
if collection_format == "ssv":
delimiter = " "
elif collection_format == "tsv":
delimiter = "\t"
elif collection_format == "pipes":
delimiter = "|"
else: # csv is the default
delimiter = ","
new_params.append((k, delimiter.join(str(value) for value in v)))
else:
if isinstance(v, bool):
v = json.dumps(v)
new_params.append((k, v))
return new_params
| (self, params, collection_formats) -> List[Tuple[str, Any]] |
21,823 | datadog_api_client.api_client | select_header_accept | Returns `Accept` based on an array of accepts provided.
:param accepts: List of headers.
:return: Accept (e.g. application/json).
| def select_header_accept(self, accepts: List[str]) -> str:
"""Returns `Accept` based on an array of accepts provided.
:param accepts: List of headers.
:return: Accept (e.g. application/json).
"""
return ", ".join(accepts)
| (self, accepts: List[str]) -> str |
21,824 | datadog_api_client.api_client | select_header_content_type | Returns `Content-Type` based on an array of content_types provided.
:param content_types: List of content-types.
:return: Content-Type (e.g. application/json).
| def select_header_content_type(self, content_types: List[str]) -> str:
"""Returns `Content-Type` based on an array of content_types provided.
:param content_types: List of content-types.
:return: Content-Type (e.g. application/json).
"""
if not content_types:
return "application/json"
content_types = [x.lower() for x in content_types]
if "application/json" in content_types or "*/*" in content_types:
return "application/json"
return content_types[0]
| (self, content_types: List[str]) -> str |
21,825 | datadog_api_client.api_client | set_default_header | null | def set_default_header(self, header_name: str, header_value: str) -> None:
self.default_headers[header_name] = header_value
| (self, header_name: str, header_value: str) -> NoneType |
21,826 | datadog_api_client.api_client | AsyncApiClient | null | class AsyncApiClient(ApiClient):
def _build_rest_client(self):
return rest.AsyncRESTClientObject(self.configuration)
async def __aenter__(self) -> Self:
return self
async def __aexit__(self, _exc_type, exc, _tb):
if exc:
raise exc
await self.rest_client._client.shutdown()
async def _call_api(
self,
method: str,
url: str,
query_params: Optional[List[Tuple[str, Any]]] = None,
header_params: Optional[Dict[str, Any]] = None,
body: Optional[Any] = None,
post_params: Optional[List[Tuple[str, Any]]] = None,
response_type: Optional[Tuple[Any]] = None,
return_http_data_only: Optional[bool] = None,
preload_content: bool = True,
request_timeout: Optional[Union[int, float, Tuple[Union[int, float], Union[int, float]]]] = None,
check_type: Optional[bool] = None,
):
# perform request and return response
response = await self.rest_client.request(
method,
url,
query_params=query_params,
headers=header_params,
post_params=post_params,
body=body,
preload_content=preload_content,
request_timeout=request_timeout,
)
if not preload_content:
return response
# deserialize response data
if response_type:
if response_type == (file_type,):
content_disposition = response.headers.get("Content-Disposition")
response_data = await response.content()
return_data = deserialize_file(
response_data, self.configuration.temp_folder_path, content_disposition=content_disposition
)
else:
response_data = await response.text()
return_data = self.deserialize(response_data, response_type, check_type)
else:
return_data = None
if return_http_data_only:
return return_data
return (return_data, response.status_code, response.headers)
async def call_api_paginated(
self,
resource_path: str,
method: str,
pagination: dict,
response_type: Optional[Tuple[Any]] = None,
request_timeout: Optional[Union[int, float, Tuple[Union[int, float], Union[int, float]]]] = None,
host: Optional[str] = None,
check_type: Optional[bool] = None,
):
params = pagination["endpoint"].get_pagination_params(pagination["kwargs"])
while True:
response = await self.call_api(
resource_path,
method,
params["path"],
params["query"],
params["header"],
body=params["body"],
post_params=params["form"],
files=params["file"],
response_type=response_type,
check_type=check_type,
return_http_data_only=True,
preload_content=True,
request_timeout=request_timeout,
host=host,
collection_formats=params["collection_format"],
)
for item in get_attribute_from_path(response, pagination.get("results_path")):
yield item
if len(get_attribute_from_path(response, pagination.get("results_path"))) < pagination["limit_value"]:
break
params = self._update_paginated_params(pagination, response)
| (configuration: datadog_api_client.configuration.Configuration) |
21,827 | datadog_api_client.api_client | __aenter__ | null | def _build_rest_client(self):
return rest.AsyncRESTClientObject(self.configuration)
| (self) -> typing_extensions.Self |
21,846 | datadog_api_client.configuration | Configuration |
:param host: Base url.
:param api_key: Dict to store API key(s).
Each entry in the dict specifies an API key.
The dict key is the name of the security scheme in the OAS specification.
The dict value is the API key secret.
:param api_key_prefix: Dict to store API prefix (e.g. Bearer).
The dict key is the name of the security scheme in the OAS specification.
The dict value is an API key prefix when generating the auth data.
:param username: Username for HTTP basic authentication.
:param password: Password for HTTP basic authentication.
:param discard_unknown_keys: Boolean value indicating whether to discard
unknown properties. A server may send a response that includes additional
properties that are not known by the client in the following scenarios:
1. The OpenAPI document is incomplete, i.e. it does not match the server
implementation.
2. The client was generated using an older version of the OpenAPI document
and the server has been upgraded since then.
If a schema in the OpenAPI document defines the additionalProperties
attribute, then all undeclared properties received by the server are injected
into the additional properties map. In that case, there are undeclared
properties, and nothing to discard.
:param disabled_client_side_validations: Comma-separated list of
JSON schema validation keywords to disable JSON schema structural validation
rules. The following keywords may be specified: multipleOf, maximum,
exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern,
maxItems, minItems.
By default, the validation is performed for data generated locally by the client
and data received from the server, independent of any validation performed by
the server side. If the input data does not satisfy the JSON schema validation
rules specified in the OpenAPI document, an exception is raised.
If disabled_client_side_validations is set, structural validation is
disabled. This can be useful to troubleshoot data validation problem, such as
when the OpenAPI document validation rules do not match the actual API data
received by the server.
:type disabled_client_side_validations: str
:param server_index: Index to servers configuration.
:param server_variables: Mapping with string values to replace variables in
templated server configuration. The validation of enums is performed for
variables with defined enum values before.
:param server_operation_index: Mapping from operation ID to an index to
server configuration.
:param server_operation_variables: Mapping from operation ID to a mapping with
string values to replace variables in templated server configuration.
The validation of enums is performed for variables with defined enum values before.
:param ssl_ca_cert: The path to a file of concatenated CA certificates
in PEM format.
:param compress: Boolean indicating whether encoded responses are accepted or not.
:type compress: bool
:param return_http_data_only: Response data without head status
code and headers. Default is True.
:type return_http_data_only: bool
:param preload_content: If False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
:type preload_content: bool
:param request_timeout: Timeout setting for this request. If one
number is provided, it will be total request timeout. It can also be a
pair (tuple) of (connection, read) timeouts. Default is None.
:type request_timeout: float/tuple
:param check_input_type: Specifies if type checking should be done on
the data sent to the server. Default is True.
:type check_input_type: bool
:param check_return_type: Specifies if type checking should be done
on the data received from the server. Default is True.
:type check_return_type: bool
:param spec_property_naming: Whether names in properties are expected to respect the spec or use snake case.
:type spec_property_naming: bool
:param enable_retry: If set, the client will retry requests on backend errors (5xx status codes), and 429.
On 429 if will use the returned headers to wait until the next requests, otherwise it will retry using
the backoff factor.
:type enable_retry: bool
:param retry_backoff_factor: Factor used to space out retried requests on backend errors.
:type retry_backoff_factor: float
:param max_retries: The maximum number of times a single request can be retried.
:type max_retries: int
| class Configuration:
"""
:param host: Base url.
:param api_key: Dict to store API key(s).
Each entry in the dict specifies an API key.
The dict key is the name of the security scheme in the OAS specification.
The dict value is the API key secret.
:param api_key_prefix: Dict to store API prefix (e.g. Bearer).
The dict key is the name of the security scheme in the OAS specification.
The dict value is an API key prefix when generating the auth data.
:param username: Username for HTTP basic authentication.
:param password: Password for HTTP basic authentication.
:param discard_unknown_keys: Boolean value indicating whether to discard
unknown properties. A server may send a response that includes additional
properties that are not known by the client in the following scenarios:
1. The OpenAPI document is incomplete, i.e. it does not match the server
implementation.
2. The client was generated using an older version of the OpenAPI document
and the server has been upgraded since then.
If a schema in the OpenAPI document defines the additionalProperties
attribute, then all undeclared properties received by the server are injected
into the additional properties map. In that case, there are undeclared
properties, and nothing to discard.
:param disabled_client_side_validations: Comma-separated list of
JSON schema validation keywords to disable JSON schema structural validation
rules. The following keywords may be specified: multipleOf, maximum,
exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern,
maxItems, minItems.
By default, the validation is performed for data generated locally by the client
and data received from the server, independent of any validation performed by
the server side. If the input data does not satisfy the JSON schema validation
rules specified in the OpenAPI document, an exception is raised.
If disabled_client_side_validations is set, structural validation is
disabled. This can be useful to troubleshoot data validation problem, such as
when the OpenAPI document validation rules do not match the actual API data
received by the server.
:type disabled_client_side_validations: str
:param server_index: Index to servers configuration.
:param server_variables: Mapping with string values to replace variables in
templated server configuration. The validation of enums is performed for
variables with defined enum values before.
:param server_operation_index: Mapping from operation ID to an index to
server configuration.
:param server_operation_variables: Mapping from operation ID to a mapping with
string values to replace variables in templated server configuration.
The validation of enums is performed for variables with defined enum values before.
:param ssl_ca_cert: The path to a file of concatenated CA certificates
in PEM format.
:param compress: Boolean indicating whether encoded responses are accepted or not.
:type compress: bool
:param return_http_data_only: Response data without head status
code and headers. Default is True.
:type return_http_data_only: bool
:param preload_content: If False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
:type preload_content: bool
:param request_timeout: Timeout setting for this request. If one
number is provided, it will be total request timeout. It can also be a
pair (tuple) of (connection, read) timeouts. Default is None.
:type request_timeout: float/tuple
:param check_input_type: Specifies if type checking should be done on
the data sent to the server. Default is True.
:type check_input_type: bool
:param check_return_type: Specifies if type checking should be done
on the data received from the server. Default is True.
:type check_return_type: bool
:param spec_property_naming: Whether names in properties are expected to respect the spec or use snake case.
:type spec_property_naming: bool
:param enable_retry: If set, the client will retry requests on backend errors (5xx status codes), and 429.
On 429 if will use the returned headers to wait until the next requests, otherwise it will retry using
the backoff factor.
:type enable_retry: bool
:param retry_backoff_factor: Factor used to space out retried requests on backend errors.
:type retry_backoff_factor: float
:param max_retries: The maximum number of times a single request can be retried.
:type max_retries: int
"""
def __init__(
self,
host=None,
api_key=None,
api_key_prefix=None,
access_token=None,
username=None,
password=None,
discard_unknown_keys=True,
disabled_client_side_validations="",
server_index=None,
server_variables=None,
server_operation_index=None,
server_operation_variables=None,
ssl_ca_cert=None,
compress=True,
return_http_data_only=True,
preload_content=True,
request_timeout=None,
check_input_type=True,
check_return_type=True,
spec_property_naming=False,
enable_retry=False,
retry_backoff_factor=2,
max_retries=3,
):
"""Constructor."""
self._base_path = "https://api.datadoghq.com" if host is None else host
self.server_index = 0 if server_index is None and host is None else server_index
self.server_operation_index = server_operation_index or {}
self.server_variables = server_variables or {}
self.server_operation_variables = server_operation_variables or {}
self.temp_folder_path = None
# Authentication Settings
self.access_token = access_token
self.api_key = {}
if api_key:
self.api_key = api_key
self.api_key_prefix = {}
if api_key_prefix:
self.api_key_prefix = api_key_prefix
self.refresh_api_key_hook = None
self.username = username
self.password = password
self.discard_unknown_keys = discard_unknown_keys
self.disabled_client_side_validations = disabled_client_side_validations
self.logger = {}
self.logger["package_logger"] = logging.getLogger("datadog_api_client")
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
self.logger_format = "%(asctime)s %(levelname)s %(message)s"
self.logger_stream_handler = None
self.logger_file_handler = None
self.logger_file = None
self.debug = False
self.verify_ssl = True
self.ssl_ca_cert = ssl_ca_cert
self.cert_file = None
self.key_file = None
self.assert_hostname = None
self.proxy = None
self.proxy_headers = None
self.safe_chars_for_path_param = ""
# Enable client side validation
self.client_side_validation = True
# Options to pass down to the underlying urllib3 socket
self.socket_options = None
# Will translate to a Accept-Encoding header
self.compress = compress
self.return_http_data_only = return_http_data_only
self.preload_content = preload_content
self.request_timeout = request_timeout
self.check_input_type = check_input_type
self.check_return_type = check_return_type
self.spec_property_naming = spec_property_naming
# Options for http retry
self.enable_retry = enable_retry
self.retry_backoff_factor = retry_backoff_factor
self.max_retries = max_retries
# Keep track of unstable operations
self.unstable_operations = _UnstableOperations(
{
"v2.create_open_api": False,
"v2.delete_open_api": False,
"v2.get_open_api": False,
"v2.update_open_api": False,
"v2.get_active_billing_dimensions": False,
"v2.get_monthly_cost_attribution": False,
"v2.create_dora_deployment": False,
"v2.create_dora_incident": False,
"v2.create_incident": False,
"v2.create_incident_integration": False,
"v2.create_incident_todo": False,
"v2.delete_incident": False,
"v2.delete_incident_integration": False,
"v2.delete_incident_todo": False,
"v2.get_incident": False,
"v2.get_incident_integration": False,
"v2.get_incident_todo": False,
"v2.list_incident_attachments": False,
"v2.list_incident_integrations": False,
"v2.list_incidents": False,
"v2.list_incident_todos": False,
"v2.search_incidents": False,
"v2.update_incident": False,
"v2.update_incident_attachments": False,
"v2.update_incident_integration": False,
"v2.update_incident_todo": False,
"v2.query_scalar_data": False,
"v2.query_timeseries_data": False,
"v2.get_finding": False,
"v2.list_findings": False,
"v2.mute_findings": False,
"v2.create_scorecard_outcomes_batch": False,
"v2.create_scorecard_rule": False,
"v2.delete_scorecard_rule": False,
"v2.list_scorecard_outcomes": False,
"v2.list_scorecard_rules": False,
"v2.create_incident_service": False,
"v2.delete_incident_service": False,
"v2.get_incident_service": False,
"v2.list_incident_services": False,
"v2.update_incident_service": False,
"v2.create_slo_report_job": False,
"v2.get_slo_report": False,
"v2.get_slo_report_job_status": False,
"v2.create_incident_team": False,
"v2.delete_incident_team": False,
"v2.get_incident_team": False,
"v2.list_incident_teams": False,
"v2.update_incident_team": False,
}
)
# Load default values from environment
if "DD_SITE" in os.environ:
self.server_variables["site"] = os.environ["DD_SITE"]
if "DD_API_KEY" in os.environ and not self.api_key.get("apiKeyAuth"):
self.api_key["apiKeyAuth"] = os.environ["DD_API_KEY"]
if "DD_APP_KEY" in os.environ and not self.api_key.get("appKeyAuth"):
self.api_key["appKeyAuth"] = os.environ["DD_APP_KEY"]
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
if k not in ("logger", "logger_file_handler"):
setattr(result, k, copy.deepcopy(v, memo))
# Shallow copy of loggers
result.logger = copy.copy(self.logger)
# Use setters to configure loggers
result.logger_file = self.logger_file
result.debug = self.debug
return result
def __setattr__(self, name, value):
object.__setattr__(self, name, value)
if name == "disabled_client_side_validations":
s = set(filter(None, value.split(",")))
for v in s:
if v not in JSON_SCHEMA_VALIDATION_KEYWORDS:
raise ApiValueError("Invalid keyword: '{0}''".format(v))
self._disabled_client_side_validations = s
@property
def logger_file(self):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:return: The logger_file path.
:rtype: str
"""
return self._logger_file
@logger_file.setter
def logger_file(self, value):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type value: str
"""
self._logger_file = value
if self._logger_file:
# If set logging file,
# then add file handler and remove stream handler.
self.logger_file_handler = logging.FileHandler(self._logger_file)
self.logger_file_handler.setFormatter(self.logger_formatter)
for _, logger in self.logger.items():
logger.addHandler(self.logger_file_handler)
@property
def debug(self):
"""Debug status.
:return: The debug status, True or False.
:rtype: bool
"""
return self._debug
@debug.setter
def debug(self, value):
"""Debug status.
:param value: The debug status, True or False.
:type value: bool
"""
self._debug = value
if self._debug:
# if debug status is True, turn on debug logging
for _, logger in self.logger.items():
logger.setLevel(logging.DEBUG)
# turn on http_client debug
http_client.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in self.logger.items():
logger.setLevel(logging.WARNING)
# turn off http_client debug
http_client.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:return: The format string.
:rtype: str
"""
return self._logger_format
@logger_format.setter
def logger_format(self, value):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type value: str
"""
self._logger_format = value
self.logger_formatter = logging.Formatter(self._logger_format)
@property
def retry_backoff_factor(self):
"""Retry backoff factor.
:return: The backoff factor, float
:rtype: float
"""
return self._retry_backoff_factor
@retry_backoff_factor.setter
def retry_backoff_factor(self, value):
"""Retry backoff factor.
:param value: The backoff factor used to calculate intervals between retry attempts
:type value: float
"""
if value < 2:
raise ValueError("Retry backoff factor cannot be smaller than 2")
self._retry_backoff_factor = value
def get_api_key_with_prefix(self, identifier, alias=None):
"""Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:param alias: The alternative identifier of apiKey.
:return: The token for api key authentication.
"""
if self.refresh_api_key_hook is not None:
self.refresh_api_key_hook(self)
key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None)
if key:
prefix = self.api_key_prefix.get(identifier)
if prefix:
return "%s %s" % (prefix, key)
return key
def get_basic_auth_token(self):
"""Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
username = ""
if self.username is not None:
username = self.username
password = ""
if self.password is not None:
password = self.password
return urllib3.util.make_headers(basic_auth=username + ":" + password).get("authorization")
def get_host_settings(self):
"""Gets an array of host settings
:return: An array of host settings
"""
return [
{
"url": "https://{subdomain}.{site}",
"description": "No description provided",
"variables": {
"site": {
"description": "The regional site for Datadog customers.",
"default_value": "datadoghq.com",
"enum_values": [
"datadoghq.com",
"us3.datadoghq.com",
"us5.datadoghq.com",
"ap1.datadoghq.com",
"datadoghq.eu",
"ddog-gov.com",
],
},
"subdomain": {
"description": "The subdomain where the API is deployed.",
"default_value": "api",
},
},
},
{
"url": "{protocol}://{name}",
"description": "No description provided",
"variables": {
"name": {
"description": "Full site DNS name.",
"default_value": "api.datadoghq.com",
},
"protocol": {
"description": "The protocol for accessing the API.",
"default_value": "https",
},
},
},
{
"url": "https://{subdomain}.{site}",
"description": "No description provided",
"variables": {
"site": {
"description": "Any Datadog deployment.",
"default_value": "datadoghq.com",
},
"subdomain": {
"description": "The subdomain where the API is deployed.",
"default_value": "api",
},
},
},
]
def get_host_from_settings(self, index, variables=None, servers=None):
"""Gets host URL based on the index and variables.
:param index: Array index of the host settings.
:param variables: Hash of variable and the corresponding value.
:param servers: An array of host settings or None.
:return: URL based on host settings.
"""
if index is None:
return self._base_path
variables = {} if variables is None else variables
servers = self.get_host_settings() if servers is None else servers
try:
server = servers[index]
except IndexError:
raise ValueError(
"Invalid index {} when selecting the host settings. " "Must be less than {}".format(index, len(servers))
)
url = server["url"]
# go through variables and replace placeholders
for variable_name, variable in server.get("variables", {}).items():
used_value = variables.get(variable_name, variable["default_value"])
if "enum_values" in variable and used_value not in variable["enum_values"]:
raise ValueError(
"The variable `{}` in the host URL has invalid value " "{}. Must be {}.".format(
variable_name, variables[variable_name], variable["enum_values"]
)
)
url = url.replace(f"{{{variable_name}}}", used_value)
return url
@property
def host(self):
"""Return generated host."""
return self.get_host_from_settings(self.server_index, variables=self.server_variables)
@host.setter
def host(self, value):
"""Fix base path."""
self._base_path = value
self.server_index = None
def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
auth = {}
if self.access_token is not None:
auth["AuthZ"] = {
"type": "oauth2",
"in": "header",
"key": "Authorization",
"value": "Bearer " + self.access_token,
}
if "apiKeyAuth" in self.api_key:
auth["apiKeyAuth"] = {
"type": "api_key",
"in": "header",
"key": "DD-API-KEY",
"value": self.get_api_key_with_prefix(
"apiKeyAuth",
),
}
if "appKeyAuth" in self.api_key:
auth["appKeyAuth"] = {
"type": "api_key",
"in": "header",
"key": "DD-APPLICATION-KEY",
"value": self.get_api_key_with_prefix(
"appKeyAuth",
),
}
return auth
| (host=None, api_key=None, api_key_prefix=None, access_token=None, username=None, password=None, discard_unknown_keys=True, disabled_client_side_validations='', server_index=None, server_variables=None, server_operation_index=None, server_operation_variables=None, ssl_ca_cert=None, compress=True, return_http_data_only=True, preload_content=True, request_timeout=None, check_input_type=True, check_return_type=True, spec_property_naming=False, enable_retry=False, retry_backoff_factor=2, max_retries=3) |
21,847 | datadog_api_client.configuration | __deepcopy__ | null | def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
if k not in ("logger", "logger_file_handler"):
setattr(result, k, copy.deepcopy(v, memo))
# Shallow copy of loggers
result.logger = copy.copy(self.logger)
# Use setters to configure loggers
result.logger_file = self.logger_file
result.debug = self.debug
return result
| (self, memo) |
21,848 | datadog_api_client.configuration | __init__ | Constructor. | def __init__(
self,
host=None,
api_key=None,
api_key_prefix=None,
access_token=None,
username=None,
password=None,
discard_unknown_keys=True,
disabled_client_side_validations="",
server_index=None,
server_variables=None,
server_operation_index=None,
server_operation_variables=None,
ssl_ca_cert=None,
compress=True,
return_http_data_only=True,
preload_content=True,
request_timeout=None,
check_input_type=True,
check_return_type=True,
spec_property_naming=False,
enable_retry=False,
retry_backoff_factor=2,
max_retries=3,
):
"""Constructor."""
self._base_path = "https://api.datadoghq.com" if host is None else host
self.server_index = 0 if server_index is None and host is None else server_index
self.server_operation_index = server_operation_index or {}
self.server_variables = server_variables or {}
self.server_operation_variables = server_operation_variables or {}
self.temp_folder_path = None
# Authentication Settings
self.access_token = access_token
self.api_key = {}
if api_key:
self.api_key = api_key
self.api_key_prefix = {}
if api_key_prefix:
self.api_key_prefix = api_key_prefix
self.refresh_api_key_hook = None
self.username = username
self.password = password
self.discard_unknown_keys = discard_unknown_keys
self.disabled_client_side_validations = disabled_client_side_validations
self.logger = {}
self.logger["package_logger"] = logging.getLogger("datadog_api_client")
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
self.logger_format = "%(asctime)s %(levelname)s %(message)s"
self.logger_stream_handler = None
self.logger_file_handler = None
self.logger_file = None
self.debug = False
self.verify_ssl = True
self.ssl_ca_cert = ssl_ca_cert
self.cert_file = None
self.key_file = None
self.assert_hostname = None
self.proxy = None
self.proxy_headers = None
self.safe_chars_for_path_param = ""
# Enable client side validation
self.client_side_validation = True
# Options to pass down to the underlying urllib3 socket
self.socket_options = None
# Will translate to a Accept-Encoding header
self.compress = compress
self.return_http_data_only = return_http_data_only
self.preload_content = preload_content
self.request_timeout = request_timeout
self.check_input_type = check_input_type
self.check_return_type = check_return_type
self.spec_property_naming = spec_property_naming
# Options for http retry
self.enable_retry = enable_retry
self.retry_backoff_factor = retry_backoff_factor
self.max_retries = max_retries
# Keep track of unstable operations
self.unstable_operations = _UnstableOperations(
{
"v2.create_open_api": False,
"v2.delete_open_api": False,
"v2.get_open_api": False,
"v2.update_open_api": False,
"v2.get_active_billing_dimensions": False,
"v2.get_monthly_cost_attribution": False,
"v2.create_dora_deployment": False,
"v2.create_dora_incident": False,
"v2.create_incident": False,
"v2.create_incident_integration": False,
"v2.create_incident_todo": False,
"v2.delete_incident": False,
"v2.delete_incident_integration": False,
"v2.delete_incident_todo": False,
"v2.get_incident": False,
"v2.get_incident_integration": False,
"v2.get_incident_todo": False,
"v2.list_incident_attachments": False,
"v2.list_incident_integrations": False,
"v2.list_incidents": False,
"v2.list_incident_todos": False,
"v2.search_incidents": False,
"v2.update_incident": False,
"v2.update_incident_attachments": False,
"v2.update_incident_integration": False,
"v2.update_incident_todo": False,
"v2.query_scalar_data": False,
"v2.query_timeseries_data": False,
"v2.get_finding": False,
"v2.list_findings": False,
"v2.mute_findings": False,
"v2.create_scorecard_outcomes_batch": False,
"v2.create_scorecard_rule": False,
"v2.delete_scorecard_rule": False,
"v2.list_scorecard_outcomes": False,
"v2.list_scorecard_rules": False,
"v2.create_incident_service": False,
"v2.delete_incident_service": False,
"v2.get_incident_service": False,
"v2.list_incident_services": False,
"v2.update_incident_service": False,
"v2.create_slo_report_job": False,
"v2.get_slo_report": False,
"v2.get_slo_report_job_status": False,
"v2.create_incident_team": False,
"v2.delete_incident_team": False,
"v2.get_incident_team": False,
"v2.list_incident_teams": False,
"v2.update_incident_team": False,
}
)
# Load default values from environment
if "DD_SITE" in os.environ:
self.server_variables["site"] = os.environ["DD_SITE"]
if "DD_API_KEY" in os.environ and not self.api_key.get("apiKeyAuth"):
self.api_key["apiKeyAuth"] = os.environ["DD_API_KEY"]
if "DD_APP_KEY" in os.environ and not self.api_key.get("appKeyAuth"):
self.api_key["appKeyAuth"] = os.environ["DD_APP_KEY"]
| (self, host=None, api_key=None, api_key_prefix=None, access_token=None, username=None, password=None, discard_unknown_keys=True, disabled_client_side_validations='', server_index=None, server_variables=None, server_operation_index=None, server_operation_variables=None, ssl_ca_cert=None, compress=True, return_http_data_only=True, preload_content=True, request_timeout=None, check_input_type=True, check_return_type=True, spec_property_naming=False, enable_retry=False, retry_backoff_factor=2, max_retries=3) |
21,849 | datadog_api_client.configuration | __setattr__ | null | def __setattr__(self, name, value):
object.__setattr__(self, name, value)
if name == "disabled_client_side_validations":
s = set(filter(None, value.split(",")))
for v in s:
if v not in JSON_SCHEMA_VALIDATION_KEYWORDS:
raise ApiValueError("Invalid keyword: '{0}''".format(v))
self._disabled_client_side_validations = s
| (self, name, value) |
21,850 | datadog_api_client.configuration | auth_settings | Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
| def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
auth = {}
if self.access_token is not None:
auth["AuthZ"] = {
"type": "oauth2",
"in": "header",
"key": "Authorization",
"value": "Bearer " + self.access_token,
}
if "apiKeyAuth" in self.api_key:
auth["apiKeyAuth"] = {
"type": "api_key",
"in": "header",
"key": "DD-API-KEY",
"value": self.get_api_key_with_prefix(
"apiKeyAuth",
),
}
if "appKeyAuth" in self.api_key:
auth["appKeyAuth"] = {
"type": "api_key",
"in": "header",
"key": "DD-APPLICATION-KEY",
"value": self.get_api_key_with_prefix(
"appKeyAuth",
),
}
return auth
| (self) |
21,851 | datadog_api_client.configuration | get_api_key_with_prefix | Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:param alias: The alternative identifier of apiKey.
:return: The token for api key authentication.
| def get_api_key_with_prefix(self, identifier, alias=None):
"""Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:param alias: The alternative identifier of apiKey.
:return: The token for api key authentication.
"""
if self.refresh_api_key_hook is not None:
self.refresh_api_key_hook(self)
key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None)
if key:
prefix = self.api_key_prefix.get(identifier)
if prefix:
return "%s %s" % (prefix, key)
return key
| (self, identifier, alias=None) |
21,852 | datadog_api_client.configuration | get_basic_auth_token | Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
| def get_basic_auth_token(self):
"""Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
username = ""
if self.username is not None:
username = self.username
password = ""
if self.password is not None:
password = self.password
return urllib3.util.make_headers(basic_auth=username + ":" + password).get("authorization")
| (self) |
21,853 | datadog_api_client.configuration | get_host_from_settings | Gets host URL based on the index and variables.
:param index: Array index of the host settings.
:param variables: Hash of variable and the corresponding value.
:param servers: An array of host settings or None.
:return: URL based on host settings.
| def get_host_from_settings(self, index, variables=None, servers=None):
"""Gets host URL based on the index and variables.
:param index: Array index of the host settings.
:param variables: Hash of variable and the corresponding value.
:param servers: An array of host settings or None.
:return: URL based on host settings.
"""
if index is None:
return self._base_path
variables = {} if variables is None else variables
servers = self.get_host_settings() if servers is None else servers
try:
server = servers[index]
except IndexError:
raise ValueError(
"Invalid index {} when selecting the host settings. " "Must be less than {}".format(index, len(servers))
)
url = server["url"]
# go through variables and replace placeholders
for variable_name, variable in server.get("variables", {}).items():
used_value = variables.get(variable_name, variable["default_value"])
if "enum_values" in variable and used_value not in variable["enum_values"]:
raise ValueError(
"The variable `{}` in the host URL has invalid value " "{}. Must be {}.".format(
variable_name, variables[variable_name], variable["enum_values"]
)
)
url = url.replace(f"{{{variable_name}}}", used_value)
return url
| (self, index, variables=None, servers=None) |
21,854 | datadog_api_client.configuration | get_host_settings | Gets an array of host settings
:return: An array of host settings
| def get_host_settings(self):
"""Gets an array of host settings
:return: An array of host settings
"""
return [
{
"url": "https://{subdomain}.{site}",
"description": "No description provided",
"variables": {
"site": {
"description": "The regional site for Datadog customers.",
"default_value": "datadoghq.com",
"enum_values": [
"datadoghq.com",
"us3.datadoghq.com",
"us5.datadoghq.com",
"ap1.datadoghq.com",
"datadoghq.eu",
"ddog-gov.com",
],
},
"subdomain": {
"description": "The subdomain where the API is deployed.",
"default_value": "api",
},
},
},
{
"url": "{protocol}://{name}",
"description": "No description provided",
"variables": {
"name": {
"description": "Full site DNS name.",
"default_value": "api.datadoghq.com",
},
"protocol": {
"description": "The protocol for accessing the API.",
"default_value": "https",
},
},
},
{
"url": "https://{subdomain}.{site}",
"description": "No description provided",
"variables": {
"site": {
"description": "Any Datadog deployment.",
"default_value": "datadoghq.com",
},
"subdomain": {
"description": "The subdomain where the API is deployed.",
"default_value": "api",
},
},
},
]
| (self) |
21,855 | datadog_api_client.api_client | ThreadedApiClient | null | class ThreadedApiClient(ApiClient):
_pool = None
def __init__(self, configuration: Configuration, pool_threads: int = 1):
self.pool_threads = pool_threads
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
super().__init__(configuration)
def _build_rest_client(self):
return rest.RESTClientObject(self.configuration, maxsize=self.connection_pool_maxsize)
def close(self) -> None:
self.rest_client.pool_manager.clear()
if self._pool:
self._pool.close()
self._pool.join()
self._pool = None
if hasattr(atexit, "unregister"):
atexit.unregister(self.close)
@property
def pool(self) -> ThreadPool:
"""Create thread pool on first request
avoids instantiating unused threadpool for blocking clients.
"""
if self._pool is None:
atexit.register(self.close)
self._pool = ThreadPool(self.pool_threads)
return self._pool
def _call_api(
self,
method: str,
url: str,
query_params: Optional[List[Tuple[str, Any]]] = None,
header_params: Optional[Dict[str, Any]] = None,
body: Optional[Any] = None,
post_params: Optional[List[Tuple[str, Any]]] = None,
response_type: Optional[Tuple[Any]] = None,
return_http_data_only: Optional[bool] = None,
preload_content: bool = True,
request_timeout: Optional[Union[int, float, Tuple]] = None,
check_type: Optional[bool] = None,
):
return self.pool.apply_async(
super()._call_api,
(
method,
url,
query_params,
header_params,
body,
post_params,
response_type,
return_http_data_only,
preload_content,
request_timeout,
check_type,
),
)
| (configuration: datadog_api_client.configuration.Configuration, pool_threads: int = 1) |
21,858 | datadog_api_client.api_client | __init__ | null | def __init__(self, configuration: Configuration, pool_threads: int = 1):
self.pool_threads = pool_threads
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
super().__init__(configuration)
| (self, configuration: datadog_api_client.configuration.Configuration, pool_threads: int = 1) |
21,859 | datadog_api_client.api_client | _build_rest_client | null | def _build_rest_client(self):
return rest.RESTClientObject(self.configuration, maxsize=self.connection_pool_maxsize)
| (self) |
21,860 | datadog_api_client.api_client | _call_api | null | def _call_api(
self,
method: str,
url: str,
query_params: Optional[List[Tuple[str, Any]]] = None,
header_params: Optional[Dict[str, Any]] = None,
body: Optional[Any] = None,
post_params: Optional[List[Tuple[str, Any]]] = None,
response_type: Optional[Tuple[Any]] = None,
return_http_data_only: Optional[bool] = None,
preload_content: bool = True,
request_timeout: Optional[Union[int, float, Tuple]] = None,
check_type: Optional[bool] = None,
):
return self.pool.apply_async(
super()._call_api,
(
method,
url,
query_params,
header_params,
body,
post_params,
response_type,
return_http_data_only,
preload_content,
request_timeout,
check_type,
),
)
| (self, method: str, url: str, query_params: Optional[List[Tuple[str, Any]]] = None, header_params: Optional[Dict[str, Any]] = None, body: Optional[Any] = None, post_params: Optional[List[Tuple[str, Any]]] = None, response_type: Optional[Tuple[Any]] = None, return_http_data_only: Optional[bool] = None, preload_content: bool = True, request_timeout: Union[int, float, Tuple, NoneType] = None, check_type: Optional[bool] = None) |
21,864 | datadog_api_client.api_client | close | null | def close(self) -> None:
self.rest_client.pool_manager.clear()
if self._pool:
self._pool.close()
self._pool.join()
self._pool = None
if hasattr(atexit, "unregister"):
atexit.unregister(self.close)
| (self) -> NoneType |
21,878 | prefect_dask.task_runners | DaskTaskRunner |
A parallel task_runner that submits tasks to the `dask.distributed` scheduler.
By default a temporary `distributed.LocalCluster` is created (and
subsequently torn down) within the `start()` contextmanager. To use a
different cluster class (e.g.
[`dask_kubernetes.KubeCluster`](https://kubernetes.dask.org/)), you can
specify `cluster_class`/`cluster_kwargs`.
Alternatively, if you already have a dask cluster running, you can provide
the cluster object via the `cluster` kwarg or the address of the scheduler
via the `address` kwarg.
!!! warning "Multiprocessing safety"
Note that, because the `DaskTaskRunner` uses multiprocessing, calls to flows
in scripts must be guarded with `if __name__ == "__main__":` or warnings will
be displayed.
Args:
cluster (distributed.deploy.Cluster, optional): Currently running dask cluster;
if one is not provider (or specified via `address` kwarg), a temporary
cluster will be created in `DaskTaskRunner.start()`. Defaults to `None`.
address (string, optional): Address of a currently running dask
scheduler. Defaults to `None`.
cluster_class (string or callable, optional): The cluster class to use
when creating a temporary dask cluster. Can be either the full
class name (e.g. `"distributed.LocalCluster"`), or the class itself.
cluster_kwargs (dict, optional): Additional kwargs to pass to the
`cluster_class` when creating a temporary dask cluster.
adapt_kwargs (dict, optional): Additional kwargs to pass to `cluster.adapt`
when creating a temporary dask cluster. Note that adaptive scaling
is only enabled if `adapt_kwargs` are provided.
client_kwargs (dict, optional): Additional kwargs to use when creating a
[`dask.distributed.Client`](https://distributed.dask.org/en/latest/api.html#client).
Examples:
Using a temporary local dask cluster:
```python
from prefect import flow
from prefect_dask.task_runners import DaskTaskRunner
@flow(task_runner=DaskTaskRunner)
def my_flow():
...
```
Using a temporary cluster running elsewhere. Any Dask cluster class should
work, here we use [dask-cloudprovider](https://cloudprovider.dask.org):
```python
DaskTaskRunner(
cluster_class="dask_cloudprovider.FargateCluster",
cluster_kwargs={
"image": "prefecthq/prefect:latest",
"n_workers": 5,
},
)
```
Connecting to an existing dask cluster:
```python
DaskTaskRunner(address="192.0.2.255:8786")
```
| class DaskTaskRunner(BaseTaskRunner):
"""
A parallel task_runner that submits tasks to the `dask.distributed` scheduler.
By default a temporary `distributed.LocalCluster` is created (and
subsequently torn down) within the `start()` contextmanager. To use a
different cluster class (e.g.
[`dask_kubernetes.KubeCluster`](https://kubernetes.dask.org/)), you can
specify `cluster_class`/`cluster_kwargs`.
Alternatively, if you already have a dask cluster running, you can provide
the cluster object via the `cluster` kwarg or the address of the scheduler
via the `address` kwarg.
!!! warning "Multiprocessing safety"
Note that, because the `DaskTaskRunner` uses multiprocessing, calls to flows
in scripts must be guarded with `if __name__ == "__main__":` or warnings will
be displayed.
Args:
cluster (distributed.deploy.Cluster, optional): Currently running dask cluster;
if one is not provider (or specified via `address` kwarg), a temporary
cluster will be created in `DaskTaskRunner.start()`. Defaults to `None`.
address (string, optional): Address of a currently running dask
scheduler. Defaults to `None`.
cluster_class (string or callable, optional): The cluster class to use
when creating a temporary dask cluster. Can be either the full
class name (e.g. `"distributed.LocalCluster"`), or the class itself.
cluster_kwargs (dict, optional): Additional kwargs to pass to the
`cluster_class` when creating a temporary dask cluster.
adapt_kwargs (dict, optional): Additional kwargs to pass to `cluster.adapt`
when creating a temporary dask cluster. Note that adaptive scaling
is only enabled if `adapt_kwargs` are provided.
client_kwargs (dict, optional): Additional kwargs to use when creating a
[`dask.distributed.Client`](https://distributed.dask.org/en/latest/api.html#client).
Examples:
Using a temporary local dask cluster:
```python
from prefect import flow
from prefect_dask.task_runners import DaskTaskRunner
@flow(task_runner=DaskTaskRunner)
def my_flow():
...
```
Using a temporary cluster running elsewhere. Any Dask cluster class should
work, here we use [dask-cloudprovider](https://cloudprovider.dask.org):
```python
DaskTaskRunner(
cluster_class="dask_cloudprovider.FargateCluster",
cluster_kwargs={
"image": "prefecthq/prefect:latest",
"n_workers": 5,
},
)
```
Connecting to an existing dask cluster:
```python
DaskTaskRunner(address="192.0.2.255:8786")
```
"""
def __init__(
self,
cluster: Optional[distributed.deploy.Cluster] = None,
address: str = None,
cluster_class: Union[str, Callable] = None,
cluster_kwargs: dict = None,
adapt_kwargs: dict = None,
client_kwargs: dict = None,
):
# Validate settings and infer defaults
if address:
if cluster or cluster_class or cluster_kwargs or adapt_kwargs:
raise ValueError(
"Cannot specify `address` and "
"`cluster`/`cluster_class`/`cluster_kwargs`/`adapt_kwargs`"
)
elif cluster:
if cluster_class or cluster_kwargs:
raise ValueError(
"Cannot specify `cluster` and `cluster_class`/`cluster_kwargs`"
)
if not cluster.asynchronous:
raise ValueError(
"The cluster must have `asynchronous=True` to be "
"used with `DaskTaskRunner`."
)
else:
if isinstance(cluster_class, str):
cluster_class = from_qualified_name(cluster_class)
else:
cluster_class = cluster_class
# Create a copies of incoming kwargs since we may mutate them
cluster_kwargs = cluster_kwargs.copy() if cluster_kwargs else {}
adapt_kwargs = adapt_kwargs.copy() if adapt_kwargs else {}
client_kwargs = client_kwargs.copy() if client_kwargs else {}
# Update kwargs defaults
client_kwargs.setdefault("set_as_default", False)
# The user cannot specify async/sync themselves
if "asynchronous" in client_kwargs:
raise ValueError(
"`client_kwargs` cannot set `asynchronous`. "
"This option is managed by Prefect."
)
if "asynchronous" in cluster_kwargs:
raise ValueError(
"`cluster_kwargs` cannot set `asynchronous`. "
"This option is managed by Prefect."
)
# Store settings
self.address = address
self.cluster_class = cluster_class
self.cluster_kwargs = cluster_kwargs
self.adapt_kwargs = adapt_kwargs
self.client_kwargs = client_kwargs
# Runtime attributes
self._client: "distributed.Client" = None
self._cluster: "distributed.deploy.Cluster" = cluster
self._dask_futures: Dict[str, "distributed.Future"] = {}
super().__init__()
@property
def concurrency_type(self) -> TaskConcurrencyType:
return (
TaskConcurrencyType.PARALLEL
if self.cluster_kwargs.get("processes")
else TaskConcurrencyType.CONCURRENT
)
def duplicate(self):
"""
Create a new instance of the task runner with the same settings.
"""
return type(self)(
address=self.address,
cluster_class=self.cluster_class,
cluster_kwargs=self.cluster_kwargs,
adapt_kwargs=self.adapt_kwargs,
client_kwargs=self.client_kwargs,
)
def __eq__(self, other: object) -> bool:
"""
Check if an instance has the same settings as this task runner.
"""
if type(self) == type(other):
return (
self.address == other.address
and self.cluster_class == other.cluster_class
and self.cluster_kwargs == other.cluster_kwargs
and self.adapt_kwargs == other.adapt_kwargs
and self.client_kwargs == other.client_kwargs
)
else:
return NotImplemented
async def submit(
self,
key: UUID,
call: Callable[..., Awaitable[State[R]]],
) -> None:
if not self._started:
raise RuntimeError(
"The task runner must be started before submitting work."
)
# unpack the upstream call in order to cast Prefect futures to Dask futures
# where possible to optimize Dask task scheduling
call_kwargs = self._optimize_futures(call.keywords)
if "task_run" in call_kwargs:
task_run = call_kwargs["task_run"]
flow_run = FlowRunContext.get().flow_run
# Dask displays the text up to the first '-' as the name; the task run key
# should include the task run name for readability in the Dask console.
# For cases where the task run fails and reruns for a retried flow run,
# the flow run count is included so that the new key will not match
# the failed run's key, therefore not retrieving from the Dask cache.
dask_key = f"{task_run.name}-{task_run.id.hex}-{flow_run.run_count}"
else:
dask_key = str(key)
self._dask_futures[key] = self._client.submit(
call.func,
key=dask_key,
# Dask defaults to treating functions are pure, but we set this here for
# explicit expectations. If this task run is submitted to Dask twice, the
# result of the first run should be returned. Subsequent runs would return
# `Abort` exceptions if they were submitted again.
pure=True,
**call_kwargs,
)
def _get_dask_future(self, key: UUID) -> "distributed.Future":
"""
Retrieve the dask future corresponding to a Prefect future.
The Dask future is for the `run_fn`, which should return a `State`.
"""
return self._dask_futures[key]
def _optimize_futures(self, expr):
def visit_fn(expr):
if isinstance(expr, PrefectFuture):
dask_future = self._dask_futures.get(expr.key)
if dask_future is not None:
return dask_future
# Fallback to return the expression unaltered
return expr
return visit_collection(expr, visit_fn=visit_fn, return_data=True)
async def wait(self, key: UUID, timeout: float = None) -> Optional[State]:
future = self._get_dask_future(key)
try:
return await future.result(timeout=timeout)
except distributed.TimeoutError:
return None
except BaseException as exc:
return await exception_to_crashed_state(exc)
async def _start(self, exit_stack: AsyncExitStack):
"""
Start the task runner and prep for context exit.
- Creates a cluster if an external address is not set.
- Creates a client to connect to the cluster.
- Pushes a call to wait for all running futures to complete on exit.
"""
if self._cluster:
self.logger.info(f"Connecting to existing Dask cluster {self._cluster}")
self._connect_to = self._cluster
if self.adapt_kwargs:
self._cluster.adapt(**self.adapt_kwargs)
elif self.address:
self.logger.info(
f"Connecting to an existing Dask cluster at {self.address}"
)
self._connect_to = self.address
else:
self.cluster_class = self.cluster_class or distributed.LocalCluster
self.logger.info(
f"Creating a new Dask cluster with "
f"`{to_qualified_name(self.cluster_class)}`"
)
self._connect_to = self._cluster = await exit_stack.enter_async_context(
self.cluster_class(asynchronous=True, **self.cluster_kwargs)
)
if self.adapt_kwargs:
adapt_response = self._cluster.adapt(**self.adapt_kwargs)
if inspect.isawaitable(adapt_response):
await adapt_response
self._client = await exit_stack.enter_async_context(
distributed.Client(
self._connect_to, asynchronous=True, **self.client_kwargs
)
)
if self._client.dashboard_link:
self.logger.info(
f"The Dask dashboard is available at {self._client.dashboard_link}",
)
def __getstate__(self):
"""
Allow the `DaskTaskRunner` to be serialized by dropping
the `distributed.Client`, which contains locks.
Must be deserialized on a dask worker.
"""
data = self.__dict__.copy()
data.update({k: None for k in {"_client", "_cluster", "_connect_to"}})
return data
def __setstate__(self, data: dict):
"""
Restore the `distributed.Client` by loading the client on a dask worker.
"""
self.__dict__.update(data)
self._client = distributed.get_client()
| (cluster: Optional[distributed.deploy.cluster.Cluster] = None, address: str = None, cluster_class: Union[str, Callable] = None, cluster_kwargs: dict = None, adapt_kwargs: dict = None, client_kwargs: dict = None) |
21,879 | prefect_dask.task_runners | __eq__ |
Check if an instance has the same settings as this task runner.
| def __eq__(self, other: object) -> bool:
"""
Check if an instance has the same settings as this task runner.
"""
if type(self) == type(other):
return (
self.address == other.address
and self.cluster_class == other.cluster_class
and self.cluster_kwargs == other.cluster_kwargs
and self.adapt_kwargs == other.adapt_kwargs
and self.client_kwargs == other.client_kwargs
)
else:
return NotImplemented
| (self, other: object) -> bool |
21,880 | prefect_dask.task_runners | __getstate__ |
Allow the `DaskTaskRunner` to be serialized by dropping
the `distributed.Client`, which contains locks.
Must be deserialized on a dask worker.
| def __getstate__(self):
"""
Allow the `DaskTaskRunner` to be serialized by dropping
the `distributed.Client`, which contains locks.
Must be deserialized on a dask worker.
"""
data = self.__dict__.copy()
data.update({k: None for k in {"_client", "_cluster", "_connect_to"}})
return data
| (self) |
21,881 | prefect_dask.task_runners | __init__ | null | def __init__(
self,
cluster: Optional[distributed.deploy.Cluster] = None,
address: str = None,
cluster_class: Union[str, Callable] = None,
cluster_kwargs: dict = None,
adapt_kwargs: dict = None,
client_kwargs: dict = None,
):
# Validate settings and infer defaults
if address:
if cluster or cluster_class or cluster_kwargs or adapt_kwargs:
raise ValueError(
"Cannot specify `address` and "
"`cluster`/`cluster_class`/`cluster_kwargs`/`adapt_kwargs`"
)
elif cluster:
if cluster_class or cluster_kwargs:
raise ValueError(
"Cannot specify `cluster` and `cluster_class`/`cluster_kwargs`"
)
if not cluster.asynchronous:
raise ValueError(
"The cluster must have `asynchronous=True` to be "
"used with `DaskTaskRunner`."
)
else:
if isinstance(cluster_class, str):
cluster_class = from_qualified_name(cluster_class)
else:
cluster_class = cluster_class
# Create a copies of incoming kwargs since we may mutate them
cluster_kwargs = cluster_kwargs.copy() if cluster_kwargs else {}
adapt_kwargs = adapt_kwargs.copy() if adapt_kwargs else {}
client_kwargs = client_kwargs.copy() if client_kwargs else {}
# Update kwargs defaults
client_kwargs.setdefault("set_as_default", False)
# The user cannot specify async/sync themselves
if "asynchronous" in client_kwargs:
raise ValueError(
"`client_kwargs` cannot set `asynchronous`. "
"This option is managed by Prefect."
)
if "asynchronous" in cluster_kwargs:
raise ValueError(
"`cluster_kwargs` cannot set `asynchronous`. "
"This option is managed by Prefect."
)
# Store settings
self.address = address
self.cluster_class = cluster_class
self.cluster_kwargs = cluster_kwargs
self.adapt_kwargs = adapt_kwargs
self.client_kwargs = client_kwargs
# Runtime attributes
self._client: "distributed.Client" = None
self._cluster: "distributed.deploy.Cluster" = cluster
self._dask_futures: Dict[str, "distributed.Future"] = {}
super().__init__()
| (self, cluster: Optional[distributed.deploy.cluster.Cluster] = None, address: Optional[str] = None, cluster_class: Union[str, Callable, NoneType] = None, cluster_kwargs: Optional[dict] = None, adapt_kwargs: Optional[dict] = None, client_kwargs: Optional[dict] = None) |
21,882 | prefect_dask.task_runners | __setstate__ |
Restore the `distributed.Client` by loading the client on a dask worker.
| def __setstate__(self, data: dict):
"""
Restore the `distributed.Client` by loading the client on a dask worker.
"""
self.__dict__.update(data)
self._client = distributed.get_client()
| (self, data: dict) |
21,883 | prefect.task_runners | __str__ | null | def __str__(self) -> str:
return type(self).__name__
| (self) -> str |
21,884 | prefect_dask.task_runners | _get_dask_future |
Retrieve the dask future corresponding to a Prefect future.
The Dask future is for the `run_fn`, which should return a `State`.
| def _get_dask_future(self, key: UUID) -> "distributed.Future":
"""
Retrieve the dask future corresponding to a Prefect future.
The Dask future is for the `run_fn`, which should return a `State`.
"""
return self._dask_futures[key]
| (self, key: uuid.UUID) -> distributed.client.Future |
21,885 | prefect_dask.task_runners | _optimize_futures | null | def _optimize_futures(self, expr):
def visit_fn(expr):
if isinstance(expr, PrefectFuture):
dask_future = self._dask_futures.get(expr.key)
if dask_future is not None:
return dask_future
# Fallback to return the expression unaltered
return expr
return visit_collection(expr, visit_fn=visit_fn, return_data=True)
| (self, expr) |
21,886 | prefect_dask.task_runners | _start |
Start the task runner and prep for context exit.
- Creates a cluster if an external address is not set.
- Creates a client to connect to the cluster.
- Pushes a call to wait for all running futures to complete on exit.
| def _optimize_futures(self, expr):
def visit_fn(expr):
if isinstance(expr, PrefectFuture):
dask_future = self._dask_futures.get(expr.key)
if dask_future is not None:
return dask_future
# Fallback to return the expression unaltered
return expr
return visit_collection(expr, visit_fn=visit_fn, return_data=True)
| (self, exit_stack: contextlib.AsyncExitStack) |
21,887 | prefect_dask.task_runners | duplicate |
Create a new instance of the task runner with the same settings.
| def duplicate(self):
"""
Create a new instance of the task runner with the same settings.
"""
return type(self)(
address=self.address,
cluster_class=self.cluster_class,
cluster_kwargs=self.cluster_kwargs,
adapt_kwargs=self.adapt_kwargs,
client_kwargs=self.client_kwargs,
)
| (self) |
21,888 | prefect.task_runners | start |
Start the task runner, preparing any resources necessary for task submission.
Children should implement `_start` to prepare and clean up resources.
Yields:
The prepared task runner
| def duplicate(self):
return type(self)()
| (self: ~T) -> AsyncIterator[~T] |
21,889 | prefect_dask.task_runners | submit | null | def __eq__(self, other: object) -> bool:
"""
Check if an instance has the same settings as this task runner.
"""
if type(self) == type(other):
return (
self.address == other.address
and self.cluster_class == other.cluster_class
and self.cluster_kwargs == other.cluster_kwargs
and self.adapt_kwargs == other.adapt_kwargs
and self.client_kwargs == other.client_kwargs
)
else:
return NotImplemented
| (self, key: uuid.UUID, call: Callable[..., Awaitable[prefect.client.schemas.objects.State[~R]]]) -> NoneType |
21,892 | prefect_dask.utils | get_async_dask_client |
Yields a temporary asynchronous dask client; this is useful
for parallelizing operations on dask collections,
such as a `dask.DataFrame` or `dask.Bag`.
Without invoking this, workers do not automatically get a client to connect
to the full cluster. Therefore, it will attempt perform work within the
worker itself serially, and potentially overwhelming the single worker.
Args:
timeout: Timeout after which to error out; has no effect in
flow run contexts because the client has already started;
Defaults to the `distributed.comm.timeouts.connect`
configuration value.
client_kwargs: Additional keyword arguments to pass to
`distributed.Client`, and overwrites inherited keyword arguments
from the task runner, if any.
Yields:
A temporary asynchronous dask client.
Examples:
Use `get_async_dask_client` to distribute work across workers.
```python
import dask
from prefect import flow, task
from prefect_dask import DaskTaskRunner, get_async_dask_client
@task
async def compute_task():
async with get_async_dask_client(timeout="120s") as client:
df = dask.datasets.timeseries("2000", "2001", partition_freq="4w")
summary_df = await client.compute(df.describe())
return summary_df
@flow(task_runner=DaskTaskRunner())
async def dask_flow():
prefect_future = await compute_task.submit()
return await prefect_future.result()
asyncio.run(dask_flow())
```
| null | (timeout: Union[int, float, str, datetime.timedelta, NoneType] = None, **client_kwargs: Dict[str, Any]) -> AsyncGenerator[distributed.client.Client, NoneType] |
21,893 | prefect_dask.utils | get_dask_client |
Yields a temporary synchronous dask client; this is useful
for parallelizing operations on dask collections,
such as a `dask.DataFrame` or `dask.Bag`.
Without invoking this, workers do not automatically get a client to connect
to the full cluster. Therefore, it will attempt perform work within the
worker itself serially, and potentially overwhelming the single worker.
When in an async context, we recommend using `get_async_dask_client` instead.
Args:
timeout: Timeout after which to error out; has no effect in
flow run contexts because the client has already started;
Defaults to the `distributed.comm.timeouts.connect`
configuration value.
client_kwargs: Additional keyword arguments to pass to
`distributed.Client`, and overwrites inherited keyword arguments
from the task runner, if any.
Yields:
A temporary synchronous dask client.
Examples:
Use `get_dask_client` to distribute work across workers.
```python
import dask
from prefect import flow, task
from prefect_dask import DaskTaskRunner, get_dask_client
@task
def compute_task():
with get_dask_client(timeout="120s") as client:
df = dask.datasets.timeseries("2000", "2001", partition_freq="4w")
summary_df = client.compute(df.describe()).result()
return summary_df
@flow(task_runner=DaskTaskRunner())
def dask_flow():
prefect_future = compute_task.submit()
return prefect_future.result()
dask_flow()
```
| null | (timeout: Union[int, float, str, datetime.timedelta, NoneType] = None, **client_kwargs: Dict[str, Any]) -> Generator[distributed.client.Client, NoneType, NoneType] |
21,896 | e3nn | get_optimization_defaults | Get the global default optimization settings. | def get_optimization_defaults() -> Dict[str, bool]:
r"""Get the global default optimization settings."""
return dict(_OPT_DEFAULTS)
| () -> Dict[str, bool] |
21,901 | e3nn | set_optimization_defaults | Globally set the default optimization settings.
Parameters
----------
**kwargs
Keyword arguments to set the default optimization settings.
| def set_optimization_defaults(**kwargs) -> None:
r"""Globally set the default optimization settings.
Parameters
----------
**kwargs
Keyword arguments to set the default optimization settings.
"""
for k, v in kwargs.items():
if k not in _OPT_DEFAULTS:
raise ValueError(f"Unknown optimization option: {k}")
_OPT_DEFAULTS[k] = v
| (**kwargs) -> NoneType |
21,903 | ep_tm | add_one | null | def add_one(number):
return number + 1
| (number) |
21,904 | twod.exceptions | ColinearPoints | null | class ColinearPoints(Exception):
pass
| null |
21,905 | twod.point | Point | Point(x: float = 0, y: float = 0) | class Point:
x: float = 0
y: float = 0
"""A two dimensional geometric point.
The Point class represents a geometric point with 'x' and 'y' attributes
and has many helpful properties and methods.
>>> p = Point()
>>> p.is_origin
True
"""
@classmethod
def from_polar(
cls,
radius: float,
theta: float,
is_radians: bool = True,
translate: PointType = None,
) -> PointType:
"""Returns a Point with polar coordinates (R, ϴ).
The point is genrated relative to the origin.
:param float radius:
:param float theta:
:param bool is_radians:
:return: Point
"""
theta = theta if is_radians else math.radians(theta)
point = cls()
point.polar = (radius, theta)
if translate:
point += translate
return point
@property
def is_origin(self) -> bool:
"""True if and only if x == 0 and y == 0."""
return self.x == 0 and self.y == 0
@property
def quadrant(self) -> Quadrant:
"""The quadrant in the cartesian plane this point is located in.
Possible values are:
- Quadrant.I
- Quadrant.II
- Quadrant.III
- Quadrant.IV
- Quadrant.ORIGIN.
"""
if self.x > 0:
if self.y > 0:
return Quadrant.I
if self.y < 0:
return Quadrant.IV
if self.x < 0:
if self.y > 0:
return Quadrant.II
if self.y < 0:
return Quadrant.III
return Quadrant.ORIGIN
def _polar_to_cartesian(self, radius: float, radians: float) -> None:
"""Computes cartesian coordinates from polar coordinates.
The polar coordinates are expected to be a dimensionless radius
and angle in radians.
:param float radius:
:param float radians:
"""
self.x = round(radius * math.cos(radians), EPSILON_EXP_MINUS_1)
self.y = round(radius * math.sin(radians), EPSILON_EXP_MINUS_1)
@property
def radius(self) -> float:
"""The distance from this point to the origin."""
return math.hypot(self.x, self.y)
@radius.setter
def radius(self, new_value: float) -> None:
self._polar_to_cartesian(new_value, self.radians)
@property
def radians(self) -> float:
"""The angle in radians measured counter-clockwise from 3 o'clock."""
return math.atan2(self.y, self.x)
@radians.setter
def radians(self, new_value: float) -> None:
self._polar_to_cartesian(self.radius, new_value)
@property
def degrees(self) -> float:
"""The angle in degrees measured counter-clockwise from 3 o'clock."""
return math.degrees(self.radians)
@degrees.setter
def degrees(self, new_value: float) -> None:
self._polar_to_cartesian(self.radius, math.radians(new_value))
@property
def polar(self) -> Tuple[float, float]:
"""Polar coordinates tuple: (R, ϴ).
R is the distance from the origin to this point.
ϴ is the angle (radians) measured counter-clockwise from 3 o'clock.
"""
return (self.radius, self.radians)
@polar.setter
def polar(self, new_values: Iterable[Numeric]) -> None:
try:
radius, radians, *_ = map(float, new_values)
self._polar_to_cartesian(radius, radians)
return
except (TypeError, ValueError):
pass
raise TypeError(
f"Expected a Iterable[Union[int, float]], got {type(new_values)}"
)
@property
def polar_deg(self) -> Tuple[float, float]:
"""Polar coordinates tuple: (R, ϴ).
R is the distance from the origin to this point.
ϴ is the angle (degrees) measured counter-clockwise from 3 o'clock.
"""
radius, radians = self.polar
return (radius, math.degrees(radians))
@polar_deg.setter
def polar_deg(self, new_values: Tuple[Numeric, Numeric]) -> None:
try:
radius, degrees, *_ = map(float, new_values)
self._polar_to_cartesian(radius=radius, radians=math.radians(degrees))
return
except (TypeError, ValueError):
pass
raise TypeError(
f"Expected a Iterable[Union[int, float]], got {type(new_values)}"
)
@property
def xy(self) -> Tuple[float, float]:
"""A tuple of this point's x and y coordinates."""
return (self.x, self.y)
@xy.setter
def xy(self, new_values: Iterable[Numeric]) -> None:
try:
self.x, self.y, *_ = map(float, new_values)
return
except (TypeError, ValueError):
pass
raise TypeError(
f"Expected a Iterable[Union[int, float]], got {type(new_values)}"
)
def __iter__(self) -> Iterable[Tuple[float, float]]:
"""An iterator over x and y coordinates."""
return iter([self.x, self.y])
def __len__(self) -> int:
return 2
def __eq__(self, other: PointOrIterable) -> bool:
try:
return self.x == other.x and self.y == other.y
except AttributeError:
pass
return all(a == b for a, b in zip(self, other))
def __getitem__(self, key: Union[int, slice]) -> float:
if isinstance(key, int):
if key == 0:
return self.x
if key == 1:
return self.y
raise IndexError("Key out of range: {key}")
if isinstance(key, slice):
return [self.x, self.y][key][0]
raise TypeError(f"Expected int or slice key, not {type(key)}")
def __setitem__(self, key: int, value: Numeric):
if not isinstance(key, int):
raise TypeError(f"Expected int key, not {type(key)}")
if key == 0:
self.x = value
return
if key == 1:
self.y = value
return
raise IndexError(f"Key out of range: {key}")
def __op(self, other: PointOrIterableOrScalar, op: Callable) -> PointType:
""""""
try:
return Point(op(self.x, other.x), op(self.y, other.y))
except AttributeError:
pass
try:
return Point(*[op(a, b) for a, b in zip(self, other)])
except TypeError:
pass
return Point(op(self.x, other), op(self.y, other))
def __iop(self, other: PointOrIterableOrScalar, op: Callable) -> PointType:
""""""
try:
self.x = op(self.x, other.x)
self.y = op(self.y, other.y)
return self
except AttributeError:
pass
try:
self.x = op(self.x, other[0])
self.y = op(self.y, other[1])
return self
except TypeError:
pass
self.x = op(self.x, other)
self.y = op(self.y, other)
return self
def __add__(self, other: PointOrIterableOrScalar) -> PointType:
"""Add `other` to `self` and return a new Point."""
return self.__op(other, operator.add)
def __iadd__(self, other: PointOrIterableOrScalar) -> PointType:
"""Add `other` to `self` in-place and returns `self`."""
return self.__iop(other, operator.add)
def __sub__(self, other: PointOrIterableOrScalar) -> PointType:
"""Subtract `other` from `self` and return a new Point."""
return self.__op(other, operator.sub)
def __isub__(self, other: PointOrIterableOrScalar) -> PointType:
"""Subtract `other` from `self` in-place and return `self`."""
return self.__iop(other, operator.sub)
def __mul__(self, other: PointOrIterableOrScalar) -> PointType:
"""Multiply `self` with `other` and return a new Point."""
return self.__op(other, operator.mul)
def __imul__(self, other: PointOrIterableOrScalar) -> PointType:
"""Multiply `self` with `other` in-place and return `self`."""
return self.__iop(other, operator.mul)
def __truediv__(self, other: PointOrIterableOrScalar) -> PointType:
"""Divide `self` by `other` and return a new Point."""
return self.__op(other, operator.truediv)
def __itruediv__(self, other: PointOrIterableOrScalar) -> PointType:
"""Divide `self` by `other` in-place and return `self`."""
return self.__iop(other, operator.truediv)
def __floordiv__(self, other: PointOrIterableOrScalar) -> PointType:
"""Divide `self` by `other` and return a new Point."""
return self.__op(other, operator.floordiv)
def __ifloordiv__(self, other: PointOrIterableOrScalar) -> PointType:
"""Divide `self` by `other` in-place and return `self`."""
return self.__iop(other, operator.floordiv)
def __pow__(self, exponent: float) -> PointType:
"""Raise each coordinate by `exponent` and return a new Point."""
return Point(self.x ** exponent, self.y ** exponent)
def __ipow__(self, exponent: float) -> PointType:
"""Raise each coordinate by `exponent` in-place and return self."""
self.x **= exponent
self.y **= exponent
return self
def __abs__(self) -> PointType:
"""Apply absolute value to each coordinate and return a new Point."""
return Point(abs(self.x), abs(self.y))
def __neg__(self) -> PointType:
"""Negate each coordinate and return a new Point."""
return self * -1
def __invert__(self) -> PointType:
"""Inverts each coordinate and return a new Point."""
return Point(~self.x, ~self.y)
def distance(self, other: PointOrIterable = None) -> float:
"""Return the floating point distance between `self` and `other`.
If other is not given, the distance from self to the origin is
returned.
:param other: PointType
:return: float
"""
return (self.distance_squared(other or Point())) ** 0.5
def distance_squared(self, other: PointOrIterable = None) -> float:
"""Return the floating point squared distance between self and other.
If other is not given, the squared distance from self to the
origin is returned.
:param other: PointType
:return: float
"""
return sum((((other or Point()) - self) ** 2))
def dot(self, other: PointOrIterable) -> float:
"""Return a scalar dot product of self with other.
:param other: PointOrIterableOrScalar
:return: float
"""
return sum(self * other)
def cross(self, other: PointOrIterable) -> float:
"""Return a scalar cross product of self with other.
:param other: PointOrIterableOrScalar
:return: float
"""
try:
return (self.x * other.y) + (self.y * other.x)
except AttributeError:
pass
return (self.x * other[1]) + (self.y * other[0])
def ccw(self, b: PointOrIterable, c: PointOrIterable) -> float:
"""Return a floating point value indicating the winding
direction of the points [self, b, c].
If ccw < 0, clock-wise winding
If ccw > 0, counter clock-wise winding
If ccw == 0, the three points are colinear
Note: ccw is also 2*area of the triangle [self, b, c].
:param b: Point
:param c: Point
:return: float
"""
try:
return ((b.x - self.x) * (c.y - self.y)) - ((c.x - self.x) * (b.y - self.y))
except AttributeError:
pass
return ((b[0] - self.x) * (c[1] - self.y)) - ((c[0] - self.x) * (b[1] - self.y))
def is_ccw(self, b: PointOrIterable, c: PointOrIterable) -> bool:
"""Return True if the angle [self, b, c] has counter clock-wise
winding, else False.
Raises the exception `ColinearPoints` if the points compose a line.
:param b: Point
:param c: Point
:return: bool
"""
result = self.ccw(b, c)
if result == 0:
raise ColinearPoints(self, b, c)
return result > 0
def is_colinear(self, b: PointType, c: PointType) -> bool:
"""True if the angle [self, b, c ] is a line, else False.
:param b: Point
:param c: Point
:return: bool
"""
return self.ccw(b, c) == 0
def midpoint(self, other: PointType = None) -> PointType:
"""Return a new Point midway between `self` and `other`.
If other is not given, the midpoint between self and the
origin is returned.
:param other: Point
:return: Point
"""
return (self + (other or Point())) / 2
def between(self, p: PointType, q: PointType) -> bool:
"""True if self is bounded by the points [p, q], else False
The bounds are checked by less than or equal to (<=) so self is
considered between if it resides on any of the lines constructed
using [p,q].
:param p: Point
:param q: Point
:return: bool
"""
i = min(p.x, q.x) <= self.x <= max(p.x, q.x)
j = min(p.y, q.y) <= self.y <= max(p.y, q.y)
return i and j
def inside(self, p: PointType, q: PointType) -> bool:
"""True if self is bounded by the points (p, q), else False
The bounds are checked by less than (<) so self is considered
inside if it does not reside on any of the lines constructed
using (p,q).
:param p: Point
:param q: Point
:return: bool
"""
# XXX re-implement with ccw and a list of points instead of a pair
i = min(p.x, q.x) < self.x < max(p.x, q.x)
j = min(p.y, q.y) < self.y < max(p.y, q.y)
return i and j
| (x: float = 0, y: float = 0) -> None |
21,906 | twod.point | __iop | def __iop(self, other: PointOrIterableOrScalar, op: Callable) -> PointType:
""""""
try:
self.x = op(self.x, other.x)
self.y = op(self.y, other.y)
return self
except AttributeError:
pass
try:
self.x = op(self.x, other[0])
self.y = op(self.y, other[1])
return self
except TypeError:
pass
self.x = op(self.x, other)
self.y = op(self.y, other)
return self
| (self, other: Union[~PointType, Iterable[Union[float, int]], float, int], op: Callable) -> ~PointType |
|
21,907 | twod.point | __op | def __op(self, other: PointOrIterableOrScalar, op: Callable) -> PointType:
""""""
try:
return Point(op(self.x, other.x), op(self.y, other.y))
except AttributeError:
pass
try:
return Point(*[op(a, b) for a, b in zip(self, other)])
except TypeError:
pass
return Point(op(self.x, other), op(self.y, other))
| (self, other: Union[~PointType, Iterable[Union[float, int]], float, int], op: Callable) -> ~PointType |
|
21,908 | twod.point | __abs__ | Apply absolute value to each coordinate and return a new Point. | def __abs__(self) -> PointType:
"""Apply absolute value to each coordinate and return a new Point."""
return Point(abs(self.x), abs(self.y))
| (self) -> ~PointType |
21,909 | twod.point | __add__ | Add `other` to `self` and return a new Point. | def __add__(self, other: PointOrIterableOrScalar) -> PointType:
"""Add `other` to `self` and return a new Point."""
return self.__op(other, operator.add)
| (self, other: Union[~PointType, Iterable[Union[float, int]], float, int]) -> ~PointType |
21,910 | twod.point | __eq__ | null | def __eq__(self, other: PointOrIterable) -> bool:
try:
return self.x == other.x and self.y == other.y
except AttributeError:
pass
return all(a == b for a, b in zip(self, other))
| (self, other: Union[~PointType, Iterable[Union[float, int]]]) -> bool |
21,911 | twod.point | __floordiv__ | Divide `self` by `other` and return a new Point. | def __floordiv__(self, other: PointOrIterableOrScalar) -> PointType:
"""Divide `self` by `other` and return a new Point."""
return self.__op(other, operator.floordiv)
| (self, other: Union[~PointType, Iterable[Union[float, int]], float, int]) -> ~PointType |
21,912 | twod.point | __getitem__ | null | def __getitem__(self, key: Union[int, slice]) -> float:
if isinstance(key, int):
if key == 0:
return self.x
if key == 1:
return self.y
raise IndexError("Key out of range: {key}")
if isinstance(key, slice):
return [self.x, self.y][key][0]
raise TypeError(f"Expected int or slice key, not {type(key)}")
| (self, key: Union[int, slice]) -> float |
21,913 | twod.point | __iadd__ | Add `other` to `self` in-place and returns `self`. | def __iadd__(self, other: PointOrIterableOrScalar) -> PointType:
"""Add `other` to `self` in-place and returns `self`."""
return self.__iop(other, operator.add)
| (self, other: Union[~PointType, Iterable[Union[float, int]], float, int]) -> ~PointType |
21,914 | twod.point | __ifloordiv__ | Divide `self` by `other` in-place and return `self`. | def __ifloordiv__(self, other: PointOrIterableOrScalar) -> PointType:
"""Divide `self` by `other` in-place and return `self`."""
return self.__iop(other, operator.floordiv)
| (self, other: Union[~PointType, Iterable[Union[float, int]], float, int]) -> ~PointType |
21,915 | twod.point | __imul__ | Multiply `self` with `other` in-place and return `self`. | def __imul__(self, other: PointOrIterableOrScalar) -> PointType:
"""Multiply `self` with `other` in-place and return `self`."""
return self.__iop(other, operator.mul)
| (self, other: Union[~PointType, Iterable[Union[float, int]], float, int]) -> ~PointType |
21,916 | twod.point | __init__ | null | """ a two-dimensional point for humans™.
"""
import math
import operator
import sys
from dataclasses import dataclass
from typing import Callable, Iterable, Tuple, Union
from .types import Numeric
from .types import PointType
from .types import PointOrIterable
from .types import PointOrIterableOrScalar
from .constants import EPSILON_EXP_MINUS_1, Quadrant
from .exceptions import ColinearPoints
@dataclass
class Point:
x: float = 0
y: float = 0
"""A two dimensional geometric point.
The Point class represents a geometric point with 'x' and 'y' attributes
and has many helpful properties and methods.
>>> p = Point()
>>> p.is_origin
True
"""
@classmethod
def from_polar(
cls,
radius: float,
theta: float,
is_radians: bool = True,
translate: PointType = None,
) -> PointType:
"""Returns a Point with polar coordinates (R, ϴ).
The point is genrated relative to the origin.
:param float radius:
:param float theta:
:param bool is_radians:
:return: Point
"""
theta = theta if is_radians else math.radians(theta)
point = cls()
point.polar = (radius, theta)
if translate:
point += translate
return point
@property
def is_origin(self) -> bool:
"""True if and only if x == 0 and y == 0."""
return self.x == 0 and self.y == 0
@property
def quadrant(self) -> Quadrant:
"""The quadrant in the cartesian plane this point is located in.
Possible values are:
- Quadrant.I
- Quadrant.II
- Quadrant.III
- Quadrant.IV
- Quadrant.ORIGIN.
"""
if self.x > 0:
if self.y > 0:
return Quadrant.I
if self.y < 0:
return Quadrant.IV
if self.x < 0:
if self.y > 0:
return Quadrant.II
if self.y < 0:
return Quadrant.III
return Quadrant.ORIGIN
def _polar_to_cartesian(self, radius: float, radians: float) -> None:
"""Computes cartesian coordinates from polar coordinates.
The polar coordinates are expected to be a dimensionless radius
and angle in radians.
:param float radius:
:param float radians:
"""
self.x = round(radius * math.cos(radians), EPSILON_EXP_MINUS_1)
self.y = round(radius * math.sin(radians), EPSILON_EXP_MINUS_1)
@property
def radius(self) -> float:
"""The distance from this point to the origin."""
return math.hypot(self.x, self.y)
@radius.setter
def radius(self, new_value: float) -> None:
self._polar_to_cartesian(new_value, self.radians)
@property
def radians(self) -> float:
"""The angle in radians measured counter-clockwise from 3 o'clock."""
return math.atan2(self.y, self.x)
@radians.setter
def radians(self, new_value: float) -> None:
self._polar_to_cartesian(self.radius, new_value)
@property
def degrees(self) -> float:
"""The angle in degrees measured counter-clockwise from 3 o'clock."""
return math.degrees(self.radians)
@degrees.setter
def degrees(self, new_value: float) -> None:
self._polar_to_cartesian(self.radius, math.radians(new_value))
@property
def polar(self) -> Tuple[float, float]:
"""Polar coordinates tuple: (R, ϴ).
R is the distance from the origin to this point.
ϴ is the angle (radians) measured counter-clockwise from 3 o'clock.
"""
return (self.radius, self.radians)
@polar.setter
def polar(self, new_values: Iterable[Numeric]) -> None:
try:
radius, radians, *_ = map(float, new_values)
self._polar_to_cartesian(radius, radians)
return
except (TypeError, ValueError):
pass
raise TypeError(
f"Expected a Iterable[Union[int, float]], got {type(new_values)}"
)
@property
def polar_deg(self) -> Tuple[float, float]:
"""Polar coordinates tuple: (R, ϴ).
R is the distance from the origin to this point.
ϴ is the angle (degrees) measured counter-clockwise from 3 o'clock.
"""
radius, radians = self.polar
return (radius, math.degrees(radians))
@polar_deg.setter
def polar_deg(self, new_values: Tuple[Numeric, Numeric]) -> None:
try:
radius, degrees, *_ = map(float, new_values)
self._polar_to_cartesian(radius=radius, radians=math.radians(degrees))
return
except (TypeError, ValueError):
pass
raise TypeError(
f"Expected a Iterable[Union[int, float]], got {type(new_values)}"
)
@property
def xy(self) -> Tuple[float, float]:
"""A tuple of this point's x and y coordinates."""
return (self.x, self.y)
@xy.setter
def xy(self, new_values: Iterable[Numeric]) -> None:
try:
self.x, self.y, *_ = map(float, new_values)
return
except (TypeError, ValueError):
pass
raise TypeError(
f"Expected a Iterable[Union[int, float]], got {type(new_values)}"
)
def __iter__(self) -> Iterable[Tuple[float, float]]:
"""An iterator over x and y coordinates."""
return iter([self.x, self.y])
def __len__(self) -> int:
return 2
def __eq__(self, other: PointOrIterable) -> bool:
try:
return self.x == other.x and self.y == other.y
except AttributeError:
pass
return all(a == b for a, b in zip(self, other))
def __getitem__(self, key: Union[int, slice]) -> float:
if isinstance(key, int):
if key == 0:
return self.x
if key == 1:
return self.y
raise IndexError("Key out of range: {key}")
if isinstance(key, slice):
return [self.x, self.y][key][0]
raise TypeError(f"Expected int or slice key, not {type(key)}")
def __setitem__(self, key: int, value: Numeric):
if not isinstance(key, int):
raise TypeError(f"Expected int key, not {type(key)}")
if key == 0:
self.x = value
return
if key == 1:
self.y = value
return
raise IndexError(f"Key out of range: {key}")
def __op(self, other: PointOrIterableOrScalar, op: Callable) -> PointType:
""""""
try:
return Point(op(self.x, other.x), op(self.y, other.y))
except AttributeError:
pass
try:
return Point(*[op(a, b) for a, b in zip(self, other)])
except TypeError:
pass
return Point(op(self.x, other), op(self.y, other))
def __iop(self, other: PointOrIterableOrScalar, op: Callable) -> PointType:
""""""
try:
self.x = op(self.x, other.x)
self.y = op(self.y, other.y)
return self
except AttributeError:
pass
try:
self.x = op(self.x, other[0])
self.y = op(self.y, other[1])
return self
except TypeError:
pass
self.x = op(self.x, other)
self.y = op(self.y, other)
return self
def __add__(self, other: PointOrIterableOrScalar) -> PointType:
"""Add `other` to `self` and return a new Point."""
return self.__op(other, operator.add)
def __iadd__(self, other: PointOrIterableOrScalar) -> PointType:
"""Add `other` to `self` in-place and returns `self`."""
return self.__iop(other, operator.add)
def __sub__(self, other: PointOrIterableOrScalar) -> PointType:
"""Subtract `other` from `self` and return a new Point."""
return self.__op(other, operator.sub)
def __isub__(self, other: PointOrIterableOrScalar) -> PointType:
"""Subtract `other` from `self` in-place and return `self`."""
return self.__iop(other, operator.sub)
def __mul__(self, other: PointOrIterableOrScalar) -> PointType:
"""Multiply `self` with `other` and return a new Point."""
return self.__op(other, operator.mul)
def __imul__(self, other: PointOrIterableOrScalar) -> PointType:
"""Multiply `self` with `other` in-place and return `self`."""
return self.__iop(other, operator.mul)
def __truediv__(self, other: PointOrIterableOrScalar) -> PointType:
"""Divide `self` by `other` and return a new Point."""
return self.__op(other, operator.truediv)
def __itruediv__(self, other: PointOrIterableOrScalar) -> PointType:
"""Divide `self` by `other` in-place and return `self`."""
return self.__iop(other, operator.truediv)
def __floordiv__(self, other: PointOrIterableOrScalar) -> PointType:
"""Divide `self` by `other` and return a new Point."""
return self.__op(other, operator.floordiv)
def __ifloordiv__(self, other: PointOrIterableOrScalar) -> PointType:
"""Divide `self` by `other` in-place and return `self`."""
return self.__iop(other, operator.floordiv)
def __pow__(self, exponent: float) -> PointType:
"""Raise each coordinate by `exponent` and return a new Point."""
return Point(self.x ** exponent, self.y ** exponent)
def __ipow__(self, exponent: float) -> PointType:
"""Raise each coordinate by `exponent` in-place and return self."""
self.x **= exponent
self.y **= exponent
return self
def __abs__(self) -> PointType:
"""Apply absolute value to each coordinate and return a new Point."""
return Point(abs(self.x), abs(self.y))
def __neg__(self) -> PointType:
"""Negate each coordinate and return a new Point."""
return self * -1
def __invert__(self) -> PointType:
"""Inverts each coordinate and return a new Point."""
return Point(~self.x, ~self.y)
def distance(self, other: PointOrIterable = None) -> float:
"""Return the floating point distance between `self` and `other`.
If other is not given, the distance from self to the origin is
returned.
:param other: PointType
:return: float
"""
return (self.distance_squared(other or Point())) ** 0.5
def distance_squared(self, other: PointOrIterable = None) -> float:
"""Return the floating point squared distance between self and other.
If other is not given, the squared distance from self to the
origin is returned.
:param other: PointType
:return: float
"""
return sum((((other or Point()) - self) ** 2))
def dot(self, other: PointOrIterable) -> float:
"""Return a scalar dot product of self with other.
:param other: PointOrIterableOrScalar
:return: float
"""
return sum(self * other)
def cross(self, other: PointOrIterable) -> float:
"""Return a scalar cross product of self with other.
:param other: PointOrIterableOrScalar
:return: float
"""
try:
return (self.x * other.y) + (self.y * other.x)
except AttributeError:
pass
return (self.x * other[1]) + (self.y * other[0])
def ccw(self, b: PointOrIterable, c: PointOrIterable) -> float:
"""Return a floating point value indicating the winding
direction of the points [self, b, c].
If ccw < 0, clock-wise winding
If ccw > 0, counter clock-wise winding
If ccw == 0, the three points are colinear
Note: ccw is also 2*area of the triangle [self, b, c].
:param b: Point
:param c: Point
:return: float
"""
try:
return ((b.x - self.x) * (c.y - self.y)) - ((c.x - self.x) * (b.y - self.y))
except AttributeError:
pass
return ((b[0] - self.x) * (c[1] - self.y)) - ((c[0] - self.x) * (b[1] - self.y))
def is_ccw(self, b: PointOrIterable, c: PointOrIterable) -> bool:
"""Return True if the angle [self, b, c] has counter clock-wise
winding, else False.
Raises the exception `ColinearPoints` if the points compose a line.
:param b: Point
:param c: Point
:return: bool
"""
result = self.ccw(b, c)
if result == 0:
raise ColinearPoints(self, b, c)
return result > 0
def is_colinear(self, b: PointType, c: PointType) -> bool:
"""True if the angle [self, b, c ] is a line, else False.
:param b: Point
:param c: Point
:return: bool
"""
return self.ccw(b, c) == 0
def midpoint(self, other: PointType = None) -> PointType:
"""Return a new Point midway between `self` and `other`.
If other is not given, the midpoint between self and the
origin is returned.
:param other: Point
:return: Point
"""
return (self + (other or Point())) / 2
def between(self, p: PointType, q: PointType) -> bool:
"""True if self is bounded by the points [p, q], else False
The bounds are checked by less than or equal to (<=) so self is
considered between if it resides on any of the lines constructed
using [p,q].
:param p: Point
:param q: Point
:return: bool
"""
i = min(p.x, q.x) <= self.x <= max(p.x, q.x)
j = min(p.y, q.y) <= self.y <= max(p.y, q.y)
return i and j
def inside(self, p: PointType, q: PointType) -> bool:
"""True if self is bounded by the points (p, q), else False
The bounds are checked by less than (<) so self is considered
inside if it does not reside on any of the lines constructed
using (p,q).
:param p: Point
:param q: Point
:return: bool
"""
# XXX re-implement with ccw and a list of points instead of a pair
i = min(p.x, q.x) < self.x < max(p.x, q.x)
j = min(p.y, q.y) < self.y < max(p.y, q.y)
return i and j
| (self, x: float = 0, y: float = 0) -> NoneType |
21,917 | twod.point | __invert__ | Inverts each coordinate and return a new Point. | def __invert__(self) -> PointType:
"""Inverts each coordinate and return a new Point."""
return Point(~self.x, ~self.y)
| (self) -> ~PointType |
21,918 | twod.point | __ipow__ | Raise each coordinate by `exponent` in-place and return self. | def __ipow__(self, exponent: float) -> PointType:
"""Raise each coordinate by `exponent` in-place and return self."""
self.x **= exponent
self.y **= exponent
return self
| (self, exponent: float) -> ~PointType |
21,919 | twod.point | __isub__ | Subtract `other` from `self` in-place and return `self`. | def __isub__(self, other: PointOrIterableOrScalar) -> PointType:
"""Subtract `other` from `self` in-place and return `self`."""
return self.__iop(other, operator.sub)
| (self, other: Union[~PointType, Iterable[Union[float, int]], float, int]) -> ~PointType |
21,920 | twod.point | __iter__ | An iterator over x and y coordinates. | def __iter__(self) -> Iterable[Tuple[float, float]]:
"""An iterator over x and y coordinates."""
return iter([self.x, self.y])
| (self) -> Iterable[Tuple[float, float]] |
21,921 | twod.point | __itruediv__ | Divide `self` by `other` in-place and return `self`. | def __itruediv__(self, other: PointOrIterableOrScalar) -> PointType:
"""Divide `self` by `other` in-place and return `self`."""
return self.__iop(other, operator.truediv)
| (self, other: Union[~PointType, Iterable[Union[float, int]], float, int]) -> ~PointType |
21,922 | twod.point | __len__ | null | def __len__(self) -> int:
return 2
| (self) -> int |
21,923 | twod.point | __mul__ | Multiply `self` with `other` and return a new Point. | def __mul__(self, other: PointOrIterableOrScalar) -> PointType:
"""Multiply `self` with `other` and return a new Point."""
return self.__op(other, operator.mul)
| (self, other: Union[~PointType, Iterable[Union[float, int]], float, int]) -> ~PointType |
21,924 | twod.point | __neg__ | Negate each coordinate and return a new Point. | def __neg__(self) -> PointType:
"""Negate each coordinate and return a new Point."""
return self * -1
| (self) -> ~PointType |
21,925 | twod.point | __pow__ | Raise each coordinate by `exponent` and return a new Point. | def __pow__(self, exponent: float) -> PointType:
"""Raise each coordinate by `exponent` and return a new Point."""
return Point(self.x ** exponent, self.y ** exponent)
| (self, exponent: float) -> ~PointType |
21,926 | twod.point | __repr__ | null | def __op(self, other: PointOrIterableOrScalar, op: Callable) -> PointType:
""""""
try:
return Point(op(self.x, other.x), op(self.y, other.y))
except AttributeError:
pass
try:
return Point(*[op(a, b) for a, b in zip(self, other)])
except TypeError:
pass
return Point(op(self.x, other), op(self.y, other))
| (self) |
21,927 | twod.point | __setitem__ | null | def __setitem__(self, key: int, value: Numeric):
if not isinstance(key, int):
raise TypeError(f"Expected int key, not {type(key)}")
if key == 0:
self.x = value
return
if key == 1:
self.y = value
return
raise IndexError(f"Key out of range: {key}")
| (self, key: int, value: Union[float, int]) |
21,928 | twod.point | __sub__ | Subtract `other` from `self` and return a new Point. | def __sub__(self, other: PointOrIterableOrScalar) -> PointType:
"""Subtract `other` from `self` and return a new Point."""
return self.__op(other, operator.sub)
| (self, other: Union[~PointType, Iterable[Union[float, int]], float, int]) -> ~PointType |
21,929 | twod.point | __truediv__ | Divide `self` by `other` and return a new Point. | def __truediv__(self, other: PointOrIterableOrScalar) -> PointType:
"""Divide `self` by `other` and return a new Point."""
return self.__op(other, operator.truediv)
| (self, other: Union[~PointType, Iterable[Union[float, int]], float, int]) -> ~PointType |
21,930 | twod.point | _polar_to_cartesian | Computes cartesian coordinates from polar coordinates.
The polar coordinates are expected to be a dimensionless radius
and angle in radians.
:param float radius:
:param float radians:
| def _polar_to_cartesian(self, radius: float, radians: float) -> None:
"""Computes cartesian coordinates from polar coordinates.
The polar coordinates are expected to be a dimensionless radius
and angle in radians.
:param float radius:
:param float radians:
"""
self.x = round(radius * math.cos(radians), EPSILON_EXP_MINUS_1)
self.y = round(radius * math.sin(radians), EPSILON_EXP_MINUS_1)
| (self, radius: float, radians: float) -> NoneType |
21,931 | twod.point | between | True if self is bounded by the points [p, q], else False
The bounds are checked by less than or equal to (<=) so self is
considered between if it resides on any of the lines constructed
using [p,q].
:param p: Point
:param q: Point
:return: bool
| def between(self, p: PointType, q: PointType) -> bool:
"""True if self is bounded by the points [p, q], else False
The bounds are checked by less than or equal to (<=) so self is
considered between if it resides on any of the lines constructed
using [p,q].
:param p: Point
:param q: Point
:return: bool
"""
i = min(p.x, q.x) <= self.x <= max(p.x, q.x)
j = min(p.y, q.y) <= self.y <= max(p.y, q.y)
return i and j
| (self, p: ~PointType, q: ~PointType) -> bool |
21,932 | twod.point | ccw | Return a floating point value indicating the winding
direction of the points [self, b, c].
If ccw < 0, clock-wise winding
If ccw > 0, counter clock-wise winding
If ccw == 0, the three points are colinear
Note: ccw is also 2*area of the triangle [self, b, c].
:param b: Point
:param c: Point
:return: float
| def ccw(self, b: PointOrIterable, c: PointOrIterable) -> float:
"""Return a floating point value indicating the winding
direction of the points [self, b, c].
If ccw < 0, clock-wise winding
If ccw > 0, counter clock-wise winding
If ccw == 0, the three points are colinear
Note: ccw is also 2*area of the triangle [self, b, c].
:param b: Point
:param c: Point
:return: float
"""
try:
return ((b.x - self.x) * (c.y - self.y)) - ((c.x - self.x) * (b.y - self.y))
except AttributeError:
pass
return ((b[0] - self.x) * (c[1] - self.y)) - ((c[0] - self.x) * (b[1] - self.y))
| (self, b: Union[~PointType, Iterable[Union[float, int]]], c: Union[~PointType, Iterable[Union[float, int]]]) -> float |
21,933 | twod.point | cross | Return a scalar cross product of self with other.
:param other: PointOrIterableOrScalar
:return: float
| def cross(self, other: PointOrIterable) -> float:
"""Return a scalar cross product of self with other.
:param other: PointOrIterableOrScalar
:return: float
"""
try:
return (self.x * other.y) + (self.y * other.x)
except AttributeError:
pass
return (self.x * other[1]) + (self.y * other[0])
| (self, other: Union[~PointType, Iterable[Union[float, int]]]) -> float |
21,934 | twod.point | distance | Return the floating point distance between `self` and `other`.
If other is not given, the distance from self to the origin is
returned.
:param other: PointType
:return: float
| def distance(self, other: PointOrIterable = None) -> float:
"""Return the floating point distance between `self` and `other`.
If other is not given, the distance from self to the origin is
returned.
:param other: PointType
:return: float
"""
return (self.distance_squared(other or Point())) ** 0.5
| (self, other: Union[~PointType, Iterable[Union[float, int]], NoneType] = None) -> float |
21,935 | twod.point | distance_squared | Return the floating point squared distance between self and other.
If other is not given, the squared distance from self to the
origin is returned.
:param other: PointType
:return: float
| def distance_squared(self, other: PointOrIterable = None) -> float:
"""Return the floating point squared distance between self and other.
If other is not given, the squared distance from self to the
origin is returned.
:param other: PointType
:return: float
"""
return sum((((other or Point()) - self) ** 2))
| (self, other: Union[~PointType, Iterable[Union[float, int]], NoneType] = None) -> float |
21,936 | twod.point | dot | Return a scalar dot product of self with other.
:param other: PointOrIterableOrScalar
:return: float
| def dot(self, other: PointOrIterable) -> float:
"""Return a scalar dot product of self with other.
:param other: PointOrIterableOrScalar
:return: float
"""
return sum(self * other)
| (self, other: Union[~PointType, Iterable[Union[float, int]]]) -> float |
21,937 | twod.point | inside | True if self is bounded by the points (p, q), else False
The bounds are checked by less than (<) so self is considered
inside if it does not reside on any of the lines constructed
using (p,q).
:param p: Point
:param q: Point
:return: bool
| def inside(self, p: PointType, q: PointType) -> bool:
"""True if self is bounded by the points (p, q), else False
The bounds are checked by less than (<) so self is considered
inside if it does not reside on any of the lines constructed
using (p,q).
:param p: Point
:param q: Point
:return: bool
"""
# XXX re-implement with ccw and a list of points instead of a pair
i = min(p.x, q.x) < self.x < max(p.x, q.x)
j = min(p.y, q.y) < self.y < max(p.y, q.y)
return i and j
| (self, p: ~PointType, q: ~PointType) -> bool |
21,938 | twod.point | is_ccw | Return True if the angle [self, b, c] has counter clock-wise
winding, else False.
Raises the exception `ColinearPoints` if the points compose a line.
:param b: Point
:param c: Point
:return: bool
| def is_ccw(self, b: PointOrIterable, c: PointOrIterable) -> bool:
"""Return True if the angle [self, b, c] has counter clock-wise
winding, else False.
Raises the exception `ColinearPoints` if the points compose a line.
:param b: Point
:param c: Point
:return: bool
"""
result = self.ccw(b, c)
if result == 0:
raise ColinearPoints(self, b, c)
return result > 0
| (self, b: Union[~PointType, Iterable[Union[float, int]]], c: Union[~PointType, Iterable[Union[float, int]]]) -> bool |
21,939 | twod.point | is_colinear | True if the angle [self, b, c ] is a line, else False.
:param b: Point
:param c: Point
:return: bool
| def is_colinear(self, b: PointType, c: PointType) -> bool:
"""True if the angle [self, b, c ] is a line, else False.
:param b: Point
:param c: Point
:return: bool
"""
return self.ccw(b, c) == 0
| (self, b: ~PointType, c: ~PointType) -> bool |
21,940 | twod.point | midpoint | Return a new Point midway between `self` and `other`.
If other is not given, the midpoint between self and the
origin is returned.
:param other: Point
:return: Point
| def midpoint(self, other: PointType = None) -> PointType:
"""Return a new Point midway between `self` and `other`.
If other is not given, the midpoint between self and the
origin is returned.
:param other: Point
:return: Point
"""
return (self + (other or Point())) / 2
| (self, other: Optional[~PointType] = None) -> ~PointType |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.