index
int64
0
731k
package
stringlengths
2
98
name
stringlengths
1
76
docstring
stringlengths
0
281k
code
stringlengths
4
1.07M
signature
stringlengths
2
42.8k
9,721
cloudscraper.cloudflare
is_New_Captcha_Challenge
null
def is_New_Captcha_Challenge(self, resp): try: return ( self.is_Captcha_Challenge(resp) and re.search( r'''cpo.src\s*=\s*['"]/cdn-cgi/challenge-platform/\S+orchestrate/(captcha|managed)/v1''', resp.text, re.M | re.S ) ) except AttributeError: pass return False
(self, resp)
9,722
cloudscraper.cloudflare
is_New_IUAM_Challenge
null
def is_New_IUAM_Challenge(self, resp): try: return ( self.is_IUAM_Challenge(resp) and re.search( r'''cpo.src\s*=\s*['"]/cdn-cgi/challenge-platform/\S+orchestrate/jsch/v1''', resp.text, re.M | re.S ) ) except AttributeError: pass return False
(self, resp)
9,723
cloudscraper.cloudflare
unescape
null
@staticmethod def unescape(html_text): if sys.version_info >= (3, 0): if sys.version_info >= (3, 4): return html.unescape(html_text) return HTMLParser().unescape(html_text) return HTMLParser().unescape(html_text)
(html_text)
9,724
cloudscraper.exceptions
CloudflareIUAMError
Raise an error for problem extracting IUAM paramters from Cloudflare payload
class CloudflareIUAMError(CloudflareException): """ Raise an error for problem extracting IUAM paramters from Cloudflare payload """
null
9,725
cloudscraper.exceptions
CloudflareLoopProtection
Raise an exception for recursive depth protection
class CloudflareLoopProtection(CloudflareException): """ Raise an exception for recursive depth protection """
null
9,726
requests.adapters
HTTPAdapter
The built-in HTTP Adapter for urllib3. Provides a general-case interface for Requests sessions to contact HTTP and HTTPS urls by implementing the Transport Adapter interface. This class will usually be created by the :class:`Session <Session>` class under the covers. :param pool_connections: The number of urllib3 connection pools to cache. :param pool_maxsize: The maximum number of connections to save in the pool. :param max_retries: The maximum number of retries each connection should attempt. Note, this applies only to failed DNS lookups, socket connections and connection timeouts, never to requests where data has made it to the server. By default, Requests does not retry failed connections. If you need granular control over the conditions under which we retry a request, import urllib3's ``Retry`` class and pass that instead. :param pool_block: Whether the connection pool should block for connections. Usage:: >>> import requests >>> s = requests.Session() >>> a = requests.adapters.HTTPAdapter(max_retries=3) >>> s.mount('http://', a)
class HTTPAdapter(BaseAdapter): """The built-in HTTP Adapter for urllib3. Provides a general-case interface for Requests sessions to contact HTTP and HTTPS urls by implementing the Transport Adapter interface. This class will usually be created by the :class:`Session <Session>` class under the covers. :param pool_connections: The number of urllib3 connection pools to cache. :param pool_maxsize: The maximum number of connections to save in the pool. :param max_retries: The maximum number of retries each connection should attempt. Note, this applies only to failed DNS lookups, socket connections and connection timeouts, never to requests where data has made it to the server. By default, Requests does not retry failed connections. If you need granular control over the conditions under which we retry a request, import urllib3's ``Retry`` class and pass that instead. :param pool_block: Whether the connection pool should block for connections. Usage:: >>> import requests >>> s = requests.Session() >>> a = requests.adapters.HTTPAdapter(max_retries=3) >>> s.mount('http://', a) """ __attrs__ = [ "max_retries", "config", "_pool_connections", "_pool_maxsize", "_pool_block", ] def __init__( self, pool_connections=DEFAULT_POOLSIZE, pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, pool_block=DEFAULT_POOLBLOCK, ): if max_retries == DEFAULT_RETRIES: self.max_retries = Retry(0, read=False) else: self.max_retries = Retry.from_int(max_retries) self.config = {} self.proxy_manager = {} super().__init__() self._pool_connections = pool_connections self._pool_maxsize = pool_maxsize self._pool_block = pool_block self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) def __getstate__(self): return {attr: getattr(self, attr, None) for attr in self.__attrs__} def __setstate__(self, state): # Can't handle by adding 'proxy_manager' to self.__attrs__ because # self.poolmanager uses a lambda function, which isn't pickleable. self.proxy_manager = {} self.config = {} for attr, value in state.items(): setattr(self, attr, value) self.init_poolmanager( self._pool_connections, self._pool_maxsize, block=self._pool_block ) def init_poolmanager( self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs ): """Initializes a urllib3 PoolManager. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param connections: The number of urllib3 connection pools to cache. :param maxsize: The maximum number of connections to save in the pool. :param block: Block when no free connections are available. :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. """ # save these values for pickling self._pool_connections = connections self._pool_maxsize = maxsize self._pool_block = block self.poolmanager = PoolManager( num_pools=connections, maxsize=maxsize, block=block, **pool_kwargs, ) def proxy_manager_for(self, proxy, **proxy_kwargs): """Return urllib3 ProxyManager for the given proxy. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param proxy: The proxy to return a urllib3 ProxyManager for. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. :returns: ProxyManager :rtype: urllib3.ProxyManager """ if proxy in self.proxy_manager: manager = self.proxy_manager[proxy] elif proxy.lower().startswith("socks"): username, password = get_auth_from_url(proxy) manager = self.proxy_manager[proxy] = SOCKSProxyManager( proxy, username=username, password=password, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs, ) else: proxy_headers = self.proxy_headers(proxy) manager = self.proxy_manager[proxy] = proxy_from_url( proxy, proxy_headers=proxy_headers, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs, ) return manager def cert_verify(self, conn, url, verify, cert): """Verify a SSL certificate. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param conn: The urllib3 connection object associated with the cert. :param url: The requested URL. :param verify: Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: The SSL certificate to verify. """ if url.lower().startswith("https") and verify: cert_loc = None # Allow self-specified cert location. if verify is not True: cert_loc = verify if not cert_loc: cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) if not cert_loc or not os.path.exists(cert_loc): raise OSError( f"Could not find a suitable TLS CA certificate bundle, " f"invalid path: {cert_loc}" ) conn.cert_reqs = "CERT_REQUIRED" if not os.path.isdir(cert_loc): conn.ca_certs = cert_loc else: conn.ca_cert_dir = cert_loc else: conn.cert_reqs = "CERT_NONE" conn.ca_certs = None conn.ca_cert_dir = None if cert: if not isinstance(cert, basestring): conn.cert_file = cert[0] conn.key_file = cert[1] else: conn.cert_file = cert conn.key_file = None if conn.cert_file and not os.path.exists(conn.cert_file): raise OSError( f"Could not find the TLS certificate file, " f"invalid path: {conn.cert_file}" ) if conn.key_file and not os.path.exists(conn.key_file): raise OSError( f"Could not find the TLS key file, invalid path: {conn.key_file}" ) def build_response(self, req, resp): """Builds a :class:`Response <requests.Response>` object from a urllib3 response. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>` :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response. :param resp: The urllib3 response object. :rtype: requests.Response """ response = Response() # Fallback to None if there's no status_code, for whatever reason. response.status_code = getattr(resp, "status", None) # Make headers case-insensitive. response.headers = CaseInsensitiveDict(getattr(resp, "headers", {})) # Set encoding. response.encoding = get_encoding_from_headers(response.headers) response.raw = resp response.reason = response.raw.reason if isinstance(req.url, bytes): response.url = req.url.decode("utf-8") else: response.url = req.url # Add new cookies from the server. extract_cookies_to_jar(response.cookies, req, resp) # Give the Response some context. response.request = req response.connection = self return response def get_connection(self, url, proxies=None): """Returns a urllib3 connection for the given URL. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param url: The URL to connect to. :param proxies: (optional) A Requests-style dictionary of proxies used on this request. :rtype: urllib3.ConnectionPool """ proxy = select_proxy(url, proxies) if proxy: proxy = prepend_scheme_if_needed(proxy, "http") proxy_url = parse_url(proxy) if not proxy_url.host: raise InvalidProxyURL( "Please check proxy URL. It is malformed " "and could be missing the host." ) proxy_manager = self.proxy_manager_for(proxy) conn = proxy_manager.connection_from_url(url) else: # Only scheme should be lower case parsed = urlparse(url) url = parsed.geturl() conn = self.poolmanager.connection_from_url(url) return conn def close(self): """Disposes of any internal state. Currently, this closes the PoolManager and any active ProxyManager, which closes any pooled connections. """ self.poolmanager.clear() for proxy in self.proxy_manager.values(): proxy.clear() def request_url(self, request, proxies): """Obtain the url to use when making the final request. If the message is being sent through a HTTP proxy, the full URL has to be used. Otherwise, we should only use the path portion of the URL. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. :rtype: str """ proxy = select_proxy(request.url, proxies) scheme = urlparse(request.url).scheme is_proxied_http_request = proxy and scheme != "https" using_socks_proxy = False if proxy: proxy_scheme = urlparse(proxy).scheme.lower() using_socks_proxy = proxy_scheme.startswith("socks") url = request.path_url if is_proxied_http_request and not using_socks_proxy: url = urldefragauth(request.url) return url def add_headers(self, request, **kwargs): """Add any headers needed by the connection. As of v2.0 this does nothing by default, but is left for overriding by users that subclass the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to. :param kwargs: The keyword arguments from the call to send(). """ pass def proxy_headers(self, proxy): """Returns a dictionary of the headers to add to any request sent through a proxy. This works with urllib3 magic to ensure that they are correctly sent to the proxy, rather than in a tunnelled request if CONNECT is being used. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param proxy: The url of the proxy being used for this request. :rtype: dict """ headers = {} username, password = get_auth_from_url(proxy) if username: headers["Proxy-Authorization"] = _basic_auth_str(username, password) return headers def send( self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None ): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) <timeouts>` tuple. :type timeout: float or tuple or urllib3 Timeout object :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. :rtype: requests.Response """ try: conn = self.get_connection(request.url, proxies) except LocationValueError as e: raise InvalidURL(e, request=request) self.cert_verify(conn, request.url, verify, cert) url = self.request_url(request, proxies) self.add_headers( request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies, ) chunked = not (request.body is None or "Content-Length" in request.headers) if isinstance(timeout, tuple): try: connect, read = timeout timeout = TimeoutSauce(connect=connect, read=read) except ValueError: raise ValueError( f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, " f"or a single float to set both timeouts to the same value." ) elif isinstance(timeout, TimeoutSauce): pass else: timeout = TimeoutSauce(connect=timeout, read=timeout) try: resp = conn.urlopen( method=request.method, url=url, body=request.body, headers=request.headers, redirect=False, assert_same_host=False, preload_content=False, decode_content=False, retries=self.max_retries, timeout=timeout, chunked=chunked, ) except (ProtocolError, OSError) as err: raise ConnectionError(err, request=request) except MaxRetryError as e: if isinstance(e.reason, ConnectTimeoutError): # TODO: Remove this in 3.0.0: see #2811 if not isinstance(e.reason, NewConnectionError): raise ConnectTimeout(e, request=request) if isinstance(e.reason, ResponseError): raise RetryError(e, request=request) if isinstance(e.reason, _ProxyError): raise ProxyError(e, request=request) if isinstance(e.reason, _SSLError): # This branch is for urllib3 v1.22 and later. raise SSLError(e, request=request) raise ConnectionError(e, request=request) except ClosedPoolError as e: raise ConnectionError(e, request=request) except _ProxyError as e: raise ProxyError(e) except (_SSLError, _HTTPError) as e: if isinstance(e, _SSLError): # This branch is for urllib3 versions earlier than v1.22 raise SSLError(e, request=request) elif isinstance(e, ReadTimeoutError): raise ReadTimeout(e, request=request) elif isinstance(e, _InvalidHeader): raise InvalidHeader(e, request=request) else: raise return self.build_response(request, resp)
(pool_connections=10, pool_maxsize=10, max_retries=0, pool_block=False)
9,728
requests.adapters
__init__
null
def __init__( self, pool_connections=DEFAULT_POOLSIZE, pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, pool_block=DEFAULT_POOLBLOCK, ): if max_retries == DEFAULT_RETRIES: self.max_retries = Retry(0, read=False) else: self.max_retries = Retry.from_int(max_retries) self.config = {} self.proxy_manager = {} super().__init__() self._pool_connections = pool_connections self._pool_maxsize = pool_maxsize self._pool_block = pool_block self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
(self, pool_connections=10, pool_maxsize=10, max_retries=0, pool_block=False)
9,735
requests.adapters
init_poolmanager
Initializes a urllib3 PoolManager. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param connections: The number of urllib3 connection pools to cache. :param maxsize: The maximum number of connections to save in the pool. :param block: Block when no free connections are available. :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
def init_poolmanager( self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs ): """Initializes a urllib3 PoolManager. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param connections: The number of urllib3 connection pools to cache. :param maxsize: The maximum number of connections to save in the pool. :param block: Block when no free connections are available. :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. """ # save these values for pickling self._pool_connections = connections self._pool_maxsize = maxsize self._pool_block = block self.poolmanager = PoolManager( num_pools=connections, maxsize=maxsize, block=block, **pool_kwargs, )
(self, connections, maxsize, block=False, **pool_kwargs)
9,737
requests.adapters
proxy_manager_for
Return urllib3 ProxyManager for the given proxy. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param proxy: The proxy to return a urllib3 ProxyManager for. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. :returns: ProxyManager :rtype: urllib3.ProxyManager
def proxy_manager_for(self, proxy, **proxy_kwargs): """Return urllib3 ProxyManager for the given proxy. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param proxy: The proxy to return a urllib3 ProxyManager for. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. :returns: ProxyManager :rtype: urllib3.ProxyManager """ if proxy in self.proxy_manager: manager = self.proxy_manager[proxy] elif proxy.lower().startswith("socks"): username, password = get_auth_from_url(proxy) manager = self.proxy_manager[proxy] = SOCKSProxyManager( proxy, username=username, password=password, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs, ) else: proxy_headers = self.proxy_headers(proxy) manager = self.proxy_manager[proxy] = proxy_from_url( proxy, proxy_headers=proxy_headers, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs, ) return manager
(self, proxy, **proxy_kwargs)
9,740
requests.sessions
Session
A Requests session. Provides cookie persistence, connection-pooling, and configuration. Basic Usage:: >>> import requests >>> s = requests.Session() >>> s.get('https://httpbin.org/get') <Response [200]> Or as a context manager:: >>> with requests.Session() as s: ... s.get('https://httpbin.org/get') <Response [200]>
class Session(SessionRedirectMixin): """A Requests session. Provides cookie persistence, connection-pooling, and configuration. Basic Usage:: >>> import requests >>> s = requests.Session() >>> s.get('https://httpbin.org/get') <Response [200]> Or as a context manager:: >>> with requests.Session() as s: ... s.get('https://httpbin.org/get') <Response [200]> """ __attrs__ = [ "headers", "cookies", "auth", "proxies", "hooks", "params", "verify", "cert", "adapters", "stream", "trust_env", "max_redirects", ] def __init__(self): #: A case-insensitive dictionary of headers to be sent on each #: :class:`Request <Request>` sent from this #: :class:`Session <Session>`. self.headers = default_headers() #: Default Authentication tuple or object to attach to #: :class:`Request <Request>`. self.auth = None #: Dictionary mapping protocol or protocol and host to the URL of the proxy #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to #: be used on each :class:`Request <Request>`. self.proxies = {} #: Event-handling hooks. self.hooks = default_hooks() #: Dictionary of querystring data to attach to each #: :class:`Request <Request>`. The dictionary values may be lists for #: representing multivalued query parameters. self.params = {} #: Stream response content default. self.stream = False #: SSL Verification default. #: Defaults to `True`, requiring requests to verify the TLS certificate at the #: remote end. #: If verify is set to `False`, requests will accept any TLS certificate #: presented by the server, and will ignore hostname mismatches and/or #: expired certificates, which will make your application vulnerable to #: man-in-the-middle (MitM) attacks. #: Only set this to `False` for testing. self.verify = True #: SSL client certificate default, if String, path to ssl client #: cert file (.pem). If Tuple, ('cert', 'key') pair. self.cert = None #: Maximum number of redirects allowed. If the request exceeds this #: limit, a :class:`TooManyRedirects` exception is raised. #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is #: 30. self.max_redirects = DEFAULT_REDIRECT_LIMIT #: Trust environment settings for proxy configuration, default #: authentication and similar. self.trust_env = True #: A CookieJar containing all currently outstanding cookies set on this #: session. By default it is a #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but #: may be any other ``cookielib.CookieJar`` compatible object. self.cookies = cookiejar_from_dict({}) # Default connection adapters. self.adapters = OrderedDict() self.mount("https://", HTTPAdapter()) self.mount("http://", HTTPAdapter()) def __enter__(self): return self def __exit__(self, *args): self.close() def prepare_request(self, request): """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it. The :class:`PreparedRequest` has settings merged from the :class:`Request <Request>` instance and those of the :class:`Session`. :param request: :class:`Request` instance to prepare with this session's settings. :rtype: requests.PreparedRequest """ cookies = request.cookies or {} # Bootstrap CookieJar. if not isinstance(cookies, cookielib.CookieJar): cookies = cookiejar_from_dict(cookies) # Merge with session cookies merged_cookies = merge_cookies( merge_cookies(RequestsCookieJar(), self.cookies), cookies ) # Set environment's basic authentication if not explicitly set. auth = request.auth if self.trust_env and not auth and not self.auth: auth = get_netrc_auth(request.url) p = PreparedRequest() p.prepare( method=request.method.upper(), url=request.url, files=request.files, data=request.data, json=request.json, headers=merge_setting( request.headers, self.headers, dict_class=CaseInsensitiveDict ), params=merge_setting(request.params, self.params), auth=merge_setting(auth, self.auth), cookies=merged_cookies, hooks=merge_hooks(request.hooks, self.hooks), ) return p def request( self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, timeout=None, allow_redirects=True, proxies=None, hooks=None, stream=None, verify=None, cert=None, json=None, ): """Constructs a :class:`Request <Request>`, prepares it and sends it. Returns :class:`Response <Response>` object. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'filename': file-like-objects`` for multipart encoding upload. :param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) <timeouts>` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Set to True by default. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol or protocol and hostname to the URL of the proxy. :param stream: (optional) whether to immediately download the response content. Defaults to ``False``. :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. Defaults to ``True``. When set to ``False``, requests will accept any TLS certificate presented by the server, and will ignore hostname mismatches and/or expired certificates, which will make your application vulnerable to man-in-the-middle (MitM) attacks. Setting verify to ``False`` may be useful during local development or testing. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :rtype: requests.Response """ # Create the Request. req = Request( method=method.upper(), url=url, headers=headers, files=files, data=data or {}, json=json, params=params or {}, auth=auth, cookies=cookies, hooks=hooks, ) prep = self.prepare_request(req) proxies = proxies or {} settings = self.merge_environment_settings( prep.url, proxies, stream, verify, cert ) # Send the request. send_kwargs = { "timeout": timeout, "allow_redirects": allow_redirects, } send_kwargs.update(settings) resp = self.send(prep, **send_kwargs) return resp def get(self, url, **kwargs): r"""Sends a GET request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ kwargs.setdefault("allow_redirects", True) return self.request("GET", url, **kwargs) def options(self, url, **kwargs): r"""Sends a OPTIONS request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ kwargs.setdefault("allow_redirects", True) return self.request("OPTIONS", url, **kwargs) def head(self, url, **kwargs): r"""Sends a HEAD request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ kwargs.setdefault("allow_redirects", False) return self.request("HEAD", url, **kwargs) def post(self, url, data=None, json=None, **kwargs): r"""Sends a POST request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ return self.request("POST", url, data=data, json=json, **kwargs) def put(self, url, data=None, **kwargs): r"""Sends a PUT request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ return self.request("PUT", url, data=data, **kwargs) def patch(self, url, data=None, **kwargs): r"""Sends a PATCH request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ return self.request("PATCH", url, data=data, **kwargs) def delete(self, url, **kwargs): r"""Sends a DELETE request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ return self.request("DELETE", url, **kwargs) def send(self, request, **kwargs): """Send a given PreparedRequest. :rtype: requests.Response """ # Set defaults that the hooks can utilize to ensure they always have # the correct parameters to reproduce the previous request. kwargs.setdefault("stream", self.stream) kwargs.setdefault("verify", self.verify) kwargs.setdefault("cert", self.cert) if "proxies" not in kwargs: kwargs["proxies"] = resolve_proxies(request, self.proxies, self.trust_env) # It's possible that users might accidentally send a Request object. # Guard against that specific failure case. if isinstance(request, Request): raise ValueError("You can only send PreparedRequests.") # Set up variables needed for resolve_redirects and dispatching of hooks allow_redirects = kwargs.pop("allow_redirects", True) stream = kwargs.get("stream") hooks = request.hooks # Get the appropriate adapter to use adapter = self.get_adapter(url=request.url) # Start time (approximately) of the request start = preferred_clock() # Send the request r = adapter.send(request, **kwargs) # Total elapsed time of the request (approximately) elapsed = preferred_clock() - start r.elapsed = timedelta(seconds=elapsed) # Response manipulation hooks r = dispatch_hook("response", hooks, r, **kwargs) # Persist cookies if r.history: # If the hooks create history then we want those cookies too for resp in r.history: extract_cookies_to_jar(self.cookies, resp.request, resp.raw) extract_cookies_to_jar(self.cookies, request, r.raw) # Resolve redirects if allowed. if allow_redirects: # Redirect resolving generator. gen = self.resolve_redirects(r, request, **kwargs) history = [resp for resp in gen] else: history = [] # Shuffle things around if there's history. if history: # Insert the first (original) request at the start history.insert(0, r) # Get the last request made r = history.pop() r.history = history # If redirects aren't being followed, store the response on the Request for Response.next(). if not allow_redirects: try: r._next = next( self.resolve_redirects(r, request, yield_requests=True, **kwargs) ) except StopIteration: pass if not stream: r.content return r def merge_environment_settings(self, url, proxies, stream, verify, cert): """ Check the environment and merge it with some settings. :rtype: dict """ # Gather clues from the surrounding environment. if self.trust_env: # Set environment's proxies. no_proxy = proxies.get("no_proxy") if proxies is not None else None env_proxies = get_environ_proxies(url, no_proxy=no_proxy) for (k, v) in env_proxies.items(): proxies.setdefault(k, v) # Look for requests environment configuration # and be compatible with cURL. if verify is True or verify is None: verify = ( os.environ.get("REQUESTS_CA_BUNDLE") or os.environ.get("CURL_CA_BUNDLE") or verify ) # Merge all the kwargs. proxies = merge_setting(proxies, self.proxies) stream = merge_setting(stream, self.stream) verify = merge_setting(verify, self.verify) cert = merge_setting(cert, self.cert) return {"proxies": proxies, "stream": stream, "verify": verify, "cert": cert} def get_adapter(self, url): """ Returns the appropriate connection adapter for the given URL. :rtype: requests.adapters.BaseAdapter """ for (prefix, adapter) in self.adapters.items(): if url.lower().startswith(prefix.lower()): return adapter # Nothing matches :-/ raise InvalidSchema(f"No connection adapters were found for {url!r}") def close(self): """Closes all adapters and as such the session""" for v in self.adapters.values(): v.close() def mount(self, prefix, adapter): """Registers a connection adapter to a prefix. Adapters are sorted in descending order by prefix length. """ self.adapters[prefix] = adapter keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] for key in keys_to_move: self.adapters[key] = self.adapters.pop(key) def __getstate__(self): state = {attr: getattr(self, attr, None) for attr in self.__attrs__} return state def __setstate__(self, state): for attr, value in state.items(): setattr(self, attr, value)
()
9,743
requests.sessions
__getstate__
null
def __getstate__(self): state = {attr: getattr(self, attr, None) for attr in self.__attrs__} return state
(self)
9,744
requests.sessions
__init__
null
def __init__(self): #: A case-insensitive dictionary of headers to be sent on each #: :class:`Request <Request>` sent from this #: :class:`Session <Session>`. self.headers = default_headers() #: Default Authentication tuple or object to attach to #: :class:`Request <Request>`. self.auth = None #: Dictionary mapping protocol or protocol and host to the URL of the proxy #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to #: be used on each :class:`Request <Request>`. self.proxies = {} #: Event-handling hooks. self.hooks = default_hooks() #: Dictionary of querystring data to attach to each #: :class:`Request <Request>`. The dictionary values may be lists for #: representing multivalued query parameters. self.params = {} #: Stream response content default. self.stream = False #: SSL Verification default. #: Defaults to `True`, requiring requests to verify the TLS certificate at the #: remote end. #: If verify is set to `False`, requests will accept any TLS certificate #: presented by the server, and will ignore hostname mismatches and/or #: expired certificates, which will make your application vulnerable to #: man-in-the-middle (MitM) attacks. #: Only set this to `False` for testing. self.verify = True #: SSL client certificate default, if String, path to ssl client #: cert file (.pem). If Tuple, ('cert', 'key') pair. self.cert = None #: Maximum number of redirects allowed. If the request exceeds this #: limit, a :class:`TooManyRedirects` exception is raised. #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is #: 30. self.max_redirects = DEFAULT_REDIRECT_LIMIT #: Trust environment settings for proxy configuration, default #: authentication and similar. self.trust_env = True #: A CookieJar containing all currently outstanding cookies set on this #: session. By default it is a #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but #: may be any other ``cookielib.CookieJar`` compatible object. self.cookies = cookiejar_from_dict({}) # Default connection adapters. self.adapters = OrderedDict() self.mount("https://", HTTPAdapter()) self.mount("http://", HTTPAdapter())
(self)
9,762
requests.sessions
request
Constructs a :class:`Request <Request>`, prepares it and sends it. Returns :class:`Response <Response>` object. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'filename': file-like-objects`` for multipart encoding upload. :param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) <timeouts>` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Set to True by default. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol or protocol and hostname to the URL of the proxy. :param stream: (optional) whether to immediately download the response content. Defaults to ``False``. :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. Defaults to ``True``. When set to ``False``, requests will accept any TLS certificate presented by the server, and will ignore hostname mismatches and/or expired certificates, which will make your application vulnerable to man-in-the-middle (MitM) attacks. Setting verify to ``False`` may be useful during local development or testing. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :rtype: requests.Response
def request( self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, timeout=None, allow_redirects=True, proxies=None, hooks=None, stream=None, verify=None, cert=None, json=None, ): """Constructs a :class:`Request <Request>`, prepares it and sends it. Returns :class:`Response <Response>` object. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'filename': file-like-objects`` for multipart encoding upload. :param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) <timeouts>` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Set to True by default. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol or protocol and hostname to the URL of the proxy. :param stream: (optional) whether to immediately download the response content. Defaults to ``False``. :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. Defaults to ``True``. When set to ``False``, requests will accept any TLS certificate presented by the server, and will ignore hostname mismatches and/or expired certificates, which will make your application vulnerable to man-in-the-middle (MitM) attacks. Setting verify to ``False`` may be useful during local development or testing. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :rtype: requests.Response """ # Create the Request. req = Request( method=method.upper(), url=url, headers=headers, files=files, data=data or {}, json=json, params=params or {}, auth=auth, cookies=cookies, hooks=hooks, ) prep = self.prepare_request(req) proxies = proxies or {} settings = self.merge_environment_settings( prep.url, proxies, stream, verify, cert ) # Send the request. send_kwargs = { "timeout": timeout, "allow_redirects": allow_redirects, } send_kwargs.update(settings) resp = self.send(prep, **send_kwargs) return resp
(self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, timeout=None, allow_redirects=True, proxies=None, hooks=None, stream=None, verify=None, cert=None, json=None)
9,766
cloudscraper.user_agent
User_Agent
null
class User_Agent(): # ------------------------------------------------------------------------------- # def __init__(self, *args, **kwargs): self.headers = None self.cipherSuite = [] self.loadUserAgent(*args, **kwargs) # ------------------------------------------------------------------------------- # def filterAgents(self, user_agents): filtered = {} if self.mobile: if self.platform in user_agents['mobile'] and user_agents['mobile'][self.platform]: filtered.update(user_agents['mobile'][self.platform]) if self.desktop: if self.platform in user_agents['desktop'] and user_agents['desktop'][self.platform]: filtered.update(user_agents['desktop'][self.platform]) return filtered # ------------------------------------------------------------------------------- # def tryMatchCustom(self, user_agents): for device_type in user_agents['user_agents']: for platform in user_agents['user_agents'][device_type]: for browser in user_agents['user_agents'][device_type][platform]: if re.search(re.escape(self.custom), ' '.join(user_agents['user_agents'][device_type][platform][browser])): self.headers = user_agents['headers'][browser] self.headers['User-Agent'] = self.custom self.cipherSuite = user_agents['cipherSuite'][browser] return True return False # ------------------------------------------------------------------------------- # def loadUserAgent(self, *args, **kwargs): self.browser = kwargs.pop('browser', None) self.platforms = ['linux', 'windows', 'darwin', 'android', 'ios'] self.browsers = ['chrome', 'firefox'] if isinstance(self.browser, dict): self.custom = self.browser.get('custom', None) self.platform = self.browser.get('platform', None) self.desktop = self.browser.get('desktop', True) self.mobile = self.browser.get('mobile', True) self.browser = self.browser.get('browser', None) else: self.custom = kwargs.pop('custom', None) self.platform = kwargs.pop('platform', None) self.desktop = kwargs.pop('desktop', True) self.mobile = kwargs.pop('mobile', True) if not self.desktop and not self.mobile: sys.tracebacklimit = 0 raise RuntimeError("Sorry you can't have mobile and desktop disabled at the same time.") with open(os.path.join(os.path.dirname(__file__), 'browsers.json'), 'r') as fp: user_agents = json.load( fp, object_pairs_hook=OrderedDict ) if self.custom: if not self.tryMatchCustom(user_agents): self.cipherSuite = [ ssl._DEFAULT_CIPHERS, '!AES128-SHA', '!ECDHE-RSA-AES256-SHA', ] self.headers = OrderedDict([ ('User-Agent', self.custom), ('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'), ('Accept-Language', 'en-US,en;q=0.9'), ('Accept-Encoding', 'gzip, deflate, br') ]) else: if self.browser and self.browser not in self.browsers: sys.tracebacklimit = 0 raise RuntimeError(f'Sorry "{self.browser}" browser is not valid, valid browsers are [{", ".join(self.browsers)}].') if not self.platform: self.platform = random.SystemRandom().choice(self.platforms) if self.platform not in self.platforms: sys.tracebacklimit = 0 raise RuntimeError(f'Sorry the platform "{self.platform}" is not valid, valid platforms are [{", ".join(self.platforms)}]') filteredAgents = self.filterAgents(user_agents['user_agents']) if not self.browser: # has to be at least one in there... while not filteredAgents.get(self.browser): self.browser = random.SystemRandom().choice(list(filteredAgents.keys())) if not filteredAgents[self.browser]: sys.tracebacklimit = 0 raise RuntimeError(f'Sorry "{self.browser}" browser was not found with a platform of "{self.platform}".') self.cipherSuite = user_agents['cipherSuite'][self.browser] self.headers = user_agents['headers'][self.browser] self.headers['User-Agent'] = random.SystemRandom().choice(filteredAgents[self.browser]) if not kwargs.get('allow_brotli', False) and 'br' in self.headers['Accept-Encoding']: self.headers['Accept-Encoding'] = ','.join([ encoding for encoding in self.headers['Accept-Encoding'].split(',') if encoding.strip() != 'br' ]).strip()
(*args, **kwargs)
9,767
cloudscraper.user_agent
__init__
null
def __init__(self, *args, **kwargs): self.headers = None self.cipherSuite = [] self.loadUserAgent(*args, **kwargs)
(self, *args, **kwargs)
9,768
cloudscraper.user_agent
filterAgents
null
def filterAgents(self, user_agents): filtered = {} if self.mobile: if self.platform in user_agents['mobile'] and user_agents['mobile'][self.platform]: filtered.update(user_agents['mobile'][self.platform]) if self.desktop: if self.platform in user_agents['desktop'] and user_agents['desktop'][self.platform]: filtered.update(user_agents['desktop'][self.platform]) return filtered
(self, user_agents)
9,769
cloudscraper.user_agent
loadUserAgent
null
def loadUserAgent(self, *args, **kwargs): self.browser = kwargs.pop('browser', None) self.platforms = ['linux', 'windows', 'darwin', 'android', 'ios'] self.browsers = ['chrome', 'firefox'] if isinstance(self.browser, dict): self.custom = self.browser.get('custom', None) self.platform = self.browser.get('platform', None) self.desktop = self.browser.get('desktop', True) self.mobile = self.browser.get('mobile', True) self.browser = self.browser.get('browser', None) else: self.custom = kwargs.pop('custom', None) self.platform = kwargs.pop('platform', None) self.desktop = kwargs.pop('desktop', True) self.mobile = kwargs.pop('mobile', True) if not self.desktop and not self.mobile: sys.tracebacklimit = 0 raise RuntimeError("Sorry you can't have mobile and desktop disabled at the same time.") with open(os.path.join(os.path.dirname(__file__), 'browsers.json'), 'r') as fp: user_agents = json.load( fp, object_pairs_hook=OrderedDict ) if self.custom: if not self.tryMatchCustom(user_agents): self.cipherSuite = [ ssl._DEFAULT_CIPHERS, '!AES128-SHA', '!ECDHE-RSA-AES256-SHA', ] self.headers = OrderedDict([ ('User-Agent', self.custom), ('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'), ('Accept-Language', 'en-US,en;q=0.9'), ('Accept-Encoding', 'gzip, deflate, br') ]) else: if self.browser and self.browser not in self.browsers: sys.tracebacklimit = 0 raise RuntimeError(f'Sorry "{self.browser}" browser is not valid, valid browsers are [{", ".join(self.browsers)}].') if not self.platform: self.platform = random.SystemRandom().choice(self.platforms) if self.platform not in self.platforms: sys.tracebacklimit = 0 raise RuntimeError(f'Sorry the platform "{self.platform}" is not valid, valid platforms are [{", ".join(self.platforms)}]') filteredAgents = self.filterAgents(user_agents['user_agents']) if not self.browser: # has to be at least one in there... while not filteredAgents.get(self.browser): self.browser = random.SystemRandom().choice(list(filteredAgents.keys())) if not filteredAgents[self.browser]: sys.tracebacklimit = 0 raise RuntimeError(f'Sorry "{self.browser}" browser was not found with a platform of "{self.platform}".') self.cipherSuite = user_agents['cipherSuite'][self.browser] self.headers = user_agents['headers'][self.browser] self.headers['User-Agent'] = random.SystemRandom().choice(filteredAgents[self.browser]) if not kwargs.get('allow_brotli', False) and 'br' in self.headers['Accept-Encoding']: self.headers['Accept-Encoding'] = ','.join([ encoding for encoding in self.headers['Accept-Encoding'].split(',') if encoding.strip() != 'br' ]).strip()
(self, *args, **kwargs)
9,770
cloudscraper.user_agent
tryMatchCustom
null
def tryMatchCustom(self, user_agents): for device_type in user_agents['user_agents']: for platform in user_agents['user_agents'][device_type]: for browser in user_agents['user_agents'][device_type][platform]: if re.search(re.escape(self.custom), ' '.join(user_agents['user_agents'][device_type][platform][browser])): self.headers = user_agents['headers'][browser] self.headers['User-Agent'] = self.custom self.cipherSuite = user_agents['cipherSuite'][browser] return True return False
(self, user_agents)
9,783
exhale
cleanup_files
null
def cleanup_files(app, env, docname): raise RuntimeError("you made it.")
(app, env, docname)
9,784
exhale
environment_ready
null
def environment_ready(app): # Defer importing configs until sphinx is running. from . import configs from . import utils from . import deploy # First, setup the extension and verify all of the configurations. configs.apply_sphinx_configurations(app) ####### Next, perform any cleanup # Generate the full API! try: deploy.explode() except: utils.fancyError("Exhale: could not generate reStructuredText documents :/")
(app)
9,785
exhale
setup
null
def setup(app): app.setup_extension("breathe") app.add_config_value("exhale_args", {}, "env") app.connect("builder-inited", environment_ready) # app.connect("env-purge-doc", cleanup_files) return { "version": __version__, # Because Exhale hooks into / generates *BEFORE* any reading or writing occurs, # it is parallel safe by default. "parallel_read_safe": True, "parallel_write_safe": True }
(app)
9,786
oc_ocdm.reader
Reader
null
class Reader(object): def __init__(self, repok: Reporter = None, reperr: Reporter = None, context_map: Dict[str, Any] = None) -> None: if context_map is not None: self.context_map: Dict[str, Any] = context_map else: self.context_map: Dict[str, Any] = {} for context_url in self.context_map: ctx_file_path: Any = self.context_map[context_url] if type(ctx_file_path) == str and os.path.isfile(ctx_file_path): # This expensive operation is done only when it's really needed with open(ctx_file_path, 'rt', encoding='utf-8') as ctx_f: self.context_map[context_url] = json.load(ctx_f) if repok is None: self.repok: Reporter = Reporter(prefix="[Reader: INFO] ") else: self.repok: Reporter = repok if reperr is None: self.reperr: Reporter = Reporter(prefix="[Reader: ERROR] ") else: self.reperr: Reporter = reperr def load(self, rdf_file_path: str) -> Optional[ConjunctiveGraph]: self.repok.new_article() self.reperr.new_article() loaded_graph: Optional[ConjunctiveGraph] = None if os.path.isfile(rdf_file_path): try: loaded_graph = self._load_graph(rdf_file_path) except Exception as e: self.reperr.add_sentence("[1] " "It was impossible to handle the format used for " "storing the file (stored in the temporary path) " f"'{rdf_file_path}'. Additional details: {e}") else: self.reperr.add_sentence("[2] " f"The file specified ('{rdf_file_path}') doesn't exist.") return loaded_graph def _load_graph(self, file_path: str) -> ConjunctiveGraph: formats = ["json-ld", "rdfxml", "turtle", "trig", "nt11", "nquads"] loaded_graph = ConjunctiveGraph() if file_path.endswith('.zip'): try: with ZipFile(file=file_path, mode="r") as archive: for zf_name in archive.namelist(): with archive.open(zf_name) as f: if self._try_parse(loaded_graph, f, formats): return loaded_graph except Exception as e: raise IOError(f"Error opening or reading zip file '{file_path}': {e}") else: try: with open(file_path, 'rt', encoding='utf-8') as f: if self._try_parse(loaded_graph, f, formats): return loaded_graph except Exception as e: raise IOError(f"Error opening or reading file '{file_path}': {e}") raise IOError(f"It was impossible to load the file '{file_path}' with supported formats.") def _try_parse(self, graph: ConjunctiveGraph, file_obj, formats: List[str]) -> bool: for cur_format in formats: file_obj.seek(0) # Reset file pointer to the beginning for each new attempt try: if cur_format == "json-ld": json_ld_file = json.load(file_obj) if isinstance(json_ld_file, dict): json_ld_file = [json_ld_file] for json_ld_resource in json_ld_file: if "@context" in json_ld_resource and json_ld_resource["@context"] in self.context_map: json_ld_resource["@context"] = self.context_map[json_ld_resource["@context"]]["@context"] data = json.dumps(json_ld_file, ensure_ascii=False) graph.parse(data=data, format=cur_format) else: graph.parse(file=file_obj, format=cur_format) return True # Success, no need to try other formats except Exception as e: continue # Try the next format return False # None of the formats succeeded @staticmethod def get_graph_from_subject(graph: Graph, subject: URIRef) -> Graph: g: Graph = Graph(identifier=graph.identifier) for p, o in graph.predicate_objects(subject, unique=True): g.add((subject, p, o)) return g @staticmethod def _extract_subjects(graph: Graph) -> Set[URIRef]: subjects: Set[URIRef] = set() for s in graph.subjects(unique=True): subjects.add(s) return subjects def graph_validation(self, graph: Graph, closed: bool = False) -> Graph: valid_graph: Graph = Graph(identifier=graph.identifier) sg = Graph() if closed: sg.parse(os.path.join('oc_ocdm', 'resources', 'shacle_closed.ttl')) else: sg.parse(os.path.join('oc_ocdm', 'resources', 'shacle.ttl')) _, report_g, _ = validate(graph, shacl_graph=sg, ont_graph=None, inference=None, abort_on_first=False, allow_infos=False, allow_warnings=False, meta_shacl=False, advanced=False, js=False, debug=False) invalid_nodes = set() for triple in report_g.triples((None, URIRef('http://www.w3.org/ns/shacl#focusNode'), None)): invalid_nodes.add(triple[2]) for subject in self._extract_subjects(graph): if subject not in invalid_nodes: for valid_subject_triple in graph.triples((subject, None, None)): valid_graph.add(valid_subject_triple) return valid_graph @staticmethod def import_entities_from_graph(g_set: GraphSet, results: List[Dict], resp_agent: str, enable_validation: bool = False, closed: bool = False) -> List[GraphEntity]: graph = build_graph_from_results(results) if enable_validation: reader = Reader() graph = reader.graph_validation(graph, closed) imported_entities: List[GraphEntity] = [] for subject in Reader._extract_subjects(graph): types = [] for o in graph.objects(subject, RDF.type): types.append(o) # ReferenceAnnotation if GraphEntity.iri_note in types: imported_entities.append(g_set.add_an(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # AgentRole elif GraphEntity.iri_role_in_time in types: imported_entities.append(g_set.add_ar(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # BibliographicReference elif GraphEntity.iri_bibliographic_reference in types: imported_entities.append(g_set.add_be(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # BibliographicResource elif GraphEntity.iri_expression in types: imported_entities.append(g_set.add_br(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # Citation elif GraphEntity.iri_citation in types: imported_entities.append(g_set.add_ci(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # DiscourseElement elif GraphEntity.iri_discourse_element in types: imported_entities.append(g_set.add_de(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # Identifier elif GraphEntity.iri_identifier in types: imported_entities.append(g_set.add_id(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # PointerList elif GraphEntity.iri_singleloc_pointer_list in types: imported_entities.append(g_set.add_pl(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # ResponsibleAgent elif GraphEntity.iri_agent in types: imported_entities.append(g_set.add_ra(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # ResourceEmbodiment elif GraphEntity.iri_manifestation in types: imported_entities.append(g_set.add_re(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # ReferencePointer elif GraphEntity.iri_intextref_pointer in types: imported_entities.append(g_set.add_rp(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) return imported_entities @staticmethod def import_entity_from_triplestore(g_set: GraphSet, ts_url: str, res: URIRef, resp_agent: str, enable_validation: bool = False) -> GraphEntity: sparql: SPARQLWrapper = SPARQLWrapper(ts_url) query: str = f"SELECT ?s ?p ?o WHERE {{BIND (<{res}> AS ?s). ?s ?p ?o.}}" sparql.setQuery(query) sparql.setMethod('GET') sparql.setReturnFormat(JSON) result: ConjunctiveGraph = sparql.queryAndConvert()['results']['bindings'] if result: imported_entities: List[GraphEntity] = Reader.import_entities_from_graph(g_set, result, resp_agent, enable_validation) if len(imported_entities) <= 0: raise ValueError("The requested entity was not found or was not recognized as a proper OCDM entity.") else: return imported_entities[0]
(repok: 'Reporter' = None, reperr: 'Reporter' = None, context_map: 'Dict[str, Any]' = None) -> 'None'
9,787
oc_ocdm.reader
__init__
null
def __init__(self, repok: Reporter = None, reperr: Reporter = None, context_map: Dict[str, Any] = None) -> None: if context_map is not None: self.context_map: Dict[str, Any] = context_map else: self.context_map: Dict[str, Any] = {} for context_url in self.context_map: ctx_file_path: Any = self.context_map[context_url] if type(ctx_file_path) == str and os.path.isfile(ctx_file_path): # This expensive operation is done only when it's really needed with open(ctx_file_path, 'rt', encoding='utf-8') as ctx_f: self.context_map[context_url] = json.load(ctx_f) if repok is None: self.repok: Reporter = Reporter(prefix="[Reader: INFO] ") else: self.repok: Reporter = repok if reperr is None: self.reperr: Reporter = Reporter(prefix="[Reader: ERROR] ") else: self.reperr: Reporter = reperr
(self, repok: 'Reporter' = None, reperr: 'Reporter' = None, context_map: 'Dict[str, Any]' = None) -> 'None'
9,788
oc_ocdm.reader
_extract_subjects
null
@staticmethod def _extract_subjects(graph: Graph) -> Set[URIRef]: subjects: Set[URIRef] = set() for s in graph.subjects(unique=True): subjects.add(s) return subjects
(graph: 'Graph') -> 'Set[URIRef]'
9,789
oc_ocdm.reader
_load_graph
null
def _load_graph(self, file_path: str) -> ConjunctiveGraph: formats = ["json-ld", "rdfxml", "turtle", "trig", "nt11", "nquads"] loaded_graph = ConjunctiveGraph() if file_path.endswith('.zip'): try: with ZipFile(file=file_path, mode="r") as archive: for zf_name in archive.namelist(): with archive.open(zf_name) as f: if self._try_parse(loaded_graph, f, formats): return loaded_graph except Exception as e: raise IOError(f"Error opening or reading zip file '{file_path}': {e}") else: try: with open(file_path, 'rt', encoding='utf-8') as f: if self._try_parse(loaded_graph, f, formats): return loaded_graph except Exception as e: raise IOError(f"Error opening or reading file '{file_path}': {e}") raise IOError(f"It was impossible to load the file '{file_path}' with supported formats.")
(self, file_path: str) -> rdflib.graph.ConjunctiveGraph
9,790
oc_ocdm.reader
_try_parse
null
def _try_parse(self, graph: ConjunctiveGraph, file_obj, formats: List[str]) -> bool: for cur_format in formats: file_obj.seek(0) # Reset file pointer to the beginning for each new attempt try: if cur_format == "json-ld": json_ld_file = json.load(file_obj) if isinstance(json_ld_file, dict): json_ld_file = [json_ld_file] for json_ld_resource in json_ld_file: if "@context" in json_ld_resource and json_ld_resource["@context"] in self.context_map: json_ld_resource["@context"] = self.context_map[json_ld_resource["@context"]]["@context"] data = json.dumps(json_ld_file, ensure_ascii=False) graph.parse(data=data, format=cur_format) else: graph.parse(file=file_obj, format=cur_format) return True # Success, no need to try other formats except Exception as e: continue # Try the next format return False # None of the formats succeeded
(self, graph: 'ConjunctiveGraph', file_obj, formats: 'List[str]') -> 'bool'
9,791
oc_ocdm.reader
get_graph_from_subject
null
@staticmethod def get_graph_from_subject(graph: Graph, subject: URIRef) -> Graph: g: Graph = Graph(identifier=graph.identifier) for p, o in graph.predicate_objects(subject, unique=True): g.add((subject, p, o)) return g
(graph: rdflib.graph.Graph, subject: rdflib.term.URIRef) -> rdflib.graph.Graph
9,792
oc_ocdm.reader
graph_validation
null
def graph_validation(self, graph: Graph, closed: bool = False) -> Graph: valid_graph: Graph = Graph(identifier=graph.identifier) sg = Graph() if closed: sg.parse(os.path.join('oc_ocdm', 'resources', 'shacle_closed.ttl')) else: sg.parse(os.path.join('oc_ocdm', 'resources', 'shacle.ttl')) _, report_g, _ = validate(graph, shacl_graph=sg, ont_graph=None, inference=None, abort_on_first=False, allow_infos=False, allow_warnings=False, meta_shacl=False, advanced=False, js=False, debug=False) invalid_nodes = set() for triple in report_g.triples((None, URIRef('http://www.w3.org/ns/shacl#focusNode'), None)): invalid_nodes.add(triple[2]) for subject in self._extract_subjects(graph): if subject not in invalid_nodes: for valid_subject_triple in graph.triples((subject, None, None)): valid_graph.add(valid_subject_triple) return valid_graph
(self, graph: rdflib.graph.Graph, closed: bool = False) -> rdflib.graph.Graph
9,793
oc_ocdm.reader
import_entities_from_graph
null
@staticmethod def import_entities_from_graph(g_set: GraphSet, results: List[Dict], resp_agent: str, enable_validation: bool = False, closed: bool = False) -> List[GraphEntity]: graph = build_graph_from_results(results) if enable_validation: reader = Reader() graph = reader.graph_validation(graph, closed) imported_entities: List[GraphEntity] = [] for subject in Reader._extract_subjects(graph): types = [] for o in graph.objects(subject, RDF.type): types.append(o) # ReferenceAnnotation if GraphEntity.iri_note in types: imported_entities.append(g_set.add_an(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # AgentRole elif GraphEntity.iri_role_in_time in types: imported_entities.append(g_set.add_ar(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # BibliographicReference elif GraphEntity.iri_bibliographic_reference in types: imported_entities.append(g_set.add_be(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # BibliographicResource elif GraphEntity.iri_expression in types: imported_entities.append(g_set.add_br(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # Citation elif GraphEntity.iri_citation in types: imported_entities.append(g_set.add_ci(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # DiscourseElement elif GraphEntity.iri_discourse_element in types: imported_entities.append(g_set.add_de(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # Identifier elif GraphEntity.iri_identifier in types: imported_entities.append(g_set.add_id(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # PointerList elif GraphEntity.iri_singleloc_pointer_list in types: imported_entities.append(g_set.add_pl(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # ResponsibleAgent elif GraphEntity.iri_agent in types: imported_entities.append(g_set.add_ra(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # ResourceEmbodiment elif GraphEntity.iri_manifestation in types: imported_entities.append(g_set.add_re(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) # ReferencePointer elif GraphEntity.iri_intextref_pointer in types: imported_entities.append(g_set.add_rp(resp_agent=resp_agent, res=subject, preexisting_graph=Reader.get_graph_from_subject(graph, subject))) return imported_entities
(g_set: 'GraphSet', results: 'List[Dict]', resp_agent: 'str', enable_validation: 'bool' = False, closed: 'bool' = False) -> 'List[GraphEntity]'
9,794
oc_ocdm.reader
import_entity_from_triplestore
null
@staticmethod def import_entity_from_triplestore(g_set: GraphSet, ts_url: str, res: URIRef, resp_agent: str, enable_validation: bool = False) -> GraphEntity: sparql: SPARQLWrapper = SPARQLWrapper(ts_url) query: str = f"SELECT ?s ?p ?o WHERE {{BIND (<{res}> AS ?s). ?s ?p ?o.}}" sparql.setQuery(query) sparql.setMethod('GET') sparql.setReturnFormat(JSON) result: ConjunctiveGraph = sparql.queryAndConvert()['results']['bindings'] if result: imported_entities: List[GraphEntity] = Reader.import_entities_from_graph(g_set, result, resp_agent, enable_validation) if len(imported_entities) <= 0: raise ValueError("The requested entity was not found or was not recognized as a proper OCDM entity.") else: return imported_entities[0]
(g_set: 'GraphSet', ts_url: 'str', res: 'URIRef', resp_agent: 'str', enable_validation: 'bool' = False) -> 'GraphEntity'
9,795
oc_ocdm.reader
load
null
def load(self, rdf_file_path: str) -> Optional[ConjunctiveGraph]: self.repok.new_article() self.reperr.new_article() loaded_graph: Optional[ConjunctiveGraph] = None if os.path.isfile(rdf_file_path): try: loaded_graph = self._load_graph(rdf_file_path) except Exception as e: self.reperr.add_sentence("[1] " "It was impossible to handle the format used for " "storing the file (stored in the temporary path) " f"'{rdf_file_path}'. Additional details: {e}") else: self.reperr.add_sentence("[2] " f"The file specified ('{rdf_file_path}') doesn't exist.") return loaded_graph
(self, rdf_file_path: 'str') -> 'Optional[ConjunctiveGraph]'
9,796
oc_ocdm.storer
Storer
null
class Storer(object): def __init__(self, abstract_set: AbstractSet, repok: Reporter = None, reperr: Reporter = None, context_map: Dict[str, Any] = None, default_dir: str = "_", dir_split: int = 0, n_file_item: int = 1, output_format: str = "json-ld", zip_output: bool = False, modified_entities: set = None) -> None: # We only accept format strings that: # 1. are supported by rdflib # 2. correspond to an output format which is effectively either NT or NQ # The only exception to this rule is the 'json-ld' format, which is the default value of 'output_format'. supported_formats: Set[str] = {'application/n-triples', 'ntriples', 'nt', 'nt11', 'application/n-quads', 'nquads', 'json-ld'} if output_format not in supported_formats: raise ValueError(f"Given output_format '{self.output_format}' is not supported." f" Available formats: {supported_formats}.") else: self.output_format: str = output_format self.zip_output = zip_output self.dir_split: int = dir_split self.n_file_item: int = n_file_item self.default_dir: str = default_dir if default_dir != "" else "_" self.a_set: AbstractSet = abstract_set self.modified_entities = modified_entities if context_map is not None: self.context_map: Dict[str, Any] = context_map else: self.context_map: Dict[str, Any] = {} if self.output_format == "json-ld": for context_url in self.context_map: ctx_file_path: Any = self.context_map[context_url] if type(ctx_file_path) == str and os.path.isfile(ctx_file_path): # This expensive operation is done only when it's really needed with open(ctx_file_path, 'rt', encoding='utf-8') as ctx_f: self.context_map[context_url] = json.load(ctx_f) if repok is None: self.repok: Reporter = Reporter(prefix="[Storer: INFO] ") else: self.repok: Reporter = repok if reperr is None: self.reperr: Reporter = Reporter(prefix="[Storer: ERROR] ") else: self.reperr: Reporter = reperr def store_graphs_in_file(self, file_path: str, context_path: str = None) -> None: self.repok.new_article() self.reperr.new_article() self.repok.add_sentence("Store the graphs into a file: starting process") cg: ConjunctiveGraph = ConjunctiveGraph() for g in self.a_set.graphs(): cg.addN([item + (g.identifier,) for item in list(g)]) self._store_in_file(cg, file_path, context_path) def _store_in_file(self, cur_g: ConjunctiveGraph, cur_file_path: str, context_path: str = None) -> None: # Note: the following lines from here and until 'cur_json_ld' are a sort of hack for including all # the triples of the input graph into the final stored file. Somehow, some of them are not written # in such file otherwise - in particular the provenance ones. new_g: ConjunctiveGraph = ConjunctiveGraph() for s, p, o in cur_g.triples((None, None, None)): g_iri: Optional[URIRef] = None for g_context in cur_g.contexts((s, p, o)): g_iri = g_context.identifier break new_g.addN([(s, p, o, g_iri)]) zip_file_path = cur_file_path.replace(os.path.splitext(cur_file_path)[1], ".zip") if self.zip_output: with ZipFile(zip_file_path, mode="w", compression=ZIP_DEFLATED, allowZip64=True) as zip_file: self._write_graph(new_g, zip_file, cur_file_path, context_path) else: # Handle non-zipped output directly to a file self._write_graph(new_g, None, cur_file_path, context_path) self.repok.add_sentence(f"File '{cur_file_path}' added.") def _write_graph(self, graph: ConjunctiveGraph, zip_file: ZipFile, cur_file_path, context_path): if self.output_format == "json-ld": # Serialize the graph in JSON-LD format cur_json_ld = json.loads(graph.serialize(format="json-ld", context=self.context_map.get(context_path))) if context_path is not None and context_path in self.context_map: if isinstance(cur_json_ld, dict): cur_json_ld["@context"] = context_path else: # When cur_json_ld is a list for item in cur_json_ld: item["@context"] = context_path # Determine how to write based on zip file presence if zip_file is not None: dumped_json = json.dumps(cur_json_ld, ensure_ascii=False).encode('utf-8') zip_file.writestr(zinfo_or_arcname=os.path.basename(cur_file_path), data=dumped_json) else: with open(cur_file_path, 'wt', encoding='utf-8') as f: json.dump(cur_json_ld, f, ensure_ascii=False) else: # Handle other RDF formats if zip_file is not None: rdf_serialization = graph.serialize(destination=None, format=self.output_format, encoding="utf-8") zip_file.writestr(zinfo_or_arcname=os.path.basename(cur_file_path), data=rdf_serialization) else: graph.serialize(destination=cur_file_path, format=self.output_format, encoding="utf-8") def store_all(self, base_dir: str, base_iri: str, context_path: str = None, process_id: int|str = None) -> List[str]: self.repok.new_article() self.reperr.new_article() self.repok.add_sentence("Starting the process") relevant_paths: Dict[str, list] = dict() for entity in self.a_set.res_to_entity.values(): is_relevant = True if self.modified_entities is not None and entity.res not in self.modified_entities: is_relevant = False if is_relevant: cur_dir_path, cur_file_path = self._dir_and_file_paths(entity.res, base_dir, base_iri, process_id) if not os.path.exists(cur_dir_path): os.makedirs(cur_dir_path) relevant_paths.setdefault(cur_file_path, list()) relevant_paths[cur_file_path].append(entity) for relevant_path, entities_in_path in relevant_paths.items(): stored_g = None # Here we try to obtain a reference to the currently stored graph output_filepath = relevant_path.replace(os.path.splitext(relevant_path)[1], ".zip") if self.zip_output else relevant_path lock = FileLock(f"{output_filepath}.lock") with lock: if os.path.exists(output_filepath): stored_g = Reader(context_map=self.context_map).load(output_filepath) if stored_g is None: stored_g = ConjunctiveGraph() for entity_in_path in entities_in_path: self.store(entity_in_path, stored_g, relevant_path, context_path, False) self._store_in_file(stored_g, relevant_path, context_path) return list(relevant_paths.keys()) def store(self, entity: AbstractEntity, destination_g: ConjunctiveGraph, cur_file_path: str, context_path: str = None, store_now: bool = True) -> ConjunctiveGraph: self.repok.new_article() self.reperr.new_article() try: if isinstance(entity, ProvEntity): quads: List[Tuple] = [] graph_identifier: URIRef = entity.g.identifier for triple in entity.g.triples((entity.res, None, None)): quads.append((*triple, graph_identifier)) destination_g.addN(quads) elif isinstance(entity, GraphEntity) or isinstance(entity, MetadataEntity): if entity.to_be_deleted: destination_g.remove((entity.res, None, None, None)) else: if len(entity.preexisting_graph) > 0: """ We're not in 'append mode', so we need to remove the entity that we're going to overwrite. """ destination_g.remove((entity.res, None, None, None)) """ Here we copy data from the entity into the stored graph. If the entity was marked as to be deleted, then we're done because we already removed all of its triples. """ quads: List[Tuple] = [] graph_identifier: URIRef = entity.g.identifier for triple in entity.g.triples((entity.res, None, None)): quads.append((*triple, graph_identifier)) destination_g.addN(quads) if store_now: self._store_in_file(destination_g, cur_file_path, context_path) return destination_g except Exception as e: self.reperr.add_sentence(f"[1] It was impossible to store the RDF statements in {cur_file_path}. {e}") def upload_and_store(self, base_dir: str, triplestore_url: str, base_iri: str, context_path: str = None, batch_size: int = 10) -> None: stored_graph_path: List[str] = self.store_all(base_dir, base_iri, context_path) # If some graphs were not stored properly, then no one will be uploaded to the triplestore # Anyway, we should highlight those ones that could have been added in principle, by # mentioning them with a ".notuploaded" marker if None in stored_graph_path: for file_path in stored_graph_path: if file_path is not None: # Create a marker for the file not uploaded in the triplestore open(f'{file_path}.notuploaded', 'wt', encoding='utf-8').close() self.reperr.add_sentence("[2] " f"The statements contained in the JSON-LD file '{file_path}' " "were not uploaded into the triplestore.") else: # All the files have been stored self.upload_all(triplestore_url, base_dir, batch_size) def _dir_and_file_paths(self, res: URIRef, base_dir: str, base_iri: str, process_id: int|str = None) -> Tuple[str, str]: is_json: bool = (self.output_format == "json-ld") return find_paths(res, base_dir, base_iri, self.default_dir, self.dir_split, self.n_file_item, is_json=is_json, process_id=process_id) @staticmethod def _class_to_entity_type(entity: AbstractEntity) -> Optional[str]: if isinstance(entity, GraphEntity): return "graph" elif isinstance(entity, ProvEntity): return "prov" elif isinstance(entity, MetadataEntity): return "metadata" else: return None def upload_all(self, triplestore_url: str, base_dir: str = None, batch_size: int = 10) -> bool: self.repok.new_article() self.reperr.new_article() if batch_size <= 0: batch_size = 10 query_string: str = "" added_statements: int = 0 removed_statements: int = 0 skipped_queries: int = 0 result: bool = True for idx, entity in enumerate(self.a_set.res_to_entity.values()): update_query, n_added, n_removed = get_update_query(entity, entity_type=self._class_to_entity_type(entity)) if update_query == "": skipped_queries += 1 else: index = idx - skipped_queries if index == 0: # First query query_string = update_query added_statements = n_added removed_statements = n_removed elif index % batch_size == 0: # batch_size-multiple query result &= self._query(query_string, triplestore_url, base_dir, added_statements, removed_statements) query_string = update_query added_statements = n_added removed_statements = n_removed else: # Accumulated query query_string += " ; " + update_query added_statements += n_added removed_statements += n_removed if query_string != "": result &= self._query(query_string, triplestore_url, base_dir, added_statements, removed_statements) return result def upload(self, entity: AbstractEntity, triplestore_url: str, base_dir: str = None) -> bool: self.repok.new_article() self.reperr.new_article() update_query, n_added, n_removed = get_update_query(entity, entity_type=self._class_to_entity_type(entity)) return self._query(update_query, triplestore_url, base_dir, n_added, n_removed) def execute_query(self, query_string: str, triplestore_url: str) -> bool: self.repok.new_article() self.reperr.new_article() return self._query(query_string, triplestore_url) def _query(self, query_string: str, triplestore_url: str, base_dir: str = None, added_statements: int = 0, removed_statements: int = 0) -> bool: if query_string != "": try: sparql: SPARQLWrapper = SPARQLWrapper(triplestore_url) sparql.setQuery(query_string) sparql.setMethod('POST') sparql.query() self.repok.add_sentence( f"Triplestore updated with {added_statements} added statements and " f"with {removed_statements} removed statements.") return True except Exception as e: self.reperr.add_sentence("[3] " "Graph was not loaded into the " f"triplestore due to communication problems: {e}") if base_dir is not None: tp_err_dir: str = base_dir + os.sep + "tp_err" if not os.path.exists(tp_err_dir): os.makedirs(tp_err_dir) cur_file_err: str = tp_err_dir + os.sep + \ datetime.now().strftime('%Y-%m-%d-%H-%M-%S-%f_not_uploaded.txt') with open(cur_file_err, 'wt', encoding='utf-8') as f: f.write(query_string) return False
(abstract_set: 'AbstractSet', repok: 'Reporter' = None, reperr: 'Reporter' = None, context_map: 'Dict[str, Any]' = None, default_dir: 'str' = '_', dir_split: 'int' = 0, n_file_item: 'int' = 1, output_format: 'str' = 'json-ld', zip_output: 'bool' = False, modified_entities: 'set' = None) -> 'None'
9,797
oc_ocdm.storer
__init__
null
def __init__(self, abstract_set: AbstractSet, repok: Reporter = None, reperr: Reporter = None, context_map: Dict[str, Any] = None, default_dir: str = "_", dir_split: int = 0, n_file_item: int = 1, output_format: str = "json-ld", zip_output: bool = False, modified_entities: set = None) -> None: # We only accept format strings that: # 1. are supported by rdflib # 2. correspond to an output format which is effectively either NT or NQ # The only exception to this rule is the 'json-ld' format, which is the default value of 'output_format'. supported_formats: Set[str] = {'application/n-triples', 'ntriples', 'nt', 'nt11', 'application/n-quads', 'nquads', 'json-ld'} if output_format not in supported_formats: raise ValueError(f"Given output_format '{self.output_format}' is not supported." f" Available formats: {supported_formats}.") else: self.output_format: str = output_format self.zip_output = zip_output self.dir_split: int = dir_split self.n_file_item: int = n_file_item self.default_dir: str = default_dir if default_dir != "" else "_" self.a_set: AbstractSet = abstract_set self.modified_entities = modified_entities if context_map is not None: self.context_map: Dict[str, Any] = context_map else: self.context_map: Dict[str, Any] = {} if self.output_format == "json-ld": for context_url in self.context_map: ctx_file_path: Any = self.context_map[context_url] if type(ctx_file_path) == str and os.path.isfile(ctx_file_path): # This expensive operation is done only when it's really needed with open(ctx_file_path, 'rt', encoding='utf-8') as ctx_f: self.context_map[context_url] = json.load(ctx_f) if repok is None: self.repok: Reporter = Reporter(prefix="[Storer: INFO] ") else: self.repok: Reporter = repok if reperr is None: self.reperr: Reporter = Reporter(prefix="[Storer: ERROR] ") else: self.reperr: Reporter = reperr
(self, abstract_set: 'AbstractSet', repok: 'Reporter' = None, reperr: 'Reporter' = None, context_map: 'Dict[str, Any]' = None, default_dir: 'str' = '_', dir_split: 'int' = 0, n_file_item: 'int' = 1, output_format: 'str' = 'json-ld', zip_output: 'bool' = False, modified_entities: 'set' = None) -> 'None'
9,798
oc_ocdm.storer
_class_to_entity_type
null
@staticmethod def _class_to_entity_type(entity: AbstractEntity) -> Optional[str]: if isinstance(entity, GraphEntity): return "graph" elif isinstance(entity, ProvEntity): return "prov" elif isinstance(entity, MetadataEntity): return "metadata" else: return None
(entity: 'AbstractEntity') -> 'Optional[str]'
9,799
oc_ocdm.storer
_dir_and_file_paths
null
def _dir_and_file_paths(self, res: URIRef, base_dir: str, base_iri: str, process_id: int|str = None) -> Tuple[str, str]: is_json: bool = (self.output_format == "json-ld") return find_paths(res, base_dir, base_iri, self.default_dir, self.dir_split, self.n_file_item, is_json=is_json, process_id=process_id)
(self, res: 'URIRef', base_dir: 'str', base_iri: 'str', process_id: 'int | str' = None) -> 'Tuple[str, str]'
9,800
oc_ocdm.storer
_query
null
def _query(self, query_string: str, triplestore_url: str, base_dir: str = None, added_statements: int = 0, removed_statements: int = 0) -> bool: if query_string != "": try: sparql: SPARQLWrapper = SPARQLWrapper(triplestore_url) sparql.setQuery(query_string) sparql.setMethod('POST') sparql.query() self.repok.add_sentence( f"Triplestore updated with {added_statements} added statements and " f"with {removed_statements} removed statements.") return True except Exception as e: self.reperr.add_sentence("[3] " "Graph was not loaded into the " f"triplestore due to communication problems: {e}") if base_dir is not None: tp_err_dir: str = base_dir + os.sep + "tp_err" if not os.path.exists(tp_err_dir): os.makedirs(tp_err_dir) cur_file_err: str = tp_err_dir + os.sep + \ datetime.now().strftime('%Y-%m-%d-%H-%M-%S-%f_not_uploaded.txt') with open(cur_file_err, 'wt', encoding='utf-8') as f: f.write(query_string) return False
(self, query_string: str, triplestore_url: str, base_dir: Optional[str] = None, added_statements: int = 0, removed_statements: int = 0) -> bool
9,801
oc_ocdm.storer
_store_in_file
null
def _store_in_file(self, cur_g: ConjunctiveGraph, cur_file_path: str, context_path: str = None) -> None: # Note: the following lines from here and until 'cur_json_ld' are a sort of hack for including all # the triples of the input graph into the final stored file. Somehow, some of them are not written # in such file otherwise - in particular the provenance ones. new_g: ConjunctiveGraph = ConjunctiveGraph() for s, p, o in cur_g.triples((None, None, None)): g_iri: Optional[URIRef] = None for g_context in cur_g.contexts((s, p, o)): g_iri = g_context.identifier break new_g.addN([(s, p, o, g_iri)]) zip_file_path = cur_file_path.replace(os.path.splitext(cur_file_path)[1], ".zip") if self.zip_output: with ZipFile(zip_file_path, mode="w", compression=ZIP_DEFLATED, allowZip64=True) as zip_file: self._write_graph(new_g, zip_file, cur_file_path, context_path) else: # Handle non-zipped output directly to a file self._write_graph(new_g, None, cur_file_path, context_path) self.repok.add_sentence(f"File '{cur_file_path}' added.")
(self, cur_g: rdflib.graph.ConjunctiveGraph, cur_file_path: str, context_path: Optional[str] = None) -> NoneType
9,802
oc_ocdm.storer
_write_graph
null
def _write_graph(self, graph: ConjunctiveGraph, zip_file: ZipFile, cur_file_path, context_path): if self.output_format == "json-ld": # Serialize the graph in JSON-LD format cur_json_ld = json.loads(graph.serialize(format="json-ld", context=self.context_map.get(context_path))) if context_path is not None and context_path in self.context_map: if isinstance(cur_json_ld, dict): cur_json_ld["@context"] = context_path else: # When cur_json_ld is a list for item in cur_json_ld: item["@context"] = context_path # Determine how to write based on zip file presence if zip_file is not None: dumped_json = json.dumps(cur_json_ld, ensure_ascii=False).encode('utf-8') zip_file.writestr(zinfo_or_arcname=os.path.basename(cur_file_path), data=dumped_json) else: with open(cur_file_path, 'wt', encoding='utf-8') as f: json.dump(cur_json_ld, f, ensure_ascii=False) else: # Handle other RDF formats if zip_file is not None: rdf_serialization = graph.serialize(destination=None, format=self.output_format, encoding="utf-8") zip_file.writestr(zinfo_or_arcname=os.path.basename(cur_file_path), data=rdf_serialization) else: graph.serialize(destination=cur_file_path, format=self.output_format, encoding="utf-8")
(self, graph: rdflib.graph.ConjunctiveGraph, zip_file: zipfile.ZipFile, cur_file_path, context_path)
9,803
oc_ocdm.storer
execute_query
null
def execute_query(self, query_string: str, triplestore_url: str) -> bool: self.repok.new_article() self.reperr.new_article() return self._query(query_string, triplestore_url)
(self, query_string: str, triplestore_url: str) -> bool
9,804
oc_ocdm.storer
store
null
def store(self, entity: AbstractEntity, destination_g: ConjunctiveGraph, cur_file_path: str, context_path: str = None, store_now: bool = True) -> ConjunctiveGraph: self.repok.new_article() self.reperr.new_article() try: if isinstance(entity, ProvEntity): quads: List[Tuple] = [] graph_identifier: URIRef = entity.g.identifier for triple in entity.g.triples((entity.res, None, None)): quads.append((*triple, graph_identifier)) destination_g.addN(quads) elif isinstance(entity, GraphEntity) or isinstance(entity, MetadataEntity): if entity.to_be_deleted: destination_g.remove((entity.res, None, None, None)) else: if len(entity.preexisting_graph) > 0: """ We're not in 'append mode', so we need to remove the entity that we're going to overwrite. """ destination_g.remove((entity.res, None, None, None)) """ Here we copy data from the entity into the stored graph. If the entity was marked as to be deleted, then we're done because we already removed all of its triples. """ quads: List[Tuple] = [] graph_identifier: URIRef = entity.g.identifier for triple in entity.g.triples((entity.res, None, None)): quads.append((*triple, graph_identifier)) destination_g.addN(quads) if store_now: self._store_in_file(destination_g, cur_file_path, context_path) return destination_g except Exception as e: self.reperr.add_sentence(f"[1] It was impossible to store the RDF statements in {cur_file_path}. {e}")
(self, entity: 'AbstractEntity', destination_g: 'ConjunctiveGraph', cur_file_path: 'str', context_path: 'str' = None, store_now: 'bool' = True) -> 'ConjunctiveGraph'
9,805
oc_ocdm.storer
store_all
null
def store_all(self, base_dir: str, base_iri: str, context_path: str = None, process_id: int|str = None) -> List[str]: self.repok.new_article() self.reperr.new_article() self.repok.add_sentence("Starting the process") relevant_paths: Dict[str, list] = dict() for entity in self.a_set.res_to_entity.values(): is_relevant = True if self.modified_entities is not None and entity.res not in self.modified_entities: is_relevant = False if is_relevant: cur_dir_path, cur_file_path = self._dir_and_file_paths(entity.res, base_dir, base_iri, process_id) if not os.path.exists(cur_dir_path): os.makedirs(cur_dir_path) relevant_paths.setdefault(cur_file_path, list()) relevant_paths[cur_file_path].append(entity) for relevant_path, entities_in_path in relevant_paths.items(): stored_g = None # Here we try to obtain a reference to the currently stored graph output_filepath = relevant_path.replace(os.path.splitext(relevant_path)[1], ".zip") if self.zip_output else relevant_path lock = FileLock(f"{output_filepath}.lock") with lock: if os.path.exists(output_filepath): stored_g = Reader(context_map=self.context_map).load(output_filepath) if stored_g is None: stored_g = ConjunctiveGraph() for entity_in_path in entities_in_path: self.store(entity_in_path, stored_g, relevant_path, context_path, False) self._store_in_file(stored_g, relevant_path, context_path) return list(relevant_paths.keys())
(self, base_dir: 'str', base_iri: 'str', context_path: 'str' = None, process_id: 'int | str' = None) -> 'List[str]'
9,806
oc_ocdm.storer
store_graphs_in_file
null
def store_graphs_in_file(self, file_path: str, context_path: str = None) -> None: self.repok.new_article() self.reperr.new_article() self.repok.add_sentence("Store the graphs into a file: starting process") cg: ConjunctiveGraph = ConjunctiveGraph() for g in self.a_set.graphs(): cg.addN([item + (g.identifier,) for item in list(g)]) self._store_in_file(cg, file_path, context_path)
(self, file_path: str, context_path: Optional[str] = None) -> NoneType
9,807
oc_ocdm.storer
upload
null
def upload(self, entity: AbstractEntity, triplestore_url: str, base_dir: str = None) -> bool: self.repok.new_article() self.reperr.new_article() update_query, n_added, n_removed = get_update_query(entity, entity_type=self._class_to_entity_type(entity)) return self._query(update_query, triplestore_url, base_dir, n_added, n_removed)
(self, entity: 'AbstractEntity', triplestore_url: 'str', base_dir: 'str' = None) -> 'bool'
9,808
oc_ocdm.storer
upload_all
null
def upload_all(self, triplestore_url: str, base_dir: str = None, batch_size: int = 10) -> bool: self.repok.new_article() self.reperr.new_article() if batch_size <= 0: batch_size = 10 query_string: str = "" added_statements: int = 0 removed_statements: int = 0 skipped_queries: int = 0 result: bool = True for idx, entity in enumerate(self.a_set.res_to_entity.values()): update_query, n_added, n_removed = get_update_query(entity, entity_type=self._class_to_entity_type(entity)) if update_query == "": skipped_queries += 1 else: index = idx - skipped_queries if index == 0: # First query query_string = update_query added_statements = n_added removed_statements = n_removed elif index % batch_size == 0: # batch_size-multiple query result &= self._query(query_string, triplestore_url, base_dir, added_statements, removed_statements) query_string = update_query added_statements = n_added removed_statements = n_removed else: # Accumulated query query_string += " ; " + update_query added_statements += n_added removed_statements += n_removed if query_string != "": result &= self._query(query_string, triplestore_url, base_dir, added_statements, removed_statements) return result
(self, triplestore_url: str, base_dir: Optional[str] = None, batch_size: int = 10) -> bool
9,809
oc_ocdm.storer
upload_and_store
null
def upload_and_store(self, base_dir: str, triplestore_url: str, base_iri: str, context_path: str = None, batch_size: int = 10) -> None: stored_graph_path: List[str] = self.store_all(base_dir, base_iri, context_path) # If some graphs were not stored properly, then no one will be uploaded to the triplestore # Anyway, we should highlight those ones that could have been added in principle, by # mentioning them with a ".notuploaded" marker if None in stored_graph_path: for file_path in stored_graph_path: if file_path is not None: # Create a marker for the file not uploaded in the triplestore open(f'{file_path}.notuploaded', 'wt', encoding='utf-8').close() self.reperr.add_sentence("[2] " f"The statements contained in the JSON-LD file '{file_path}' " "were not uploaded into the triplestore.") else: # All the files have been stored self.upload_all(triplestore_url, base_dir, batch_size)
(self, base_dir: str, triplestore_url: str, base_iri: str, context_path: Optional[str] = None, batch_size: int = 10) -> NoneType
9,820
myx.client
Client
All client functions return the response in json format, except for get_file
class Client(): """All client functions return the response in json format, except for get_file""" def __init__(self, email: str, password: str, base_url="https://platform.myxrobotics.com"): self.base_url = base_url self.session = requests.Session() r = self.session.post(f'{self.base_url}/users/login/', data={'email': email, 'password': password}, allow_redirects=False ) if r.headers['location'] != '/dashboard/': raise Exception("Failed to authenticate. Check your email and password are correct.") def get_file(self, twin_id: int, file_path: str): """ Download a file if it exists. Returns a file-like object from which you can read in binary mode. Some twin files are generated by MYX.""" return self.session.get(f'{self.base_url}/twins/{twin_id}/data/{file_path}', stream=True).raw def upload_dir(self, directory, show_status=True): """ Upload all files in a directory. """ for root, subdirs, files in os.walk(directory): for fn in files: path = os.path.join(root, fn) if show_status: print(f"Uploading {path}") self.upload_file(path) def upload_file(self, filename): """ Upload a single file from the filesystem. """ with open(filename, 'rb') as f: return self.session.post(f'{self.base_url}/upload/file/', files={'file': f}) def finish_upload(self, twin_name='Twin from API', client_group=''): """ Give a name for the new twin and notify MYX that there will be no more images uploaded. This triggers the processing pipeline to start working on your data. The first return value is whether or not you had enough balance to complete the upload. The second value is the URL you should go to add more money to your account. """ r = self.session.post(f'{self.base_url}/upload/checkout/begin/', json={ 'twinName': twin_name, 'clientGroup': client_group, }) next_url = r.json()['url'] if next_url.startswith(f'{self.base_url}/upload/checkout/success/'): self.session.get(next_url) return True, '' else: return False, next_url def get_annotations(self, twin_id: int): """ Get a list of all annotations for a given twin. """ return self.session.get(f'{self.base_url}/twins/{twin_id}/annotations/').json() def make_new_annotation(self, twin_id: int, x: float, y: float, z: float, label: str, iframeURL: str, notes: str): """ Make a new annotation at the given position, with the given label, url and additional notes. """ return self.session.post(f'{self.base_url}/twins/{twin_id}/annotations/', json={ 'x': x, 'y': y, 'z': z, 'label': label, 'iframeURL': iframeURL, 'notes': notes, }).json() def get_twins(self) : """ Get a list of all twins you own. """ return self.session.get(f'{self.base_url}/api/list_all/').json() #Twin(str(json['id']), json['name'], json['latitude'], json['longitude'], json['captureDate'])
(email: str, password: str, base_url='https://platform.myxrobotics.com')
9,821
myx.client
__init__
null
def __init__(self, email: str, password: str, base_url="https://platform.myxrobotics.com"): self.base_url = base_url self.session = requests.Session() r = self.session.post(f'{self.base_url}/users/login/', data={'email': email, 'password': password}, allow_redirects=False ) if r.headers['location'] != '/dashboard/': raise Exception("Failed to authenticate. Check your email and password are correct.")
(self, email: str, password: str, base_url='https://platform.myxrobotics.com')
9,822
myx.client
finish_upload
Give a name for the new twin and notify MYX that there will be no more images uploaded. This triggers the processing pipeline to start working on your data. The first return value is whether or not you had enough balance to complete the upload. The second value is the URL you should go to add more money to your account.
def finish_upload(self, twin_name='Twin from API', client_group=''): """ Give a name for the new twin and notify MYX that there will be no more images uploaded. This triggers the processing pipeline to start working on your data. The first return value is whether or not you had enough balance to complete the upload. The second value is the URL you should go to add more money to your account. """ r = self.session.post(f'{self.base_url}/upload/checkout/begin/', json={ 'twinName': twin_name, 'clientGroup': client_group, }) next_url = r.json()['url'] if next_url.startswith(f'{self.base_url}/upload/checkout/success/'): self.session.get(next_url) return True, '' else: return False, next_url
(self, twin_name='Twin from API', client_group='')
9,823
myx.client
get_annotations
Get a list of all annotations for a given twin.
def get_annotations(self, twin_id: int): """ Get a list of all annotations for a given twin. """ return self.session.get(f'{self.base_url}/twins/{twin_id}/annotations/').json()
(self, twin_id: int)
9,824
myx.client
get_file
Download a file if it exists. Returns a file-like object from which you can read in binary mode. Some twin files are generated by MYX.
def get_file(self, twin_id: int, file_path: str): """ Download a file if it exists. Returns a file-like object from which you can read in binary mode. Some twin files are generated by MYX.""" return self.session.get(f'{self.base_url}/twins/{twin_id}/data/{file_path}', stream=True).raw
(self, twin_id: int, file_path: str)
9,825
myx.client
get_twins
Get a list of all twins you own.
def get_twins(self) : """ Get a list of all twins you own. """ return self.session.get(f'{self.base_url}/api/list_all/').json() #Twin(str(json['id']), json['name'], json['latitude'], json['longitude'], json['captureDate'])
(self)
9,826
myx.client
make_new_annotation
Make a new annotation at the given position, with the given label, url and additional notes.
def make_new_annotation(self, twin_id: int, x: float, y: float, z: float, label: str, iframeURL: str, notes: str): """ Make a new annotation at the given position, with the given label, url and additional notes. """ return self.session.post(f'{self.base_url}/twins/{twin_id}/annotations/', json={ 'x': x, 'y': y, 'z': z, 'label': label, 'iframeURL': iframeURL, 'notes': notes, }).json()
(self, twin_id: int, x: float, y: float, z: float, label: str, iframeURL: str, notes: str)
9,827
myx.client
upload_dir
Upload all files in a directory.
def upload_dir(self, directory, show_status=True): """ Upload all files in a directory. """ for root, subdirs, files in os.walk(directory): for fn in files: path = os.path.join(root, fn) if show_status: print(f"Uploading {path}") self.upload_file(path)
(self, directory, show_status=True)
9,828
myx.client
upload_file
Upload a single file from the filesystem.
def upload_file(self, filename): """ Upload a single file from the filesystem. """ with open(filename, 'rb') as f: return self.session.post(f'{self.base_url}/upload/file/', files={'file': f})
(self, filename)
9,830
pygeoif.feature
Feature
Aggregates a geometry instance with associated user-defined properties. Attributes: ---------- geometry : object A geometry instance properties : dict A dictionary linking field keys with values associated with geometry instance Example: ------- >>> p = Point(1.0, -1.0) >>> props = {'Name': 'Sample Point', 'Other': 'Other Data'} >>> a = Feature(p, props) >>> a.properties {'Name': 'Sample Point', 'Other': 'Other Data'} >>> a.properties['Name'] 'Sample Point'
class Feature: """ Aggregates a geometry instance with associated user-defined properties. Attributes: ---------- geometry : object A geometry instance properties : dict A dictionary linking field keys with values associated with geometry instance Example: ------- >>> p = Point(1.0, -1.0) >>> props = {'Name': 'Sample Point', 'Other': 'Other Data'} >>> a = Feature(p, props) >>> a.properties {'Name': 'Sample Point', 'Other': 'Other Data'} >>> a.properties['Name'] 'Sample Point' """ def __init__( self, geometry: Geometry, properties: Optional[Dict[str, Any]] = None, feature_id: Optional[Union[str, int]] = None, ) -> None: """Initialize the feature.""" self._geometry = geometry self._properties = properties or {} self._feature_id = feature_id def __eq__(self, other: object) -> bool: """Check if the geointerfaces are equal.""" try: if not other.__geo_interface__.get( # type: ignore [attr-defined] "geometry", ): return False except AttributeError: return False return feature_geo_interface_equals( my_interface=self.__geo_interface__, other_interface=other.__geo_interface__, # type: ignore [attr-defined] ) def __repr__(self) -> str: """Return the representation.""" return ( f"{self.__class__.__name__}({self._geometry!r}," f" {self._properties}, {self._feature_id!r})" ) @property def id(self) -> Optional[Union[str, int]]: """Return the id of the feature.""" return self._feature_id @property def geometry(self) -> Geometry: """Return the geometry of the feature.""" return self._geometry @property def properties(self) -> Dict[str, Any]: """Return a dictionary of properties.""" return self._properties @property def __geo_interface__(self) -> GeoFeatureInterface: """Return the GeoInterface of the geometry with properties.""" geo_interface: GeoFeatureInterface = { "type": "Feature", "bbox": cast(Bounds, self._geometry.bounds), "geometry": self._geometry.__geo_interface__, "properties": self._properties, } if self._feature_id is not None: geo_interface["id"] = self._feature_id return geo_interface
(geometry: Union[pygeoif.geometry.Point, pygeoif.geometry.LineString, pygeoif.geometry.LinearRing, pygeoif.geometry.Polygon, pygeoif.geometry.MultiPoint, pygeoif.geometry.MultiLineString, pygeoif.geometry.MultiPolygon], properties: Optional[Dict[str, Any]] = None, feature_id: Union[str, int, NoneType] = None) -> None
9,831
pygeoif.feature
__eq__
Check if the geointerfaces are equal.
def __eq__(self, other: object) -> bool: """Check if the geointerfaces are equal.""" try: if not other.__geo_interface__.get( # type: ignore [attr-defined] "geometry", ): return False except AttributeError: return False return feature_geo_interface_equals( my_interface=self.__geo_interface__, other_interface=other.__geo_interface__, # type: ignore [attr-defined] )
(self, other: object) -> bool
9,832
pygeoif.feature
__init__
Initialize the feature.
def __init__( self, geometry: Geometry, properties: Optional[Dict[str, Any]] = None, feature_id: Optional[Union[str, int]] = None, ) -> None: """Initialize the feature.""" self._geometry = geometry self._properties = properties or {} self._feature_id = feature_id
(self, geometry: Union[pygeoif.geometry.Point, pygeoif.geometry.LineString, pygeoif.geometry.LinearRing, pygeoif.geometry.Polygon, pygeoif.geometry.MultiPoint, pygeoif.geometry.MultiLineString, pygeoif.geometry.MultiPolygon], properties: Optional[Dict[str, Any]] = None, feature_id: Union[str, int, NoneType] = None) -> NoneType
9,833
pygeoif.feature
__repr__
Return the representation.
def __repr__(self) -> str: """Return the representation.""" return ( f"{self.__class__.__name__}({self._geometry!r}," f" {self._properties}, {self._feature_id!r})" )
(self) -> str
9,834
pygeoif.feature
FeatureCollection
A heterogenous collection of Features. Attributes: ---------- features : sequence A sequence of feature instances Example: ------- >>> from pygeoif import geometry >>> p = geometry.Point(1.0, -1.0) >>> props = {'Name': 'Sample Point', 'Other': 'Other Data'} >>> a = geometry.Feature(p, props) >>> p2 = geometry.Point(1.0, -1.0) >>> props2 = {'Name': 'Sample Point2', 'Other': 'Other Data2'} >>> b = geometry.Feature(p2, props2) >>> features = [a, b] >>> c = geometry.FeatureCollection(features) >>> c.__geo_interface__ {'type': 'FeatureCollection', 'features': [{'geometry': {'type': 'Point', 'coordinates': (1.0, -1.0)}, 'type': 'Feature', 'properties': {'Other': 'Other Data', 'Name': 'Sample Point'}}, {'geometry': {'type': 'Point', 'coordinates': (1.0, -1.0)}, 'type': 'Feature', 'properties': {'Other': 'Other Data2', 'Name': 'Sample Point2'}}]}
class FeatureCollection: """ A heterogenous collection of Features. Attributes: ---------- features : sequence A sequence of feature instances Example: ------- >>> from pygeoif import geometry >>> p = geometry.Point(1.0, -1.0) >>> props = {'Name': 'Sample Point', 'Other': 'Other Data'} >>> a = geometry.Feature(p, props) >>> p2 = geometry.Point(1.0, -1.0) >>> props2 = {'Name': 'Sample Point2', 'Other': 'Other Data2'} >>> b = geometry.Feature(p2, props2) >>> features = [a, b] >>> c = geometry.FeatureCollection(features) >>> c.__geo_interface__ {'type': 'FeatureCollection', 'features': [{'geometry': {'type': 'Point', 'coordinates': (1.0, -1.0)}, 'type': 'Feature', 'properties': {'Other': 'Other Data', 'Name': 'Sample Point'}}, {'geometry': {'type': 'Point', 'coordinates': (1.0, -1.0)}, 'type': 'Feature', 'properties': {'Other': 'Other Data2', 'Name': 'Sample Point2'}}]} """ def __init__(self, features: Sequence[Feature]) -> None: """Initialize the feature.""" self._features = tuple(features) def __eq__(self, other: object) -> bool: """Check if the geointerfaces are equal.""" return self._check_interface(other) and all( ( feature_geo_interface_equals(my_interface=mine, other_interface=other) for mine, other in zip( self.__geo_interface__["features"], other.__geo_interface__["features"], # type: ignore [attr-defined] ) ), ) def __len__(self) -> int: """Return the umber of features in this collection.""" return len(self._features) def __iter__(self) -> Iterator[Feature]: """Iterate over the features of the collection.""" return iter(self._features) def __repr__(self) -> str: """Return the representation.""" return f"{self.__class__.__name__}({self._features!r})" @property def features(self) -> Generator[Feature, None, None]: """Iterate over the features of the collection.""" yield from self._features @property def bounds(self) -> Bounds: """Return the X-Y bounding box.""" geom_bounds = list( zip(*(feature.geometry.bounds for feature in self._features)), ) return ( min(geom_bounds[0]), min(geom_bounds[1]), max(geom_bounds[2]), max(geom_bounds[3]), ) @property def __geo_interface__(self) -> GeoFeatureCollectionInterface: """Return the GeoInterface of the feature.""" return { "type": "FeatureCollection", "bbox": self.bounds, "features": tuple(feature.__geo_interface__ for feature in self._features), } def _check_interface(self, other: object) -> bool: try: return self.__geo_interface__[ "type" ] == other.__geo_interface__.get( # type: ignore [attr-defined] "type", ) and len( self.__geo_interface__["features"], ) == len( other.__geo_interface__.get( # type: ignore [attr-defined] "features", [], ), ) except AttributeError: return False
(features: Sequence[pygeoif.feature.Feature]) -> None
9,835
pygeoif.feature
__eq__
Check if the geointerfaces are equal.
def __eq__(self, other: object) -> bool: """Check if the geointerfaces are equal.""" return self._check_interface(other) and all( ( feature_geo_interface_equals(my_interface=mine, other_interface=other) for mine, other in zip( self.__geo_interface__["features"], other.__geo_interface__["features"], # type: ignore [attr-defined] ) ), )
(self, other: object) -> bool
9,836
pygeoif.feature
__init__
Initialize the feature.
def __init__(self, features: Sequence[Feature]) -> None: """Initialize the feature.""" self._features = tuple(features)
(self, features: Sequence[pygeoif.feature.Feature]) -> NoneType
9,837
pygeoif.feature
__iter__
Iterate over the features of the collection.
def __iter__(self) -> Iterator[Feature]: """Iterate over the features of the collection.""" return iter(self._features)
(self) -> Iterator[pygeoif.feature.Feature]
9,838
pygeoif.feature
__len__
Return the umber of features in this collection.
def __len__(self) -> int: """Return the umber of features in this collection.""" return len(self._features)
(self) -> int
9,839
pygeoif.feature
__repr__
Return the representation.
def __repr__(self) -> str: """Return the representation.""" return f"{self.__class__.__name__}({self._features!r})"
(self) -> str
9,840
pygeoif.feature
_check_interface
null
def _check_interface(self, other: object) -> bool: try: return self.__geo_interface__[ "type" ] == other.__geo_interface__.get( # type: ignore [attr-defined] "type", ) and len( self.__geo_interface__["features"], ) == len( other.__geo_interface__.get( # type: ignore [attr-defined] "features", [], ), ) except AttributeError: return False
(self, other: object) -> bool
9,841
pygeoif.geometry
GeometryCollection
A heterogenous collection of geometries. Attributes: ---------- geoms : sequence A sequence of geometry instances Please note: GEOMETRYCOLLECTION isn't supported by the Shapefile format. And this sub- class isn't generally supported by ordinary GIS sw (viewers and so on). So it's very rarely used in the real GIS professional world. Example: ------- Initialize Geometries and construct a GeometryCollection >>> from pygeoif import geometry >>> p = geometry.Point(1.0, -1.0) >>> p2 = geometry.Point(1.0, -1.0) >>> geoms = [p, p2] >>> c = geometry.GeometryCollection(geoms) >>> c.__geo_interface__ {'type': 'GeometryCollection', 'geometries': [{'type': 'Point', 'coordinates': (1.0, -1.0)}, {'type': 'Point', 'coordinates': (1.0, -1.0)}]}
class GeometryCollection(_MultiGeometry): """ A heterogenous collection of geometries. Attributes: ---------- geoms : sequence A sequence of geometry instances Please note: GEOMETRYCOLLECTION isn't supported by the Shapefile format. And this sub- class isn't generally supported by ordinary GIS sw (viewers and so on). So it's very rarely used in the real GIS professional world. Example: ------- Initialize Geometries and construct a GeometryCollection >>> from pygeoif import geometry >>> p = geometry.Point(1.0, -1.0) >>> p2 = geometry.Point(1.0, -1.0) >>> geoms = [p, p2] >>> c = geometry.GeometryCollection(geoms) >>> c.__geo_interface__ {'type': 'GeometryCollection', 'geometries': [{'type': 'Point', 'coordinates': (1.0, -1.0)}, {'type': 'Point', 'coordinates': (1.0, -1.0)}]} """ _geoms: Tuple[Union[Geometry, "GeometryCollection"], ...] def __init__( self, geometries: Iterable[Union[Geometry, "GeometryCollection"]], ) -> None: """ Initialize the MultiGeometry with Geometries. Args: ---- geometries (Iterable[Geometry] """ object.__setattr__(self, "_geoms", tuple(geom for geom in geometries if geom)) def __eq__(self, other: object) -> bool: """ Return equality between collections. Types and coordinates from all contained geometries must be equal. """ try: if self.is_empty: return False if ( other.__geo_interface__.get("type") # type: ignore [attr-defined] != self.geom_type ): return False if len( other.__geo_interface__.get( # type: ignore [attr-defined] "geometries", [], ), ) != len( self, ): return False except AttributeError: return False return compare_geo_interface( first=self.__geo_interface__, second=other.__geo_interface__, # type: ignore [attr-defined] ) def __len__(self) -> int: """ Length of the collection. Returns ------- int: Number of geometries in the collection. """ return len(self._geoms) def __repr__(self) -> str: """Return the representation.""" return f"{self.geom_type}({tuple(self.geoms)})" @property def _wkt_coords(self) -> str: return ", ".join(geom.wkt for geom in self.geoms) @property def __geo_interface__(self) -> GeoCollectionInterface: # type: ignore [override] """Return the geo interface of the collection.""" return { "type": "GeometryCollection", "geometries": tuple(geom.__geo_interface__ for geom in self.geoms), } def _prepare_hull(self) -> Iterable[Point2D]: return chain.from_iterable( geom._prepare_hull() for geom in self.geoms # noqa: SLF001 )
(geometries: Iterable[Union[pygeoif.geometry.Point, pygeoif.geometry.LineString, pygeoif.geometry.LinearRing, pygeoif.geometry.Polygon, pygeoif.geometry.MultiPoint, pygeoif.geometry.MultiLineString, pygeoif.geometry.MultiPolygon, ForwardRef('GeometryCollection')]]) -> None
9,842
pygeoif.geometry
__bool__
null
def __bool__(self) -> bool: return self.is_empty is False
(self) -> bool
9,843
pygeoif.geometry
__delattr__
null
def __delattr__(self, *args: Any) -> NoReturn: # noqa: ANN401 msg = f"Attributes of {self.__class__.__name__} cannot be deleted" raise AttributeError( msg, )
(self, *args: Any) -> NoReturn
9,844
pygeoif.geometry
__eq__
Return equality between collections. Types and coordinates from all contained geometries must be equal.
def __eq__(self, other: object) -> bool: """ Return equality between collections. Types and coordinates from all contained geometries must be equal. """ try: if self.is_empty: return False if ( other.__geo_interface__.get("type") # type: ignore [attr-defined] != self.geom_type ): return False if len( other.__geo_interface__.get( # type: ignore [attr-defined] "geometries", [], ), ) != len( self, ): return False except AttributeError: return False return compare_geo_interface( first=self.__geo_interface__, second=other.__geo_interface__, # type: ignore [attr-defined] )
(self, other: object) -> bool
9,845
pygeoif.geometry
__init__
Initialize the MultiGeometry with Geometries. Args: ---- geometries (Iterable[Geometry]
def __init__( self, geometries: Iterable[Union[Geometry, "GeometryCollection"]], ) -> None: """ Initialize the MultiGeometry with Geometries. Args: ---- geometries (Iterable[Geometry] """ object.__setattr__(self, "_geoms", tuple(geom for geom in geometries if geom))
(self, geometries: Iterable[Union[pygeoif.geometry.Point, pygeoif.geometry.LineString, pygeoif.geometry.LinearRing, pygeoif.geometry.Polygon, pygeoif.geometry.MultiPoint, pygeoif.geometry.MultiLineString, pygeoif.geometry.MultiPolygon, pygeoif.geometry.GeometryCollection]]) -> NoneType
9,846
pygeoif.geometry
__len__
Length of the collection. Returns ------- int: Number of geometries in the collection.
def __len__(self) -> int: """ Length of the collection. Returns ------- int: Number of geometries in the collection. """ return len(self._geoms)
(self) -> int
9,847
pygeoif.geometry
__repr__
Return the representation.
def __repr__(self) -> str: """Return the representation.""" return f"{self.geom_type}({tuple(self.geoms)})"
(self) -> str
9,848
pygeoif.geometry
__setattr__
null
def __setattr__(self, *args: Any) -> NoReturn: # noqa: ANN401 msg = f"Attributes of {self.__class__.__name__} cannot be changed" raise AttributeError( msg, )
(self, *args: Any) -> NoReturn
9,849
pygeoif.geometry
__str__
null
def __str__(self) -> str: return self.wkt
(self) -> str
9,850
pygeoif.geometry
_get_bounds
Return the X-Y bounding box.
def _get_bounds(self) -> Bounds: """Return the X-Y bounding box.""" geom_bounds = list( zip(*(geom.bounds for geom in self.geoms)), ) return ( min(geom_bounds[0]), min(geom_bounds[1]), max(geom_bounds[2]), max(geom_bounds[3]), )
(self) -> Tuple[float, float, float, float]
9,851
pygeoif.geometry
_prepare_hull
null
def _prepare_hull(self) -> Iterable[Point2D]: return chain.from_iterable( geom._prepare_hull() for geom in self.geoms # noqa: SLF001 )
(self) -> Iterable[Tuple[float, float]]
9,852
pygeoif.geometry
LineString
A one-dimensional figure comprising one or more line segments. A LineString has non-zero length and zero area. It may approximate a curve and need not be straight. Unlike a LinearRing, a LineString is not closed. Attributes ---------- geoms : sequence A sequence of Points
class LineString(_Geometry): """ A one-dimensional figure comprising one or more line segments. A LineString has non-zero length and zero area. It may approximate a curve and need not be straight. Unlike a LinearRing, a LineString is not closed. Attributes ---------- geoms : sequence A sequence of Points """ _geoms: Tuple[Point, ...] def __init__(self, coordinates: LineType) -> None: """ Initialize a Linestring. Parameters ---------- coordinates : sequence A sequence of (x, y [,z]) numeric coordinate pairs or triples Example ------- Create a line with two segments >>> a = LineString([(0, 0), (1, 0), (1, 1)]) """ object.__setattr__(self, "_geoms", self._set_geoms(coordinates)) def __repr__(self) -> str: """Return the representation.""" return f"{self.geom_type}({self.coords})" @property def geoms(self) -> Tuple[Point, ...]: """Return the underlying geometries.""" return self._geoms @property def coords(self) -> LineType: """Return the geometry coordinates.""" return cast( LineType, tuple(point.coords[0] for point in self.geoms if point.coords), ) @property def is_empty(self) -> bool: """ Return if this geometry is empty. A Linestring is considered empty when it has no points. """ return len(self._geoms) == 0 @property def has_z(self) -> Optional[bool]: """Return True if the geometry's coordinate sequence(s) have z values.""" return self._geoms[0].has_z if self.geoms else None @property def _wkt_coords(self) -> str: return ", ".join(point._wkt_coords for point in self.geoms) # noqa: SLF001 @property def __geo_interface__(self) -> GeoInterface: """Return the geo interface.""" geo_interface = super().__geo_interface__ geo_interface["coordinates"] = self.coords return geo_interface @classmethod def from_coordinates(cls, coordinates: LineType) -> "LineString": """Construct a linestring from coordinates.""" return cls(coordinates) @classmethod def from_points(cls, *args: Point) -> "LineString": """Create a linestring from points.""" return cls( cast(LineType, tuple(point.coords[0] for point in args if point.coords)), ) @classmethod def _from_dict(cls, geo_interface: GeoInterface) -> "LineString": cls._check_dict(geo_interface) return cls(cast(LineType, geo_interface["coordinates"])) @staticmethod def _set_geoms(coordinates: LineType) -> Tuple[Point, ...]: geoms = [] last_len = None for coord in dedupe(coordinates): if len(coord) != last_len and last_len is not None: msg = ( # type: ignore [unreachable] "All coordinates must have the same dimension" ) raise DimensionError( msg, ) last_len = len(coord) point = Point(*coord) if point: geoms.append(point) return tuple(geoms) def _get_bounds(self) -> Bounds: """Return the X-Y bounding box.""" xy = list(zip(*((p.x, p.y) for p in self._geoms))) return ( min(xy[0]), min(xy[1]), max(xy[0]), max(xy[1]), ) def _prepare_hull(self) -> Iterable[Point2D]: return ((pt.x, pt.y) for pt in self._geoms)
(coordinates: Union[Sequence[Tuple[float, float]], Sequence[Tuple[float, float, float]]]) -> None
9,855
pygeoif.geometry
__eq__
Check if the geometry objects have the same coordinates and type. Empty geometries are always considered as not equal.
def __eq__(self, other: object) -> bool: """ Check if the geometry objects have the same coordinates and type. Empty geometries are always considered as not equal. """ try: return all( ( not self.is_empty, self.__geo_interface__["type"] == other.__geo_interface__.get( # type: ignore [attr-defined] "type", ), compare_coordinates( coords=self.__geo_interface__["coordinates"], other=other.__geo_interface__.get( # type: ignore [attr-defined] "coordinates", ), ), ), ) except AttributeError: return False
(self, other: object) -> bool
9,856
pygeoif.geometry
__init__
Initialize a Linestring. Parameters ---------- coordinates : sequence A sequence of (x, y [,z]) numeric coordinate pairs or triples Example ------- Create a line with two segments >>> a = LineString([(0, 0), (1, 0), (1, 1)])
def __init__(self, coordinates: LineType) -> None: """ Initialize a Linestring. Parameters ---------- coordinates : sequence A sequence of (x, y [,z]) numeric coordinate pairs or triples Example ------- Create a line with two segments >>> a = LineString([(0, 0), (1, 0), (1, 1)]) """ object.__setattr__(self, "_geoms", self._set_geoms(coordinates))
(self, coordinates: Union[Sequence[Tuple[float, float]], Sequence[Tuple[float, float, float]]]) -> NoneType
9,857
pygeoif.geometry
__repr__
Return the representation.
def __repr__(self) -> str: """Return the representation.""" return f"{self.geom_type}({self.coords})"
(self) -> str
9,860
pygeoif.geometry
_get_bounds
Return the X-Y bounding box.
def _get_bounds(self) -> Bounds: """Return the X-Y bounding box.""" xy = list(zip(*((p.x, p.y) for p in self._geoms))) return ( min(xy[0]), min(xy[1]), max(xy[0]), max(xy[1]), )
(self) -> Tuple[float, float, float, float]
9,861
pygeoif.geometry
_prepare_hull
null
def _prepare_hull(self) -> Iterable[Point2D]: return ((pt.x, pt.y) for pt in self._geoms)
(self) -> Iterable[Tuple[float, float]]
9,862
pygeoif.geometry
_set_geoms
null
@staticmethod def _set_geoms(coordinates: LineType) -> Tuple[Point, ...]: geoms = [] last_len = None for coord in dedupe(coordinates): if len(coord) != last_len and last_len is not None: msg = ( # type: ignore [unreachable] "All coordinates must have the same dimension" ) raise DimensionError( msg, ) last_len = len(coord) point = Point(*coord) if point: geoms.append(point) return tuple(geoms)
(coordinates: Union[Sequence[Tuple[float, float]], Sequence[Tuple[float, float, float]]]) -> Tuple[pygeoif.geometry.Point, ...]
9,863
pygeoif.geometry
LinearRing
A closed one-dimensional geometry comprising one or more line segments. A LinearRing that crosses itself or touches itself at a single point is invalid and operations on it may fail. A Linear Ring is self closing
class LinearRing(LineString): """ A closed one-dimensional geometry comprising one or more line segments. A LinearRing that crosses itself or touches itself at a single point is invalid and operations on it may fail. A Linear Ring is self closing """ def __init__(self, coordinates: LineType) -> None: """ Initialize a LinearRing. Args: ---- coordinates (Sequence): A sequence of (x, y [,z]) numeric coordinate pairs or triples """ super().__init__(coordinates) if not self.is_empty and self._geoms[0].coords != self._geoms[-1].coords: object.__setattr__(self, "_geoms", (*self._geoms, self._geoms[0])) @property def centroid(self) -> Optional[Point]: """Return the centroid of the ring.""" if self.has_z: msg = "Centeroid is only implemented for 2D coordinates" raise DimensionError(msg) cent, area = centroid(self.coords) if any(math.isnan(coord) for coord in cent): return None return ( Point(x=cent[0], y=cent[1]) if math.isclose(a=area, b=signed_area(self.coords)) else None ) @property def is_ccw(self) -> bool: """Return True if the ring is oriented counter clock-wise.""" return signed_area(self.coords) >= 0
(coordinates: Union[Sequence[Tuple[float, float]], Sequence[Tuple[float, float, float]]]) -> None
9,867
pygeoif.geometry
__init__
Initialize a LinearRing. Args: ---- coordinates (Sequence): A sequence of (x, y [,z]) numeric coordinate pairs or triples
def __init__(self, coordinates: LineType) -> None: """ Initialize a LinearRing. Args: ---- coordinates (Sequence): A sequence of (x, y [,z]) numeric coordinate pairs or triples """ super().__init__(coordinates) if not self.is_empty and self._geoms[0].coords != self._geoms[-1].coords: object.__setattr__(self, "_geoms", (*self._geoms, self._geoms[0]))
(self, coordinates: Union[Sequence[Tuple[float, float]], Sequence[Tuple[float, float, float]]]) -> NoneType
9,874
pygeoif.geometry
MultiLineString
A collection of one or more line strings. A MultiLineString has non-zero length and zero area. Attributes ---------- geoms : sequence A sequence of LineStrings
class MultiLineString(_MultiGeometry): """ A collection of one or more line strings. A MultiLineString has non-zero length and zero area. Attributes ---------- geoms : sequence A sequence of LineStrings """ _geoms: Tuple[LineString, ...] def __init__(self, lines: Sequence[LineType], unique: bool = False) -> None: """ Initialize the MultiLineString. Parameters ---------- lines : sequence A sequence of line-like coordinate sequences. unique: boolean, when unique is true duplicates will be removed, the ordering will not be preserved. Example ------- Construct a collection containing one line string. >>> lines = MultiLineString( [[[0.0, 0.0], [1.0, 2.0]]] ) """ if unique: lines = {tuple(line) for line in lines} # type: ignore [assignment] object.__setattr__(self, "_geoms", tuple(LineString(line) for line in lines)) def __len__(self) -> int: """Return the number of lines in the collection.""" return len(self._geoms) def __repr__(self) -> str: """Return the representation.""" return f"{self.geom_type}({tuple(geom.coords for geom in self._geoms)})" @property def geoms(self) -> Iterator[LineString]: """Iterate over the points.""" yield from (cast(LineString, line) for line in super().geoms) @property def _wkt_coords(self) -> str: return ",".join( f"({linestring._wkt_coords})" for linestring in self.geoms # noqa: SLF001 ) @property def __geo_interface__(self) -> GeoInterface: """Return the geo interface.""" geo_interface = super().__geo_interface__ geo_interface["coordinates"] = tuple(geom.coords for geom in self.geoms) return geo_interface @classmethod def from_linestrings( cls, *args: LineString, unique: bool = False, ) -> "MultiLineString": """Create a MultiLineString from LineStrings.""" return cls([line.coords for line in args], unique=unique) @classmethod def _from_dict(cls, geo_interface: GeoInterface) -> "MultiLineString": cls._check_dict(geo_interface) return cls(cast(Sequence[LineType], geo_interface["coordinates"])) def _prepare_hull(self) -> Iterable[Point2D]: return ( (pt.x, pt.y) for pt in chain.from_iterable(line.geoms for line in self.geoms) )
(lines: Sequence[Union[Sequence[Tuple[float, float]], Sequence[Tuple[float, float, float]]]], unique: bool = False) -> None
9,878
pygeoif.geometry
__init__
Initialize the MultiLineString. Parameters ---------- lines : sequence A sequence of line-like coordinate sequences. unique: boolean, when unique is true duplicates will be removed, the ordering will not be preserved. Example ------- Construct a collection containing one line string. >>> lines = MultiLineString( [[[0.0, 0.0], [1.0, 2.0]]] )
def __init__(self, lines: Sequence[LineType], unique: bool = False) -> None: """ Initialize the MultiLineString. Parameters ---------- lines : sequence A sequence of line-like coordinate sequences. unique: boolean, when unique is true duplicates will be removed, the ordering will not be preserved. Example ------- Construct a collection containing one line string. >>> lines = MultiLineString( [[[0.0, 0.0], [1.0, 2.0]]] ) """ if unique: lines = {tuple(line) for line in lines} # type: ignore [assignment] object.__setattr__(self, "_geoms", tuple(LineString(line) for line in lines))
(self, lines: Sequence[Union[Sequence[Tuple[float, float]], Sequence[Tuple[float, float, float]]]], unique: bool = False) -> NoneType
9,879
pygeoif.geometry
__len__
Return the number of lines in the collection.
def __len__(self) -> int: """Return the number of lines in the collection.""" return len(self._geoms)
(self) -> int
9,880
pygeoif.geometry
__repr__
Return the representation.
def __repr__(self) -> str: """Return the representation.""" return f"{self.geom_type}({tuple(geom.coords for geom in self._geoms)})"
(self) -> str
9,884
pygeoif.geometry
_prepare_hull
null
def _prepare_hull(self) -> Iterable[Point2D]: return ( (pt.x, pt.y) for pt in chain.from_iterable(line.geoms for line in self.geoms) )
(self) -> Iterable[Tuple[float, float]]
9,885
pygeoif.geometry
MultiPoint
A collection of one or more points. Attributes ---------- geoms : sequence A sequence of Points
class MultiPoint(_MultiGeometry): """ A collection of one or more points. Attributes ---------- geoms : sequence A sequence of Points """ _geoms: Tuple[Point, ...] def __init__(self, points: Sequence[PointType], unique: bool = False) -> None: """ Create a collection of one or more points. Parameters ---------- points : sequence A sequence of (x, y [,z]) numeric coordinate pairs or triples. unique: boolean, when unique is true duplicates will be removed, the ordering will not be preserved. Example ------- Construct a 2 point collection >>> ob = MultiPoint([[0.0, 0.0], [1.0, 2.0]]) >>> len(ob.geoms) 2 >>> type(ob.geoms[0]) == Point True """ if unique: points = set(points) # type: ignore [assignment] object.__setattr__(self, "_geoms", tuple(Point(*point) for point in points)) def __len__(self) -> int: """Return the number of points in this MultiPoint.""" return len(self._geoms) def __repr__(self) -> str: """Return the representation.""" return ( f"{self.geom_type}" f"({tuple(geom.coords[0] for geom in self._geoms if geom.coords)})" ) @property def geoms(self) -> Iterator[Point]: """Iterate over the points.""" yield from (cast(Point, p) for p in super().geoms) @property def _wkt_coords(self) -> str: return ", ".join(point._wkt_coords for point in self.geoms) # noqa: SLF001 @property def __geo_interface__(self) -> GeoInterface: """Return the geo interface.""" geo_interface = super().__geo_interface__ geo_interface["coordinates"] = tuple( geom.coords[0] for geom in self.geoms if geom.coords ) return geo_interface @classmethod def from_points(cls, *args: Point, unique: bool = False) -> "MultiPoint": """Create a MultiPoint from Points.""" return cls([point.coords[0] for point in args if point.coords], unique=unique) @classmethod def _from_dict(cls, geo_interface: GeoInterface) -> "MultiPoint": cls._check_dict(geo_interface) return cls(cast(Sequence[PointType], geo_interface["coordinates"])) def _prepare_hull(self) -> Iterable[Point2D]: return ((pt.x, pt.y) for pt in self.geoms)
(points: Sequence[Union[Tuple[float, float], Tuple[float, float, float]]], unique: bool = False) -> None
9,889
pygeoif.geometry
__init__
Create a collection of one or more points. Parameters ---------- points : sequence A sequence of (x, y [,z]) numeric coordinate pairs or triples. unique: boolean, when unique is true duplicates will be removed, the ordering will not be preserved. Example ------- Construct a 2 point collection >>> ob = MultiPoint([[0.0, 0.0], [1.0, 2.0]]) >>> len(ob.geoms) 2 >>> type(ob.geoms[0]) == Point True
def __init__(self, points: Sequence[PointType], unique: bool = False) -> None: """ Create a collection of one or more points. Parameters ---------- points : sequence A sequence of (x, y [,z]) numeric coordinate pairs or triples. unique: boolean, when unique is true duplicates will be removed, the ordering will not be preserved. Example ------- Construct a 2 point collection >>> ob = MultiPoint([[0.0, 0.0], [1.0, 2.0]]) >>> len(ob.geoms) 2 >>> type(ob.geoms[0]) == Point True """ if unique: points = set(points) # type: ignore [assignment] object.__setattr__(self, "_geoms", tuple(Point(*point) for point in points))
(self, points: Sequence[Union[Tuple[float, float], Tuple[float, float, float]]], unique: bool = False) -> NoneType
9,890
pygeoif.geometry
__len__
Return the number of points in this MultiPoint.
def __len__(self) -> int: """Return the number of points in this MultiPoint.""" return len(self._geoms)
(self) -> int
9,891
pygeoif.geometry
__repr__
Return the representation.
def __repr__(self) -> str: """Return the representation.""" return ( f"{self.geom_type}" f"({tuple(geom.coords[0] for geom in self._geoms if geom.coords)})" )
(self) -> str
9,895
pygeoif.geometry
_prepare_hull
null
def _prepare_hull(self) -> Iterable[Point2D]: return ((pt.x, pt.y) for pt in self.geoms)
(self) -> Iterable[Tuple[float, float]]
9,896
pygeoif.geometry
MultiPolygon
A collection of one or more polygons. If component polygons overlap the collection is `invalid` and some operations on it may fail. Attributes ---------- geoms : sequence A sequence of `Polygon` instances
class MultiPolygon(_MultiGeometry): """ A collection of one or more polygons. If component polygons overlap the collection is `invalid` and some operations on it may fail. Attributes ---------- geoms : sequence A sequence of `Polygon` instances """ _geoms: Tuple[Polygon, ...] def __init__(self, polygons: Sequence[PolygonType], unique: bool = False) -> None: """ Initialize a Multipolygon. Parameters ---------- polygons : sequence A sequence of (shell, holes) tuples where shell is the sequence representation of a linear ring and holes is a sequence of such linear rings unique: boolean, when unique is true duplicates will be removed, the ordering will not be preserved. Example ------- Construct a collection from a sequence of coordinate tuples >>> ob = MultiPolygon([ ... ( ... ((0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)), ... [((0.1, 0.1), (0.1, 0.2), (0.2, 0.2), (0.2, 0.1))] ...) ...]) >>> len(ob.geoms) 1 >>> type(ob.geoms[0]) == Polygon True """ if unique: polygons = set(polygons) # type: ignore [assignment] object.__setattr__( self, "_geoms", tuple( Polygon( shell=polygon[0], holes=polygon[1] if len(polygon) == 2 else None, # noqa: PLR2004 ) for polygon in polygons ), ) def __len__(self) -> int: """Return the number of polygons in the collection.""" return len(self._geoms) def __repr__(self) -> str: """Return the representation.""" return f"{self.geom_type}({tuple(geom.coords for geom in self.geoms)})" @property def geoms(self) -> Iterator[Polygon]: """Iterate over the points.""" yield from (cast(Polygon, p) for p in super().geoms) @property def _wkt_coords(self) -> str: return ",".join(f"({poly._wkt_coords})" for poly in self.geoms) # noqa: SLF001 @property def __geo_interface__(self) -> GeoInterface: """Return the geo interface.""" geo_interface = super().__geo_interface__ coords = tuple( (geom.exterior.coords, *tuple(hole.coords for hole in geom.interiors)) for geom in self.geoms ) geo_interface["coordinates"] = coords return geo_interface @classmethod def from_polygons(cls, *args: Polygon, unique: bool = False) -> "MultiPolygon": """Create a MultiPolygon from Polygons.""" return cls([poly.coords for poly in args], unique=unique) @classmethod def _from_dict(cls, geo_interface: GeoInterface) -> "MultiPolygon": cls._check_dict(geo_interface) coords = tuple( (poly[0], poly[1:]) # type: ignore [index] for poly in geo_interface["coordinates"] ) return cls(cast(Sequence[PolygonType], coords)) def _prepare_hull(self) -> Iterable[Point2D]: return ( (pt.x, pt.y) for pt in chain.from_iterable(poly.exterior.geoms for poly in self.geoms) )
(polygons: Sequence[Union[Tuple[Sequence[Tuple[float, float]], Sequence[Sequence[Tuple[float, float]]]], Tuple[Sequence[Tuple[float, float]]], Tuple[Sequence[Tuple[float, float, float]], Sequence[Sequence[Tuple[float, float, float]]]], Tuple[Sequence[Tuple[float, float, float]]]]], unique: bool = False) -> None
9,900
pygeoif.geometry
__init__
Initialize a Multipolygon. Parameters ---------- polygons : sequence A sequence of (shell, holes) tuples where shell is the sequence representation of a linear ring and holes is a sequence of such linear rings unique: boolean, when unique is true duplicates will be removed, the ordering will not be preserved. Example ------- Construct a collection from a sequence of coordinate tuples >>> ob = MultiPolygon([ ... ( ... ((0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)), ... [((0.1, 0.1), (0.1, 0.2), (0.2, 0.2), (0.2, 0.1))] ...) ...]) >>> len(ob.geoms) 1 >>> type(ob.geoms[0]) == Polygon True
def __init__(self, polygons: Sequence[PolygonType], unique: bool = False) -> None: """ Initialize a Multipolygon. Parameters ---------- polygons : sequence A sequence of (shell, holes) tuples where shell is the sequence representation of a linear ring and holes is a sequence of such linear rings unique: boolean, when unique is true duplicates will be removed, the ordering will not be preserved. Example ------- Construct a collection from a sequence of coordinate tuples >>> ob = MultiPolygon([ ... ( ... ((0.0, 0.0), (0.0, 1.0), (1.0, 1.0), (1.0, 0.0)), ... [((0.1, 0.1), (0.1, 0.2), (0.2, 0.2), (0.2, 0.1))] ...) ...]) >>> len(ob.geoms) 1 >>> type(ob.geoms[0]) == Polygon True """ if unique: polygons = set(polygons) # type: ignore [assignment] object.__setattr__( self, "_geoms", tuple( Polygon( shell=polygon[0], holes=polygon[1] if len(polygon) == 2 else None, # noqa: PLR2004 ) for polygon in polygons ), )
(self, polygons: Sequence[Union[Tuple[Sequence[Tuple[float, float]], Sequence[Sequence[Tuple[float, float]]]], Tuple[Sequence[Tuple[float, float]]], Tuple[Sequence[Tuple[float, float, float]], Sequence[Sequence[Tuple[float, float, float]]]], Tuple[Sequence[Tuple[float, float, float]]]]], unique: bool = False) -> NoneType
9,901
pygeoif.geometry
__len__
Return the number of polygons in the collection.
def __len__(self) -> int: """Return the number of polygons in the collection.""" return len(self._geoms)
(self) -> int
9,902
pygeoif.geometry
__repr__
Return the representation.
def __repr__(self) -> str: """Return the representation.""" return f"{self.geom_type}({tuple(geom.coords for geom in self.geoms)})"
(self) -> str
9,906
pygeoif.geometry
_prepare_hull
null
def _prepare_hull(self) -> Iterable[Point2D]: return ( (pt.x, pt.y) for pt in chain.from_iterable(poly.exterior.geoms for poly in self.geoms) )
(self) -> Iterable[Tuple[float, float]]