file_path
stringlengths
32
153
content
stringlengths
0
3.14M
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/test_io.py
from typing import Any, Callable, Generator, List import pytest from .._events import ( ConnectionClosed, Data, EndOfMessage, Event, InformationalResponse, Request, Response, ) from .._headers import Headers, normalize_and_validate from .._readers import ( _obsolete_line_fold, ChunkedReader, ContentLengthReader, Http10Reader, READERS, ) from .._receivebuffer import ReceiveBuffer from .._state import ( CLIENT, CLOSED, DONE, IDLE, MIGHT_SWITCH_PROTOCOL, MUST_CLOSE, SEND_BODY, SEND_RESPONSE, SERVER, SWITCHED_PROTOCOL, ) from .._util import LocalProtocolError from .._writers import ( ChunkedWriter, ContentLengthWriter, Http10Writer, write_any_response, write_headers, write_request, WRITERS, ) from .helpers import normalize_data_events SIMPLE_CASES = [ ( (CLIENT, IDLE), Request( method="GET", target="/a", headers=[("Host", "foo"), ("Connection", "close")], ), b"GET /a HTTP/1.1\r\nHost: foo\r\nConnection: close\r\n\r\n", ), ( (SERVER, SEND_RESPONSE), Response(status_code=200, headers=[("Connection", "close")], reason=b"OK"), b"HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", ), ( (SERVER, SEND_RESPONSE), Response(status_code=200, headers=[], reason=b"OK"), # type: ignore[arg-type] b"HTTP/1.1 200 OK\r\n\r\n", ), ( (SERVER, SEND_RESPONSE), InformationalResponse( status_code=101, headers=[("Upgrade", "websocket")], reason=b"Upgrade" ), b"HTTP/1.1 101 Upgrade\r\nUpgrade: websocket\r\n\r\n", ), ( (SERVER, SEND_RESPONSE), InformationalResponse(status_code=101, headers=[], reason=b"Upgrade"), # type: ignore[arg-type] b"HTTP/1.1 101 Upgrade\r\n\r\n", ), ] def dowrite(writer: Callable[..., None], obj: Any) -> bytes: got_list: List[bytes] = [] writer(obj, got_list.append) return b"".join(got_list) def tw(writer: Any, obj: Any, expected: Any) -> None: got = dowrite(writer, obj) assert got == expected def makebuf(data: bytes) -> ReceiveBuffer: buf = ReceiveBuffer() buf += data return buf def tr(reader: Any, data: bytes, expected: Any) -> None: def check(got: Any) -> None: assert got == expected # Headers should always be returned as bytes, not e.g. bytearray # https://github.com/python-hyper/wsproto/pull/54#issuecomment-377709478 for name, value in getattr(got, "headers", []): assert type(name) is bytes assert type(value) is bytes # Simple: consume whole thing buf = makebuf(data) check(reader(buf)) assert not buf # Incrementally growing buffer buf = ReceiveBuffer() for i in range(len(data)): assert reader(buf) is None buf += data[i : i + 1] check(reader(buf)) # Trailing data buf = makebuf(data) buf += b"trailing" check(reader(buf)) assert bytes(buf) == b"trailing" def test_writers_simple() -> None: for ((role, state), event, binary) in SIMPLE_CASES: tw(WRITERS[role, state], event, binary) def test_readers_simple() -> None: for ((role, state), event, binary) in SIMPLE_CASES: tr(READERS[role, state], binary, event) def test_writers_unusual() -> None: # Simple test of the write_headers utility routine tw( write_headers, normalize_and_validate([("foo", "bar"), ("baz", "quux")]), b"foo: bar\r\nbaz: quux\r\n\r\n", ) tw(write_headers, Headers([]), b"\r\n") # We understand HTTP/1.0, but we don't speak it with pytest.raises(LocalProtocolError): tw( write_request, Request( method="GET", target="/", headers=[("Host", "foo"), ("Connection", "close")], http_version="1.0", ), None, ) with pytest.raises(LocalProtocolError): tw( write_any_response, Response( status_code=200, headers=[("Connection", "close")], http_version="1.0" ), None, ) def test_readers_unusual() -> None: # Reading HTTP/1.0 tr( READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.0\r\nSome: header\r\n\r\n", Request( method="HEAD", target="/foo", headers=[("Some", "header")], http_version="1.0", ), ) # check no-headers, since it's only legal with HTTP/1.0 tr( READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.0\r\n\r\n", Request(method="HEAD", target="/foo", headers=[], http_version="1.0"), # type: ignore[arg-type] ) tr( READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\r\nSome: header\r\n\r\n", Response( status_code=200, headers=[("Some", "header")], http_version="1.0", reason=b"OK", ), ) # single-character header values (actually disallowed by the ABNF in RFC # 7230 -- this is a bug in the standard that we originally copied...) tr( READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\r\n" b"Foo: a a a a a \r\n\r\n", Response( status_code=200, headers=[("Foo", "a a a a a")], http_version="1.0", reason=b"OK", ), ) # Empty headers -- also legal tr( READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\r\n" b"Foo:\r\n\r\n", Response( status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" ), ) tr( READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\r\n" b"Foo: \t \t \r\n\r\n", Response( status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" ), ) # Tolerate broken servers that leave off the response code tr( READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200\r\n" b"Foo: bar\r\n\r\n", Response( status_code=200, headers=[("Foo", "bar")], http_version="1.0", reason=b"" ), ) # Tolerate headers line endings (\r\n and \n) # \n\r\b between headers and body tr( READERS[SERVER, SEND_RESPONSE], b"HTTP/1.1 200 OK\r\nSomeHeader: val\n\r\n", Response( status_code=200, headers=[("SomeHeader", "val")], http_version="1.1", reason="OK", ), ) # delimited only with \n tr( READERS[SERVER, SEND_RESPONSE], b"HTTP/1.1 200 OK\nSomeHeader1: val1\nSomeHeader2: val2\n\n", Response( status_code=200, headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], http_version="1.1", reason="OK", ), ) # mixed \r\n and \n tr( READERS[SERVER, SEND_RESPONSE], b"HTTP/1.1 200 OK\r\nSomeHeader1: val1\nSomeHeader2: val2\n\r\n", Response( status_code=200, headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], http_version="1.1", reason="OK", ), ) # obsolete line folding tr( READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b"Host: example.com\r\n" b"Some: multi-line\r\n" b" header\r\n" b"\tnonsense\r\n" b" \t \t\tI guess\r\n" b"Connection: close\r\n" b"More-nonsense: in the\r\n" b" last header \r\n\r\n", Request( method="HEAD", target="/foo", headers=[ ("Host", "example.com"), ("Some", "multi-line header nonsense I guess"), ("Connection", "close"), ("More-nonsense", "in the last header"), ], ), ) with pytest.raises(LocalProtocolError): tr( READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b" folded: line\r\n\r\n", None, ) with pytest.raises(LocalProtocolError): tr( READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b"foo : line\r\n\r\n", None, ) with pytest.raises(LocalProtocolError): tr( READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", None, ) with pytest.raises(LocalProtocolError): tr( READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", None, ) with pytest.raises(LocalProtocolError): tr(READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b": line\r\n\r\n", None) def test__obsolete_line_fold_bytes() -> None: # _obsolete_line_fold has a defensive cast to bytearray, which is # necessary to protect against O(n^2) behavior in case anyone ever passes # in regular bytestrings... but right now we never pass in regular # bytestrings. so this test just exists to get some coverage on that # defensive cast. assert list(_obsolete_line_fold([b"aaa", b"bbb", b" ccc", b"ddd"])) == [ b"aaa", bytearray(b"bbb ccc"), b"ddd", ] def _run_reader_iter( reader: Any, buf: bytes, do_eof: bool ) -> Generator[Any, None, None]: while True: event = reader(buf) if event is None: break yield event # body readers have undefined behavior after returning EndOfMessage, # because this changes the state so they don't get called again if type(event) is EndOfMessage: break if do_eof: assert not buf yield reader.read_eof() def _run_reader(*args: Any) -> List[Event]: events = list(_run_reader_iter(*args)) return normalize_data_events(events) def t_body_reader(thunk: Any, data: bytes, expected: Any, do_eof: bool = False) -> None: # Simple: consume whole thing print("Test 1") buf = makebuf(data) assert _run_reader(thunk(), buf, do_eof) == expected # Incrementally growing buffer print("Test 2") reader = thunk() buf = ReceiveBuffer() events = [] for i in range(len(data)): events += _run_reader(reader, buf, False) buf += data[i : i + 1] events += _run_reader(reader, buf, do_eof) assert normalize_data_events(events) == expected is_complete = any(type(event) is EndOfMessage for event in expected) if is_complete and not do_eof: buf = makebuf(data + b"trailing") assert _run_reader(thunk(), buf, False) == expected def test_ContentLengthReader() -> None: t_body_reader(lambda: ContentLengthReader(0), b"", [EndOfMessage()]) t_body_reader( lambda: ContentLengthReader(10), b"0123456789", [Data(data=b"0123456789"), EndOfMessage()], ) def test_Http10Reader() -> None: t_body_reader(Http10Reader, b"", [EndOfMessage()], do_eof=True) t_body_reader(Http10Reader, b"asdf", [Data(data=b"asdf")], do_eof=False) t_body_reader( Http10Reader, b"asdf", [Data(data=b"asdf"), EndOfMessage()], do_eof=True ) def test_ChunkedReader() -> None: t_body_reader(ChunkedReader, b"0\r\n\r\n", [EndOfMessage()]) t_body_reader( ChunkedReader, b"0\r\nSome: header\r\n\r\n", [EndOfMessage(headers=[("Some", "header")])], ) t_body_reader( ChunkedReader, b"5\r\n01234\r\n" + b"10\r\n0123456789abcdef\r\n" + b"0\r\n" + b"Some: header\r\n\r\n", [ Data(data=b"012340123456789abcdef"), EndOfMessage(headers=[("Some", "header")]), ], ) t_body_reader( ChunkedReader, b"5\r\n01234\r\n" + b"10\r\n0123456789abcdef\r\n" + b"0\r\n\r\n", [Data(data=b"012340123456789abcdef"), EndOfMessage()], ) # handles upper and lowercase hex t_body_reader( ChunkedReader, b"aA\r\n" + b"x" * 0xAA + b"\r\n" + b"0\r\n\r\n", [Data(data=b"x" * 0xAA), EndOfMessage()], ) # refuses arbitrarily long chunk integers with pytest.raises(LocalProtocolError): # Technically this is legal HTTP/1.1, but we refuse to process chunk # sizes that don't fit into 20 characters of hex t_body_reader(ChunkedReader, b"9" * 100 + b"\r\nxxx", [Data(data=b"xxx")]) # refuses garbage in the chunk count with pytest.raises(LocalProtocolError): t_body_reader(ChunkedReader, b"10\x00\r\nxxx", None) # handles (and discards) "chunk extensions" omg wtf t_body_reader( ChunkedReader, b"5; hello=there\r\n" + b"xxxxx" + b"\r\n" + b'0; random="junk"; some=more; canbe=lonnnnngg\r\n\r\n', [Data(data=b"xxxxx"), EndOfMessage()], ) t_body_reader( ChunkedReader, b"5 \r\n01234\r\n" + b"0\r\n\r\n", [Data(data=b"01234"), EndOfMessage()], ) def test_ContentLengthWriter() -> None: w = ContentLengthWriter(5) assert dowrite(w, Data(data=b"123")) == b"123" assert dowrite(w, Data(data=b"45")) == b"45" assert dowrite(w, EndOfMessage()) == b"" w = ContentLengthWriter(5) with pytest.raises(LocalProtocolError): dowrite(w, Data(data=b"123456")) w = ContentLengthWriter(5) dowrite(w, Data(data=b"123")) with pytest.raises(LocalProtocolError): dowrite(w, Data(data=b"456")) w = ContentLengthWriter(5) dowrite(w, Data(data=b"123")) with pytest.raises(LocalProtocolError): dowrite(w, EndOfMessage()) w = ContentLengthWriter(5) dowrite(w, Data(data=b"123")) == b"123" dowrite(w, Data(data=b"45")) == b"45" with pytest.raises(LocalProtocolError): dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) def test_ChunkedWriter() -> None: w = ChunkedWriter() assert dowrite(w, Data(data=b"aaa")) == b"3\r\naaa\r\n" assert dowrite(w, Data(data=b"a" * 20)) == b"14\r\n" + b"a" * 20 + b"\r\n" assert dowrite(w, Data(data=b"")) == b"" assert dowrite(w, EndOfMessage()) == b"0\r\n\r\n" assert ( dowrite(w, EndOfMessage(headers=[("Etag", "asdf"), ("a", "b")])) == b"0\r\nEtag: asdf\r\na: b\r\n\r\n" ) def test_Http10Writer() -> None: w = Http10Writer() assert dowrite(w, Data(data=b"1234")) == b"1234" assert dowrite(w, EndOfMessage()) == b"" with pytest.raises(LocalProtocolError): dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) def test_reject_garbage_after_request_line() -> None: with pytest.raises(LocalProtocolError): tr(READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\x00xxxx\r\n\r\n", None) def test_reject_garbage_after_response_line() -> None: with pytest.raises(LocalProtocolError): tr( READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1 xxxxxx\r\n" b"Host: a\r\n\r\n", None, ) def test_reject_garbage_in_header_line() -> None: with pytest.raises(LocalProtocolError): tr( READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b"Host: foo\x00bar\r\n\r\n", None, ) def test_reject_non_vchar_in_path() -> None: for bad_char in b"\x00\x20\x7f\xee": message = bytearray(b"HEAD /") message.append(bad_char) message.extend(b" HTTP/1.1\r\nHost: foobar\r\n\r\n") with pytest.raises(LocalProtocolError): tr(READERS[CLIENT, IDLE], message, None) # https://github.com/python-hyper/h11/issues/57 def test_allow_some_garbage_in_cookies() -> None: tr( READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b"Host: foo\r\n" b"Set-Cookie: ___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900\r\n" b"\r\n", Request( method="HEAD", target="/foo", headers=[ ("Host", "foo"), ("Set-Cookie", "___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900"), ], ), ) def test_host_comes_first() -> None: tw( write_headers, normalize_and_validate([("foo", "bar"), ("Host", "example.com")]), b"Host: example.com\r\nfoo: bar\r\n\r\n", )
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/test_events.py
from http import HTTPStatus import pytest from .. import _events from .._events import ( ConnectionClosed, Data, EndOfMessage, Event, InformationalResponse, Request, Response, ) from .._util import LocalProtocolError def test_events() -> None: with pytest.raises(LocalProtocolError): # Missing Host: req = Request( method="GET", target="/", headers=[("a", "b")], http_version="1.1" ) # But this is okay (HTTP/1.0) req = Request(method="GET", target="/", headers=[("a", "b")], http_version="1.0") # fields are normalized assert req.method == b"GET" assert req.target == b"/" assert req.headers == [(b"a", b"b")] assert req.http_version == b"1.0" # This is also okay -- has a Host (with weird capitalization, which is ok) req = Request( method="GET", target="/", headers=[("a", "b"), ("hOSt", "example.com")], http_version="1.1", ) # we normalize header capitalization assert req.headers == [(b"a", b"b"), (b"host", b"example.com")] # Multiple host is bad too with pytest.raises(LocalProtocolError): req = Request( method="GET", target="/", headers=[("Host", "a"), ("Host", "a")], http_version="1.1", ) # Even for HTTP/1.0 with pytest.raises(LocalProtocolError): req = Request( method="GET", target="/", headers=[("Host", "a"), ("Host", "a")], http_version="1.0", ) # Header values are validated for bad_char in "\x00\r\n\f\v": with pytest.raises(LocalProtocolError): req = Request( method="GET", target="/", headers=[("Host", "a"), ("Foo", "asd" + bad_char)], http_version="1.0", ) # But for compatibility we allow non-whitespace control characters, even # though they're forbidden by the spec. Request( method="GET", target="/", headers=[("Host", "a"), ("Foo", "asd\x01\x02\x7f")], http_version="1.0", ) # Request target is validated for bad_byte in b"\x00\x20\x7f\xee": target = bytearray(b"/") target.append(bad_byte) with pytest.raises(LocalProtocolError): Request( method="GET", target=target, headers=[("Host", "a")], http_version="1.1" ) # Request method is validated with pytest.raises(LocalProtocolError): Request( method="GET / HTTP/1.1", target=target, headers=[("Host", "a")], http_version="1.1", ) ir = InformationalResponse(status_code=100, headers=[("Host", "a")]) assert ir.status_code == 100 assert ir.headers == [(b"host", b"a")] assert ir.http_version == b"1.1" with pytest.raises(LocalProtocolError): InformationalResponse(status_code=200, headers=[("Host", "a")]) resp = Response(status_code=204, headers=[], http_version="1.0") # type: ignore[arg-type] assert resp.status_code == 204 assert resp.headers == [] assert resp.http_version == b"1.0" with pytest.raises(LocalProtocolError): resp = Response(status_code=100, headers=[], http_version="1.0") # type: ignore[arg-type] with pytest.raises(LocalProtocolError): Response(status_code="100", headers=[], http_version="1.0") # type: ignore[arg-type] with pytest.raises(LocalProtocolError): InformationalResponse(status_code=b"100", headers=[], http_version="1.0") # type: ignore[arg-type] d = Data(data=b"asdf") assert d.data == b"asdf" eom = EndOfMessage() assert eom.headers == [] cc = ConnectionClosed() assert repr(cc) == "ConnectionClosed()" def test_intenum_status_code() -> None: # https://github.com/python-hyper/h11/issues/72 r = Response(status_code=HTTPStatus.OK, headers=[], http_version="1.0") # type: ignore[arg-type] assert r.status_code == HTTPStatus.OK assert type(r.status_code) is not type(HTTPStatus.OK) assert type(r.status_code) is int def test_header_casing() -> None: r = Request( method="GET", target="/", headers=[("Host", "example.org"), ("Connection", "keep-alive")], http_version="1.1", ) assert len(r.headers) == 2 assert r.headers[0] == (b"host", b"example.org") assert r.headers == [(b"host", b"example.org"), (b"connection", b"keep-alive")] assert r.headers.raw_items() == [ (b"Host", b"example.org"), (b"Connection", b"keep-alive"), ]
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/test_against_stdlib_http.py
import json import os.path import socket import socketserver import threading from contextlib import closing, contextmanager from http.server import SimpleHTTPRequestHandler from typing import Callable, Generator from urllib.request import urlopen import h11 @contextmanager def socket_server( handler: Callable[..., socketserver.BaseRequestHandler] ) -> Generator[socketserver.TCPServer, None, None]: httpd = socketserver.TCPServer(("127.0.0.1", 0), handler) thread = threading.Thread( target=httpd.serve_forever, kwargs={"poll_interval": 0.01} ) thread.daemon = True try: thread.start() yield httpd finally: httpd.shutdown() test_file_path = os.path.join(os.path.dirname(__file__), "data/test-file") with open(test_file_path, "rb") as f: test_file_data = f.read() class SingleMindedRequestHandler(SimpleHTTPRequestHandler): def translate_path(self, path: str) -> str: return test_file_path def test_h11_as_client() -> None: with socket_server(SingleMindedRequestHandler) as httpd: with closing(socket.create_connection(httpd.server_address)) as s: c = h11.Connection(h11.CLIENT) s.sendall( c.send( # type: ignore[arg-type] h11.Request( method="GET", target="/foo", headers=[("Host", "localhost")] ) ) ) s.sendall(c.send(h11.EndOfMessage())) # type: ignore[arg-type] data = bytearray() while True: event = c.next_event() print(event) if event is h11.NEED_DATA: # Use a small read buffer to make things more challenging # and exercise more paths :-) c.receive_data(s.recv(10)) continue if type(event) is h11.Response: assert event.status_code == 200 if type(event) is h11.Data: data += event.data if type(event) is h11.EndOfMessage: break assert bytes(data) == test_file_data class H11RequestHandler(socketserver.BaseRequestHandler): def handle(self) -> None: with closing(self.request) as s: c = h11.Connection(h11.SERVER) request = None while True: event = c.next_event() if event is h11.NEED_DATA: # Use a small read buffer to make things more challenging # and exercise more paths :-) c.receive_data(s.recv(10)) continue if type(event) is h11.Request: request = event if type(event) is h11.EndOfMessage: break assert request is not None info = json.dumps( { "method": request.method.decode("ascii"), "target": request.target.decode("ascii"), "headers": { name.decode("ascii"): value.decode("ascii") for (name, value) in request.headers }, } ) s.sendall(c.send(h11.Response(status_code=200, headers=[]))) # type: ignore[arg-type] s.sendall(c.send(h11.Data(data=info.encode("ascii")))) s.sendall(c.send(h11.EndOfMessage())) def test_h11_as_server() -> None: with socket_server(H11RequestHandler) as httpd: host, port = httpd.server_address url = "http://{}:{}/some-path".format(host, port) with closing(urlopen(url)) as f: assert f.getcode() == 200 data = f.read() info = json.loads(data.decode("ascii")) print(info) assert info["method"] == "GET" assert info["target"] == "/some-path" assert "urllib" in info["headers"]["user-agent"]
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/test_util.py
import re import sys import traceback from typing import NoReturn import pytest from .._util import ( bytesify, LocalProtocolError, ProtocolError, RemoteProtocolError, Sentinel, validate, ) def test_ProtocolError() -> None: with pytest.raises(TypeError): ProtocolError("abstract base class") def test_LocalProtocolError() -> None: try: raise LocalProtocolError("foo") except LocalProtocolError as e: assert str(e) == "foo" assert e.error_status_hint == 400 try: raise LocalProtocolError("foo", error_status_hint=418) except LocalProtocolError as e: assert str(e) == "foo" assert e.error_status_hint == 418 def thunk() -> NoReturn: raise LocalProtocolError("a", error_status_hint=420) try: try: thunk() except LocalProtocolError as exc1: orig_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) exc1._reraise_as_remote_protocol_error() except RemoteProtocolError as exc2: assert type(exc2) is RemoteProtocolError assert exc2.args == ("a",) assert exc2.error_status_hint == 420 new_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) assert new_traceback.endswith(orig_traceback) def test_validate() -> None: my_re = re.compile(rb"(?P<group1>[0-9]+)\.(?P<group2>[0-9]+)") with pytest.raises(LocalProtocolError): validate(my_re, b"0.") groups = validate(my_re, b"0.1") assert groups == {"group1": b"0", "group2": b"1"} # successful partial matches are an error - must match whole string with pytest.raises(LocalProtocolError): validate(my_re, b"0.1xx") with pytest.raises(LocalProtocolError): validate(my_re, b"0.1\n") def test_validate_formatting() -> None: my_re = re.compile(rb"foo") with pytest.raises(LocalProtocolError) as excinfo: validate(my_re, b"", "oops") assert "oops" in str(excinfo.value) with pytest.raises(LocalProtocolError) as excinfo: validate(my_re, b"", "oops {}") assert "oops {}" in str(excinfo.value) with pytest.raises(LocalProtocolError) as excinfo: validate(my_re, b"", "oops {} xx", 10) assert "oops 10 xx" in str(excinfo.value) def test_make_sentinel() -> None: class S(Sentinel, metaclass=Sentinel): pass assert repr(S) == "S" assert S == S assert type(S).__name__ == "S" assert S in {S} assert type(S) is S class S2(Sentinel, metaclass=Sentinel): pass assert repr(S2) == "S2" assert S != S2 assert S not in {S2} assert type(S) is not type(S2) def test_bytesify() -> None: assert bytesify(b"123") == b"123" assert bytesify(bytearray(b"123")) == b"123" assert bytesify("123") == b"123" with pytest.raises(UnicodeEncodeError): bytesify("\u1234") with pytest.raises(TypeError): bytesify(10)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/test_receivebuffer.py
import re from typing import Tuple import pytest from .._receivebuffer import ReceiveBuffer def test_receivebuffer() -> None: b = ReceiveBuffer() assert not b assert len(b) == 0 assert bytes(b) == b"" b += b"123" assert b assert len(b) == 3 assert bytes(b) == b"123" assert bytes(b) == b"123" assert b.maybe_extract_at_most(2) == b"12" assert b assert len(b) == 1 assert bytes(b) == b"3" assert bytes(b) == b"3" assert b.maybe_extract_at_most(10) == b"3" assert bytes(b) == b"" assert b.maybe_extract_at_most(10) is None assert not b ################################################################ # maybe_extract_until_next ################################################################ b += b"123\n456\r\n789\r\n" assert b.maybe_extract_next_line() == b"123\n456\r\n" assert bytes(b) == b"789\r\n" assert b.maybe_extract_next_line() == b"789\r\n" assert bytes(b) == b"" b += b"12\r" assert b.maybe_extract_next_line() is None assert bytes(b) == b"12\r" b += b"345\n\r" assert b.maybe_extract_next_line() is None assert bytes(b) == b"12\r345\n\r" # here we stopped at the middle of b"\r\n" delimiter b += b"\n6789aaa123\r\n" assert b.maybe_extract_next_line() == b"12\r345\n\r\n" assert b.maybe_extract_next_line() == b"6789aaa123\r\n" assert b.maybe_extract_next_line() is None assert bytes(b) == b"" ################################################################ # maybe_extract_lines ################################################################ b += b"123\r\na: b\r\nfoo:bar\r\n\r\ntrailing" lines = b.maybe_extract_lines() assert lines == [b"123", b"a: b", b"foo:bar"] assert bytes(b) == b"trailing" assert b.maybe_extract_lines() is None b += b"\r\n\r" assert b.maybe_extract_lines() is None assert b.maybe_extract_at_most(100) == b"trailing\r\n\r" assert not b # Empty body case (as happens at the end of chunked encoding if there are # no trailing headers, e.g.) b += b"\r\ntrailing" assert b.maybe_extract_lines() == [] assert bytes(b) == b"trailing" @pytest.mark.parametrize( "data", [ pytest.param( ( b"HTTP/1.1 200 OK\r\n", b"Content-type: text/plain\r\n", b"Connection: close\r\n", b"\r\n", b"Some body", ), id="with_crlf_delimiter", ), pytest.param( ( b"HTTP/1.1 200 OK\n", b"Content-type: text/plain\n", b"Connection: close\n", b"\n", b"Some body", ), id="with_lf_only_delimiter", ), pytest.param( ( b"HTTP/1.1 200 OK\n", b"Content-type: text/plain\r\n", b"Connection: close\n", b"\n", b"Some body", ), id="with_mixed_crlf_and_lf", ), ], ) def test_receivebuffer_for_invalid_delimiter(data: Tuple[bytes]) -> None: b = ReceiveBuffer() for line in data: b += line lines = b.maybe_extract_lines() assert lines == [ b"HTTP/1.1 200 OK", b"Content-type: text/plain", b"Connection: close", ] assert bytes(b) == b"Some body"
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/h11/tests/test_helpers.py
from .._events import ( ConnectionClosed, Data, EndOfMessage, Event, InformationalResponse, Request, Response, ) from .helpers import normalize_data_events def test_normalize_data_events() -> None: assert normalize_data_events( [ Data(data=bytearray(b"1")), Data(data=b"2"), Response(status_code=200, headers=[]), # type: ignore[arg-type] Data(data=b"3"), Data(data=b"4"), EndOfMessage(), Data(data=b"5"), Data(data=b"6"), Data(data=b"7"), ] ) == [ Data(data=b"12"), Response(status_code=200, headers=[]), # type: ignore[arg-type] Data(data=b"34"), EndOfMessage(), Data(data=b"567"), ]
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/util.py
import math import re from typing import List from qrcode import LUT, base, exceptions from qrcode.base import RSBlock # QR encoding modes. MODE_NUMBER = 1 << 0 MODE_ALPHA_NUM = 1 << 1 MODE_8BIT_BYTE = 1 << 2 MODE_KANJI = 1 << 3 # Encoding mode sizes. MODE_SIZE_SMALL = { MODE_NUMBER: 10, MODE_ALPHA_NUM: 9, MODE_8BIT_BYTE: 8, MODE_KANJI: 8, } MODE_SIZE_MEDIUM = { MODE_NUMBER: 12, MODE_ALPHA_NUM: 11, MODE_8BIT_BYTE: 16, MODE_KANJI: 10, } MODE_SIZE_LARGE = { MODE_NUMBER: 14, MODE_ALPHA_NUM: 13, MODE_8BIT_BYTE: 16, MODE_KANJI: 12, } ALPHA_NUM = b"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ $%*+-./:" RE_ALPHA_NUM = re.compile(b"^[" + re.escape(ALPHA_NUM) + rb"]*\Z") # The number of bits for numeric delimited data lengths. NUMBER_LENGTH = {3: 10, 2: 7, 1: 4} PATTERN_POSITION_TABLE = [ [], [6, 18], [6, 22], [6, 26], [6, 30], [6, 34], [6, 22, 38], [6, 24, 42], [6, 26, 46], [6, 28, 50], [6, 30, 54], [6, 32, 58], [6, 34, 62], [6, 26, 46, 66], [6, 26, 48, 70], [6, 26, 50, 74], [6, 30, 54, 78], [6, 30, 56, 82], [6, 30, 58, 86], [6, 34, 62, 90], [6, 28, 50, 72, 94], [6, 26, 50, 74, 98], [6, 30, 54, 78, 102], [6, 28, 54, 80, 106], [6, 32, 58, 84, 110], [6, 30, 58, 86, 114], [6, 34, 62, 90, 118], [6, 26, 50, 74, 98, 122], [6, 30, 54, 78, 102, 126], [6, 26, 52, 78, 104, 130], [6, 30, 56, 82, 108, 134], [6, 34, 60, 86, 112, 138], [6, 30, 58, 86, 114, 142], [6, 34, 62, 90, 118, 146], [6, 30, 54, 78, 102, 126, 150], [6, 24, 50, 76, 102, 128, 154], [6, 28, 54, 80, 106, 132, 158], [6, 32, 58, 84, 110, 136, 162], [6, 26, 54, 82, 110, 138, 166], [6, 30, 58, 86, 114, 142, 170], ] G15 = (1 << 10) | (1 << 8) | (1 << 5) | (1 << 4) | (1 << 2) | (1 << 1) | (1 << 0) G18 = ( (1 << 12) | (1 << 11) | (1 << 10) | (1 << 9) | (1 << 8) | (1 << 5) | (1 << 2) | (1 << 0) ) G15_MASK = (1 << 14) | (1 << 12) | (1 << 10) | (1 << 4) | (1 << 1) PAD0 = 0xEC PAD1 = 0x11 # Precompute bit count limits, indexed by error correction level and code size def _data_count(block): return block.data_count BIT_LIMIT_TABLE = [ [0] + [ 8 * sum(map(_data_count, base.rs_blocks(version, error_correction))) for version in range(1, 41) ] for error_correction in range(4) ] def BCH_type_info(data): d = data << 10 while BCH_digit(d) - BCH_digit(G15) >= 0: d ^= G15 << (BCH_digit(d) - BCH_digit(G15)) return ((data << 10) | d) ^ G15_MASK def BCH_type_number(data): d = data << 12 while BCH_digit(d) - BCH_digit(G18) >= 0: d ^= G18 << (BCH_digit(d) - BCH_digit(G18)) return (data << 12) | d def BCH_digit(data): digit = 0 while data != 0: digit += 1 data >>= 1 return digit def pattern_position(version): return PATTERN_POSITION_TABLE[version - 1] def mask_func(pattern): """ Return the mask function for the given mask pattern. """ if pattern == 0: # 000 return lambda i, j: (i + j) % 2 == 0 if pattern == 1: # 001 return lambda i, j: i % 2 == 0 if pattern == 2: # 010 return lambda i, j: j % 3 == 0 if pattern == 3: # 011 return lambda i, j: (i + j) % 3 == 0 if pattern == 4: # 100 return lambda i, j: (math.floor(i / 2) + math.floor(j / 3)) % 2 == 0 if pattern == 5: # 101 return lambda i, j: (i * j) % 2 + (i * j) % 3 == 0 if pattern == 6: # 110 return lambda i, j: ((i * j) % 2 + (i * j) % 3) % 2 == 0 if pattern == 7: # 111 return lambda i, j: ((i * j) % 3 + (i + j) % 2) % 2 == 0 raise TypeError("Bad mask pattern: " + pattern) # pragma: no cover def mode_sizes_for_version(version): if version < 10: return MODE_SIZE_SMALL elif version < 27: return MODE_SIZE_MEDIUM else: return MODE_SIZE_LARGE def length_in_bits(mode, version): if mode not in (MODE_NUMBER, MODE_ALPHA_NUM, MODE_8BIT_BYTE, MODE_KANJI): raise TypeError(f"Invalid mode ({mode})") # pragma: no cover check_version(version) return mode_sizes_for_version(version)[mode] def check_version(version): if version < 1 or version > 40: raise ValueError(f"Invalid version (was {version}, expected 1 to 40)") def lost_point(modules): modules_count = len(modules) lost_point = 0 lost_point = _lost_point_level1(modules, modules_count) lost_point += _lost_point_level2(modules, modules_count) lost_point += _lost_point_level3(modules, modules_count) lost_point += _lost_point_level4(modules, modules_count) return lost_point def _lost_point_level1(modules, modules_count): lost_point = 0 modules_range = range(modules_count) container = [0] * (modules_count + 1) for row in modules_range: this_row = modules[row] previous_color = this_row[0] length = 0 for col in modules_range: if this_row[col] == previous_color: length += 1 else: if length >= 5: container[length] += 1 length = 1 previous_color = this_row[col] if length >= 5: container[length] += 1 for col in modules_range: previous_color = modules[0][col] length = 0 for row in modules_range: if modules[row][col] == previous_color: length += 1 else: if length >= 5: container[length] += 1 length = 1 previous_color = modules[row][col] if length >= 5: container[length] += 1 lost_point += sum( container[each_length] * (each_length - 2) for each_length in range(5, modules_count + 1) ) return lost_point def _lost_point_level2(modules, modules_count): lost_point = 0 modules_range = range(modules_count - 1) for row in modules_range: this_row = modules[row] next_row = modules[row + 1] # use iter() and next() to skip next four-block. e.g. # d a f if top-right a != b bottom-right, # c b e then both abcd and abef won't lost any point. modules_range_iter = iter(modules_range) for col in modules_range_iter: top_right = this_row[col + 1] if top_right != next_row[col + 1]: # reduce 33.3% of runtime via next(). # None: raise nothing if there is no next item. next(modules_range_iter, None) elif top_right != this_row[col]: continue elif top_right != next_row[col]: continue else: lost_point += 3 return lost_point def _lost_point_level3(modules, modules_count): # 1 : 1 : 3 : 1 : 1 ratio (dark:light:dark:light:dark) pattern in # row/column, preceded or followed by light area 4 modules wide. From ISOIEC. # pattern1: 10111010000 # pattern2: 00001011101 modules_range = range(modules_count) modules_range_short = range(modules_count - 10) lost_point = 0 for row in modules_range: this_row = modules[row] modules_range_short_iter = iter(modules_range_short) col = 0 for col in modules_range_short_iter: if ( not this_row[col + 1] and this_row[col + 4] and not this_row[col + 5] and this_row[col + 6] and not this_row[col + 9] and ( this_row[col + 0] and this_row[col + 2] and this_row[col + 3] and not this_row[col + 7] and not this_row[col + 8] and not this_row[col + 10] or not this_row[col + 0] and not this_row[col + 2] and not this_row[col + 3] and this_row[col + 7] and this_row[col + 8] and this_row[col + 10] ) ): lost_point += 40 # horspool algorithm. # if this_row[col + 10]: # pattern1 shift 4, pattern2 shift 2. So min=2. # else: # pattern1 shift 1, pattern2 shift 1. So min=1. if this_row[col + 10]: next(modules_range_short_iter, None) for col in modules_range: modules_range_short_iter = iter(modules_range_short) row = 0 for row in modules_range_short_iter: if ( not modules[row + 1][col] and modules[row + 4][col] and not modules[row + 5][col] and modules[row + 6][col] and not modules[row + 9][col] and ( modules[row + 0][col] and modules[row + 2][col] and modules[row + 3][col] and not modules[row + 7][col] and not modules[row + 8][col] and not modules[row + 10][col] or not modules[row + 0][col] and not modules[row + 2][col] and not modules[row + 3][col] and modules[row + 7][col] and modules[row + 8][col] and modules[row + 10][col] ) ): lost_point += 40 if modules[row + 10][col]: next(modules_range_short_iter, None) return lost_point def _lost_point_level4(modules, modules_count): dark_count = sum(map(sum, modules)) percent = float(dark_count) / (modules_count**2) # Every 5% departure from 50%, rating++ rating = int(abs(percent * 100 - 50) / 5) return rating * 10 def optimal_data_chunks(data, minimum=4): """ An iterator returning QRData chunks optimized to the data content. :param minimum: The minimum number of bytes in a row to split as a chunk. """ data = to_bytestring(data) num_pattern = rb"\d" alpha_pattern = b"[" + re.escape(ALPHA_NUM) + b"]" if len(data) <= minimum: num_pattern = re.compile(b"^" + num_pattern + b"+$") alpha_pattern = re.compile(b"^" + alpha_pattern + b"+$") else: re_repeat = b"{" + str(minimum).encode("ascii") + b",}" num_pattern = re.compile(num_pattern + re_repeat) alpha_pattern = re.compile(alpha_pattern + re_repeat) num_bits = _optimal_split(data, num_pattern) for is_num, chunk in num_bits: if is_num: yield QRData(chunk, mode=MODE_NUMBER, check_data=False) else: for is_alpha, sub_chunk in _optimal_split(chunk, alpha_pattern): mode = MODE_ALPHA_NUM if is_alpha else MODE_8BIT_BYTE yield QRData(sub_chunk, mode=mode, check_data=False) def _optimal_split(data, pattern): while data: match = re.search(pattern, data) if not match: break start, end = match.start(), match.end() if start: yield False, data[:start] yield True, data[start:end] data = data[end:] if data: yield False, data def to_bytestring(data): """ Convert data to a (utf-8 encoded) byte-string if it isn't a byte-string already. """ if not isinstance(data, bytes): data = str(data).encode("utf-8") return data def optimal_mode(data): """ Calculate the optimal mode for this chunk of data. """ if data.isdigit(): return MODE_NUMBER if RE_ALPHA_NUM.match(data): return MODE_ALPHA_NUM return MODE_8BIT_BYTE class QRData: """ Data held in a QR compatible format. Doesn't currently handle KANJI. """ def __init__(self, data, mode=None, check_data=True): """ If ``mode`` isn't provided, the most compact QR data type possible is chosen. """ if check_data: data = to_bytestring(data) if mode is None: self.mode = optimal_mode(data) else: self.mode = mode if mode not in (MODE_NUMBER, MODE_ALPHA_NUM, MODE_8BIT_BYTE): raise TypeError(f"Invalid mode ({mode})") # pragma: no cover if check_data and mode < optimal_mode(data): # pragma: no cover raise ValueError(f"Provided data can not be represented in mode {mode}") self.data = data def __len__(self): return len(self.data) def write(self, buffer): if self.mode == MODE_NUMBER: for i in range(0, len(self.data), 3): chars = self.data[i : i + 3] bit_length = NUMBER_LENGTH[len(chars)] buffer.put(int(chars), bit_length) elif self.mode == MODE_ALPHA_NUM: for i in range(0, len(self.data), 2): chars = self.data[i : i + 2] if len(chars) > 1: buffer.put( ALPHA_NUM.find(chars[0]) * 45 + ALPHA_NUM.find(chars[1]), 11 ) else: buffer.put(ALPHA_NUM.find(chars), 6) else: # Iterating a bytestring in Python 3 returns an integer, # no need to ord(). data = self.data for c in data: buffer.put(c, 8) def __repr__(self): return repr(self.data) class BitBuffer: def __init__(self): self.buffer: List[int] = [] self.length = 0 def __repr__(self): return ".".join([str(n) for n in self.buffer]) def get(self, index): buf_index = math.floor(index / 8) return ((self.buffer[buf_index] >> (7 - index % 8)) & 1) == 1 def put(self, num, length): for i in range(length): self.put_bit(((num >> (length - i - 1)) & 1) == 1) def __len__(self): return self.length def put_bit(self, bit): buf_index = self.length // 8 if len(self.buffer) <= buf_index: self.buffer.append(0) if bit: self.buffer[buf_index] |= 0x80 >> (self.length % 8) self.length += 1 def create_bytes(buffer: BitBuffer, rs_blocks: List[RSBlock]): offset = 0 maxDcCount = 0 maxEcCount = 0 dcdata: List[List[int]] = [] ecdata: List[List[int]] = [] for rs_block in rs_blocks: dcCount = rs_block.data_count ecCount = rs_block.total_count - dcCount maxDcCount = max(maxDcCount, dcCount) maxEcCount = max(maxEcCount, ecCount) current_dc = [0xFF & buffer.buffer[i + offset] for i in range(dcCount)] offset += dcCount # Get error correction polynomial. if ecCount in LUT.rsPoly_LUT: rsPoly = base.Polynomial(LUT.rsPoly_LUT[ecCount], 0) else: rsPoly = base.Polynomial([1], 0) for i in range(ecCount): rsPoly = rsPoly * base.Polynomial([1, base.gexp(i)], 0) rawPoly = base.Polynomial(current_dc, len(rsPoly) - 1) modPoly = rawPoly % rsPoly current_ec = [] mod_offset = len(modPoly) - ecCount for i in range(ecCount): modIndex = i + mod_offset current_ec.append(modPoly[modIndex] if (modIndex >= 0) else 0) dcdata.append(current_dc) ecdata.append(current_ec) data = [] for i in range(maxDcCount): for dc in dcdata: if i < len(dc): data.append(dc[i]) for i in range(maxEcCount): for ec in ecdata: if i < len(ec): data.append(ec[i]) return data def create_data(version, error_correction, data_list): buffer = BitBuffer() for data in data_list: buffer.put(data.mode, 4) buffer.put(len(data), length_in_bits(data.mode, version)) data.write(buffer) # Calculate the maximum number of bits for the given version. rs_blocks = base.rs_blocks(version, error_correction) bit_limit = sum(block.data_count * 8 for block in rs_blocks) if len(buffer) > bit_limit: raise exceptions.DataOverflowError( "Code length overflow. Data size (%s) > size available (%s)" % (len(buffer), bit_limit) ) # Terminate the bits (add up to four 0s). for _ in range(min(bit_limit - len(buffer), 4)): buffer.put_bit(False) # Delimit the string into 8-bit words, padding with 0s if necessary. delimit = len(buffer) % 8 if delimit: for _ in range(8 - delimit): buffer.put_bit(False) # Add special alternating padding bitstrings until buffer is full. bytes_to_fill = (bit_limit - len(buffer)) // 8 for i in range(bytes_to_fill): if i % 2 == 0: buffer.put(PAD0, 8) else: buffer.put(PAD1, 8) return create_bytes(buffer, rs_blocks)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/main.py
import sys from bisect import bisect_left from typing import ( Dict, Generic, List, NamedTuple, Optional, Type, TypeVar, cast, overload, ) from typing_extensions import Literal from qrcode import constants, exceptions, util from qrcode.image.base import BaseImage from qrcode.image.pure import PyPNGImage ModulesType = List[List[Optional[bool]]] # Cache modules generated just based on the QR Code version precomputed_qr_blanks: Dict[int, ModulesType] = {} def make(data=None, **kwargs): qr = QRCode(**kwargs) qr.add_data(data) return qr.make_image() def _check_box_size(size): if int(size) <= 0: raise ValueError(f"Invalid box size (was {size}, expected larger than 0)") def _check_border(size): if int(size) < 0: raise ValueError( "Invalid border value (was %s, expected 0 or larger than that)" % size ) def _check_mask_pattern(mask_pattern): if mask_pattern is None: return if not isinstance(mask_pattern, int): raise TypeError( f"Invalid mask pattern (was {type(mask_pattern)}, expected int)" ) if mask_pattern < 0 or mask_pattern > 7: raise ValueError(f"Mask pattern should be in range(8) (got {mask_pattern})") def copy_2d_array(x): return [row[:] for row in x] class ActiveWithNeighbors(NamedTuple): NW: bool N: bool NE: bool W: bool me: bool E: bool SW: bool S: bool SE: bool def __bool__(self) -> bool: return self.me GenericImage = TypeVar("GenericImage", bound=BaseImage) GenericImageLocal = TypeVar("GenericImageLocal", bound=BaseImage) class QRCode(Generic[GenericImage]): modules: ModulesType _version: Optional[int] = None def __init__( self, version=None, error_correction=constants.ERROR_CORRECT_M, box_size=10, border=4, image_factory: Optional[Type[GenericImage]] = None, mask_pattern=None, ): _check_box_size(box_size) _check_border(border) self.version = version self.error_correction = int(error_correction) self.box_size = int(box_size) # Spec says border should be at least four boxes wide, but allow for # any (e.g. for producing printable QR codes). self.border = int(border) self.mask_pattern = mask_pattern self.image_factory = image_factory if image_factory is not None: assert issubclass(image_factory, BaseImage) self.clear() @property def version(self) -> int: if self._version is None: self.best_fit() return cast(int, self._version) @version.setter def version(self, value) -> None: if value is not None: value = int(value) util.check_version(value) self._version = value @property def mask_pattern(self): return self._mask_pattern @mask_pattern.setter def mask_pattern(self, pattern): _check_mask_pattern(pattern) self._mask_pattern = pattern def clear(self): """ Reset the internal data. """ self.modules = [[]] self.modules_count = 0 self.data_cache = None self.data_list = [] def add_data(self, data, optimize=20): """ Add data to this QR Code. :param optimize: Data will be split into multiple chunks to optimize the QR size by finding to more compressed modes of at least this length. Set to ``0`` to avoid optimizing at all. """ if isinstance(data, util.QRData): self.data_list.append(data) elif optimize: self.data_list.extend(util.optimal_data_chunks(data, minimum=optimize)) else: self.data_list.append(util.QRData(data)) self.data_cache = None def make(self, fit=True): """ Compile the data into a QR Code array. :param fit: If ``True`` (or if a size has not been provided), find the best fit for the data to avoid data overflow errors. """ if fit or (self.version is None): self.best_fit(start=self.version) if self.mask_pattern is None: self.makeImpl(False, self.best_mask_pattern()) else: self.makeImpl(False, self.mask_pattern) def makeImpl(self, test, mask_pattern): self.modules_count = self.version * 4 + 17 if self.version in precomputed_qr_blanks: self.modules = copy_2d_array(precomputed_qr_blanks[self.version]) else: self.modules = [ [None] * self.modules_count for i in range(self.modules_count) ] self.setup_position_probe_pattern(0, 0) self.setup_position_probe_pattern(self.modules_count - 7, 0) self.setup_position_probe_pattern(0, self.modules_count - 7) self.setup_position_adjust_pattern() self.setup_timing_pattern() precomputed_qr_blanks[self.version] = copy_2d_array(self.modules) self.setup_type_info(test, mask_pattern) if self.version >= 7: self.setup_type_number(test) if self.data_cache is None: self.data_cache = util.create_data( self.version, self.error_correction, self.data_list ) self.map_data(self.data_cache, mask_pattern) def setup_position_probe_pattern(self, row, col): for r in range(-1, 8): if row + r <= -1 or self.modules_count <= row + r: continue for c in range(-1, 8): if col + c <= -1 or self.modules_count <= col + c: continue if ( (0 <= r <= 6 and c in {0, 6}) or (0 <= c <= 6 and r in {0, 6}) or (2 <= r <= 4 and 2 <= c <= 4) ): self.modules[row + r][col + c] = True else: self.modules[row + r][col + c] = False def best_fit(self, start=None): """ Find the minimum size required to fit in the data. """ if start is None: start = 1 util.check_version(start) # Corresponds to the code in util.create_data, except we don't yet know # version, so optimistically assume start and check later mode_sizes = util.mode_sizes_for_version(start) buffer = util.BitBuffer() for data in self.data_list: buffer.put(data.mode, 4) buffer.put(len(data), mode_sizes[data.mode]) data.write(buffer) needed_bits = len(buffer) self.version = bisect_left( util.BIT_LIMIT_TABLE[self.error_correction], needed_bits, start ) if self.version == 41: raise exceptions.DataOverflowError() # Now check whether we need more bits for the mode sizes, recursing if # our guess was too low if mode_sizes is not util.mode_sizes_for_version(self.version): self.best_fit(start=self.version) return self.version def best_mask_pattern(self): """ Find the most efficient mask pattern. """ min_lost_point = 0 pattern = 0 for i in range(8): self.makeImpl(True, i) lost_point = util.lost_point(self.modules) if i == 0 or min_lost_point > lost_point: min_lost_point = lost_point pattern = i return pattern def print_tty(self, out=None): """ Output the QR Code only using TTY colors. If the data has not been compiled yet, make it first. """ if out is None: import sys out = sys.stdout if not out.isatty(): raise OSError("Not a tty") if self.data_cache is None: self.make() modcount = self.modules_count out.write("\x1b[1;47m" + (" " * (modcount * 2 + 4)) + "\x1b[0m\n") for r in range(modcount): out.write("\x1b[1;47m \x1b[40m") for c in range(modcount): if self.modules[r][c]: out.write(" ") else: out.write("\x1b[1;47m \x1b[40m") out.write("\x1b[1;47m \x1b[0m\n") out.write("\x1b[1;47m" + (" " * (modcount * 2 + 4)) + "\x1b[0m\n") out.flush() def print_ascii(self, out=None, tty=False, invert=False): """ Output the QR Code using ASCII characters. :param tty: use fixed TTY color codes (forces invert=True) :param invert: invert the ASCII characters (solid <-> transparent) """ if out is None: out = sys.stdout if tty and not out.isatty(): raise OSError("Not a tty") if self.data_cache is None: self.make() modcount = self.modules_count codes = [bytes((code,)).decode("cp437") for code in (255, 223, 220, 219)] if tty: invert = True if invert: codes.reverse() def get_module(x, y) -> int: if invert and self.border and max(x, y) >= modcount + self.border: return 1 if min(x, y) < 0 or max(x, y) >= modcount: return 0 return cast(int, self.modules[x][y]) for r in range(-self.border, modcount + self.border, 2): if tty: if not invert or r < modcount + self.border - 1: out.write("\x1b[48;5;232m") # Background black out.write("\x1b[38;5;255m") # Foreground white for c in range(-self.border, modcount + self.border): pos = get_module(r, c) + (get_module(r + 1, c) << 1) out.write(codes[pos]) if tty: out.write("\x1b[0m") out.write("\n") out.flush() @overload def make_image(self, image_factory: Literal[None] = None, **kwargs) -> GenericImage: ... @overload def make_image( self, image_factory: Type[GenericImageLocal] = None, **kwargs ) -> GenericImageLocal: ... def make_image(self, image_factory=None, **kwargs): """ Make an image from the QR Code data. If the data has not been compiled yet, make it first. """ _check_box_size(self.box_size) if self.data_cache is None: self.make() if image_factory is not None: assert issubclass(image_factory, BaseImage) else: image_factory = self.image_factory if image_factory is None: from qrcode.image.pil import Image, PilImage # Use PIL by default if available, otherwise use PyPNG. image_factory = PilImage if Image else PyPNGImage im = image_factory( self.border, self.modules_count, self.box_size, qrcode_modules=self.modules, **kwargs, ) if im.needs_drawrect: for r in range(self.modules_count): for c in range(self.modules_count): if im.needs_context: im.drawrect_context(r, c, qr=self) elif self.modules[r][c]: im.drawrect(r, c) if im.needs_processing: im.process() return im # return true if and only if (row, col) is in the module def is_constrained(self, row: int, col: int) -> bool: return ( row >= 0 and row < len(self.modules) and col >= 0 and col < len(self.modules[row]) ) def setup_timing_pattern(self): for r in range(8, self.modules_count - 8): if self.modules[r][6] is not None: continue self.modules[r][6] = r % 2 == 0 for c in range(8, self.modules_count - 8): if self.modules[6][c] is not None: continue self.modules[6][c] = c % 2 == 0 def setup_position_adjust_pattern(self): pos = util.pattern_position(self.version) for i in range(len(pos)): row = pos[i] for j in range(len(pos)): col = pos[j] if self.modules[row][col] is not None: continue for r in range(-2, 3): for c in range(-2, 3): if ( r == -2 or r == 2 or c == -2 or c == 2 or (r == 0 and c == 0) ): self.modules[row + r][col + c] = True else: self.modules[row + r][col + c] = False def setup_type_number(self, test): bits = util.BCH_type_number(self.version) for i in range(18): mod = not test and ((bits >> i) & 1) == 1 self.modules[i // 3][i % 3 + self.modules_count - 8 - 3] = mod for i in range(18): mod = not test and ((bits >> i) & 1) == 1 self.modules[i % 3 + self.modules_count - 8 - 3][i // 3] = mod def setup_type_info(self, test, mask_pattern): data = (self.error_correction << 3) | mask_pattern bits = util.BCH_type_info(data) # vertical for i in range(15): mod = not test and ((bits >> i) & 1) == 1 if i < 6: self.modules[i][8] = mod elif i < 8: self.modules[i + 1][8] = mod else: self.modules[self.modules_count - 15 + i][8] = mod # horizontal for i in range(15): mod = not test and ((bits >> i) & 1) == 1 if i < 8: self.modules[8][self.modules_count - i - 1] = mod elif i < 9: self.modules[8][15 - i - 1 + 1] = mod else: self.modules[8][15 - i - 1] = mod # fixed module self.modules[self.modules_count - 8][8] = not test def map_data(self, data, mask_pattern): inc = -1 row = self.modules_count - 1 bitIndex = 7 byteIndex = 0 mask_func = util.mask_func(mask_pattern) data_len = len(data) for col in range(self.modules_count - 1, 0, -2): if col <= 6: col -= 1 col_range = (col, col - 1) while True: for c in col_range: if self.modules[row][c] is None: dark = False if byteIndex < data_len: dark = ((data[byteIndex] >> bitIndex) & 1) == 1 if mask_func(row, c): dark = not dark self.modules[row][c] = dark bitIndex -= 1 if bitIndex == -1: byteIndex += 1 bitIndex = 7 row += inc if row < 0 or self.modules_count <= row: row -= inc inc = -inc break def get_matrix(self): """ Return the QR Code as a multidimensional array, including the border. To return the array without a border, set ``self.border`` to 0 first. """ if self.data_cache is None: self.make() if not self.border: return self.modules width = len(self.modules) + self.border * 2 code = [[False] * width] * self.border x_border = [False] * self.border for module in self.modules: code.append(x_border + cast(List[bool], module) + x_border) code += [[False] * width] * self.border return code def active_with_neighbors(self, row: int, col: int) -> ActiveWithNeighbors: context: List[bool] = [] for r in range(row - 1, row + 2): for c in range(col - 1, col + 2): context.append(self.is_constrained(r, c) and bool(self.modules[r][c])) return ActiveWithNeighbors(*context)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/exceptions.py
class DataOverflowError(Exception): pass
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/base.py
from typing import NamedTuple from qrcode import constants EXP_TABLE = list(range(256)) LOG_TABLE = list(range(256)) for i in range(8): EXP_TABLE[i] = 1 << i for i in range(8, 256): EXP_TABLE[i] = ( EXP_TABLE[i - 4] ^ EXP_TABLE[i - 5] ^ EXP_TABLE[i - 6] ^ EXP_TABLE[i - 8] ) for i in range(255): LOG_TABLE[EXP_TABLE[i]] = i RS_BLOCK_OFFSET = { constants.ERROR_CORRECT_L: 0, constants.ERROR_CORRECT_M: 1, constants.ERROR_CORRECT_Q: 2, constants.ERROR_CORRECT_H: 3, } RS_BLOCK_TABLE = ( # L # M # Q # H # 1 (1, 26, 19), (1, 26, 16), (1, 26, 13), (1, 26, 9), # 2 (1, 44, 34), (1, 44, 28), (1, 44, 22), (1, 44, 16), # 3 (1, 70, 55), (1, 70, 44), (2, 35, 17), (2, 35, 13), # 4 (1, 100, 80), (2, 50, 32), (2, 50, 24), (4, 25, 9), # 5 (1, 134, 108), (2, 67, 43), (2, 33, 15, 2, 34, 16), (2, 33, 11, 2, 34, 12), # 6 (2, 86, 68), (4, 43, 27), (4, 43, 19), (4, 43, 15), # 7 (2, 98, 78), (4, 49, 31), (2, 32, 14, 4, 33, 15), (4, 39, 13, 1, 40, 14), # 8 (2, 121, 97), (2, 60, 38, 2, 61, 39), (4, 40, 18, 2, 41, 19), (4, 40, 14, 2, 41, 15), # 9 (2, 146, 116), (3, 58, 36, 2, 59, 37), (4, 36, 16, 4, 37, 17), (4, 36, 12, 4, 37, 13), # 10 (2, 86, 68, 2, 87, 69), (4, 69, 43, 1, 70, 44), (6, 43, 19, 2, 44, 20), (6, 43, 15, 2, 44, 16), # 11 (4, 101, 81), (1, 80, 50, 4, 81, 51), (4, 50, 22, 4, 51, 23), (3, 36, 12, 8, 37, 13), # 12 (2, 116, 92, 2, 117, 93), (6, 58, 36, 2, 59, 37), (4, 46, 20, 6, 47, 21), (7, 42, 14, 4, 43, 15), # 13 (4, 133, 107), (8, 59, 37, 1, 60, 38), (8, 44, 20, 4, 45, 21), (12, 33, 11, 4, 34, 12), # 14 (3, 145, 115, 1, 146, 116), (4, 64, 40, 5, 65, 41), (11, 36, 16, 5, 37, 17), (11, 36, 12, 5, 37, 13), # 15 (5, 109, 87, 1, 110, 88), (5, 65, 41, 5, 66, 42), (5, 54, 24, 7, 55, 25), (11, 36, 12, 7, 37, 13), # 16 (5, 122, 98, 1, 123, 99), (7, 73, 45, 3, 74, 46), (15, 43, 19, 2, 44, 20), (3, 45, 15, 13, 46, 16), # 17 (1, 135, 107, 5, 136, 108), (10, 74, 46, 1, 75, 47), (1, 50, 22, 15, 51, 23), (2, 42, 14, 17, 43, 15), # 18 (5, 150, 120, 1, 151, 121), (9, 69, 43, 4, 70, 44), (17, 50, 22, 1, 51, 23), (2, 42, 14, 19, 43, 15), # 19 (3, 141, 113, 4, 142, 114), (3, 70, 44, 11, 71, 45), (17, 47, 21, 4, 48, 22), (9, 39, 13, 16, 40, 14), # 20 (3, 135, 107, 5, 136, 108), (3, 67, 41, 13, 68, 42), (15, 54, 24, 5, 55, 25), (15, 43, 15, 10, 44, 16), # 21 (4, 144, 116, 4, 145, 117), (17, 68, 42), (17, 50, 22, 6, 51, 23), (19, 46, 16, 6, 47, 17), # 22 (2, 139, 111, 7, 140, 112), (17, 74, 46), (7, 54, 24, 16, 55, 25), (34, 37, 13), # 23 (4, 151, 121, 5, 152, 122), (4, 75, 47, 14, 76, 48), (11, 54, 24, 14, 55, 25), (16, 45, 15, 14, 46, 16), # 24 (6, 147, 117, 4, 148, 118), (6, 73, 45, 14, 74, 46), (11, 54, 24, 16, 55, 25), (30, 46, 16, 2, 47, 17), # 25 (8, 132, 106, 4, 133, 107), (8, 75, 47, 13, 76, 48), (7, 54, 24, 22, 55, 25), (22, 45, 15, 13, 46, 16), # 26 (10, 142, 114, 2, 143, 115), (19, 74, 46, 4, 75, 47), (28, 50, 22, 6, 51, 23), (33, 46, 16, 4, 47, 17), # 27 (8, 152, 122, 4, 153, 123), (22, 73, 45, 3, 74, 46), (8, 53, 23, 26, 54, 24), (12, 45, 15, 28, 46, 16), # 28 (3, 147, 117, 10, 148, 118), (3, 73, 45, 23, 74, 46), (4, 54, 24, 31, 55, 25), (11, 45, 15, 31, 46, 16), # 29 (7, 146, 116, 7, 147, 117), (21, 73, 45, 7, 74, 46), (1, 53, 23, 37, 54, 24), (19, 45, 15, 26, 46, 16), # 30 (5, 145, 115, 10, 146, 116), (19, 75, 47, 10, 76, 48), (15, 54, 24, 25, 55, 25), (23, 45, 15, 25, 46, 16), # 31 (13, 145, 115, 3, 146, 116), (2, 74, 46, 29, 75, 47), (42, 54, 24, 1, 55, 25), (23, 45, 15, 28, 46, 16), # 32 (17, 145, 115), (10, 74, 46, 23, 75, 47), (10, 54, 24, 35, 55, 25), (19, 45, 15, 35, 46, 16), # 33 (17, 145, 115, 1, 146, 116), (14, 74, 46, 21, 75, 47), (29, 54, 24, 19, 55, 25), (11, 45, 15, 46, 46, 16), # 34 (13, 145, 115, 6, 146, 116), (14, 74, 46, 23, 75, 47), (44, 54, 24, 7, 55, 25), (59, 46, 16, 1, 47, 17), # 35 (12, 151, 121, 7, 152, 122), (12, 75, 47, 26, 76, 48), (39, 54, 24, 14, 55, 25), (22, 45, 15, 41, 46, 16), # 36 (6, 151, 121, 14, 152, 122), (6, 75, 47, 34, 76, 48), (46, 54, 24, 10, 55, 25), (2, 45, 15, 64, 46, 16), # 37 (17, 152, 122, 4, 153, 123), (29, 74, 46, 14, 75, 47), (49, 54, 24, 10, 55, 25), (24, 45, 15, 46, 46, 16), # 38 (4, 152, 122, 18, 153, 123), (13, 74, 46, 32, 75, 47), (48, 54, 24, 14, 55, 25), (42, 45, 15, 32, 46, 16), # 39 (20, 147, 117, 4, 148, 118), (40, 75, 47, 7, 76, 48), (43, 54, 24, 22, 55, 25), (10, 45, 15, 67, 46, 16), # 40 (19, 148, 118, 6, 149, 119), (18, 75, 47, 31, 76, 48), (34, 54, 24, 34, 55, 25), (20, 45, 15, 61, 46, 16), ) def glog(n): if n < 1: # pragma: no cover raise ValueError(f"glog({n})") return LOG_TABLE[n] def gexp(n): return EXP_TABLE[n % 255] class Polynomial: def __init__(self, num, shift): if not num: # pragma: no cover raise Exception(f"{len(num)}/{shift}") offset = 0 for offset in range(len(num)): if num[offset] != 0: break self.num = num[offset:] + [0] * shift def __getitem__(self, index): return self.num[index] def __iter__(self): return iter(self.num) def __len__(self): return len(self.num) def __mul__(self, other): num = [0] * (len(self) + len(other) - 1) for i, item in enumerate(self): for j, other_item in enumerate(other): num[i + j] ^= gexp(glog(item) + glog(other_item)) return Polynomial(num, 0) def __mod__(self, other): difference = len(self) - len(other) if difference < 0: return self ratio = glog(self[0]) - glog(other[0]) num = [ item ^ gexp(glog(other_item) + ratio) for item, other_item in zip(self, other) ] if difference: num.extend(self[-difference:]) # recursive call return Polynomial(num, 0) % other class RSBlock(NamedTuple): total_count: int data_count: int def rs_blocks(version, error_correction): if error_correction not in RS_BLOCK_OFFSET: # pragma: no cover raise Exception( "bad rs block @ version: %s / error_correction: %s" % (version, error_correction) ) offset = RS_BLOCK_OFFSET[error_correction] rs_block = RS_BLOCK_TABLE[(version - 1) * 4 + offset] blocks = [] for i in range(0, len(rs_block), 3): count, total_count, data_count = rs_block[i : i + 3] for _ in range(count): blocks.append(RSBlock(total_count, data_count)) return blocks
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/console_scripts.py
#!/usr/bin/env python """ qr - Convert stdin (or the first argument) to a QR Code. When stdout is a tty the QR Code is printed to the terminal and when stdout is a pipe to a file an image is written. The default image format is PNG. """ import optparse import os import sys from typing import Dict, Iterable, NoReturn, Optional, Set, Type import qrcode from qrcode.image.base import BaseImage, DrawerAliases # The next block is added to get the terminal to display properly on MS platforms if sys.platform.startswith(("win", "cygwin")): # pragma: no cover import colorama # type: ignore colorama.init() default_factories = { "pil": "qrcode.image.pil.PilImage", "png": "qrcode.image.pure.PyPNGImage", "svg": "qrcode.image.svg.SvgImage", "svg-fragment": "qrcode.image.svg.SvgFragmentImage", "svg-path": "qrcode.image.svg.SvgPathImage", # Keeping for backwards compatibility: "pymaging": "qrcode.image.pure.PymagingImage", } error_correction = { "L": qrcode.ERROR_CORRECT_L, "M": qrcode.ERROR_CORRECT_M, "Q": qrcode.ERROR_CORRECT_Q, "H": qrcode.ERROR_CORRECT_H, } def main(args=None): if args is None: args = sys.argv[1:] from pkg_resources import get_distribution version = get_distribution("qrcode").version parser = optparse.OptionParser(usage=(__doc__ or "").strip(), version=version) # Wrap parser.error in a typed NoReturn method for better typing. def raise_error(msg: str) -> NoReturn: parser.error(msg) raise # pragma: no cover parser.add_option( "--factory", help="Full python path to the image factory class to " "create the image with. You can use the following shortcuts to the " f"built-in image factory classes: {commas(default_factories)}.", ) parser.add_option( "--factory-drawer", help=f"Use an alternate drawer. {get_drawer_help()}.", ) parser.add_option( "--optimize", type=int, help="Optimize the data by looking for chunks " "of at least this many characters that could use a more efficient " "encoding method. Use 0 to turn off chunk optimization.", ) parser.add_option( "--error-correction", type="choice", choices=sorted(error_correction.keys()), default="M", help="The error correction level to use. Choices are L (7%), " "M (15%, default), Q (25%), and H (30%).", ) parser.add_option( "--ascii", help="Print as ascii even if stdout is piped.", action="store_true" ) parser.add_option( "--output", help="The output file. If not specified, the image is sent to " "the standard output.", ) opts, args = parser.parse_args(args) if opts.factory: module = default_factories.get(opts.factory, opts.factory) try: image_factory = get_factory(module) except ValueError as e: raise_error(str(e)) else: image_factory = None qr = qrcode.QRCode( error_correction=error_correction[opts.error_correction], image_factory=image_factory, ) if args: data = args[0] data = data.encode(errors="surrogateescape") else: data = sys.stdin.buffer.read() if opts.optimize is None: qr.add_data(data) else: qr.add_data(data, optimize=opts.optimize) if opts.output: img = qr.make_image() with open(opts.output, "wb") as out: img.save(out) else: if image_factory is None and (os.isatty(sys.stdout.fileno()) or opts.ascii): qr.print_ascii(tty=not opts.ascii) return kwargs = {} aliases: Optional[DrawerAliases] = getattr( qr.image_factory, "drawer_aliases", None ) if opts.factory_drawer: if not aliases: raise_error("The selected factory has no drawer aliases.") if opts.factory_drawer not in aliases: raise_error( f"{opts.factory_drawer} factory drawer not found." f" Expected {commas(aliases)}" ) drawer_cls, drawer_kwargs = aliases[opts.factory_drawer] kwargs["module_drawer"] = drawer_cls(**drawer_kwargs) img = qr.make_image(**kwargs) sys.stdout.flush() img.save(sys.stdout.buffer) def get_factory(module: str) -> Type[BaseImage]: if "." not in module: raise ValueError("The image factory is not a full python path") module, name = module.rsplit(".", 1) imp = __import__(module, {}, {}, [name]) return getattr(imp, name) def get_drawer_help() -> str: help: Dict[str, Set] = {} for alias, module in default_factories.items(): try: image = get_factory(module) except ImportError: # pragma: no cover continue aliases: Optional[DrawerAliases] = getattr(image, "drawer_aliases", None) if not aliases: continue factories = help.setdefault(commas(aliases), set()) factories.add(alias) return ". ".join( f"For {commas(factories, 'and')}, use: {aliases}" for aliases, factories in help.items() ) def commas(items: Iterable[str], joiner="or") -> str: items = tuple(items) if not items: return "" if len(items) == 1: return items[0] return f"{', '.join(items[:-1])} {joiner} {items[-1]}" if __name__ == "__main__": # pragma: no cover main()
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/constants.py
# QR error correct levels ERROR_CORRECT_L = 1 ERROR_CORRECT_M = 0 ERROR_CORRECT_Q = 3 ERROR_CORRECT_H = 2
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/__init__.py
from qrcode.main import QRCode from qrcode.main import make # noqa from qrcode.constants import ( # noqa ERROR_CORRECT_L, ERROR_CORRECT_M, ERROR_CORRECT_Q, ERROR_CORRECT_H, ) from qrcode import image # noqa def run_example(data="http://www.lincolnloop.com", *args, **kwargs): """ Build an example QR Code and display it. There's an even easier way than the code here though: just use the ``make`` shortcut. """ qr = QRCode(*args, **kwargs) qr.add_data(data) im = qr.make_image() im.show() if __name__ == "__main__": # pragma: no cover import sys run_example(*sys.argv[1:])
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/LUT.py
# Store all kinds of lookup table. # # generate rsPoly lookup table. # from qrcode import base # def create_bytes(rs_blocks): # for r in range(len(rs_blocks)): # dcCount = rs_blocks[r].data_count # ecCount = rs_blocks[r].total_count - dcCount # rsPoly = base.Polynomial([1], 0) # for i in range(ecCount): # rsPoly = rsPoly * base.Polynomial([1, base.gexp(i)], 0) # return ecCount, rsPoly # rsPoly_LUT = {} # for version in range(1,41): # for error_correction in range(4): # rs_blocks_list = base.rs_blocks(version, error_correction) # ecCount, rsPoly = create_bytes(rs_blocks_list) # rsPoly_LUT[ecCount]=rsPoly.num # print(rsPoly_LUT) # Result. Usage: input: ecCount, output: Polynomial.num # e.g. rsPoly = base.Polynomial(LUT.rsPoly_LUT[ecCount], 0) rsPoly_LUT = { 7: [1, 127, 122, 154, 164, 11, 68, 117], 10: [1, 216, 194, 159, 111, 199, 94, 95, 113, 157, 193], 13: [1, 137, 73, 227, 17, 177, 17, 52, 13, 46, 43, 83, 132, 120], 15: [1, 29, 196, 111, 163, 112, 74, 10, 105, 105, 139, 132, 151, 32, 134, 26], 16: [1, 59, 13, 104, 189, 68, 209, 30, 8, 163, 65, 41, 229, 98, 50, 36, 59], 17: [1, 119, 66, 83, 120, 119, 22, 197, 83, 249, 41, 143, 134, 85, 53, 125, 99, 79], 18: [ 1, 239, 251, 183, 113, 149, 175, 199, 215, 240, 220, 73, 82, 173, 75, 32, 67, 217, 146, ], 20: [ 1, 152, 185, 240, 5, 111, 99, 6, 220, 112, 150, 69, 36, 187, 22, 228, 198, 121, 121, 165, 174, ], 22: [ 1, 89, 179, 131, 176, 182, 244, 19, 189, 69, 40, 28, 137, 29, 123, 67, 253, 86, 218, 230, 26, 145, 245, ], 24: [ 1, 122, 118, 169, 70, 178, 237, 216, 102, 115, 150, 229, 73, 130, 72, 61, 43, 206, 1, 237, 247, 127, 217, 144, 117, ], 26: [ 1, 246, 51, 183, 4, 136, 98, 199, 152, 77, 56, 206, 24, 145, 40, 209, 117, 233, 42, 135, 68, 70, 144, 146, 77, 43, 94, ], 28: [ 1, 252, 9, 28, 13, 18, 251, 208, 150, 103, 174, 100, 41, 167, 12, 247, 56, 117, 119, 233, 127, 181, 100, 121, 147, 176, 74, 58, 197, ], 30: [ 1, 212, 246, 77, 73, 195, 192, 75, 98, 5, 70, 103, 177, 22, 217, 138, 51, 181, 246, 72, 25, 18, 46, 228, 74, 216, 195, 11, 106, 130, 150, ], }
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/release.py
""" This file provides zest.releaser entrypoints using when releasing new qrcode versions. """ import os import re import datetime def update_manpage(data): """ Update the version in the manpage document. """ if data["name"] != "qrcode": return base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) filename = os.path.join(base_dir, "doc", "qr.1") with open(filename) as f: lines = f.readlines() changed = False for i, line in enumerate(lines): if not line.startswith(".TH "): continue parts = re.split(r'"([^"]*)"', line) if len(parts) < 5: continue changed = parts[3] != data["new_version"] if changed: # Update version parts[3] = data["new_version"] # Update date parts[1] = datetime.datetime.now().strftime("%-d %b %Y") lines[i] = '"'.join(parts) break if changed: with open(filename, "w") as f: for line in lines: f.write(line)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/image/base.py
import abc from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Type, Union from qrcode.image.styles.moduledrawers.base import QRModuleDrawer if TYPE_CHECKING: from qrcode.main import ActiveWithNeighbors, QRCode DrawerAliases = Dict[str, Tuple[Type[QRModuleDrawer], Dict[str, Any]]] class BaseImage: """ Base QRCode image output class. """ kind: Optional[str] = None allowed_kinds: Optional[Tuple[str]] = None needs_context = False needs_processing = False needs_drawrect = True def __init__(self, border, width, box_size, *args, **kwargs): self.border = border self.width = width self.box_size = box_size self.pixel_size = (self.width + self.border * 2) * self.box_size self.modules = kwargs.pop("qrcode_modules") self._img = self.new_image(**kwargs) self.init_new_image() @abc.abstractmethod def drawrect(self, row, col): """ Draw a single rectangle of the QR code. """ def drawrect_context(self, row: int, col: int, qr: "QRCode"): """ Draw a single rectangle of the QR code given the surrounding context """ raise NotImplementedError("BaseImage.drawrect_context") # pragma: no cover def process(self): """ Processes QR code after completion """ raise NotImplementedError("BaseImage.drawimage") # pragma: no cover @abc.abstractmethod def save(self, stream, kind=None): """ Save the image file. """ def pixel_box(self, row, col): """ A helper method for pixel-based image generators that specifies the four pixel coordinates for a single rect. """ x = (col + self.border) * self.box_size y = (row + self.border) * self.box_size return ( (x, y), (x + self.box_size - 1, y + self.box_size - 1), ) @abc.abstractmethod def new_image(self, **kwargs) -> Any: """ Build the image class. Subclasses should return the class created. """ def init_new_image(self): pass def get_image(self, **kwargs): """ Return the image class for further processing. """ return self._img def check_kind(self, kind, transform=None): """ Get the image type. """ if kind is None: kind = self.kind allowed = not self.allowed_kinds or kind in self.allowed_kinds if transform: kind = transform(kind) if not allowed: allowed = kind in self.allowed_kinds if not allowed: raise ValueError(f"Cannot set {type(self).__name__} type to {kind}") return kind def is_eye(self, row: int, col: int): """ Find whether the referenced module is in an eye. """ return ( (row < 7 and col < 7) or (row < 7 and self.width - col < 8) or (self.width - row < 8 and col < 7) ) class BaseImageWithDrawer(BaseImage): default_drawer_class: Type[QRModuleDrawer] drawer_aliases: DrawerAliases = {} def get_default_module_drawer(self) -> QRModuleDrawer: return self.default_drawer_class() def get_default_eye_drawer(self) -> QRModuleDrawer: return self.default_drawer_class() needs_context = True module_drawer: "QRModuleDrawer" eye_drawer: "QRModuleDrawer" def __init__( self, *args, module_drawer: Union[QRModuleDrawer, str, None] = None, eye_drawer: Union[QRModuleDrawer, str, None] = None, **kwargs, ): self.module_drawer = ( self.get_drawer(module_drawer) or self.get_default_module_drawer() ) # The eye drawer can be overridden by another module drawer as well, # but you have to be more careful with these in order to make the QR # code still parseable self.eye_drawer = self.get_drawer(eye_drawer) or self.get_default_eye_drawer() super().__init__(*args, **kwargs) def get_drawer( self, drawer: Union[QRModuleDrawer, str, None] ) -> Optional[QRModuleDrawer]: if not isinstance(drawer, str): return drawer drawer_cls, kwargs = self.drawer_aliases[drawer] return drawer_cls(**kwargs) def init_new_image(self): self.module_drawer.initialize(img=self) self.eye_drawer.initialize(img=self) return super().init_new_image() def drawrect_context(self, row: int, col: int, qr: "QRCode"): box = self.pixel_box(row, col) drawer = self.eye_drawer if self.is_eye(row, col) else self.module_drawer is_active: Union[bool, ActiveWithNeighbors] = ( qr.active_with_neighbors(row, col) if drawer.needs_neighbors else bool(qr.modules[row][col]) ) drawer.drawrect(box, is_active)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/image/styledpil.py
# Needed on case-insensitive filesystems from __future__ import absolute_import import qrcode.image.base from qrcode.compat.pil import Image from qrcode.image.styles.colormasks import QRColorMask, SolidFillColorMask from qrcode.image.styles.moduledrawers import SquareModuleDrawer class StyledPilImage(qrcode.image.base.BaseImageWithDrawer): """ Styled PIL image builder, default format is PNG. This differs from the PilImage in that there is a module_drawer, a color_mask, and an optional image The module_drawer should extend the QRModuleDrawer class and implement the drawrect_context(self, box, active, context), and probably also the initialize function. This will draw an individual "module" or square on the QR code. The color_mask will extend the QRColorMask class and will at very least implement the get_fg_pixel(image, x, y) function, calculating a color to put on the image at the pixel location (x,y) (more advanced functionality can be gotten by instead overriding other functions defined in the QRColorMask class) The Image can be specified either by path or with a Pillow Image, and if it is there will be placed in the middle of the QR code. No effort is done to ensure that the QR code is still legible after the image has been placed there; Q or H level error correction levels are recommended to maintain data integrity A resampling filter can be specified (defaulting to PIL.Image.Resampling.LANCZOS) for resizing; see PIL.Image.resize() for possible options for this parameter. """ kind = "PNG" needs_processing = True color_mask: QRColorMask default_drawer_class = SquareModuleDrawer def __init__(self, *args, **kwargs): self.color_mask = kwargs.get("color_mask", SolidFillColorMask()) embeded_image_path = kwargs.get("embeded_image_path", None) self.embeded_image = kwargs.get("embeded_image", None) self.embeded_image_resample = kwargs.get( "embeded_image_resample", Image.Resampling.LANCZOS ) if not self.embeded_image and embeded_image_path: self.embeded_image = Image.open(embeded_image_path) # the paint_color is the color the module drawer will use to draw upon # a canvas During the color mask process, pixels that are paint_color # are replaced by a newly-calculated color self.paint_color = tuple(0 for i in self.color_mask.back_color) if self.color_mask.has_transparency: self.paint_color = tuple([*self.color_mask.back_color[:3], 255]) super().__init__(*args, **kwargs) def new_image(self, **kwargs): mode = ( "RGBA" if ( self.color_mask.has_transparency or (self.embeded_image and "A" in self.embeded_image.getbands()) ) else "RGB" ) # This is the background color. Should be white or whiteish back_color = self.color_mask.back_color return Image.new(mode, (self.pixel_size, self.pixel_size), back_color) def init_new_image(self): self.color_mask.initialize(self, self._img) super().init_new_image() def process(self): self.color_mask.apply_mask(self._img) if self.embeded_image: self.draw_embeded_image() def draw_embeded_image(self): if not self.embeded_image: return total_width, _ = self._img.size total_width = int(total_width) logo_width_ish = int(total_width / 4) logo_offset = ( int((int(total_width / 2) - int(logo_width_ish / 2)) / self.box_size) * self.box_size ) # round the offset to the nearest module logo_position = (logo_offset, logo_offset) logo_width = total_width - logo_offset * 2 region = self.embeded_image region = region.resize((logo_width, logo_width), self.embeded_image_resample) if "A" in region.getbands(): self._img.alpha_composite(region, logo_position) else: self._img.paste(region, logo_position) def save(self, stream, format=None, **kwargs): if format is None: format = kwargs.get("kind", self.kind) if "kind" in kwargs: del kwargs["kind"] self._img.save(stream, format=format, **kwargs) def __getattr__(self, name): return getattr(self._img, name)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/image/__init__.py
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/image/pil.py
import qrcode.image.base from qrcode.compat.pil import Image, ImageDraw class PilImage(qrcode.image.base.BaseImage): """ PIL image builder, default format is PNG. """ kind = "PNG" def new_image(self, **kwargs): back_color = kwargs.get("back_color", "white") fill_color = kwargs.get("fill_color", "black") try: fill_color = fill_color.lower() except AttributeError: pass try: back_color = back_color.lower() except AttributeError: pass # L mode (1 mode) color = (r*299 + g*587 + b*114)//1000 if fill_color == "black" and back_color == "white": mode = "1" fill_color = 0 if back_color == "white": back_color = 255 elif back_color == "transparent": mode = "RGBA" back_color = None else: mode = "RGB" img = Image.new(mode, (self.pixel_size, self.pixel_size), back_color) self.fill_color = fill_color self._idr = ImageDraw.Draw(img) return img def drawrect(self, row, col): box = self.pixel_box(row, col) self._idr.rectangle(box, fill=self.fill_color) def save(self, stream, format=None, **kwargs): kind = kwargs.pop("kind", self.kind) if format is None: format = kind self._img.save(stream, format=format, **kwargs) def __getattr__(self, name): return getattr(self._img, name)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/image/svg.py
import decimal from decimal import Decimal from typing import List, Optional, Type, Union, overload from typing_extensions import Literal import qrcode.image.base from qrcode.compat.etree import ET from qrcode.image.styles.moduledrawers import svg as svg_drawers from qrcode.image.styles.moduledrawers.base import QRModuleDrawer class SvgFragmentImage(qrcode.image.base.BaseImageWithDrawer): """ SVG image builder Creates a QR-code image as a SVG document fragment. """ _SVG_namespace = "http://www.w3.org/2000/svg" kind = "SVG" allowed_kinds = ("SVG",) default_drawer_class: Type[QRModuleDrawer] = svg_drawers.SvgSquareDrawer def __init__(self, *args, **kwargs): ET.register_namespace("svg", self._SVG_namespace) super().__init__(*args, **kwargs) # Save the unit size, for example the default box_size of 10 is '1mm'. self.unit_size = self.units(self.box_size) @overload def units(self, pixels: Union[int, Decimal], text: Literal[False]) -> Decimal: ... @overload def units(self, pixels: Union[int, Decimal], text: Literal[True] = True) -> str: ... def units(self, pixels, text=True): """ A box_size of 10 (default) equals 1mm. """ units = Decimal(pixels) / 10 if not text: return units units = units.quantize(Decimal("0.001")) context = decimal.Context(traps=[decimal.Inexact]) try: for d in (Decimal("0.01"), Decimal("0.1"), Decimal("0")): units = units.quantize(d, context=context) except decimal.Inexact: pass return f"{units}mm" def save(self, stream, kind=None): self.check_kind(kind=kind) self._write(stream) def to_string(self, **kwargs): return ET.tostring(self._img, **kwargs) def new_image(self, **kwargs): return self._svg(**kwargs) def _svg(self, tag=None, version="1.1", **kwargs): if tag is None: tag = ET.QName(self._SVG_namespace, "svg") dimension = self.units(self.pixel_size) return ET.Element( tag, # type: ignore width=dimension, height=dimension, version=version, **kwargs, ) def _write(self, stream): ET.ElementTree(self._img).write(stream, xml_declaration=False) class SvgImage(SvgFragmentImage): """ Standalone SVG image builder Creates a QR-code image as a standalone SVG document. """ background: Optional[str] = None drawer_aliases: qrcode.image.base.DrawerAliases = { "circle": (svg_drawers.SvgCircleDrawer, {}), "gapped-circle": (svg_drawers.SvgCircleDrawer, {"size_ratio": Decimal(0.8)}), "gapped-square": (svg_drawers.SvgSquareDrawer, {"size_ratio": Decimal(0.8)}), } def _svg(self, tag="svg", **kwargs): svg = super()._svg(tag=tag, **kwargs) svg.set("xmlns", self._SVG_namespace) if self.background: svg.append( ET.Element( "rect", fill=self.background, x="0", y="0", width="100%", height="100%", ) ) return svg def _write(self, stream): ET.ElementTree(self._img).write(stream, encoding="UTF-8", xml_declaration=True) class SvgPathImage(SvgImage): """ SVG image builder with one single <path> element (removes white spaces between individual QR points). """ QR_PATH_STYLE = { "fill": "#000000", "fill-opacity": "1", "fill-rule": "nonzero", "stroke": "none", } needs_processing = True path: Optional[ET.Element] = None default_drawer_class: Type[QRModuleDrawer] = svg_drawers.SvgPathSquareDrawer drawer_aliases = { "circle": (svg_drawers.SvgPathCircleDrawer, {}), "gapped-circle": ( svg_drawers.SvgPathCircleDrawer, {"size_ratio": Decimal(0.8)}, ), "gapped-square": ( svg_drawers.SvgPathSquareDrawer, {"size_ratio": Decimal(0.8)}, ), } def __init__(self, *args, **kwargs): self._subpaths: List[str] = [] super().__init__(*args, **kwargs) def _svg(self, viewBox=None, **kwargs): if viewBox is None: dimension = self.units(self.pixel_size, text=False) viewBox = "0 0 {d} {d}".format(d=dimension) return super()._svg(viewBox=viewBox, **kwargs) def process(self): # Store the path just in case someone wants to use it again or in some # unique way. self.path = ET.Element( ET.QName("path"), # type: ignore d="".join(self._subpaths), id="qr-path", **self.QR_PATH_STYLE, ) self._subpaths = [] self._img.append(self.path) class SvgFillImage(SvgImage): """ An SvgImage that fills the background to white. """ background = "white" class SvgPathFillImage(SvgPathImage): """ An SvgPathImage that fills the background to white. """ background = "white"
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/image/pure.py
from itertools import chain import png import qrcode.image.base class PyPNGImage(qrcode.image.base.BaseImage): """ pyPNG image builder. """ kind = "PNG" allowed_kinds = ("PNG",) needs_drawrect = False def new_image(self, **kwargs): return png.Writer(self.pixel_size, self.pixel_size, greyscale=True, bitdepth=1) def drawrect(self, row, col): """ Not used. """ def save(self, stream, kind=None): if isinstance(stream, str): stream = open(stream, "wb") self._img.write(stream, self.rows_iter()) def rows_iter(self): yield from self.border_rows_iter() border_col = [1] * (self.box_size * self.border) for module_row in self.modules: row = ( border_col + list( chain.from_iterable( ([not point] * self.box_size) for point in module_row ) ) + border_col ) for _ in range(self.box_size): yield row yield from self.border_rows_iter() def border_rows_iter(self): border_row = [1] * (self.box_size * (self.width + self.border * 2)) for _ in range(self.border * self.box_size): yield border_row # Keeping this for backwards compatibility. PymagingImage = PyPNGImage
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/image/styles/colormasks.py
# Needed on case-insensitive filesystems from __future__ import absolute_import import math from qrcode.compat.pil import Image class QRColorMask: """ QRColorMask is used to color in the QRCode. By the time apply_mask is called, the QRModuleDrawer of the StyledPilImage will have drawn all of the modules on the canvas (the color of these modules will be mostly black, although antialiasing may result in gradients) In the base class, apply_mask is implemented such that the background color will remain, but the foreground pixels will be replaced by a color determined by a call to get_fg_pixel. There is additional calculation done to preserve the gradient artifacts of antialiasing. All QRColorMask objects should be careful about RGB vs RGBA color spaces. For examples of what these look like, see doc/color_masks.png """ back_color = (255, 255, 255) has_transparency = False paint_color = back_color def initialize(self, styledPilImage, image): self.paint_color = styledPilImage.paint_color def apply_mask(self, image): width, height = image.size for x in range(width): for y in range(height): norm = self.extrap_color( self.back_color, self.paint_color, image.getpixel((x, y)) ) if norm is not None: image.putpixel( (x, y), self.interp_color( self.get_bg_pixel(image, x, y), self.get_fg_pixel(image, x, y), norm, ), ) else: image.putpixel((x, y), self.get_bg_pixel(image, x, y)) def get_fg_pixel(self, image, x, y): raise NotImplementedError("QRModuleDrawer.paint_fg_pixel") def get_bg_pixel(self, image, x, y): return self.back_color # The following functions are helpful for color calculation: # interpolate a number between two numbers def interp_num(self, n1, n2, norm): return int(n2 * norm + n1 * (1 - norm)) # interpolate a color between two colorrs def interp_color(self, col1, col2, norm): return tuple(self.interp_num(col1[i], col2[i], norm) for i in range(len(col1))) # find the interpolation coefficient between two numbers def extrap_num(self, n1, n2, interped_num): if n2 == n1: return None else: return (interped_num - n1) / (n2 - n1) # find the interpolation coefficient between two numbers def extrap_color(self, col1, col2, interped_color): normed = [] for c1, c2, ci in zip(col1, col2, interped_color): extrap = self.extrap_num(c1, c2, ci) if extrap is not None: normed.append(extrap) if not normed: return None return sum(normed) / len(normed) class SolidFillColorMask(QRColorMask): """ Just fills in the background with one color and the foreground with another """ def __init__(self, back_color=(255, 255, 255), front_color=(0, 0, 0)): self.back_color = back_color self.front_color = front_color self.has_transparency = len(self.back_color) == 4 def apply_mask(self, image): if self.back_color == (255, 255, 255) and self.front_color == (0, 0, 0): # Optimization: the image is already drawn by QRModuleDrawer in # black and white, so if these are also our mask colors we don't # need to do anything. This is much faster than actually applying a # mask. pass else: # TODO there's probably a way to use PIL.ImageMath instead of doing # the individual pixel comparisons that the base class uses, which # would be a lot faster. (In fact doing this would probably remove # the need for the B&W optimization above.) QRColorMask.apply_mask(self, image) def get_fg_pixel(self, image, x, y): return self.front_color class RadialGradiantColorMask(QRColorMask): """ Fills in the foreground with a radial gradient from the center to the edge """ def __init__( self, back_color=(255, 255, 255), center_color=(0, 0, 0), edge_color=(0, 0, 255) ): self.back_color = back_color self.center_color = center_color self.edge_color = edge_color self.has_transparency = len(self.back_color) == 4 def get_fg_pixel(self, image, x, y): width, _ = image.size normedDistanceToCenter = math.sqrt( (x - width / 2) ** 2 + (y - width / 2) ** 2 ) / (math.sqrt(2) * width / 2) return self.interp_color( self.center_color, self.edge_color, normedDistanceToCenter ) class SquareGradiantColorMask(QRColorMask): """ Fills in the foreground with a square gradient from the center to the edge """ def __init__( self, back_color=(255, 255, 255), center_color=(0, 0, 0), edge_color=(0, 0, 255) ): self.back_color = back_color self.center_color = center_color self.edge_color = edge_color self.has_transparency = len(self.back_color) == 4 def get_fg_pixel(self, image, x, y): width, _ = image.size normedDistanceToCenter = max(abs(x - width / 2), abs(y - width / 2)) / ( width / 2 ) return self.interp_color( self.center_color, self.edge_color, normedDistanceToCenter ) class HorizontalGradiantColorMask(QRColorMask): """ Fills in the foreground with a gradient sweeping from the left to the right """ def __init__( self, back_color=(255, 255, 255), left_color=(0, 0, 0), right_color=(0, 0, 255) ): self.back_color = back_color self.left_color = left_color self.right_color = right_color self.has_transparency = len(self.back_color) == 4 def get_fg_pixel(self, image, x, y): width, _ = image.size return self.interp_color(self.left_color, self.right_color, x / width) class VerticalGradiantColorMask(QRColorMask): """ Fills in the forefround with a gradient sweeping from the top to the bottom """ def __init__( self, back_color=(255, 255, 255), top_color=(0, 0, 0), bottom_color=(0, 0, 255) ): self.back_color = back_color self.top_color = top_color self.bottom_color = bottom_color self.has_transparency = len(self.back_color) == 4 def get_fg_pixel(self, image, x, y): width, _ = image.size return self.interp_color(self.top_color, self.bottom_color, y / width) class ImageColorMask(QRColorMask): """ Fills in the foreground with pixels from another image, either passed by path or passed by image object. """ def __init__( self, back_color=(255, 255, 255), color_mask_path=None, color_mask_image=None ): self.back_color = back_color if color_mask_image: self.color_img = color_mask_image else: self.color_img = Image.open(color_mask_path) self.has_transparency = len(self.back_color) == 4 def initialize(self, styledPilImage, image): self.paint_color = styledPilImage.paint_color self.color_img = self.color_img.resize(image.size) def get_fg_pixel(self, image, x, y): width, _ = image.size return self.color_img.getpixel((x, y))
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/image/styles/__init__.py
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/image/styles/moduledrawers/base.py
from __future__ import absolute_import import abc from typing import TYPE_CHECKING if TYPE_CHECKING: from qrcode.image.base import BaseImage class QRModuleDrawer(abc.ABC): """ QRModuleDrawer exists to draw the modules of the QR Code onto images. For this, technically all that is necessary is a ``drawrect(self, box, is_active)`` function which takes in the box in which it is to draw, whether or not the box is "active" (a module exists there). If ``needs_neighbors`` is set to True, then the method should also accept a ``neighbors`` kwarg (the neighboring pixels). It is frequently necessary to also implement an "initialize" function to set up values that only the containing Image class knows about. For examples of what these look like, see doc/module_drawers.png """ needs_neighbors = False def __init__(self, **kwargs): pass def initialize(self, img: "BaseImage") -> None: self.img = img @abc.abstractmethod def drawrect(self, box, is_active) -> None: ...
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/image/styles/moduledrawers/__init__.py
# For backwards compatibility, importing the PIL drawers here. try: from .pil import CircleModuleDrawer # noqa: F401 from .pil import GappedSquareModuleDrawer # noqa: F401 from .pil import HorizontalBarsDrawer # noqa: F401 from .pil import RoundedModuleDrawer # noqa: F401 from .pil import SquareModuleDrawer # noqa: F401 from .pil import VerticalBarsDrawer # noqa: F401 except ImportError: pass
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/image/styles/moduledrawers/pil.py
# Needed on case-insensitive filesystems from __future__ import absolute_import from typing import TYPE_CHECKING, List from qrcode.compat.pil import Image, ImageDraw from qrcode.image.styles.moduledrawers.base import QRModuleDrawer if TYPE_CHECKING: from qrcode.image.styledpil import StyledPilImage from qrcode.main import ActiveWithNeighbors # When drawing antialiased things, make them bigger and then shrink them down # to size after the geometry has been drawn. ANTIALIASING_FACTOR = 4 class StyledPilQRModuleDrawer(QRModuleDrawer): """ A base class for StyledPilImage module drawers. NOTE: the color that this draws in should be whatever is equivalent to black in the color space, and the specified QRColorMask will handle adding colors as necessary to the image """ img: "StyledPilImage" class SquareModuleDrawer(StyledPilQRModuleDrawer): """ Draws the modules as simple squares """ def initialize(self, *args, **kwargs): super().initialize(*args, **kwargs) self.imgDraw = ImageDraw.Draw(self.img._img) def drawrect(self, box, is_active: bool): if is_active: self.imgDraw.rectangle(box, fill=self.img.paint_color) class GappedSquareModuleDrawer(StyledPilQRModuleDrawer): """ Draws the modules as simple squares that are not contiguous. The size_ratio determines how wide the squares are relative to the width of the space they are printed in """ def __init__(self, size_ratio=0.8): self.size_ratio = size_ratio def initialize(self, *args, **kwargs): super().initialize(*args, **kwargs) self.imgDraw = ImageDraw.Draw(self.img._img) self.delta = (1 - self.size_ratio) * self.img.box_size / 2 def drawrect(self, box, is_active: bool): if is_active: smaller_box = ( box[0][0] + self.delta, box[0][1] + self.delta, box[1][0] - self.delta, box[1][1] - self.delta, ) self.imgDraw.rectangle(smaller_box, fill=self.img.paint_color) class CircleModuleDrawer(StyledPilQRModuleDrawer): """ Draws the modules as circles """ circle = None def initialize(self, *args, **kwargs): super().initialize(*args, **kwargs) box_size = self.img.box_size fake_size = box_size * ANTIALIASING_FACTOR self.circle = Image.new( self.img.mode, (fake_size, fake_size), self.img.color_mask.back_color, ) ImageDraw.Draw(self.circle).ellipse( (0, 0, fake_size, fake_size), fill=self.img.paint_color ) self.circle = self.circle.resize((box_size, box_size), Image.Resampling.LANCZOS) def drawrect(self, box, is_active: bool): if is_active: self.img._img.paste(self.circle, (box[0][0], box[0][1])) class RoundedModuleDrawer(StyledPilQRModuleDrawer): """ Draws the modules with all 90 degree corners replaced with rounded edges. radius_ratio determines the radius of the rounded edges - a value of 1 means that an isolated module will be drawn as a circle, while a value of 0 means that the radius of the rounded edge will be 0 (and thus back to 90 degrees again). """ needs_neighbors = True def __init__(self, radius_ratio=1): self.radius_ratio = radius_ratio def initialize(self, *args, **kwargs): super().initialize(*args, **kwargs) self.corner_width = int(self.img.box_size / 2) self.setup_corners() def setup_corners(self): mode = self.img.mode back_color = self.img.color_mask.back_color front_color = self.img.paint_color self.SQUARE = Image.new( mode, (self.corner_width, self.corner_width), front_color ) fake_width = self.corner_width * ANTIALIASING_FACTOR radius = self.radius_ratio * fake_width diameter = radius * 2 base = Image.new( mode, (fake_width, fake_width), back_color ) # make something 4x bigger for antialiasing base_draw = ImageDraw.Draw(base) base_draw.ellipse((0, 0, diameter, diameter), fill=front_color) base_draw.rectangle((radius, 0, fake_width, fake_width), fill=front_color) base_draw.rectangle((0, radius, fake_width, fake_width), fill=front_color) self.NW_ROUND = base.resize( (self.corner_width, self.corner_width), Image.Resampling.LANCZOS ) self.SW_ROUND = self.NW_ROUND.transpose(Image.Transpose.FLIP_TOP_BOTTOM) self.SE_ROUND = self.NW_ROUND.transpose(Image.Transpose.ROTATE_180) self.NE_ROUND = self.NW_ROUND.transpose(Image.Transpose.FLIP_LEFT_RIGHT) def drawrect(self, box: List[List[int]], is_active: "ActiveWithNeighbors"): if not is_active: return # find rounded edges nw_rounded = not is_active.W and not is_active.N ne_rounded = not is_active.N and not is_active.E se_rounded = not is_active.E and not is_active.S sw_rounded = not is_active.S and not is_active.W nw = self.NW_ROUND if nw_rounded else self.SQUARE ne = self.NE_ROUND if ne_rounded else self.SQUARE se = self.SE_ROUND if se_rounded else self.SQUARE sw = self.SW_ROUND if sw_rounded else self.SQUARE self.img._img.paste(nw, (box[0][0], box[0][1])) self.img._img.paste(ne, (box[0][0] + self.corner_width, box[0][1])) self.img._img.paste( se, (box[0][0] + self.corner_width, box[0][1] + self.corner_width) ) self.img._img.paste(sw, (box[0][0], box[0][1] + self.corner_width)) class VerticalBarsDrawer(StyledPilQRModuleDrawer): """ Draws vertically contiguous groups of modules as long rounded rectangles, with gaps between neighboring bands (the size of these gaps is inversely proportional to the horizontal_shrink). """ needs_neighbors = True def __init__(self, horizontal_shrink=0.8): self.horizontal_shrink = horizontal_shrink def initialize(self, *args, **kwargs): super().initialize(*args, **kwargs) self.half_height = int(self.img.box_size / 2) self.delta = int((1 - self.horizontal_shrink) * self.half_height) self.setup_edges() def setup_edges(self): mode = self.img.mode back_color = self.img.color_mask.back_color front_color = self.img.paint_color height = self.half_height width = height * 2 shrunken_width = int(width * self.horizontal_shrink) self.SQUARE = Image.new(mode, (shrunken_width, height), front_color) fake_width = width * ANTIALIASING_FACTOR fake_height = height * ANTIALIASING_FACTOR base = Image.new( mode, (fake_width, fake_height), back_color ) # make something 4x bigger for antialiasing base_draw = ImageDraw.Draw(base) base_draw.ellipse((0, 0, fake_width, fake_height * 2), fill=front_color) self.ROUND_TOP = base.resize((shrunken_width, height), Image.Resampling.LANCZOS) self.ROUND_BOTTOM = self.ROUND_TOP.transpose(Image.Transpose.FLIP_TOP_BOTTOM) def drawrect(self, box, is_active: "ActiveWithNeighbors"): if is_active: # find rounded edges top_rounded = not is_active.N bottom_rounded = not is_active.S top = self.ROUND_TOP if top_rounded else self.SQUARE bottom = self.ROUND_BOTTOM if bottom_rounded else self.SQUARE self.img._img.paste(top, (box[0][0] + self.delta, box[0][1])) self.img._img.paste( bottom, (box[0][0] + self.delta, box[0][1] + self.half_height) ) class HorizontalBarsDrawer(StyledPilQRModuleDrawer): """ Draws horizontally contiguous groups of modules as long rounded rectangles, with gaps between neighboring bands (the size of these gaps is inversely proportional to the vertical_shrink). """ needs_neighbors = True def __init__(self, vertical_shrink=0.8): self.vertical_shrink = vertical_shrink def initialize(self, *args, **kwargs): super().initialize(*args, **kwargs) self.half_width = int(self.img.box_size / 2) self.delta = int((1 - self.vertical_shrink) * self.half_width) self.setup_edges() def setup_edges(self): mode = self.img.mode back_color = self.img.color_mask.back_color front_color = self.img.paint_color width = self.half_width height = width * 2 shrunken_height = int(height * self.vertical_shrink) self.SQUARE = Image.new(mode, (width, shrunken_height), front_color) fake_width = width * ANTIALIASING_FACTOR fake_height = height * ANTIALIASING_FACTOR base = Image.new( mode, (fake_width, fake_height), back_color ) # make something 4x bigger for antialiasing base_draw = ImageDraw.Draw(base) base_draw.ellipse((0, 0, fake_width * 2, fake_height), fill=front_color) self.ROUND_LEFT = base.resize((width, shrunken_height), Image.Resampling.LANCZOS) self.ROUND_RIGHT = self.ROUND_LEFT.transpose(Image.Transpose.FLIP_LEFT_RIGHT) def drawrect(self, box, is_active: "ActiveWithNeighbors"): if is_active: # find rounded edges left_rounded = not is_active.W right_rounded = not is_active.E left = self.ROUND_LEFT if left_rounded else self.SQUARE right = self.ROUND_RIGHT if right_rounded else self.SQUARE self.img._img.paste(left, (box[0][0], box[0][1] + self.delta)) self.img._img.paste( right, (box[0][0] + self.half_width, box[0][1] + self.delta) )
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/image/styles/moduledrawers/svg.py
import abc from decimal import Decimal from typing import TYPE_CHECKING, NamedTuple from qrcode.image.styles.moduledrawers.base import QRModuleDrawer from qrcode.compat.etree import ET if TYPE_CHECKING: from qrcode.image.svg import SvgFragmentImage, SvgPathImage ANTIALIASING_FACTOR = 4 class Coords(NamedTuple): x0: Decimal y0: Decimal x1: Decimal y1: Decimal xh: Decimal yh: Decimal class BaseSvgQRModuleDrawer(QRModuleDrawer): img: "SvgFragmentImage" def __init__(self, *, size_ratio: Decimal = Decimal(1), **kwargs): self.size_ratio = size_ratio def initialize(self, *args, **kwargs) -> None: super().initialize(*args, **kwargs) self.box_delta = (1 - self.size_ratio) * self.img.box_size / 2 self.box_size = Decimal(self.img.box_size) * self.size_ratio self.box_half = self.box_size / 2 def coords(self, box) -> Coords: row, col = box[0] x = row + self.box_delta y = col + self.box_delta return Coords( x, y, x + self.box_size, y + self.box_size, x + self.box_half, y + self.box_half, ) class SvgQRModuleDrawer(BaseSvgQRModuleDrawer): tag = "rect" def initialize(self, *args, **kwargs) -> None: super().initialize(*args, **kwargs) self.tag_qname = ET.QName(self.img._SVG_namespace, self.tag) def drawrect(self, box, is_active: bool): if not is_active: return self.img._img.append(self.el(box)) @abc.abstractmethod def el(self, box): ... class SvgSquareDrawer(SvgQRModuleDrawer): def initialize(self, *args, **kwargs) -> None: super().initialize(*args, **kwargs) self.unit_size = self.img.units(self.box_size) def el(self, box): coords = self.coords(box) return ET.Element( self.tag_qname, # type: ignore x=self.img.units(coords.x0), y=self.img.units(coords.y0), width=self.unit_size, height=self.unit_size, ) class SvgCircleDrawer(SvgQRModuleDrawer): tag = "circle" def initialize(self, *args, **kwargs) -> None: super().initialize(*args, **kwargs) self.radius = self.img.units(self.box_half) def el(self, box): coords = self.coords(box) return ET.Element( self.tag_qname, # type: ignore cx=self.img.units(coords.xh), cy=self.img.units(coords.yh), r=self.radius, ) class SvgPathQRModuleDrawer(BaseSvgQRModuleDrawer): img: "SvgPathImage" def drawrect(self, box, is_active: bool): if not is_active: return self.img._subpaths.append(self.subpath(box)) @abc.abstractmethod def subpath(self, box) -> str: ... class SvgPathSquareDrawer(SvgPathQRModuleDrawer): def subpath(self, box) -> str: coords = self.coords(box) x0 = self.img.units(coords.x0, text=False) y0 = self.img.units(coords.y0, text=False) x1 = self.img.units(coords.x1, text=False) y1 = self.img.units(coords.y1, text=False) return f"M{x0},{y0}H{x1}V{y1}H{x0}z" class SvgPathCircleDrawer(SvgPathQRModuleDrawer): def initialize(self, *args, **kwargs) -> None: super().initialize(*args, **kwargs) def subpath(self, box) -> str: coords = self.coords(box) x0 = self.img.units(coords.x0, text=False) yh = self.img.units(coords.yh, text=False) h = self.img.units(self.box_half - self.box_delta, text=False) x1 = self.img.units(coords.x1, text=False) # rx,ry is the centerpoint of the arc # 1? is the x-axis-rotation # 2? is the large-arc-flag # 3? is the sweep flag # x,y is the point the arc is drawn to return f"M{x0},{yh}A{h},{h} 0 0 0 {x1},{yh}A{h},{h} 0 0 0 {x0},{yh}z"
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/compat/etree.py
try: import lxml.etree as ET # type: ignore # noqa: F401 except ImportError: import xml.etree.ElementTree as ET # type: ignore # noqa: F401
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/compat/__init__.py
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/compat/pil.py
# Try to import PIL in either of the two ways it can be installed. Image = None ImageDraw = None try: from PIL import Image, ImageDraw # type: ignore # noqa: F401 except ImportError: # pragma: no cover try: import Image # type: ignore # noqa: F401 import ImageDraw # type: ignore # noqa: F401 except ImportError: pass
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/tests/test_release.py
import re import builtins import datetime import unittest from unittest import mock from qrcode.release import update_manpage OPEN = f"{builtins.__name__}.open" DATA = 'test\n.TH "date" "version" "description"\nthis' class UpdateManpageTests(unittest.TestCase): @mock.patch(OPEN, new_callable=mock.mock_open, read_data=".TH invalid") def test_invalid_data(self, mock_file): update_manpage({"name": "qrcode", "new_version": "1.23"}) mock_file.assert_called() mock_file().write.assert_not_called() @mock.patch(OPEN, new_callable=mock.mock_open, read_data=DATA) def test_not_qrcode(self, mock_file): update_manpage({"name": "not-qrcode"}) mock_file.assert_not_called() @mock.patch(OPEN, new_callable=mock.mock_open, read_data=DATA) def test_no_change(self, mock_file): update_manpage({"name": "qrcode", "new_version": "version"}) mock_file.assert_called() mock_file().write.assert_not_called() @mock.patch(OPEN, new_callable=mock.mock_open, read_data=DATA) def test_change(self, mock_file): update_manpage({"name": "qrcode", "new_version": "3.11"}) expected = re.split(r"([^\n]*(?:\n|$))", DATA)[1::2] expected[1] = ( expected[1] .replace("version", "3.11") .replace("date", datetime.datetime.now().strftime("%-d %b %Y")) ) mock_file().write.has_calls([mock.call(line) for line in expected])
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/tests/__init__.py
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/tests/test_util.py
import unittest from qrcode import util class UtilTests(unittest.TestCase): def test_check_wrong_version(self): with self.assertRaises(ValueError): util.check_version(0) with self.assertRaises(ValueError): util.check_version(41)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/tests/test_script.py
import io import os import sys import unittest from tempfile import mkdtemp from unittest import mock from qrcode.compat.pil import Image from qrcode.console_scripts import commas, main def bad_read(): raise UnicodeDecodeError("utf-8", b"0x80", 0, 1, "invalid start byte") class ScriptTest(unittest.TestCase): def setUp(self): self.tmpdir = mkdtemp() def tearDown(self): os.rmdir(self.tmpdir) @mock.patch("os.isatty", lambda *args: True) @mock.patch("qrcode.main.QRCode.print_ascii") def test_isatty(self, mock_print_ascii): main(["testtext"]) mock_print_ascii.assert_called_with(tty=True) @mock.patch("os.isatty", lambda *args: False) @mock.patch("sys.stdout") @unittest.skipIf(not Image, "Requires PIL") def test_piped(self, mock_stdout): main(["testtext"]) @mock.patch("os.isatty", lambda *args: True) @mock.patch("qrcode.main.QRCode.print_ascii") @mock.patch("sys.stdin") def test_stdin(self, mock_stdin, mock_print_ascii): mock_stdin.buffer.read.return_value = "testtext" main([]) self.assertTrue(mock_stdin.buffer.read.called) mock_print_ascii.assert_called_with(tty=True) @mock.patch("os.isatty", lambda *args: True) @mock.patch("qrcode.main.QRCode.print_ascii") def test_stdin_py3_unicodedecodeerror(self, mock_print_ascii): mock_stdin = mock.Mock(sys.stdin) mock_stdin.buffer.read.return_value = "testtext" mock_stdin.read.side_effect = bad_read with mock.patch("sys.stdin", mock_stdin): # sys.stdin.read() will raise an error... self.assertRaises(UnicodeDecodeError, sys.stdin.read) # ... but it won't be used now. main([]) mock_print_ascii.assert_called_with(tty=True) @mock.patch("os.isatty", lambda *args: True) @mock.patch("qrcode.main.QRCode.print_ascii") def test_optimize(self, mock_print_ascii): main("testtext --optimize 0".split()) @mock.patch("sys.stdout") def test_factory(self, mock_stdout): main("testtext --factory svg".split()) @mock.patch("sys.stderr") def test_bad_factory(self, mock_stderr): self.assertRaises(SystemExit, main, "testtext --factory fish".split()) @mock.patch.object(sys, "argv", "qr testtext output".split()) @unittest.skipIf(not Image, "Requires PIL") def test_sys_argv(self): main() @unittest.skipIf(not Image, "Requires PIL") def test_output(self): tmpfile = os.path.join(self.tmpdir, "test.png") main(["testtext", "--output", tmpfile]) os.remove(tmpfile) @mock.patch("sys.stderr", new_callable=io.StringIO) @unittest.skipIf(not Image, "Requires PIL") def test_factory_drawer_none(self, mock_stderr): with self.assertRaises(SystemExit): main("testtext --factory pil --factory-drawer nope".split()) self.assertIn( "The selected factory has no drawer aliases", mock_stderr.getvalue() ) @mock.patch("sys.stderr", new_callable=io.StringIO) def test_factory_drawer_bad(self, mock_stderr): with self.assertRaises(SystemExit): main("testtext --factory svg --factory-drawer sobad".split()) self.assertIn("sobad factory drawer not found", mock_stderr.getvalue()) @mock.patch("sys.stderr", new_callable=io.StringIO) def test_factory_drawer(self, mock_stderr): main("testtext --factory svg --factory-drawer circle".split()) def test_commas(self): self.assertEqual(commas([]), "") self.assertEqual(commas(["A"]), "A") self.assertEqual(commas("AB"), "A or B") self.assertEqual(commas("ABC"), "A, B or C") self.assertEqual(commas("ABC", joiner="and"), "A, B and C")
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/tests/test_qrcode_svg.py
import io import os import unittest from tempfile import mkdtemp import qrcode from qrcode.image import svg UNICODE_TEXT = "\u03b1\u03b2\u03b3" class SvgImageWhite(svg.SvgImage): background = "white" class QRCodeSvgTests(unittest.TestCase): def setUp(self): self.tmpdir = mkdtemp() def tearDown(self): os.rmdir(self.tmpdir) def test_render_svg(self): qr = qrcode.QRCode() qr.add_data(UNICODE_TEXT) img = qr.make_image(image_factory=svg.SvgImage) img.save(io.BytesIO()) def test_render_svg_path(self): qr = qrcode.QRCode() qr.add_data(UNICODE_TEXT) img = qr.make_image(image_factory=svg.SvgPathImage) img.save(io.BytesIO()) def test_render_svg_fragment(self): qr = qrcode.QRCode() qr.add_data(UNICODE_TEXT) img = qr.make_image(image_factory=svg.SvgFragmentImage) img.save(io.BytesIO()) def test_svg_string(self): qr = qrcode.QRCode() qr.add_data(UNICODE_TEXT) img = qr.make_image(image_factory=svg.SvgFragmentImage) file_like = io.BytesIO() img.save(file_like) file_like.seek(0) assert file_like.read() in img.to_string() def test_render_svg_with_background(self): qr = qrcode.QRCode() qr.add_data(UNICODE_TEXT) img = qr.make_image(image_factory=SvgImageWhite) img.save(io.BytesIO()) def test_svg_circle_drawer(self): qr = qrcode.QRCode() qr.add_data(UNICODE_TEXT) img = qr.make_image(image_factory=svg.SvgPathImage, module_drawer="circle") img.save(io.BytesIO())
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/tests/test_qrcode.py
import io import os import unittest import warnings from tempfile import mkdtemp from unittest import mock import png import qrcode import qrcode.util from qrcode.compat.pil import Image as pil_Image from qrcode.exceptions import DataOverflowError from qrcode.image.base import BaseImage from qrcode.image.pure import PyPNGImage from qrcode.image.styledpil import StyledPilImage from qrcode.image.styles import colormasks, moduledrawers from qrcode.util import MODE_8BIT_BYTE, MODE_ALPHA_NUM, MODE_NUMBER, QRData UNICODE_TEXT = "\u03b1\u03b2\u03b3" WHITE = (255, 255, 255) BLACK = (0, 0, 0) RED = (255, 0, 0) class QRCodeTests(unittest.TestCase): def setUp(self): self.tmpdir = mkdtemp() def tearDown(self): os.rmdir(self.tmpdir) def test_basic(self): qr = qrcode.QRCode(version=1) qr.add_data("a") qr.make(fit=False) def test_large(self): qr = qrcode.QRCode(version=27) qr.add_data("a") qr.make(fit=False) def test_invalid_version(self): self.assertRaises(ValueError, qrcode.QRCode, version=41) def test_invalid_border(self): self.assertRaises(ValueError, qrcode.QRCode, border=-1) def test_overflow(self): qr = qrcode.QRCode(version=1) qr.add_data("abcdefghijklmno") self.assertRaises(DataOverflowError, qr.make, fit=False) def test_add_qrdata(self): qr = qrcode.QRCode(version=1) data = QRData("a") qr.add_data(data) qr.make(fit=False) def test_fit(self): qr = qrcode.QRCode() qr.add_data("a") qr.make() self.assertEqual(qr.version, 1) qr.add_data("bcdefghijklmno") qr.make() self.assertEqual(qr.version, 2) def test_mode_number(self): qr = qrcode.QRCode() qr.add_data("1234567890123456789012345678901234", optimize=0) qr.make() self.assertEqual(qr.version, 1) self.assertEqual(qr.data_list[0].mode, MODE_NUMBER) def test_mode_alpha(self): qr = qrcode.QRCode() qr.add_data("ABCDEFGHIJ1234567890", optimize=0) qr.make() self.assertEqual(qr.version, 1) self.assertEqual(qr.data_list[0].mode, MODE_ALPHA_NUM) def test_regression_mode_comma(self): qr = qrcode.QRCode() qr.add_data(",", optimize=0) qr.make() self.assertEqual(qr.data_list[0].mode, MODE_8BIT_BYTE) def test_mode_8bit(self): qr = qrcode.QRCode() qr.add_data("abcABC" + UNICODE_TEXT, optimize=0) qr.make() self.assertEqual(qr.version, 1) self.assertEqual(qr.data_list[0].mode, MODE_8BIT_BYTE) def test_mode_8bit_newline(self): qr = qrcode.QRCode() qr.add_data("ABCDEFGHIJ1234567890\n", optimize=0) qr.make() self.assertEqual(qr.data_list[0].mode, MODE_8BIT_BYTE) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_pil(self): qr = qrcode.QRCode() qr.add_data(UNICODE_TEXT) img = qr.make_image() img.save(io.BytesIO()) self.assertIsInstance(img.get_image(), pil_Image.Image) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_pil_with_transparent_background(self): qr = qrcode.QRCode() qr.add_data(UNICODE_TEXT) img = qr.make_image(back_color="TransParent") img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_pil_with_red_background(self): qr = qrcode.QRCode() qr.add_data(UNICODE_TEXT) img = qr.make_image(back_color="red") img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_pil_with_rgb_color_tuples(self): qr = qrcode.QRCode() qr.add_data(UNICODE_TEXT) img = qr.make_image(back_color=(255, 195, 235), fill_color=(55, 95, 35)) img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_with_pattern(self): qr = qrcode.QRCode(mask_pattern=3) qr.add_data(UNICODE_TEXT) img = qr.make_image() img.save(io.BytesIO()) def test_make_image_with_wrong_pattern(self): with self.assertRaises(TypeError): qrcode.QRCode(mask_pattern="string pattern") with self.assertRaises(ValueError): qrcode.QRCode(mask_pattern=-1) with self.assertRaises(ValueError): qrcode.QRCode(mask_pattern=42) def test_mask_pattern_setter(self): qr = qrcode.QRCode() with self.assertRaises(TypeError): qr.mask_pattern = "string pattern" with self.assertRaises(ValueError): qr.mask_pattern = -1 with self.assertRaises(ValueError): qr.mask_pattern = 8 def test_qrcode_bad_factory(self): with self.assertRaises(TypeError): qrcode.QRCode(image_factory="not_BaseImage") # type: ignore with self.assertRaises(AssertionError): qrcode.QRCode(image_factory=dict) # type: ignore def test_qrcode_factory(self): class MockFactory(BaseImage): drawrect = mock.Mock() new_image = mock.Mock() qr = qrcode.QRCode(image_factory=MockFactory) qr.add_data(UNICODE_TEXT) qr.make_image() self.assertTrue(MockFactory.new_image.called) self.assertTrue(MockFactory.drawrect.called) def test_render_pypng(self): qr = qrcode.QRCode() qr.add_data(UNICODE_TEXT) img = qr.make_image(image_factory=PyPNGImage) self.assertIsInstance(img.get_image(), png.Writer) print(img.width, img.box_size, img.border) img.save(io.BytesIO()) def test_render_pypng_to_str(self): qr = qrcode.QRCode() qr.add_data(UNICODE_TEXT) img = qr.make_image(image_factory=PyPNGImage) self.assertIsInstance(img.get_image(), png.Writer) mock_open = mock.mock_open() with mock.patch("qrcode.image.pure.open", mock_open, create=True): img.save("test_file.png") mock_open.assert_called_once_with("test_file.png", "wb") mock_open("test_file.png", "wb").write.assert_called() @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_Image(self): qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) img = qr.make_image(image_factory=StyledPilImage) img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_embeded_image(self): embeded_img = pil_Image.new("RGB", (10, 10), color="red") qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) img = qr.make_image(image_factory=StyledPilImage, embeded_image=embeded_img) img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_embeded_image_path(self): tmpfile = os.path.join(self.tmpdir, "test.png") embeded_img = pil_Image.new("RGB", (10, 10), color="red") embeded_img.save(tmpfile) qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) img = qr.make_image(image_factory=StyledPilImage, embeded_image_path=tmpfile) img.save(io.BytesIO()) os.remove(tmpfile) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_square_module_drawer(self): qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) img = qr.make_image( image_factory=StyledPilImage, module_drawer=moduledrawers.SquareModuleDrawer(), ) img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_gapped_module_drawer(self): qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) img = qr.make_image( image_factory=StyledPilImage, module_drawer=moduledrawers.GappedSquareModuleDrawer(), ) img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_circle_module_drawer(self): qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) img = qr.make_image( image_factory=StyledPilImage, module_drawer=moduledrawers.CircleModuleDrawer(), ) img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_rounded_module_drawer(self): qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) img = qr.make_image( image_factory=StyledPilImage, module_drawer=moduledrawers.RoundedModuleDrawer(), ) img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_vertical_bars_module_drawer(self): qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) img = qr.make_image( image_factory=StyledPilImage, module_drawer=moduledrawers.VerticalBarsDrawer(), ) img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_horizontal_bars_module_drawer(self): qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) img = qr.make_image( image_factory=StyledPilImage, module_drawer=moduledrawers.HorizontalBarsDrawer(), ) img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_default_solid_color_mask(self): qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) mask = colormasks.SolidFillColorMask() img = qr.make_image(image_factory=StyledPilImage, color_mask=mask) img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_solid_color_mask(self): qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) mask = colormasks.SolidFillColorMask(back_color=WHITE, front_color=RED) img = qr.make_image(image_factory=StyledPilImage, color_mask=mask) img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_color_mask_with_transparency(self): qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) mask = colormasks.SolidFillColorMask( back_color=(255, 0, 255, 255), front_color=RED ) img = qr.make_image(image_factory=StyledPilImage, color_mask=mask) img.save(io.BytesIO()) assert img.mode == "RGBA" @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_radial_gradient_color_mask(self): qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) mask = colormasks.RadialGradiantColorMask( back_color=WHITE, center_color=BLACK, edge_color=RED ) img = qr.make_image(image_factory=StyledPilImage, color_mask=mask) img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_square_gradient_color_mask(self): qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) mask = colormasks.SquareGradiantColorMask( back_color=WHITE, center_color=BLACK, edge_color=RED ) img = qr.make_image(image_factory=StyledPilImage, color_mask=mask) img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_horizontal_gradient_color_mask(self): qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) mask = colormasks.HorizontalGradiantColorMask( back_color=WHITE, left_color=RED, right_color=BLACK ) img = qr.make_image(image_factory=StyledPilImage, color_mask=mask) img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_vertical_gradient_color_mask(self): qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) mask = colormasks.VerticalGradiantColorMask( back_color=WHITE, top_color=RED, bottom_color=BLACK ) img = qr.make_image(image_factory=StyledPilImage, color_mask=mask) img.save(io.BytesIO()) @unittest.skipIf(not pil_Image, "Requires PIL") def test_render_styled_with_image_color_mask(self): img_mask = pil_Image.new("RGB", (10, 10), color="red") qr = qrcode.QRCode(error_correction=qrcode.ERROR_CORRECT_L) qr.add_data(UNICODE_TEXT) mask = colormasks.ImageColorMask(back_color=WHITE, color_mask_image=img_mask) img = qr.make_image(image_factory=StyledPilImage, color_mask=mask) img.save(io.BytesIO()) def test_optimize(self): qr = qrcode.QRCode() text = "A1abc12345def1HELLOa" qr.add_data(text, optimize=4) qr.make() self.assertEqual( [d.mode for d in qr.data_list], [ MODE_8BIT_BYTE, MODE_NUMBER, MODE_8BIT_BYTE, MODE_ALPHA_NUM, MODE_8BIT_BYTE, ], ) self.assertEqual(qr.version, 2) def test_optimize_short(self): qr = qrcode.QRCode() text = "A1abc1234567def1HELLOa" qr.add_data(text, optimize=7) qr.make() self.assertEqual(len(qr.data_list), 3) self.assertEqual( [d.mode for d in qr.data_list], [MODE_8BIT_BYTE, MODE_NUMBER, MODE_8BIT_BYTE], ) self.assertEqual(qr.version, 2) def test_optimize_longer_than_data(self): qr = qrcode.QRCode() text = "ABCDEFGHIJK" qr.add_data(text, optimize=12) self.assertEqual(len(qr.data_list), 1) self.assertEqual(qr.data_list[0].mode, MODE_ALPHA_NUM) def test_optimize_size(self): text = "A1abc12345123451234512345def1HELLOHELLOHELLOHELLOa" * 5 qr = qrcode.QRCode() qr.add_data(text) qr.make() self.assertEqual(qr.version, 10) qr = qrcode.QRCode() qr.add_data(text, optimize=0) qr.make() self.assertEqual(qr.version, 11) def test_qrdata_repr(self): data = b"hello" data_obj = qrcode.util.QRData(data) self.assertEqual(repr(data_obj), repr(data)) def test_print_ascii_stdout(self): qr = qrcode.QRCode() with mock.patch("sys.stdout") as fake_stdout: fake_stdout.isatty.return_value = None self.assertRaises(OSError, qr.print_ascii, tty=True) self.assertTrue(fake_stdout.isatty.called) def test_print_ascii(self): qr = qrcode.QRCode(border=0) f = io.StringIO() qr.print_ascii(out=f) printed = f.getvalue() f.close() expected = "\u2588\u2580\u2580\u2580\u2580\u2580\u2588" self.assertEqual(printed[: len(expected)], expected) f = io.StringIO() f.isatty = lambda: True qr.print_ascii(out=f, tty=True) printed = f.getvalue() f.close() expected = ( "\x1b[48;5;232m\x1b[38;5;255m" + "\xa0\u2584\u2584\u2584\u2584\u2584\xa0" ) self.assertEqual(printed[: len(expected)], expected) def test_print_tty_stdout(self): qr = qrcode.QRCode() with mock.patch("sys.stdout") as fake_stdout: fake_stdout.isatty.return_value = None self.assertRaises(OSError, qr.print_tty) self.assertTrue(fake_stdout.isatty.called) def test_print_tty(self): qr = qrcode.QRCode() f = io.StringIO() f.isatty = lambda: True qr.print_tty(out=f) printed = f.getvalue() f.close() BOLD_WHITE_BG = "\x1b[1;47m" BLACK_BG = "\x1b[40m" WHITE_BLOCK = BOLD_WHITE_BG + " " + BLACK_BG EOL = "\x1b[0m\n" expected = ( BOLD_WHITE_BG + " " * 23 + EOL + WHITE_BLOCK + " " * 7 + WHITE_BLOCK ) self.assertEqual(printed[: len(expected)], expected) def test_get_matrix(self): qr = qrcode.QRCode(border=0) qr.add_data("1") self.assertEqual(qr.get_matrix(), qr.modules) def test_get_matrix_border(self): qr = qrcode.QRCode(border=1) qr.add_data("1") matrix = [row[1:-1] for row in qr.get_matrix()[1:-1]] self.assertEqual(matrix, qr.modules) def test_negative_size_at_construction(self): self.assertRaises(ValueError, qrcode.QRCode, box_size=-1) def test_negative_size_at_usage(self): qr = qrcode.QRCode() qr.box_size = -1 self.assertRaises(ValueError, qr.make_image) class ShortcutTest(unittest.TestCase): @unittest.skipIf(not pil_Image, "Requires PIL") def runTest(self): qrcode.make("image")
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/qrcode/tests/test_example.py
import unittest from unittest import mock from qrcode import run_example from qrcode.compat.pil import Image class ExampleTest(unittest.TestCase): @unittest.skipIf(not Image, "Requires PIL") @mock.patch("PIL.Image.Image.show") def runTest(self, mock_show): run_example() mock_show.assert_called_with()
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/anyio-3.7.1.dist-info/top_level.txt
anyio
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/anyio-3.7.1.dist-info/entry_points.txt
[pytest11] anyio = anyio.pytest_plugin
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pathtools/patterns.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # patterns.py: Common wildcard searching/filtering functionality for files. # # Copyright (C) 2010 Yesudeep Mangalapilly <[email protected]> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. """ :module: pathtools.patterns :synopsis: Wildcard pattern matching and filtering functions for paths. :author: Yesudeep Mangalapilly <[email protected]> Functions --------- .. autofunction:: match_path .. autofunction:: match_path_against .. autofunction:: filter_paths """ from fnmatch import fnmatch, fnmatchcase __all__ = ['match_path', 'match_path_against', 'match_any_paths', 'filter_paths'] def _string_lower(s): """ Convenience function to lowercase a string (the :mod:`string` module is deprecated/removed in Python 3.0). :param s: The string which will be lowercased. :returns: Lowercased copy of string s. """ return s.lower() def match_path_against(pathname, patterns, case_sensitive=True): """ Determines whether the pathname matches any of the given wildcard patterns, optionally ignoring the case of the pathname and patterns. :param pathname: A path name that will be matched against a wildcard pattern. :param patterns: A list of wildcard patterns to match_path the filename against. :param case_sensitive: ``True`` if the matching should be case-sensitive; ``False`` otherwise. :returns: ``True`` if the pattern matches; ``False`` otherwise. Doctests:: >>> match_path_against("/home/username/foobar/blah.py", ["*.py", "*.txt"], False) True >>> match_path_against("/home/username/foobar/blah.py", ["*.PY", "*.txt"], True) False >>> match_path_against("/home/username/foobar/blah.py", ["*.PY", "*.txt"], False) True >>> match_path_against("C:\\windows\\blah\\BLAH.PY", ["*.py", "*.txt"], True) False >>> match_path_against("C:\\windows\\blah\\BLAH.PY", ["*.py", "*.txt"], False) True """ if case_sensitive: match_func = fnmatchcase pattern_transform_func = (lambda w: w) else: match_func = fnmatch pathname = pathname.lower() pattern_transform_func = _string_lower for pattern in set(patterns): pattern = pattern_transform_func(pattern) if match_func(pathname, pattern): return True return False def _match_path(pathname, included_patterns, excluded_patterns, case_sensitive=True): """Internal function same as :func:`match_path` but does not check arguments. Doctests:: >>> _match_path("/users/gorakhargosh/foobar.py", ["*.py"], ["*.PY"], True) True >>> _match_path("/users/gorakhargosh/FOOBAR.PY", ["*.py"], ["*.PY"], True) False >>> _match_path("/users/gorakhargosh/foobar/", ["*.py"], ["*.txt"], False) False >>> _match_path("/users/gorakhargosh/FOOBAR.PY", ["*.py"], ["*.PY"], False) Traceback (most recent call last): ... ValueError: conflicting patterns `set(['*.py'])` included and excluded """ if not case_sensitive: included_patterns = set(map(_string_lower, included_patterns)) excluded_patterns = set(map(_string_lower, excluded_patterns)) else: included_patterns = set(included_patterns) excluded_patterns = set(excluded_patterns) common_patterns = included_patterns & excluded_patterns if common_patterns: raise ValueError('conflicting patterns `%s` included and excluded'\ % common_patterns) return (match_path_against(pathname, included_patterns, case_sensitive)\ and not match_path_against(pathname, excluded_patterns, case_sensitive)) def match_path(pathname, included_patterns=None, excluded_patterns=None, case_sensitive=True): """ Matches a pathname against a set of acceptable and ignored patterns. :param pathname: A pathname which will be matched against a pattern. :param included_patterns: Allow filenames matching wildcard patterns specified in this list. If no pattern is specified, the function treats the pathname as a match_path. :param excluded_patterns: Ignores filenames matching wildcard patterns specified in this list. If no pattern is specified, the function treats the pathname as a match_path. :param case_sensitive: ``True`` if matching should be case-sensitive; ``False`` otherwise. :returns: ``True`` if the pathname matches; ``False`` otherwise. :raises: ValueError if included patterns and excluded patterns contain the same pattern. Doctests:: >>> match_path("/Users/gorakhargosh/foobar.py") True >>> match_path("/Users/gorakhargosh/foobar.py", case_sensitive=False) True >>> match_path("/users/gorakhargosh/foobar.py", ["*.py"], ["*.PY"], True) True >>> match_path("/users/gorakhargosh/FOOBAR.PY", ["*.py"], ["*.PY"], True) False >>> match_path("/users/gorakhargosh/foobar/", ["*.py"], ["*.txt"], False) False >>> match_path("/users/gorakhargosh/FOOBAR.PY", ["*.py"], ["*.PY"], False) Traceback (most recent call last): ... ValueError: conflicting patterns `set(['*.py'])` included and excluded """ included = ["*"] if included_patterns is None else included_patterns excluded = [] if excluded_patterns is None else excluded_patterns return _match_path(pathname, included, excluded, case_sensitive) def filter_paths(pathnames, included_patterns=None, excluded_patterns=None, case_sensitive=True): """ Filters from a set of paths based on acceptable patterns and ignorable patterns. :param pathnames: A list of path names that will be filtered based on matching and ignored patterns. :param included_patterns: Allow filenames matching wildcard patterns specified in this list. If no pattern list is specified, ["*"] is used as the default pattern, which matches all files. :param excluded_patterns: Ignores filenames matching wildcard patterns specified in this list. If no pattern list is specified, no files are ignored. :param case_sensitive: ``True`` if matching should be case-sensitive; ``False`` otherwise. :returns: A list of pathnames that matched the allowable patterns and passed through the ignored patterns. Doctests:: >>> pathnames = set(["/users/gorakhargosh/foobar.py", "/var/cache/pdnsd.status", "/etc/pdnsd.conf", "/usr/local/bin/python"]) >>> set(filter_paths(pathnames)) == pathnames True >>> set(filter_paths(pathnames, case_sensitive=False)) == pathnames True >>> set(filter_paths(pathnames, ["*.py", "*.conf"], ["*.status"], case_sensitive=True)) == set(["/users/gorakhargosh/foobar.py", "/etc/pdnsd.conf"]) True """ included = ["*"] if included_patterns is None else included_patterns excluded = [] if excluded_patterns is None else excluded_patterns for pathname in pathnames: # We don't call the public match_path because it checks arguments # and sets default values if none are found. We're already doing that # above. if _match_path(pathname, included, excluded, case_sensitive): yield pathname def match_any_paths(pathnames, included_patterns=None, excluded_patterns=None, case_sensitive=True): """ Matches from a set of paths based on acceptable patterns and ignorable patterns. :param pathnames: A list of path names that will be filtered based on matching and ignored patterns. :param included_patterns: Allow filenames matching wildcard patterns specified in this list. If no pattern list is specified, ["*"] is used as the default pattern, which matches all files. :param excluded_patterns: Ignores filenames matching wildcard patterns specified in this list. If no pattern list is specified, no files are ignored. :param case_sensitive: ``True`` if matching should be case-sensitive; ``False`` otherwise. :returns: ``True`` if any of the paths matches; ``False`` otherwise. Doctests:: >>> pathnames = set(["/users/gorakhargosh/foobar.py", "/var/cache/pdnsd.status", "/etc/pdnsd.conf", "/usr/local/bin/python"]) >>> match_any_paths(pathnames) True >>> match_any_paths(pathnames, case_sensitive=False) True >>> match_any_paths(pathnames, ["*.py", "*.conf"], ["*.status"], case_sensitive=True) True >>> match_any_paths(pathnames, ["*.txt"], case_sensitive=False) False >>> match_any_paths(pathnames, ["*.txt"], case_sensitive=True) False """ included = ["*"] if included_patterns is None else included_patterns excluded = [] if excluded_patterns is None else excluded_patterns for pathname in pathnames: # We don't call the public match_path because it checks arguments # and sets default values if none are found. We're already doing that # above. if _match_path(pathname, included, excluded, case_sensitive): return True return False
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pathtools/__init__.py
# -*- coding: utf-8 -*- # pathtools: File system path tools. # Copyright (C) 2010 Yesudeep Mangalapilly <[email protected]> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE.
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pathtools/version.py
# -*- coding: utf-8 -*- # version.py: Version information. # Copyright (C) 2010 Yesudeep Mangalapilly <[email protected]> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # When updating this version number, please update the # ``docs/source/global.rst.inc`` file as well. VERSION_MAJOR = 0 VERSION_MINOR = 1 VERSION_BUILD = 2 VERSION_INFO = (VERSION_MAJOR, VERSION_MINOR, VERSION_BUILD) VERSION_STRING = "%d.%d.%d" % VERSION_INFO __version__ = VERSION_INFO
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/pathtools/path.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # path.py: Path functions. # # Copyright (C) 2010 Yesudeep Mangalapilly <[email protected]> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. """ :module: pathtools.path :synopsis: Directory walking, listing, and path sanitizing functions. :author: Yesudeep Mangalapilly <[email protected]> Functions --------- .. autofunction:: get_dir_walker .. autofunction:: walk .. autofunction:: listdir .. autofunction:: list_directories .. autofunction:: list_files .. autofunction:: absolute_path .. autofunction:: real_absolute_path .. autofunction:: parent_dir_path """ import os.path import os.path from functools import partial __all__ = [ 'get_dir_walker', 'walk', 'listdir', 'list_directories', 'list_files', 'absolute_path', 'real_absolute_path', 'parent_dir_path', ] def get_dir_walker(recursive, topdown=True, followlinks=False): """ Returns a recursive or a non-recursive directory walker. :param recursive: ``True`` produces a recursive walker; ``False`` produces a non-recursive walker. :returns: A walker function. """ if recursive: walk = partial(os.walk, topdown=topdown, followlinks=followlinks) else: def walk(path, topdown=topdown, followlinks=followlinks): try: yield next(os.walk(path, topdown=topdown, followlinks=followlinks)) except NameError: yield os.walk(path, topdown=topdown, followlinks=followlinks).next() #IGNORE:E1101 return walk def walk(dir_pathname, recursive=True, topdown=True, followlinks=False): """ Walks a directory tree optionally recursively. Works exactly like :func:`os.walk` only adding the `recursive` argument. :param dir_pathname: The directory to traverse. :param recursive: ``True`` for walking recursively through the directory tree; ``False`` otherwise. :param topdown: Please see the documentation for :func:`os.walk` :param followlinks: Please see the documentation for :func:`os.walk` """ walk_func = get_dir_walker(recursive, topdown, followlinks) for root, dirnames, filenames in walk_func(dir_pathname): yield (root, dirnames, filenames) def listdir(dir_pathname, recursive=True, topdown=True, followlinks=False): """ Enlists all items using their absolute paths in a directory, optionally recursively. :param dir_pathname: The directory to traverse. :param recursive: ``True`` for walking recursively through the directory tree; ``False`` otherwise. :param topdown: Please see the documentation for :func:`os.walk` :param followlinks: Please see the documentation for :func:`os.walk` """ for root, dirnames, filenames\ in walk(dir_pathname, recursive, topdown, followlinks): for dirname in dirnames: yield absolute_path(os.path.join(root, dirname)) for filename in filenames: yield absolute_path(os.path.join(root, filename)) def list_directories(dir_pathname, recursive=True, topdown=True, followlinks=False): """ Enlists all the directories using their absolute paths within the specified directory, optionally recursively. :param dir_pathname: The directory to traverse. :param recursive: ``True`` for walking recursively through the directory tree; ``False`` otherwise. :param topdown: Please see the documentation for :func:`os.walk` :param followlinks: Please see the documentation for :func:`os.walk` """ for root, dirnames, filenames\ in walk(dir_pathname, recursive, topdown, followlinks): for dirname in dirnames: yield absolute_path(os.path.join(root, dirname)) def list_files(dir_pathname, recursive=True, topdown=True, followlinks=False): """ Enlists all the files using their absolute paths within the specified directory, optionally recursively. :param dir_pathname: The directory to traverse. :param recursive: ``True`` for walking recursively through the directory tree; ``False`` otherwise. :param topdown: Please see the documentation for :func:`os.walk` :param followlinks: Please see the documentation for :func:`os.walk` """ for root, dirnames, filenames\ in walk(dir_pathname, recursive, topdown, followlinks): for filename in filenames: yield absolute_path(os.path.join(root, filename)) def absolute_path(path): """ Returns the absolute path for the given path and normalizes the path. :param path: Path for which the absolute normalized path will be found. :returns: Absolute normalized path. """ return os.path.abspath(os.path.normpath(path)) def real_absolute_path(path): """ Returns the real absolute normalized path for the given path. :param path: Path for which the real absolute normalized path will be found. :returns: Real absolute normalized path. """ return os.path.realpath(absolute_path(path)) def parent_dir_path(path): """ Returns the parent directory path. :param path: Path for which the parent directory will be obtained. :returns: Parent directory path. """ return absolute_path(os.path.dirname(path))
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiofiles/os.py
"""Async executor versions of file functions from the os module.""" import asyncio from functools import partial, wraps import os def wrap(func): @asyncio.coroutine @wraps(func) def run(*args, loop=None, executor=None, **kwargs): if loop is None: loop = asyncio.get_event_loop() pfunc = partial(func, *args, **kwargs) return loop.run_in_executor(executor, pfunc) return run stat = wrap(os.stat) if hasattr(os, "sendfile"): sendfile = wrap(os.sendfile)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiofiles/base.py
"""Various base classes.""" import asyncio from collections.abc import Coroutine class AsyncBase: def __init__(self, file, loop, executor): self._file = file self._loop = loop self._executor = executor def __aiter__(self): """We are our own iterator.""" return self @asyncio.coroutine def __anext__(self): """Simulate normal file iteration.""" line = yield from self.readline() if line: return line else: raise StopAsyncIteration class _ContextManager(Coroutine): __slots__ = ('_coro', '_obj') def __init__(self, coro): self._coro = coro self._obj = None def send(self, value): return self._coro.send(value) def throw(self, typ, val=None, tb=None): if val is None: return self._coro.throw(typ) elif tb is None: return self._coro.throw(typ, val) else: return self._coro.throw(typ, val, tb) def close(self): return self._coro.close() @property def gi_frame(self): return self._coro.gi_frame @property def gi_running(self): return self._coro.gi_running @property def gi_code(self): return self._coro.gi_code def __next__(self): return self.send(None) @asyncio.coroutine def __iter__(self): resp = yield from self._coro return resp def __await__(self): resp = yield from self._coro return resp @asyncio.coroutine def __anext__(self): resp = yield from self._coro return resp @asyncio.coroutine def __aenter__(self): self._obj = yield from self._coro return self._obj @asyncio.coroutine def __aexit__(self, exc_type, exc, tb): self._obj.close() self._obj = None class AiofilesContextManager(_ContextManager): """An adjusted async context manager for aiofiles.""" @asyncio.coroutine def __aexit__(self, exc_type, exc_val, exc_tb): yield from self._obj.close() self._obj = None
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiofiles/_compat.py
import sys try: from functools import singledispatch except ImportError: # pragma: nocover from singledispatch import singledispatch PY_35 = sys.version_info >= (3, 5)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiofiles/__init__.py
"""Utilities for asyncio-friendly file handling.""" from .threadpool import open __version__ = "0.4.0" __all__ = (open,)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/wrapt/importer.py
"""This module implements a post import hook mechanism styled after what is described in PEP-369. Note that it doesn't cope with modules being reloaded. """ import sys import threading PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 if PY3: import importlib string_types = str, else: string_types = basestring, from .decorators import synchronized # The dictionary registering any post import hooks to be triggered once # the target module has been imported. Once a module has been imported # and the hooks fired, the list of hooks recorded against the target # module will be truncacted but the list left in the dictionary. This # acts as a flag to indicate that the module had already been imported. _post_import_hooks = {} _post_import_hooks_init = False _post_import_hooks_lock = threading.RLock() # Register a new post import hook for the target module name. This # differs from the PEP-369 implementation in that it also allows the # hook function to be specified as a string consisting of the name of # the callback in the form 'module:function'. This will result in a # proxy callback being registered which will defer loading of the # specified module containing the callback function until required. def _create_import_hook_from_string(name): def import_hook(module): module_name, function = name.split(':') attrs = function.split('.') __import__(module_name) callback = sys.modules[module_name] for attr in attrs: callback = getattr(callback, attr) return callback(module) return import_hook @synchronized(_post_import_hooks_lock) def register_post_import_hook(hook, name): # Create a deferred import hook if hook is a string name rather than # a callable function. if isinstance(hook, string_types): hook = _create_import_hook_from_string(hook) # Automatically install the import hook finder if it has not already # been installed. global _post_import_hooks_init if not _post_import_hooks_init: _post_import_hooks_init = True sys.meta_path.insert(0, ImportHookFinder()) # Determine if any prior registration of a post import hook for # the target modules has occurred and act appropriately. hooks = _post_import_hooks.get(name, None) if hooks is None: # No prior registration of post import hooks for the target # module. We need to check whether the module has already been # imported. If it has we fire the hook immediately and add an # empty list to the registry to indicate that the module has # already been imported and hooks have fired. Otherwise add # the post import hook to the registry. module = sys.modules.get(name, None) if module is not None: _post_import_hooks[name] = [] hook(module) else: _post_import_hooks[name] = [hook] elif hooks == []: # A prior registration of port import hooks for the target # module was done and the hooks already fired. Fire the hook # immediately. module = sys.modules[name] hook(module) else: # A prior registration of port import hooks for the target # module was done but the module has not yet been imported. _post_import_hooks[name].append(hook) # Register post import hooks defined as package entry points. def _create_import_hook_from_entrypoint(entrypoint): def import_hook(module): __import__(entrypoint.module_name) callback = sys.modules[entrypoint.module_name] for attr in entrypoint.attrs: callback = getattr(callback, attr) return callback(module) return import_hook def discover_post_import_hooks(group): try: import pkg_resources except ImportError: return for entrypoint in pkg_resources.iter_entry_points(group=group): callback = _create_import_hook_from_entrypoint(entrypoint) register_post_import_hook(callback, entrypoint.name) # Indicate that a module has been loaded. Any post import hooks which # were registered against the target module will be invoked. If an # exception is raised in any of the post import hooks, that will cause # the import of the target module to fail. @synchronized(_post_import_hooks_lock) def notify_module_loaded(module): name = getattr(module, '__name__', None) hooks = _post_import_hooks.get(name, None) if hooks: _post_import_hooks[name] = [] for hook in hooks: hook(module) # A custom module import finder. This intercepts attempts to import # modules and watches out for attempts to import target modules of # interest. When a module of interest is imported, then any post import # hooks which are registered will be invoked. class _ImportHookLoader: def load_module(self, fullname): module = sys.modules[fullname] notify_module_loaded(module) return module class _ImportHookChainedLoader: def __init__(self, loader): self.loader = loader def load_module(self, fullname): module = self.loader.load_module(fullname) notify_module_loaded(module) return module class ImportHookFinder: def __init__(self): self.in_progress = {} @synchronized(_post_import_hooks_lock) def find_module(self, fullname, path=None): # If the module being imported is not one we have registered # post import hooks for, we can return immediately. We will # take no further part in the importing of this module. if not fullname in _post_import_hooks: return None # When we are interested in a specific module, we will call back # into the import system a second time to defer to the import # finder that is supposed to handle the importing of the module. # We set an in progress flag for the target module so that on # the second time through we don't trigger another call back # into the import system and cause a infinite loop. if fullname in self.in_progress: return None self.in_progress[fullname] = True # Now call back into the import system again. try: if PY3: # For Python 3 we need to use find_loader() from # the importlib module. It doesn't actually # import the target module and only finds the # loader. If a loader is found, we need to return # our own loader which will then in turn call the # real loader to import the module and invoke the # post import hooks. loader = importlib.find_loader(fullname, path) if loader: return _ImportHookChainedLoader(loader) else: # For Python 2 we don't have much choice but to # call back in to __import__(). This will # actually cause the module to be imported. If no # module could be found then ImportError will be # raised. Otherwise we return a loader which # returns the already loaded module and invokes # the post import hooks. __import__(fullname) return _ImportHookLoader() finally: del self.in_progress[fullname] # Decorator for marking that a function should be called as a post # import hook when the target module is imported. def when_imported(name): def register(hook): register_post_import_hook(hook, name) return hook return register
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/wrapt/wrappers.py
import sys import functools import operator import weakref import inspect PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 if PY3: string_types = str, else: string_types = basestring, def with_metaclass(meta, *bases): """Create a base class with a metaclass.""" return meta("NewBase", bases, {}) class _ObjectProxyMethods(object): # We use properties to override the values of __module__ and # __doc__. If we add these in ObjectProxy, the derived class # __dict__ will still be setup to have string variants of these # attributes and the rules of descriptors means that they appear to # take precedence over the properties in the base class. To avoid # that, we copy the properties into the derived class type itself # via a meta class. In that way the properties will always take # precedence. @property def __module__(self): return self.__wrapped__.__module__ @__module__.setter def __module__(self, value): self.__wrapped__.__module__ = value @property def __doc__(self): return self.__wrapped__.__doc__ @__doc__.setter def __doc__(self, value): self.__wrapped__.__doc__ = value # We similar use a property for __dict__. We need __dict__ to be # explicit to ensure that vars() works as expected. @property def __dict__(self): return self.__wrapped__.__dict__ # Need to also propagate the special __weakref__ attribute for case # where decorating classes which will define this. If do not define # it and use a function like inspect.getmembers() on a decorator # class it will fail. This can't be in the derived classes. @property def __weakref__(self): return self.__wrapped__.__weakref__ class _ObjectProxyMetaType(type): def __new__(cls, name, bases, dictionary): # Copy our special properties into the class so that they # always take precedence over attributes of the same name added # during construction of a derived class. This is to save # duplicating the implementation for them in all derived classes. dictionary.update(vars(_ObjectProxyMethods)) return type.__new__(cls, name, bases, dictionary) class ObjectProxy(with_metaclass(_ObjectProxyMetaType)): __slots__ = '__wrapped__' def __init__(self, wrapped): object.__setattr__(self, '__wrapped__', wrapped) # Python 3.2+ has the __qualname__ attribute, but it does not # allow it to be overridden using a property and it must instead # be an actual string object instead. try: object.__setattr__(self, '__qualname__', wrapped.__qualname__) except AttributeError: pass @property def __name__(self): return self.__wrapped__.__name__ @__name__.setter def __name__(self, value): self.__wrapped__.__name__ = value @property def __class__(self): return self.__wrapped__.__class__ @__class__.setter def __class__(self, value): self.__wrapped__.__class__ = value @property def __annotations__(self): return self.__wrapped__.__anotations__ @__annotations__.setter def __annotations__(self, value): self.__wrapped__.__annotations__ = value def __dir__(self): return dir(self.__wrapped__) def __str__(self): return str(self.__wrapped__) if PY3: def __bytes__(self): return bytes(self.__wrapped__) def __repr__(self): return '<%s at 0x%x for %s at 0x%x>' % ( type(self).__name__, id(self), type(self.__wrapped__).__name__, id(self.__wrapped__)) def __reversed__(self): return reversed(self.__wrapped__) if PY3: def __round__(self): return round(self.__wrapped__) def __lt__(self, other): return self.__wrapped__ < other def __le__(self, other): return self.__wrapped__ <= other def __eq__(self, other): return self.__wrapped__ == other def __ne__(self, other): return self.__wrapped__ != other def __gt__(self, other): return self.__wrapped__ > other def __ge__(self, other): return self.__wrapped__ >= other def __hash__(self): return hash(self.__wrapped__) def __nonzero__(self): return bool(self.__wrapped__) def __bool__(self): return bool(self.__wrapped__) def __setattr__(self, name, value): if name.startswith('_self_'): object.__setattr__(self, name, value) elif name == '__wrapped__': object.__setattr__(self, name, value) try: object.__delattr__(self, '__qualname__') except AttributeError: pass try: object.__setattr__(self, '__qualname__', value.__qualname__) except AttributeError: pass elif name == '__qualname__': setattr(self.__wrapped__, name, value) object.__setattr__(self, name, value) elif hasattr(type(self), name): object.__setattr__(self, name, value) else: setattr(self.__wrapped__, name, value) def __getattr__(self, name): # If we are being to lookup '__wrapped__' then the # '__init__()' method cannot have been called. if name == '__wrapped__': raise ValueError('wrapper has not been initialised') return getattr(self.__wrapped__, name) def __delattr__(self, name): if name.startswith('_self_'): object.__delattr__(self, name) elif name == '__wrapped__': raise TypeError('__wrapped__ must be an object') elif name == '__qualname__': object.__delattr__(self, name) delattr(self.__wrapped__, name) elif hasattr(type(self), name): object.__delattr__(self, name) else: delattr(self.__wrapped__, name) def __add__(self, other): return self.__wrapped__ + other def __sub__(self, other): return self.__wrapped__ - other def __mul__(self, other): return self.__wrapped__ * other def __div__(self, other): return operator.div(self.__wrapped__, other) def __truediv__(self, other): return operator.truediv(self.__wrapped__, other) def __floordiv__(self, other): return self.__wrapped__ // other def __mod__(self, other): return self.__wrapped__ % other def __divmod__(self, other): return divmod(self.__wrapped__, other) def __pow__(self, other, *args): return pow(self.__wrapped__, other, *args) def __lshift__(self, other): return self.__wrapped__ << other def __rshift__(self, other): return self.__wrapped__ >> other def __and__(self, other): return self.__wrapped__ & other def __xor__(self, other): return self.__wrapped__ ^ other def __or__(self, other): return self.__wrapped__ | other def __radd__(self, other): return other + self.__wrapped__ def __rsub__(self, other): return other - self.__wrapped__ def __rmul__(self, other): return other * self.__wrapped__ def __rdiv__(self, other): return operator.div(other, self.__wrapped__) def __rtruediv__(self, other): return operator.truediv(other, self.__wrapped__) def __rfloordiv__(self, other): return other // self.__wrapped__ def __rmod__(self, other): return other % self.__wrapped__ def __rdivmod__(self, other): return divmod(other, self.__wrapped__) def __rpow__(self, other, *args): return pow(other, self.__wrapped__, *args) def __rlshift__(self, other): return other << self.__wrapped__ def __rrshift__(self, other): return other >> self.__wrapped__ def __rand__(self, other): return other & self.__wrapped__ def __rxor__(self, other): return other ^ self.__wrapped__ def __ror__(self, other): return other | self.__wrapped__ def __iadd__(self, other): self.__wrapped__ += other return self def __isub__(self, other): self.__wrapped__ -= other return self def __imul__(self, other): self.__wrapped__ *= other return self def __idiv__(self, other): self.__wrapped__ = operator.idiv(self.__wrapped__, other) return self def __itruediv__(self, other): self.__wrapped__ = operator.itruediv(self.__wrapped__, other) return self def __ifloordiv__(self, other): self.__wrapped__ //= other return self def __imod__(self, other): self.__wrapped__ %= other return self def __ipow__(self, other): self.__wrapped__ **= other return self def __ilshift__(self, other): self.__wrapped__ <<= other return self def __irshift__(self, other): self.__wrapped__ >>= other return self def __iand__(self, other): self.__wrapped__ &= other return self def __ixor__(self, other): self.__wrapped__ ^= other return self def __ior__(self, other): self.__wrapped__ |= other return self def __neg__(self): return -self.__wrapped__ def __pos__(self): return +self.__wrapped__ def __abs__(self): return abs(self.__wrapped__) def __invert__(self): return ~self.__wrapped__ def __int__(self): return int(self.__wrapped__) def __long__(self): return long(self.__wrapped__) def __float__(self): return float(self.__wrapped__) def __oct__(self): return oct(self.__wrapped__) def __hex__(self): return hex(self.__wrapped__) def __index__(self): return operator.index(self.__wrapped__) def __len__(self): return len(self.__wrapped__) def __contains__(self, value): return value in self.__wrapped__ def __getitem__(self, key): return self.__wrapped__[key] def __setitem__(self, key, value): self.__wrapped__[key] = value def __delitem__(self, key): del self.__wrapped__[key] def __getslice__(self, i, j): return self.__wrapped__[i:j] def __setslice__(self, i, j, value): self.__wrapped__[i:j] = value def __delslice__(self, i, j): del self.__wrapped__[i:j] def __enter__(self): return self.__wrapped__.__enter__() def __exit__(self, *args, **kwargs): return self.__wrapped__.__exit__(*args, **kwargs) def __iter__(self): return iter(self.__wrapped__) class CallableObjectProxy(ObjectProxy): def __call__(self, *args, **kwargs): return self.__wrapped__(*args, **kwargs) class _FunctionWrapperBase(ObjectProxy): __slots__ = ('_self_instance', '_self_wrapper', '_self_enabled', '_self_binding', '_self_parent') def __init__(self, wrapped, instance, wrapper, enabled=None, binding='function', parent=None): super(_FunctionWrapperBase, self).__init__(wrapped) object.__setattr__(self, '_self_instance', instance) object.__setattr__(self, '_self_wrapper', wrapper) object.__setattr__(self, '_self_enabled', enabled) object.__setattr__(self, '_self_binding', binding) object.__setattr__(self, '_self_parent', parent) def __get__(self, instance, owner): # This method is actually doing double duty for both unbound and # bound derived wrapper classes. It should possibly be broken up # and the distinct functionality moved into the derived classes. # Can't do that straight away due to some legacy code which is # relying on it being here in this base class. # # The distinguishing attribute which determines whether we are # being called in an unbound or bound wrapper is the parent # attribute. If binding has never occurred, then the parent will # be None. # # First therefore, is if we are called in an unbound wrapper. In # this case we perform the binding. # # We have one special case to worry about here. This is where we # are decorating a nested class. In this case the wrapped class # would not have a __get__() method to call. In that case we # simply return self. # # Note that we otherwise still do binding even if instance is # None and accessing an unbound instance method from a class. # This is because we need to be able to later detect that # specific case as we will need to extract the instance from the # first argument of those passed in. if self._self_parent is None: if not inspect.isclass(self.__wrapped__): descriptor = self.__wrapped__.__get__(instance, owner) return self.__bound_function_wrapper__(descriptor, instance, self._self_wrapper, self._self_enabled, self._self_binding, self) return self # Now we have the case of binding occurring a second time on what # was already a bound function. In this case we would usually # return ourselves again. This mirrors what Python does. # # The special case this time is where we were originally bound # with an instance of None and we were likely an instance # method. In that case we rebind against the original wrapped # function from the parent again. if self._self_instance is None and self._self_binding == 'function': descriptor = self._self_parent.__wrapped__.__get__( instance, owner) return self._self_parent.__bound_function_wrapper__( descriptor, instance, self._self_wrapper, self._self_enabled, self._self_binding, self._self_parent) return self def __call__(self, *args, **kwargs): # If enabled has been specified, then evaluate it at this point # and if the wrapper is not to be executed, then simply return # the bound function rather than a bound wrapper for the bound # function. When evaluating enabled, if it is callable we call # it, otherwise we evaluate it as a boolean. if self._self_enabled is not None: if callable(self._self_enabled): if not self._self_enabled(): return self.__wrapped__(*args, **kwargs) elif not self._self_enabled: return self.__wrapped__(*args, **kwargs) # This can occur where initial function wrapper was applied to # a function that was already bound to an instance. In that case # we want to extract the instance from the function and use it. if self._self_binding == 'function': if self._self_instance is None: instance = getattr(self.__wrapped__, '__self__', None) if instance is not None: return self._self_wrapper(self.__wrapped__, instance, args, kwargs) # This is generally invoked when the wrapped function is being # called as a normal function and is not bound to a class as an # instance method. This is also invoked in the case where the # wrapped function was a method, but this wrapper was in turn # wrapped using the staticmethod decorator. return self._self_wrapper(self.__wrapped__, self._self_instance, args, kwargs) class BoundFunctionWrapper(_FunctionWrapperBase): def __call__(self, *args, **kwargs): # If enabled has been specified, then evaluate it at this point # and if the wrapper is not to be executed, then simply return # the bound function rather than a bound wrapper for the bound # function. When evaluating enabled, if it is callable we call # it, otherwise we evaluate it as a boolean. if self._self_enabled is not None: if callable(self._self_enabled): if not self._self_enabled(): return self.__wrapped__(*args, **kwargs) elif not self._self_enabled: return self.__wrapped__(*args, **kwargs) # We need to do things different depending on whether we are # likely wrapping an instance method vs a static method or class # method. if self._self_binding == 'function': if self._self_instance is None: # This situation can occur where someone is calling the # instancemethod via the class type and passing the instance # as the first argument. We need to shift the args before # making the call to the wrapper and effectively bind the # instance to the wrapped function using a partial so the # wrapper doesn't see anything as being different. if not args: raise TypeError('missing 1 required positional argument') instance, args = args[0], args[1:] wrapped = functools.partial(self.__wrapped__, instance) return self._self_wrapper(wrapped, instance, args, kwargs) return self._self_wrapper(self.__wrapped__, self._self_instance, args, kwargs) else: # As in this case we would be dealing with a classmethod or # staticmethod, then _self_instance will only tell us whether # when calling the classmethod or staticmethod they did it via an # instance of the class it is bound to and not the case where # done by the class type itself. We thus ignore _self_instance # and use the __self__ attribute of the bound function instead. # For a classmethod, this means instance will be the class type # and for a staticmethod it will be None. This is probably the # more useful thing we can pass through even though we loose # knowledge of whether they were called on the instance vs the # class type, as it reflects what they have available in the # decoratored function. instance = getattr(self.__wrapped__, '__self__', None) return self._self_wrapper(self.__wrapped__, instance, args, kwargs) class FunctionWrapper(_FunctionWrapperBase): __bound_function_wrapper__ = BoundFunctionWrapper def __init__(self, wrapped, wrapper, enabled=None): # What it is we are wrapping here could be anything. We need to # try and detect specific cases though. In particular, we need # to detect when we are given something that is a method of a # class. Further, we need to know when it is likely an instance # method, as opposed to a class or static method. This can # become problematic though as there isn't strictly a fool proof # method of knowing. # # The situations we could encounter when wrapping a method are: # # 1. The wrapper is being applied as part of a decorator which # is a part of the class definition. In this case what we are # given is the raw unbound function, classmethod or staticmethod # wrapper objects. # # The problem here is that we will not know we are being applied # in the context of the class being set up. This becomes # important later for the case of an instance method, because in # that case we just see it as a raw function and can't # distinguish it from wrapping a normal function outside of # a class context. # # 2. The wrapper is being applied when performing monkey # patching of the class type afterwards and the method to be # wrapped was retrieved direct from the __dict__ of the class # type. This is effectively the same as (1) above. # # 3. The wrapper is being applied when performing monkey # patching of the class type afterwards and the method to be # wrapped was retrieved from the class type. In this case # binding will have been performed where the instance against # which the method is bound will be None at that point. # # This case is a problem because we can no longer tell if the # method was a static method, plus if using Python3, we cannot # tell if it was an instance method as the concept of an # unnbound method no longer exists. # # 4. The wrapper is being applied when performing monkey # patching of an instance of a class. In this case binding will # have been perfomed where the instance was not None. # # This case is a problem because we can no longer tell if the # method was a static method. # # Overall, the best we can do is look at the original type of the # object which was wrapped prior to any binding being done and # see if it is an instance of classmethod or staticmethod. In # the case where other decorators are between us and them, if # they do not propagate the __class__ attribute so that the # isinstance() checks works, then likely this will do the wrong # thing where classmethod and staticmethod are used. # # Since it is likely to be very rare that anyone even puts # decorators around classmethod and staticmethod, likelihood of # that being an issue is very small, so we accept it and suggest # that those other decorators be fixed. It is also only an issue # if a decorator wants to actually do things with the arguments. # # As to not being able to identify static methods properly, we # just hope that that isn't something people are going to want # to wrap, or if they do suggest they do it the correct way by # ensuring that it is decorated in the class definition itself, # or patch it in the __dict__ of the class type. # # So to get the best outcome we can, whenever we aren't sure what # it is, we label it as a 'function'. If it was already bound and # that is rebound later, we assume that it will be an instance # method and try an cope with the possibility that the 'self' # argument it being passed as an explicit argument and shuffle # the arguments around to extract 'self' for use as the instance. if isinstance(wrapped, classmethod): binding = 'classmethod' elif isinstance(wrapped, staticmethod): binding = 'staticmethod' elif hasattr(wrapped, '__self__'): if inspect.isclass(wrapped.__self__): binding = 'classmethod' else: binding = 'function' else: binding = 'function' super(FunctionWrapper, self).__init__(wrapped, None, wrapper, enabled, binding) try: from ._wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, BoundFunctionWrapper, _FunctionWrapperBase) except ImportError: pass # Helper functions for applying wrappers to existing functions. def resolve_path(module, name): if isinstance(module, string_types): __import__(module) module = sys.modules[module] parent = module path = name.split('.') attribute = path[0] original = getattr(parent, attribute) for attribute in path[1:]: parent = original # We can't just always use getattr() because in doing # that on a class it will cause binding to occur which # will complicate things later and cause some things not # to work. For the case of a class we therefore access # the __dict__ directly. To cope though with the wrong # class being given to us, or a method being moved into # a base class, we need to walk the class hierarchy to # work out exactly which __dict__ the method was defined # in, as accessing it from __dict__ will fail if it was # not actually on the class given. Fallback to using # getattr() if we can't find it. If it truly doesn't # exist, then that will fail. if inspect.isclass(original): for cls in inspect.getmro(original): if attribute in vars(original): original = vars(original)[attribute] break else: original = getattr(original, attribute) else: original = getattr(original, attribute) return (parent, attribute, original) def apply_patch(parent, attribute, replacement): setattr(parent, attribute, replacement) def wrap_object(module, name, factory, args=(), kwargs={}): (parent, attribute, original) = resolve_path(module, name) wrapper = factory(original, *args, **kwargs) apply_patch(parent, attribute, wrapper) return wrapper # Function for applying a proxy object to an attribute of a class # instance. The wrapper works by defining an attribute of the same name # on the class which is a descriptor and which intercepts access to the # instance attribute. Note that this cannot be used on attributes which # are themselves defined by a property object. class AttributeWrapper(object): def __init__(self, attribute, factory, args, kwargs): self.attribute = attribute self.factory = factory self.args = args self.kwargs = kwargs def __get__(self, instance, owner): value = instance.__dict__[self.attribute] return self.factory(value, *self.args, **self.kwargs) def __set__(self, instance, value): instance.__dict__[self.attribute] = value def __delete__(self, instance): del instance.__dict__[self.attribute] def wrap_object_attribute(module, name, factory, args=(), kwargs={}): path, attribute = name.rsplit('.', 1) parent = resolve_path(module, path)[2] wrapper = AttributeWrapper(attribute, factory, args, kwargs) apply_patch(parent, attribute, wrapper) return wrapper # Functions for creating a simple decorator using a FunctionWrapper, # plus short cut functions for applying wrappers to functions. These are # for use when doing monkey patching. For a more featured way of # creating decorators see the decorator decorator instead. def function_wrapper(wrapper): def _wrapper(wrapped, instance, args, kwargs): target_wrapped = args[0] if instance is None: target_wrapper = wrapper elif inspect.isclass(instance): target_wrapper = wrapper.__get__(None, instance) else: target_wrapper = wrapper.__get__(instance, type(instance)) return FunctionWrapper(target_wrapped, target_wrapper) return FunctionWrapper(wrapper, _wrapper) def wrap_function_wrapper(module, name, wrapper): return wrap_object(module, name, FunctionWrapper, (wrapper,)) def patch_function_wrapper(module, name): def _wrapper(wrapper): return wrap_object(module, name, FunctionWrapper, (wrapper,)) return _wrapper def transient_function_wrapper(module, name): def _decorator(wrapper): def _wrapper(wrapped, instance, args, kwargs): target_wrapped = args[0] if instance is None: target_wrapper = wrapper elif inspect.isclass(instance): target_wrapper = wrapper.__get__(None, instance) else: target_wrapper = wrapper.__get__(instance, type(instance)) def _execute(wrapped, instance, args, kwargs): (parent, attribute, original) = resolve_path(module, name) replacement = FunctionWrapper(original, target_wrapper) setattr(parent, attribute, replacement) try: return wrapped(*args, **kwargs) finally: setattr(parent, attribute, original) return FunctionWrapper(target_wrapped, _execute) return FunctionWrapper(wrapper, _wrapper) return _decorator # A weak function proxy. This will work on instance methods, class # methods, static methods and regular functions. Special treatment is # needed for the method types because the bound method is effectively a # transient object and applying a weak reference to one will immediately # result in it being destroyed and the weakref callback called. The weak # reference is therefore applied to the instance the method is bound to # and the original function. The function is then rebound at the point # of a call via the weak function proxy. def _weak_function_proxy_callback(ref, proxy, callback): if proxy._self_expired: return proxy._self_expired = True # This could raise an exception. We let it propagate back and let # the weakref.proxy() deal with it, at which point it generally # prints out a short error message direct to stderr and keeps going. if callback is not None: callback(proxy) class WeakFunctionProxy(ObjectProxy): __slots__ = ('_self_expired', '_self_instance') def __init__(self, wrapped, callback=None): # We need to determine if the wrapped function is actually a # bound method. In the case of a bound method, we need to keep a # reference to the original unbound function and the instance. # This is necessary because if we hold a reference to the bound # function, it will be the only reference and given it is a # temporary object, it will almost immediately expire and # the weakref callback triggered. So what is done is that we # hold a reference to the instance and unbound function and # when called bind the function to the instance once again and # then call it. Note that we avoid using a nested function for # the callback here so as not to cause any odd reference cycles. _callback = callback and functools.partial( _weak_function_proxy_callback, proxy=self, callback=callback) self._self_expired = False if isinstance(wrapped, _FunctionWrapperBase): self._self_instance = weakref.ref(wrapped._self_instance, _callback) if wrapped._self_parent is not None: super(WeakFunctionProxy, self).__init__( weakref.proxy(wrapped._self_parent, _callback)) else: super(WeakFunctionProxy, self).__init__( weakref.proxy(wrapped, _callback)) return try: self._self_instance = weakref.ref(wrapped.__self__, _callback) super(WeakFunctionProxy, self).__init__( weakref.proxy(wrapped.__func__, _callback)) except AttributeError: self._self_instance = None super(WeakFunctionProxy, self).__init__( weakref.proxy(wrapped, _callback)) def __call__(self, *args, **kwargs): # We perform a boolean check here on the instance and wrapped # function as that will trigger the reference error prior to # calling if the reference had expired. instance = self._self_instance and self._self_instance() function = self.__wrapped__ and self.__wrapped__ # If the wrapped function was originally a bound function, for # which we retained a reference to the instance and the unbound # function we need to rebind the function and then call it. If # not just called the wrapped function. if instance is None: return self.__wrapped__(*args, **kwargs) return function.__get__(instance, type(instance))(*args, **kwargs)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/wrapt/arguments.py
# This is a copy of the inspect.getcallargs() function from Python 2.7 # so we can provide it for use under Python 2.6. As the code in this # file derives from the Python distribution, it falls under the version # of the PSF license used for Python 2.7. from inspect import getargspec, ismethod import sys def getcallargs(func, *positional, **named): """Get the mapping of arguments to values. A dict is returned, with keys the function argument names (including the names of the * and ** arguments, if any), and values the respective bound values from 'positional' and 'named'.""" args, varargs, varkw, defaults = getargspec(func) f_name = func.__name__ arg2value = {} # The following closures are basically because of tuple parameter unpacking. assigned_tuple_params = [] def assign(arg, value): if isinstance(arg, str): arg2value[arg] = value else: assigned_tuple_params.append(arg) value = iter(value) for i, subarg in enumerate(arg): try: subvalue = next(value) except StopIteration: raise ValueError('need more than %d %s to unpack' % (i, 'values' if i > 1 else 'value')) assign(subarg, subvalue) try: next(value) except StopIteration: pass else: raise ValueError('too many values to unpack') def is_assigned(arg): if isinstance(arg, str): return arg in arg2value return arg in assigned_tuple_params if ismethod(func) and func.im_self is not None: # implicit 'self' (or 'cls' for classmethods) argument positional = (func.im_self,) + positional num_pos = len(positional) num_total = num_pos + len(named) num_args = len(args) num_defaults = len(defaults) if defaults else 0 for arg, value in zip(args, positional): assign(arg, value) if varargs: if num_pos > num_args: assign(varargs, positional[-(num_pos-num_args):]) else: assign(varargs, ()) elif 0 < num_args < num_pos: raise TypeError('%s() takes %s %d %s (%d given)' % ( f_name, 'at most' if defaults else 'exactly', num_args, 'arguments' if num_args > 1 else 'argument', num_total)) elif num_args == 0 and num_total: if varkw: if num_pos: # XXX: We should use num_pos, but Python also uses num_total: raise TypeError('%s() takes exactly 0 arguments ' '(%d given)' % (f_name, num_total)) else: raise TypeError('%s() takes no arguments (%d given)' % (f_name, num_total)) for arg in args: if isinstance(arg, str) and arg in named: if is_assigned(arg): raise TypeError("%s() got multiple values for keyword " "argument '%s'" % (f_name, arg)) else: assign(arg, named.pop(arg)) if defaults: # fill in any missing values with the defaults for arg, value in zip(args[-num_defaults:], defaults): if not is_assigned(arg): assign(arg, value) if varkw: assign(varkw, named) elif named: unexpected = next(iter(named)) if isinstance(unexpected, unicode): unexpected = unexpected.encode(sys.getdefaultencoding(), 'replace') raise TypeError("%s() got an unexpected keyword argument '%s'" % (f_name, unexpected)) unassigned = num_args - len([arg for arg in args if is_assigned(arg)]) if unassigned: num_required = num_args - num_defaults raise TypeError('%s() takes %s %d %s (%d given)' % ( f_name, 'at least' if defaults else 'exactly', num_required, 'arguments' if num_required > 1 else 'argument', num_total)) return arg2value
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/wrapt/__init__.py
__version_info__ = ('1', '10', '10') __version__ = '.'.join(__version_info__) from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, BoundFunctionWrapper, WeakFunctionProxy, resolve_path, apply_patch, wrap_object, wrap_object_attribute, function_wrapper, wrap_function_wrapper, patch_function_wrapper, transient_function_wrapper) from .decorators import (adapter_factory, AdapterFactory, decorator, synchronized) from .importer import (register_post_import_hook, when_imported, notify_module_loaded, discover_post_import_hooks) try: from inspect import getcallargs except ImportError: from .arguments import getcallargs
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/wrapt/decorators.py
"""This module implements decorators for implementing other decorators as well as some commonly used decorators. """ import sys PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 if PY3: string_types = str, import builtins exec_ = getattr(builtins, "exec") del builtins else: string_types = basestring, def exec_(_code_, _globs_=None, _locs_=None): """Execute code in a namespace.""" if _globs_ is None: frame = sys._getframe(1) _globs_ = frame.f_globals if _locs_ is None: _locs_ = frame.f_locals del frame elif _locs_ is None: _locs_ = _globs_ exec("""exec _code_ in _globs_, _locs_""") from functools import partial from inspect import ismethod, isclass, formatargspec from collections import namedtuple from threading import Lock, RLock try: from inspect import signature except ImportError: pass from .wrappers import (FunctionWrapper, BoundFunctionWrapper, ObjectProxy, CallableObjectProxy) # Adapter wrapper for the wrapped function which will overlay certain # properties from the adapter function onto the wrapped function so that # functions such as inspect.getargspec(), inspect.getfullargspec(), # inspect.signature() and inspect.getsource() return the correct results # one would expect. class _AdapterFunctionCode(CallableObjectProxy): def __init__(self, wrapped_code, adapter_code): super(_AdapterFunctionCode, self).__init__(wrapped_code) self._self_adapter_code = adapter_code @property def co_argcount(self): return self._self_adapter_code.co_argcount @property def co_code(self): return self._self_adapter_code.co_code @property def co_flags(self): return self._self_adapter_code.co_flags @property def co_kwonlyargcount(self): return self._self_adapter_code.co_kwonlyargcount @property def co_varnames(self): return self._self_adapter_code.co_varnames class _AdapterFunctionSurrogate(CallableObjectProxy): def __init__(self, wrapped, adapter): super(_AdapterFunctionSurrogate, self).__init__(wrapped) self._self_adapter = adapter @property def __code__(self): return _AdapterFunctionCode(self.__wrapped__.__code__, self._self_adapter.__code__) @property def __defaults__(self): return self._self_adapter.__defaults__ @property def __kwdefaults__(self): return self._self_adapter.__kwdefaults__ @property def __signature__(self): if 'signature' not in globals(): return self._self_adapter.__signature__ else: # Can't allow this to fail on Python 3 else it falls # through to using __wrapped__, but that will be the # wrong function we want to derive the signature # from. Thus generate the signature ourselves. return signature(self._self_adapter) if PY2: func_code = __code__ func_defaults = __defaults__ class _BoundAdapterWrapper(BoundFunctionWrapper): @property def __func__(self): return _AdapterFunctionSurrogate(self.__wrapped__.__func__, self._self_parent._self_adapter) if PY2: im_func = __func__ class AdapterWrapper(FunctionWrapper): __bound_function_wrapper__ = _BoundAdapterWrapper def __init__(self, *args, **kwargs): adapter = kwargs.pop('adapter') super(AdapterWrapper, self).__init__(*args, **kwargs) self._self_surrogate = _AdapterFunctionSurrogate( self.__wrapped__, adapter) self._self_adapter = adapter @property def __code__(self): return self._self_surrogate.__code__ @property def __defaults__(self): return self._self_surrogate.__defaults__ @property def __kwdefaults__(self): return self._self_surrogate.__kwdefaults__ if PY2: func_code = __code__ func_defaults = __defaults__ @property def __signature__(self): return self._self_surrogate.__signature__ class AdapterFactory(object): def __call__(self, wrapped): raise NotImplementedError() class DelegatedAdapterFactory(AdapterFactory): def __init__(self, factory): super(DelegatedAdapterFactory, self).__init__() self.factory = factory def __call__(self, wrapped): return self.factory(wrapped) adapter_factory = DelegatedAdapterFactory # Decorator for creating other decorators. This decorator and the # wrappers which they use are designed to properly preserve any name # attributes, function signatures etc, in addition to the wrappers # themselves acting like a transparent proxy for the original wrapped # function so the wrapper is effectively indistinguishable from the # original wrapped function. def decorator(wrapper=None, enabled=None, adapter=None): # The decorator should be supplied with a single positional argument # which is the wrapper function to be used to implement the # decorator. This may be preceded by a step whereby the keyword # arguments are supplied to customise the behaviour of the # decorator. The 'adapter' argument is used to optionally denote a # separate function which is notionally used by an adapter # decorator. In that case parts of the function '__code__' and # '__defaults__' attributes are used from the adapter function # rather than those of the wrapped function. This allows for the # argument specification from inspect.getargspec() and similar # functions to be overridden with a prototype for a different # function than what was wrapped. The 'enabled' argument provides a # way to enable/disable the use of the decorator. If the type of # 'enabled' is a boolean, then it is evaluated immediately and the # wrapper not even applied if it is False. If not a boolean, it will # be evaluated when the wrapper is called for an unbound wrapper, # and when binding occurs for a bound wrapper. When being evaluated, # if 'enabled' is callable it will be called to obtain the value to # be checked. If False, the wrapper will not be called and instead # the original wrapped function will be called directly instead. if wrapper is not None: # Helper function for creating wrapper of the appropriate # time when we need it down below. def _build(wrapped, wrapper, enabled=None, adapter=None): if adapter: if isinstance(adapter, AdapterFactory): adapter = adapter(wrapped) if not callable(adapter): ns = {} if not isinstance(adapter, string_types): adapter = formatargspec(*adapter) exec_('def adapter{0}: pass'.format(adapter), ns, ns) adapter = ns['adapter'] return AdapterWrapper(wrapped=wrapped, wrapper=wrapper, enabled=enabled, adapter=adapter) return FunctionWrapper(wrapped=wrapped, wrapper=wrapper, enabled=enabled) # The wrapper has been provided so return the final decorator. # The decorator is itself one of our function wrappers so we # can determine when it is applied to functions, instance methods # or class methods. This allows us to bind the instance or class # method so the appropriate self or cls attribute is supplied # when it is finally called. def _wrapper(wrapped, instance, args, kwargs): # We first check for the case where the decorator was applied # to a class type. # # @decorator # class mydecoratorclass(object): # def __init__(self, arg=None): # self.arg = arg # def __call__(self, wrapped, instance, args, kwargs): # return wrapped(*args, **kwargs) # # @mydecoratorclass(arg=1) # def function(): # pass # # In this case an instance of the class is to be used as the # decorator wrapper function. If args was empty at this point, # then it means that there were optional keyword arguments # supplied to be used when creating an instance of the class # to be used as the wrapper function. if instance is None and isclass(wrapped) and not args: # We still need to be passed the target function to be # wrapped as yet, so we need to return a further function # to be able to capture it. def _capture(target_wrapped): # Now have the target function to be wrapped and need # to create an instance of the class which is to act # as the decorator wrapper function. Before we do that, # we need to first check that use of the decorator # hadn't been disabled by a simple boolean. If it was, # the target function to be wrapped is returned instead. _enabled = enabled if type(_enabled) is bool: if not _enabled: return target_wrapped _enabled = None # Now create an instance of the class which is to act # as the decorator wrapper function. Any arguments had # to be supplied as keyword only arguments so that is # all we pass when creating it. target_wrapper = wrapped(**kwargs) # Finally build the wrapper itself and return it. return _build(target_wrapped, target_wrapper, _enabled, adapter) return _capture # We should always have the target function to be wrapped at # this point as the first (and only) value in args. target_wrapped = args[0] # Need to now check that use of the decorator hadn't been # disabled by a simple boolean. If it was, then target # function to be wrapped is returned instead. _enabled = enabled if type(_enabled) is bool: if not _enabled: return target_wrapped _enabled = None # We now need to build the wrapper, but there are a couple of # different cases we need to consider. if instance is None: if isclass(wrapped): # In this case the decorator was applied to a class # type but optional keyword arguments were not supplied # for initialising an instance of the class to be used # as the decorator wrapper function. # # @decorator # class mydecoratorclass(object): # def __init__(self, arg=None): # self.arg = arg # def __call__(self, wrapped, instance, # args, kwargs): # return wrapped(*args, **kwargs) # # @mydecoratorclass # def function(): # pass # # We still need to create an instance of the class to # be used as the decorator wrapper function, but no # arguments are pass. target_wrapper = wrapped() else: # In this case the decorator was applied to a normal # function, or possibly a static method of a class. # # @decorator # def mydecoratorfuntion(wrapped, instance, # args, kwargs): # return wrapped(*args, **kwargs) # # @mydecoratorfunction # def function(): # pass # # That normal function becomes the decorator wrapper # function. target_wrapper = wrapper else: if isclass(instance): # In this case the decorator was applied to a class # method. # # class myclass(object): # @decorator # @classmethod # def decoratorclassmethod(cls, wrapped, # instance, args, kwargs): # return wrapped(*args, **kwargs) # # instance = myclass() # # @instance.decoratorclassmethod # def function(): # pass # # This one is a bit strange because binding was actually # performed on the wrapper created by our decorator # factory. We need to apply that binding to the decorator # wrapper function which which the decorator factory # was applied to. target_wrapper = wrapper.__get__(None, instance) else: # In this case the decorator was applied to an instance # method. # # class myclass(object): # @decorator # def decoratorclassmethod(self, wrapped, # instance, args, kwargs): # return wrapped(*args, **kwargs) # # instance = myclass() # # @instance.decoratorclassmethod # def function(): # pass # # This one is a bit strange because binding was actually # performed on the wrapper created by our decorator # factory. We need to apply that binding to the decorator # wrapper function which which the decorator factory # was applied to. target_wrapper = wrapper.__get__(instance, type(instance)) # Finally build the wrapper itself and return it. return _build(target_wrapped, target_wrapper, _enabled, adapter) # We first return our magic function wrapper here so we can # determine in what context the decorator factory was used. In # other words, it is itself a universal decorator. return _build(wrapper, _wrapper) else: # The wrapper still has not been provided, so we are just # collecting the optional keyword arguments. Return the # decorator again wrapped in a partial using the collected # arguments. return partial(decorator, enabled=enabled, adapter=adapter) # Decorator for implementing thread synchronization. It can be used as a # decorator, in which case the synchronization context is determined by # what type of function is wrapped, or it can also be used as a context # manager, where the user needs to supply the correct synchronization # context. It is also possible to supply an object which appears to be a # synchronization primitive of some sort, by virtue of having release() # and acquire() methods. In that case that will be used directly as the # synchronization primitive without creating a separate lock against the # derived or supplied context. def synchronized(wrapped): # Determine if being passed an object which is a synchronization # primitive. We can't check by type for Lock, RLock, Semaphore etc, # as the means of creating them isn't the type. Therefore use the # existence of acquire() and release() methods. This is more # extensible anyway as it allows custom synchronization mechanisms. if hasattr(wrapped, 'acquire') and hasattr(wrapped, 'release'): # We remember what the original lock is and then return a new # decorator which accesses and locks it. When returning the new # decorator we wrap it with an object proxy so we can override # the context manager methods in case it is being used to wrap # synchronized statements with a 'with' statement. lock = wrapped @decorator def _synchronized(wrapped, instance, args, kwargs): # Execute the wrapped function while the original supplied # lock is held. with lock: return wrapped(*args, **kwargs) class _PartialDecorator(CallableObjectProxy): def __enter__(self): lock.acquire() return lock def __exit__(self, *args): lock.release() return _PartialDecorator(wrapped=_synchronized) # Following only apply when the lock is being created automatically # based on the context of what was supplied. In this case we supply # a final decorator, but need to use FunctionWrapper directly as we # want to derive from it to add context manager methods in case it is # being used to wrap synchronized statements with a 'with' statement. def _synchronized_lock(context): # Attempt to retrieve the lock for the specific context. lock = vars(context).get('_synchronized_lock', None) if lock is None: # There is no existing lock defined for the context we # are dealing with so we need to create one. This needs # to be done in a way to guarantee there is only one # created, even if multiple threads try and create it at # the same time. We can't always use the setdefault() # method on the __dict__ for the context. This is the # case where the context is a class, as __dict__ is # actually a dictproxy. What we therefore do is use a # meta lock on this wrapper itself, to control the # creation and assignment of the lock attribute against # the context. meta_lock = vars(synchronized).setdefault( '_synchronized_meta_lock', Lock()) with meta_lock: # We need to check again for whether the lock we want # exists in case two threads were trying to create it # at the same time and were competing to create the # meta lock. lock = vars(context).get('_synchronized_lock', None) if lock is None: lock = RLock() setattr(context, '_synchronized_lock', lock) return lock def _synchronized_wrapper(wrapped, instance, args, kwargs): # Execute the wrapped function while the lock for the # desired context is held. If instance is None then the # wrapped function is used as the context. with _synchronized_lock(instance or wrapped): return wrapped(*args, **kwargs) class _FinalDecorator(FunctionWrapper): def __enter__(self): self._self_lock = _synchronized_lock(self.__wrapped__) self._self_lock.acquire() return self._self_lock def __exit__(self, *args): self._self_lock.release() return _FinalDecorator(wrapped=wrapped, wrapper=_synchronized_wrapper)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/multidict-6.0.4.dist-info/top_level.txt
multidict
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jmespath/exceptions.py
from jmespath.compat import with_str_method class JMESPathError(ValueError): pass @with_str_method class ParseError(JMESPathError): _ERROR_MESSAGE = 'Invalid jmespath expression' def __init__(self, lex_position, token_value, token_type, msg=_ERROR_MESSAGE): super(ParseError, self).__init__(lex_position, token_value, token_type) self.lex_position = lex_position self.token_value = token_value self.token_type = token_type.upper() self.msg = msg # Whatever catches the ParseError can fill in the full expression self.expression = None def __str__(self): # self.lex_position +1 to account for the starting double quote char. underline = ' ' * (self.lex_position + 1) + '^' return ( '%s: Parse error at column %s, ' 'token "%s" (%s), for expression:\n"%s"\n%s' % ( self.msg, self.lex_position, self.token_value, self.token_type, self.expression, underline)) @with_str_method class IncompleteExpressionError(ParseError): def set_expression(self, expression): self.expression = expression self.lex_position = len(expression) self.token_type = None self.token_value = None def __str__(self): # self.lex_position +1 to account for the starting double quote char. underline = ' ' * (self.lex_position + 1) + '^' return ( 'Invalid jmespath expression: Incomplete expression:\n' '"%s"\n%s' % (self.expression, underline)) @with_str_method class LexerError(ParseError): def __init__(self, lexer_position, lexer_value, message, expression=None): self.lexer_position = lexer_position self.lexer_value = lexer_value self.message = message super(LexerError, self).__init__(lexer_position, lexer_value, message) # Whatever catches LexerError can set this. self.expression = expression def __str__(self): underline = ' ' * self.lexer_position + '^' return 'Bad jmespath expression: %s:\n%s\n%s' % ( self.message, self.expression, underline) @with_str_method class ArityError(ParseError): def __init__(self, expected, actual, name): self.expected_arity = expected self.actual_arity = actual self.function_name = name self.expression = None def __str__(self): return ("Expected %s %s for function %s(), " "received %s" % ( self.expected_arity, self._pluralize('argument', self.expected_arity), self.function_name, self.actual_arity)) def _pluralize(self, word, count): if count == 1: return word else: return word + 's' @with_str_method class VariadictArityError(ArityError): def __str__(self): return ("Expected at least %s %s for function %s(), " "received %s" % ( self.expected_arity, self._pluralize('argument', self.expected_arity), self.function_name, self.actual_arity)) @with_str_method class JMESPathTypeError(JMESPathError): def __init__(self, function_name, current_value, actual_type, expected_types): self.function_name = function_name self.current_value = current_value self.actual_type = actual_type self.expected_types = expected_types def __str__(self): return ('In function %s(), invalid type for value: %s, ' 'expected one of: %s, received: "%s"' % ( self.function_name, self.current_value, self.expected_types, self.actual_type)) class EmptyExpressionError(JMESPathError): def __init__(self): super(EmptyExpressionError, self).__init__( "Invalid JMESPath expression: cannot be empty.") class UnknownFunctionError(JMESPathError): pass
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jmespath/parser.py
"""Top down operator precedence parser. This is an implementation of Vaughan R. Pratt's "Top Down Operator Precedence" parser. (http://dl.acm.org/citation.cfm?doid=512927.512931). These are some additional resources that help explain the general idea behind a Pratt parser: * http://effbot.org/zone/simple-top-down-parsing.htm * http://javascript.crockford.com/tdop/tdop.html A few notes on the implementation. * All the nud/led tokens are on the Parser class itself, and are dispatched using getattr(). This keeps all the parsing logic contained to a single class. * We use two passes through the data. One to create a list of token, then one pass through the tokens to create the AST. While the lexer actually yields tokens, we convert it to a list so we can easily implement two tokens of lookahead. A previous implementation used a fixed circular buffer, but it was significantly slower. Also, the average jmespath expression typically does not have a large amount of token so this is not an issue. And interestingly enough, creating a token list first is actually faster than consuming from the token iterator one token at a time. """ import random from jmespath import lexer from jmespath.compat import with_repr_method from jmespath import ast from jmespath import exceptions from jmespath import visitor class Parser(object): BINDING_POWER = { 'eof': 0, 'unquoted_identifier': 0, 'quoted_identifier': 0, 'literal': 0, 'rbracket': 0, 'rparen': 0, 'comma': 0, 'rbrace': 0, 'number': 0, 'current': 0, 'expref': 0, 'colon': 0, 'pipe': 1, 'or': 2, 'and': 3, 'eq': 5, 'gt': 5, 'lt': 5, 'gte': 5, 'lte': 5, 'ne': 5, 'flatten': 9, # Everything above stops a projection. 'star': 20, 'filter': 21, 'dot': 40, 'not': 45, 'lbrace': 50, 'lbracket': 55, 'lparen': 60, } # The maximum binding power for a token that can stop # a projection. _PROJECTION_STOP = 10 # The _MAX_SIZE most recent expressions are cached in # _CACHE dict. _CACHE = {} _MAX_SIZE = 128 def __init__(self, lookahead=2): self.tokenizer = None self._tokens = [None] * lookahead self._buffer_size = lookahead self._index = 0 def parse(self, expression): cached = self._CACHE.get(expression) if cached is not None: return cached parsed_result = self._do_parse(expression) self._CACHE[expression] = parsed_result if len(self._CACHE) > self._MAX_SIZE: self._free_cache_entries() return parsed_result def _do_parse(self, expression): try: return self._parse(expression) except exceptions.LexerError as e: e.expression = expression raise except exceptions.IncompleteExpressionError as e: e.set_expression(expression) raise except exceptions.ParseError as e: e.expression = expression raise def _parse(self, expression): self.tokenizer = lexer.Lexer().tokenize(expression) self._tokens = list(self.tokenizer) self._index = 0 parsed = self._expression(binding_power=0) if not self._current_token() == 'eof': t = self._lookahead_token(0) raise exceptions.ParseError(t['start'], t['value'], t['type'], "Unexpected token: %s" % t['value']) return ParsedResult(expression, parsed) def _expression(self, binding_power=0): left_token = self._lookahead_token(0) self._advance() nud_function = getattr( self, '_token_nud_%s' % left_token['type'], self._error_nud_token) left = nud_function(left_token) current_token = self._current_token() while binding_power < self.BINDING_POWER[current_token]: led = getattr(self, '_token_led_%s' % current_token, None) if led is None: error_token = self._lookahead_token(0) self._error_led_token(error_token) else: self._advance() left = led(left) current_token = self._current_token() return left def _token_nud_literal(self, token): return ast.literal(token['value']) def _token_nud_unquoted_identifier(self, token): return ast.field(token['value']) def _token_nud_quoted_identifier(self, token): field = ast.field(token['value']) # You can't have a quoted identifier as a function # name. if self._current_token() == 'lparen': t = self._lookahead_token(0) raise exceptions.ParseError( 0, t['value'], t['type'], 'Quoted identifier not allowed for function names.') return field def _token_nud_star(self, token): left = ast.identity() if self._current_token() == 'rbracket': right = ast.identity() else: right = self._parse_projection_rhs(self.BINDING_POWER['star']) return ast.value_projection(left, right) def _token_nud_filter(self, token): return self._token_led_filter(ast.identity()) def _token_nud_lbrace(self, token): return self._parse_multi_select_hash() def _token_nud_lparen(self, token): expression = self._expression() self._match('rparen') return expression def _token_nud_flatten(self, token): left = ast.flatten(ast.identity()) right = self._parse_projection_rhs( self.BINDING_POWER['flatten']) return ast.projection(left, right) def _token_nud_not(self, token): expr = self._expression(self.BINDING_POWER['not']) return ast.not_expression(expr) def _token_nud_lbracket(self, token): if self._current_token() in ['number', 'colon']: right = self._parse_index_expression() # We could optimize this and remove the identity() node. # We don't really need an index_expression node, we can # just use emit an index node here if we're not dealing # with a slice. return self._project_if_slice(ast.identity(), right) elif self._current_token() == 'star' and \ self._lookahead(1) == 'rbracket': self._advance() self._advance() right = self._parse_projection_rhs(self.BINDING_POWER['star']) return ast.projection(ast.identity(), right) else: return self._parse_multi_select_list() def _parse_index_expression(self): # We're here: # [<current> # ^ # | current token if (self._lookahead(0) == 'colon' or self._lookahead(1) == 'colon'): return self._parse_slice_expression() else: # Parse the syntax [number] node = ast.index(self._lookahead_token(0)['value']) self._advance() self._match('rbracket') return node def _parse_slice_expression(self): # [start:end:step] # Where start, end, and step are optional. # The last colon is optional as well. parts = [None, None, None] index = 0 current_token = self._current_token() while not current_token == 'rbracket' and index < 3: if current_token == 'colon': index += 1 if index == 3: self._raise_parse_error_for_token( self._lookahead_token(0), 'syntax error') self._advance() elif current_token == 'number': parts[index] = self._lookahead_token(0)['value'] self._advance() else: self._raise_parse_error_for_token( self._lookahead_token(0), 'syntax error') current_token = self._current_token() self._match('rbracket') return ast.slice(*parts) def _token_nud_current(self, token): return ast.current_node() def _token_nud_expref(self, token): expression = self._expression(self.BINDING_POWER['expref']) return ast.expref(expression) def _token_led_dot(self, left): if not self._current_token() == 'star': right = self._parse_dot_rhs(self.BINDING_POWER['dot']) if left['type'] == 'subexpression': left['children'].append(right) return left else: return ast.subexpression([left, right]) else: # We're creating a projection. self._advance() right = self._parse_projection_rhs( self.BINDING_POWER['dot']) return ast.value_projection(left, right) def _token_led_pipe(self, left): right = self._expression(self.BINDING_POWER['pipe']) return ast.pipe(left, right) def _token_led_or(self, left): right = self._expression(self.BINDING_POWER['or']) return ast.or_expression(left, right) def _token_led_and(self, left): right = self._expression(self.BINDING_POWER['and']) return ast.and_expression(left, right) def _token_led_lparen(self, left): if left['type'] != 'field': # 0 - first func arg or closing paren. # -1 - '(' token # -2 - invalid function "name". prev_t = self._lookahead_token(-2) raise exceptions.ParseError( prev_t['start'], prev_t['value'], prev_t['type'], "Invalid function name '%s'" % prev_t['value']) name = left['value'] args = [] while not self._current_token() == 'rparen': expression = self._expression() if self._current_token() == 'comma': self._match('comma') args.append(expression) self._match('rparen') function_node = ast.function_expression(name, args) return function_node def _token_led_filter(self, left): # Filters are projections. condition = self._expression(0) self._match('rbracket') if self._current_token() == 'flatten': right = ast.identity() else: right = self._parse_projection_rhs(self.BINDING_POWER['filter']) return ast.filter_projection(left, right, condition) def _token_led_eq(self, left): return self._parse_comparator(left, 'eq') def _token_led_ne(self, left): return self._parse_comparator(left, 'ne') def _token_led_gt(self, left): return self._parse_comparator(left, 'gt') def _token_led_gte(self, left): return self._parse_comparator(left, 'gte') def _token_led_lt(self, left): return self._parse_comparator(left, 'lt') def _token_led_lte(self, left): return self._parse_comparator(left, 'lte') def _token_led_flatten(self, left): left = ast.flatten(left) right = self._parse_projection_rhs( self.BINDING_POWER['flatten']) return ast.projection(left, right) def _token_led_lbracket(self, left): token = self._lookahead_token(0) if token['type'] in ['number', 'colon']: right = self._parse_index_expression() if left['type'] == 'index_expression': # Optimization: if the left node is an index expr, # we can avoid creating another node and instead just add # the right node as a child of the left. left['children'].append(right) return left else: return self._project_if_slice(left, right) else: # We have a projection self._match('star') self._match('rbracket') right = self._parse_projection_rhs(self.BINDING_POWER['star']) return ast.projection(left, right) def _project_if_slice(self, left, right): index_expr = ast.index_expression([left, right]) if right['type'] == 'slice': return ast.projection( index_expr, self._parse_projection_rhs(self.BINDING_POWER['star'])) else: return index_expr def _parse_comparator(self, left, comparator): right = self._expression(self.BINDING_POWER[comparator]) return ast.comparator(comparator, left, right) def _parse_multi_select_list(self): expressions = [] while True: expression = self._expression() expressions.append(expression) if self._current_token() == 'rbracket': break else: self._match('comma') self._match('rbracket') return ast.multi_select_list(expressions) def _parse_multi_select_hash(self): pairs = [] while True: key_token = self._lookahead_token(0) # Before getting the token value, verify it's # an identifier. self._match_multiple_tokens( token_types=['quoted_identifier', 'unquoted_identifier']) key_name = key_token['value'] self._match('colon') value = self._expression(0) node = ast.key_val_pair(key_name=key_name, node=value) pairs.append(node) if self._current_token() == 'comma': self._match('comma') elif self._current_token() == 'rbrace': self._match('rbrace') break return ast.multi_select_dict(nodes=pairs) def _parse_projection_rhs(self, binding_power): # Parse the right hand side of the projection. if self.BINDING_POWER[self._current_token()] < self._PROJECTION_STOP: # BP of 10 are all the tokens that stop a projection. right = ast.identity() elif self._current_token() == 'lbracket': right = self._expression(binding_power) elif self._current_token() == 'filter': right = self._expression(binding_power) elif self._current_token() == 'dot': self._match('dot') right = self._parse_dot_rhs(binding_power) else: self._raise_parse_error_for_token(self._lookahead_token(0), 'syntax error') return right def _parse_dot_rhs(self, binding_power): # From the grammar: # expression '.' ( identifier / # multi-select-list / # multi-select-hash / # function-expression / # * # In terms of tokens that means that after a '.', # you can have: lookahead = self._current_token() # Common case "foo.bar", so first check for an identifier. if lookahead in ['quoted_identifier', 'unquoted_identifier', 'star']: return self._expression(binding_power) elif lookahead == 'lbracket': self._match('lbracket') return self._parse_multi_select_list() elif lookahead == 'lbrace': self._match('lbrace') return self._parse_multi_select_hash() else: t = self._lookahead_token(0) allowed = ['quoted_identifier', 'unquoted_identifier', 'lbracket', 'lbrace'] msg = ( "Expecting: %s, got: %s" % (allowed, t['type']) ) self._raise_parse_error_for_token(t, msg) def _error_nud_token(self, token): if token['type'] == 'eof': raise exceptions.IncompleteExpressionError( token['start'], token['value'], token['type']) self._raise_parse_error_for_token(token, 'invalid token') def _error_led_token(self, token): self._raise_parse_error_for_token(token, 'invalid token') def _match(self, token_type=None): # inline'd self._current_token() if self._current_token() == token_type: # inline'd self._advance() self._advance() else: self._raise_parse_error_maybe_eof( token_type, self._lookahead_token(0)) def _match_multiple_tokens(self, token_types): if self._current_token() not in token_types: self._raise_parse_error_maybe_eof( token_types, self._lookahead_token(0)) self._advance() def _advance(self): self._index += 1 def _current_token(self): return self._tokens[self._index]['type'] def _lookahead(self, number): return self._tokens[self._index + number]['type'] def _lookahead_token(self, number): return self._tokens[self._index + number] def _raise_parse_error_for_token(self, token, reason): lex_position = token['start'] actual_value = token['value'] actual_type = token['type'] raise exceptions.ParseError(lex_position, actual_value, actual_type, reason) def _raise_parse_error_maybe_eof(self, expected_type, token): lex_position = token['start'] actual_value = token['value'] actual_type = token['type'] if actual_type == 'eof': raise exceptions.IncompleteExpressionError( lex_position, actual_value, actual_type) message = 'Expecting: %s, got: %s' % (expected_type, actual_type) raise exceptions.ParseError( lex_position, actual_value, actual_type, message) def _free_cache_entries(self): for key in random.sample(self._CACHE.keys(), int(self._MAX_SIZE / 2)): self._CACHE.pop(key, None) @classmethod def purge(cls): """Clear the expression compilation cache.""" cls._CACHE.clear() @with_repr_method class ParsedResult(object): def __init__(self, expression, parsed): self.expression = expression self.parsed = parsed def search(self, value, options=None): interpreter = visitor.TreeInterpreter(options) result = interpreter.visit(self.parsed, value) return result def _render_dot_file(self): """Render the parsed AST as a dot file. Note that this is marked as an internal method because the AST is an implementation detail and is subject to change. This method can be used to help troubleshoot or for development purposes, but is not considered part of the public supported API. Use at your own risk. """ renderer = visitor.GraphvizVisitor() contents = renderer.visit(self.parsed) return contents def __repr__(self): return repr(self.parsed)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jmespath/ast.py
# AST nodes have this structure: # {"type": <node type>", children: [], "value": ""} def comparator(name, first, second): return {'type': 'comparator', 'children': [first, second], 'value': name} def current_node(): return {'type': 'current', 'children': []} def expref(expression): return {'type': 'expref', 'children': [expression]} def function_expression(name, args): return {'type': 'function_expression', 'children': args, 'value': name} def field(name): return {"type": "field", "children": [], "value": name} def filter_projection(left, right, comparator): return {'type': 'filter_projection', 'children': [left, right, comparator]} def flatten(node): return {'type': 'flatten', 'children': [node]} def identity(): return {"type": "identity", 'children': []} def index(index): return {"type": "index", "value": index, "children": []} def index_expression(children): return {"type": "index_expression", 'children': children} def key_val_pair(key_name, node): return {"type": "key_val_pair", 'children': [node], "value": key_name} def literal(literal_value): return {'type': 'literal', 'value': literal_value, 'children': []} def multi_select_dict(nodes): return {"type": "multi_select_dict", "children": nodes} def multi_select_list(nodes): return {"type": "multi_select_list", "children": nodes} def or_expression(left, right): return {"type": "or_expression", "children": [left, right]} def and_expression(left, right): return {"type": "and_expression", "children": [left, right]} def not_expression(expr): return {"type": "not_expression", "children": [expr]} def pipe(left, right): return {'type': 'pipe', 'children': [left, right]} def projection(left, right): return {'type': 'projection', 'children': [left, right]} def subexpression(children): return {"type": "subexpression", 'children': children} def slice(start, end, step): return {"type": "slice", "children": [start, end, step]} def value_projection(left, right): return {'type': 'value_projection', 'children': [left, right]}
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jmespath/lexer.py
import string import warnings from json import loads from jmespath.exceptions import LexerError, EmptyExpressionError class Lexer(object): START_IDENTIFIER = set(string.ascii_letters + '_') VALID_IDENTIFIER = set(string.ascii_letters + string.digits + '_') VALID_NUMBER = set(string.digits) WHITESPACE = set(" \t\n\r") SIMPLE_TOKENS = { '.': 'dot', '*': 'star', ']': 'rbracket', ',': 'comma', ':': 'colon', '@': 'current', '(': 'lparen', ')': 'rparen', '{': 'lbrace', '}': 'rbrace', } def tokenize(self, expression): self._initialize_for_expression(expression) while self._current is not None: if self._current in self.SIMPLE_TOKENS: yield {'type': self.SIMPLE_TOKENS[self._current], 'value': self._current, 'start': self._position, 'end': self._position + 1} self._next() elif self._current in self.START_IDENTIFIER: start = self._position buff = self._current while self._next() in self.VALID_IDENTIFIER: buff += self._current yield {'type': 'unquoted_identifier', 'value': buff, 'start': start, 'end': start + len(buff)} elif self._current in self.WHITESPACE: self._next() elif self._current == '[': start = self._position next_char = self._next() if next_char == ']': self._next() yield {'type': 'flatten', 'value': '[]', 'start': start, 'end': start + 2} elif next_char == '?': self._next() yield {'type': 'filter', 'value': '[?', 'start': start, 'end': start + 2} else: yield {'type': 'lbracket', 'value': '[', 'start': start, 'end': start + 1} elif self._current == "'": yield self._consume_raw_string_literal() elif self._current == '|': yield self._match_or_else('|', 'or', 'pipe') elif self._current == '&': yield self._match_or_else('&', 'and', 'expref') elif self._current == '`': yield self._consume_literal() elif self._current in self.VALID_NUMBER: start = self._position buff = self._consume_number() yield {'type': 'number', 'value': int(buff), 'start': start, 'end': start + len(buff)} elif self._current == '-': # Negative number. start = self._position buff = self._consume_number() if len(buff) > 1: yield {'type': 'number', 'value': int(buff), 'start': start, 'end': start + len(buff)} else: raise LexerError(lexer_position=start, lexer_value=buff, message="Unknown token '%s'" % buff) elif self._current == '"': yield self._consume_quoted_identifier() elif self._current == '<': yield self._match_or_else('=', 'lte', 'lt') elif self._current == '>': yield self._match_or_else('=', 'gte', 'gt') elif self._current == '!': yield self._match_or_else('=', 'ne', 'not') elif self._current == '=': if self._next() == '=': yield {'type': 'eq', 'value': '==', 'start': self._position - 1, 'end': self._position} self._next() else: if self._current is None: # If we're at the EOF, we never advanced # the position so we don't need to rewind # it back one location. position = self._position else: position = self._position - 1 raise LexerError( lexer_position=position, lexer_value='=', message="Unknown token '='") else: raise LexerError(lexer_position=self._position, lexer_value=self._current, message="Unknown token %s" % self._current) yield {'type': 'eof', 'value': '', 'start': self._length, 'end': self._length} def _consume_number(self): start = self._position buff = self._current while self._next() in self.VALID_NUMBER: buff += self._current return buff def _initialize_for_expression(self, expression): if not expression: raise EmptyExpressionError() self._position = 0 self._expression = expression self._chars = list(self._expression) self._current = self._chars[self._position] self._length = len(self._expression) def _next(self): if self._position == self._length - 1: self._current = None else: self._position += 1 self._current = self._chars[self._position] return self._current def _consume_until(self, delimiter): # Consume until the delimiter is reached, # allowing for the delimiter to be escaped with "\". start = self._position buff = '' self._next() while self._current != delimiter: if self._current == '\\': buff += '\\' self._next() if self._current is None: # We're at the EOF. raise LexerError(lexer_position=start, lexer_value=self._expression[start:], message="Unclosed %s delimiter" % delimiter) buff += self._current self._next() # Skip the closing delimiter. self._next() return buff def _consume_literal(self): start = self._position lexeme = self._consume_until('`').replace('\\`', '`') try: # Assume it is valid JSON and attempt to parse. parsed_json = loads(lexeme) except ValueError: try: # Invalid JSON values should be converted to quoted # JSON strings during the JEP-12 deprecation period. parsed_json = loads('"%s"' % lexeme.lstrip()) warnings.warn("deprecated string literal syntax", PendingDeprecationWarning) except ValueError: raise LexerError(lexer_position=start, lexer_value=self._expression[start:], message="Bad token %s" % lexeme) token_len = self._position - start return {'type': 'literal', 'value': parsed_json, 'start': start, 'end': token_len} def _consume_quoted_identifier(self): start = self._position lexeme = '"' + self._consume_until('"') + '"' try: token_len = self._position - start return {'type': 'quoted_identifier', 'value': loads(lexeme), 'start': start, 'end': token_len} except ValueError as e: error_message = str(e).split(':')[0] raise LexerError(lexer_position=start, lexer_value=lexeme, message=error_message) def _consume_raw_string_literal(self): start = self._position lexeme = self._consume_until("'").replace("\\'", "'") token_len = self._position - start return {'type': 'literal', 'value': lexeme, 'start': start, 'end': token_len} def _match_or_else(self, expected, match_type, else_type): start = self._position current = self._current next_char = self._next() if next_char == expected: self._next() return {'type': match_type, 'value': current + next_char, 'start': start, 'end': start + 1} return {'type': else_type, 'value': current, 'start': start, 'end': start}
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jmespath/__init__.py
import warnings import sys from jmespath import parser from jmespath.visitor import Options __version__ = '0.10.0' if sys.version_info[:2] <= (2, 6) or ((3, 0) <= sys.version_info[:2] <= (3, 3)): python_ver = '.'.join(str(x) for x in sys.version_info[:3]) warnings.warn( 'You are using Python {0}, which will no longer be supported in ' 'version 0.11.0'.format(python_ver), DeprecationWarning) def compile(expression): return parser.Parser().parse(expression) def search(expression, data, options=None): return parser.Parser().parse(expression).search(data, options=options)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jmespath/visitor.py
import operator from jmespath import functions from jmespath.compat import string_type from numbers import Number def _equals(x, y): if _is_special_integer_case(x, y): return False else: return x == y def _is_special_integer_case(x, y): # We need to special case comparing 0 or 1 to # True/False. While normally comparing any # integer other than 0/1 to True/False will always # return False. However 0/1 have this: # >>> 0 == True # False # >>> 0 == False # True # >>> 1 == True # True # >>> 1 == False # False # # Also need to consider that: # >>> 0 in [True, False] # True if type(x) is int and (x == 0 or x == 1): return y is True or y is False elif type(y) is int and (y == 0 or y == 1): return x is True or x is False def _is_comparable(x): # The spec doesn't officially support string types yet, # but enough people are relying on this behavior that # it's been added back. This should eventually become # part of the official spec. return _is_actual_number(x) or isinstance(x, string_type) def _is_actual_number(x): # We need to handle python's quirkiness with booleans, # specifically: # # >>> isinstance(False, int) # True # >>> isinstance(True, int) # True if x is True or x is False: return False return isinstance(x, Number) class Options(object): """Options to control how a JMESPath function is evaluated.""" def __init__(self, dict_cls=None, custom_functions=None): #: The class to use when creating a dict. The interpreter # may create dictionaries during the evaluation of a JMESPath # expression. For example, a multi-select hash will # create a dictionary. By default we use a dict() type. # You can set this value to change what dict type is used. # The most common reason you would change this is if you # want to set a collections.OrderedDict so that you can # have predictable key ordering. self.dict_cls = dict_cls self.custom_functions = custom_functions class _Expression(object): def __init__(self, expression, interpreter): self.expression = expression self.interpreter = interpreter def visit(self, node, *args, **kwargs): return self.interpreter.visit(node, *args, **kwargs) class Visitor(object): def __init__(self): self._method_cache = {} def visit(self, node, *args, **kwargs): node_type = node['type'] method = self._method_cache.get(node_type) if method is None: method = getattr( self, 'visit_%s' % node['type'], self.default_visit) self._method_cache[node_type] = method return method(node, *args, **kwargs) def default_visit(self, node, *args, **kwargs): raise NotImplementedError("default_visit") class TreeInterpreter(Visitor): COMPARATOR_FUNC = { 'eq': _equals, 'ne': lambda x, y: not _equals(x, y), 'lt': operator.lt, 'gt': operator.gt, 'lte': operator.le, 'gte': operator.ge } _EQUALITY_OPS = ['eq', 'ne'] MAP_TYPE = dict def __init__(self, options=None): super(TreeInterpreter, self).__init__() self._dict_cls = self.MAP_TYPE if options is None: options = Options() self._options = options if options.dict_cls is not None: self._dict_cls = self._options.dict_cls if options.custom_functions is not None: self._functions = self._options.custom_functions else: self._functions = functions.Functions() def default_visit(self, node, *args, **kwargs): raise NotImplementedError(node['type']) def visit_subexpression(self, node, value): result = value for node in node['children']: result = self.visit(node, result) return result def visit_field(self, node, value): try: return value.get(node['value']) except AttributeError: return None def visit_comparator(self, node, value): # Common case: comparator is == or != comparator_func = self.COMPARATOR_FUNC[node['value']] if node['value'] in self._EQUALITY_OPS: return comparator_func( self.visit(node['children'][0], value), self.visit(node['children'][1], value) ) else: # Ordering operators are only valid for numbers. # Evaluating any other type with a comparison operator # will yield a None value. left = self.visit(node['children'][0], value) right = self.visit(node['children'][1], value) num_types = (int, float) if not (_is_comparable(left) and _is_comparable(right)): return None return comparator_func(left, right) def visit_current(self, node, value): return value def visit_expref(self, node, value): return _Expression(node['children'][0], self) def visit_function_expression(self, node, value): resolved_args = [] for child in node['children']: current = self.visit(child, value) resolved_args.append(current) return self._functions.call_function(node['value'], resolved_args) def visit_filter_projection(self, node, value): base = self.visit(node['children'][0], value) if not isinstance(base, list): return None comparator_node = node['children'][2] collected = [] for element in base: if self._is_true(self.visit(comparator_node, element)): current = self.visit(node['children'][1], element) if current is not None: collected.append(current) return collected def visit_flatten(self, node, value): base = self.visit(node['children'][0], value) if not isinstance(base, list): # Can't flatten the object if it's not a list. return None merged_list = [] for element in base: if isinstance(element, list): merged_list.extend(element) else: merged_list.append(element) return merged_list def visit_identity(self, node, value): return value def visit_index(self, node, value): # Even though we can index strings, we don't # want to support that. if not isinstance(value, list): return None try: return value[node['value']] except IndexError: return None def visit_index_expression(self, node, value): result = value for node in node['children']: result = self.visit(node, result) return result def visit_slice(self, node, value): if not isinstance(value, list): return None s = slice(*node['children']) return value[s] def visit_key_val_pair(self, node, value): return self.visit(node['children'][0], value) def visit_literal(self, node, value): return node['value'] def visit_multi_select_dict(self, node, value): if value is None: return None collected = self._dict_cls() for child in node['children']: collected[child['value']] = self.visit(child, value) return collected def visit_multi_select_list(self, node, value): if value is None: return None collected = [] for child in node['children']: collected.append(self.visit(child, value)) return collected def visit_or_expression(self, node, value): matched = self.visit(node['children'][0], value) if self._is_false(matched): matched = self.visit(node['children'][1], value) return matched def visit_and_expression(self, node, value): matched = self.visit(node['children'][0], value) if self._is_false(matched): return matched return self.visit(node['children'][1], value) def visit_not_expression(self, node, value): original_result = self.visit(node['children'][0], value) if type(original_result) is int and original_result == 0: # Special case for 0, !0 should be false, not true. # 0 is not a special cased integer in jmespath. return False return not original_result def visit_pipe(self, node, value): result = value for node in node['children']: result = self.visit(node, result) return result def visit_projection(self, node, value): base = self.visit(node['children'][0], value) if not isinstance(base, list): return None collected = [] for element in base: current = self.visit(node['children'][1], element) if current is not None: collected.append(current) return collected def visit_value_projection(self, node, value): base = self.visit(node['children'][0], value) try: base = base.values() except AttributeError: return None collected = [] for element in base: current = self.visit(node['children'][1], element) if current is not None: collected.append(current) return collected def _is_false(self, value): # This looks weird, but we're explicitly using equality checks # because the truth/false values are different between # python and jmespath. return (value == '' or value == [] or value == {} or value is None or value is False) def _is_true(self, value): return not self._is_false(value) class GraphvizVisitor(Visitor): def __init__(self): super(GraphvizVisitor, self).__init__() self._lines = [] self._count = 1 def visit(self, node, *args, **kwargs): self._lines.append('digraph AST {') current = '%s%s' % (node['type'], self._count) self._count += 1 self._visit(node, current) self._lines.append('}') return '\n'.join(self._lines) def _visit(self, node, current): self._lines.append('%s [label="%s(%s)"]' % ( current, node['type'], node.get('value', ''))) for child in node.get('children', []): child_name = '%s%s' % (child['type'], self._count) self._count += 1 self._lines.append(' %s -> %s' % (current, child_name)) self._visit(child, child_name)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jmespath/compat.py
import sys import inspect PY2 = sys.version_info[0] == 2 def with_metaclass(meta, *bases): # Taken from flask/six. class metaclass(meta): def __new__(cls, name, this_bases, d): return meta(name, bases, d) return type.__new__(metaclass, 'temporary_class', (), {}) if PY2: text_type = unicode string_type = basestring from itertools import izip_longest as zip_longest def with_str_method(cls): """Class decorator that handles __str__ compat between py2 and py3.""" # In python2, the __str__ should be __unicode__ # and __str__ should return bytes. cls.__unicode__ = cls.__str__ def __str__(self): return self.__unicode__().encode('utf-8') cls.__str__ = __str__ return cls def with_repr_method(cls): """Class decorator that handle __repr__ with py2 and py3.""" # This is almost the same thing as with_str_method *except* # it uses the unicode_escape encoding. This also means we need to be # careful encoding the input multiple times, so we only encode # if we get a unicode type. original_repr_method = cls.__repr__ def __repr__(self): original_repr = original_repr_method(self) if isinstance(original_repr, text_type): original_repr = original_repr.encode('unicode_escape') return original_repr cls.__repr__ = __repr__ return cls def get_methods(cls): for name, method in inspect.getmembers(cls, predicate=inspect.ismethod): yield name, method else: text_type = str string_type = str from itertools import zip_longest def with_str_method(cls): # In python3, we don't need to do anything, we return a str type. return cls def with_repr_method(cls): return cls def get_methods(cls): for name, method in inspect.getmembers(cls, predicate=inspect.isfunction): yield name, method
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jmespath/functions.py
import math import json from jmespath import exceptions from jmespath.compat import string_type as STRING_TYPE from jmespath.compat import get_methods, with_metaclass # python types -> jmespath types TYPES_MAP = { 'bool': 'boolean', 'list': 'array', 'dict': 'object', 'NoneType': 'null', 'unicode': 'string', 'str': 'string', 'float': 'number', 'int': 'number', 'long': 'number', 'OrderedDict': 'object', '_Projection': 'array', '_Expression': 'expref', } # jmespath types -> python types REVERSE_TYPES_MAP = { 'boolean': ('bool',), 'array': ('list', '_Projection'), 'object': ('dict', 'OrderedDict',), 'null': ('NoneType',), 'string': ('unicode', 'str'), 'number': ('float', 'int', 'long'), 'expref': ('_Expression',), } def signature(*arguments): def _record_signature(func): func.signature = arguments return func return _record_signature class FunctionRegistry(type): def __init__(cls, name, bases, attrs): cls._populate_function_table() super(FunctionRegistry, cls).__init__(name, bases, attrs) def _populate_function_table(cls): function_table = {} # Any method with a @signature decorator that also # starts with "_func_" is registered as a function. # _func_max_by -> max_by function. for name, method in get_methods(cls): if not name.startswith('_func_'): continue signature = getattr(method, 'signature', None) if signature is not None: function_table[name[6:]] = { 'function': method, 'signature': signature, } cls.FUNCTION_TABLE = function_table class Functions(with_metaclass(FunctionRegistry, object)): FUNCTION_TABLE = { } def call_function(self, function_name, resolved_args): try: spec = self.FUNCTION_TABLE[function_name] except KeyError: raise exceptions.UnknownFunctionError( "Unknown function: %s()" % function_name) function = spec['function'] signature = spec['signature'] self._validate_arguments(resolved_args, signature, function_name) return function(self, *resolved_args) def _validate_arguments(self, args, signature, function_name): if signature and signature[-1].get('variadic'): if len(args) < len(signature): raise exceptions.VariadictArityError( len(signature), len(args), function_name) elif len(args) != len(signature): raise exceptions.ArityError( len(signature), len(args), function_name) return self._type_check(args, signature, function_name) def _type_check(self, actual, signature, function_name): for i in range(len(signature)): allowed_types = signature[i]['types'] if allowed_types: self._type_check_single(actual[i], allowed_types, function_name) def _type_check_single(self, current, types, function_name): # Type checking involves checking the top level type, # and in the case of arrays, potentially checking the types # of each element. allowed_types, allowed_subtypes = self._get_allowed_pytypes(types) # We're not using isinstance() on purpose. # The type model for jmespath does not map # 1-1 with python types (booleans are considered # integers in python for example). actual_typename = type(current).__name__ if actual_typename not in allowed_types: raise exceptions.JMESPathTypeError( function_name, current, self._convert_to_jmespath_type(actual_typename), types) # If we're dealing with a list type, we can have # additional restrictions on the type of the list # elements (for example a function can require a # list of numbers or a list of strings). # Arrays are the only types that can have subtypes. if allowed_subtypes: self._subtype_check(current, allowed_subtypes, types, function_name) def _get_allowed_pytypes(self, types): allowed_types = [] allowed_subtypes = [] for t in types: type_ = t.split('-', 1) if len(type_) == 2: type_, subtype = type_ allowed_subtypes.append(REVERSE_TYPES_MAP[subtype]) else: type_ = type_[0] allowed_types.extend(REVERSE_TYPES_MAP[type_]) return allowed_types, allowed_subtypes def _subtype_check(self, current, allowed_subtypes, types, function_name): if len(allowed_subtypes) == 1: # The easy case, we know up front what type # we need to validate. allowed_subtypes = allowed_subtypes[0] for element in current: actual_typename = type(element).__name__ if actual_typename not in allowed_subtypes: raise exceptions.JMESPathTypeError( function_name, element, actual_typename, types) elif len(allowed_subtypes) > 1 and current: # Dynamic type validation. Based on the first # type we see, we validate that the remaining types # match. first = type(current[0]).__name__ for subtypes in allowed_subtypes: if first in subtypes: allowed = subtypes break else: raise exceptions.JMESPathTypeError( function_name, current[0], first, types) for element in current: actual_typename = type(element).__name__ if actual_typename not in allowed: raise exceptions.JMESPathTypeError( function_name, element, actual_typename, types) @signature({'types': ['number']}) def _func_abs(self, arg): return abs(arg) @signature({'types': ['array-number']}) def _func_avg(self, arg): if arg: return sum(arg) / float(len(arg)) else: return None @signature({'types': [], 'variadic': True}) def _func_not_null(self, *arguments): for argument in arguments: if argument is not None: return argument @signature({'types': []}) def _func_to_array(self, arg): if isinstance(arg, list): return arg else: return [arg] @signature({'types': []}) def _func_to_string(self, arg): if isinstance(arg, STRING_TYPE): return arg else: return json.dumps(arg, separators=(',', ':'), default=str) @signature({'types': []}) def _func_to_number(self, arg): if isinstance(arg, (list, dict, bool)): return None elif arg is None: return None elif isinstance(arg, (int, float)): return arg else: try: return int(arg) except ValueError: try: return float(arg) except ValueError: return None @signature({'types': ['array', 'string']}, {'types': []}) def _func_contains(self, subject, search): return search in subject @signature({'types': ['string', 'array', 'object']}) def _func_length(self, arg): return len(arg) @signature({'types': ['string']}, {'types': ['string']}) def _func_ends_with(self, search, suffix): return search.endswith(suffix) @signature({'types': ['string']}, {'types': ['string']}) def _func_starts_with(self, search, suffix): return search.startswith(suffix) @signature({'types': ['array', 'string']}) def _func_reverse(self, arg): if isinstance(arg, STRING_TYPE): return arg[::-1] else: return list(reversed(arg)) @signature({"types": ['number']}) def _func_ceil(self, arg): return math.ceil(arg) @signature({"types": ['number']}) def _func_floor(self, arg): return math.floor(arg) @signature({"types": ['string']}, {"types": ['array-string']}) def _func_join(self, separator, array): return separator.join(array) @signature({'types': ['expref']}, {'types': ['array']}) def _func_map(self, expref, arg): result = [] for element in arg: result.append(expref.visit(expref.expression, element)) return result @signature({"types": ['array-number', 'array-string']}) def _func_max(self, arg): if arg: return max(arg) else: return None @signature({"types": ["object"], "variadic": True}) def _func_merge(self, *arguments): merged = {} for arg in arguments: merged.update(arg) return merged @signature({"types": ['array-number', 'array-string']}) def _func_min(self, arg): if arg: return min(arg) else: return None @signature({"types": ['array-string', 'array-number']}) def _func_sort(self, arg): return list(sorted(arg)) @signature({"types": ['array-number']}) def _func_sum(self, arg): return sum(arg) @signature({"types": ['object']}) def _func_keys(self, arg): # To be consistent with .values() # should we also return the indices of a list? return list(arg.keys()) @signature({"types": ['object']}) def _func_values(self, arg): return list(arg.values()) @signature({'types': []}) def _func_type(self, arg): if isinstance(arg, STRING_TYPE): return "string" elif isinstance(arg, bool): return "boolean" elif isinstance(arg, list): return "array" elif isinstance(arg, dict): return "object" elif isinstance(arg, (float, int)): return "number" elif arg is None: return "null" @signature({'types': ['array']}, {'types': ['expref']}) def _func_sort_by(self, array, expref): if not array: return array # sort_by allows for the expref to be either a number of # a string, so we have some special logic to handle this. # We evaluate the first array element and verify that it's # either a string of a number. We then create a key function # that validates that type, which requires that remaining array # elements resolve to the same type as the first element. required_type = self._convert_to_jmespath_type( type(expref.visit(expref.expression, array[0])).__name__) if required_type not in ['number', 'string']: raise exceptions.JMESPathTypeError( 'sort_by', array[0], required_type, ['string', 'number']) keyfunc = self._create_key_func(expref, [required_type], 'sort_by') return list(sorted(array, key=keyfunc)) @signature({'types': ['array']}, {'types': ['expref']}) def _func_min_by(self, array, expref): keyfunc = self._create_key_func(expref, ['number', 'string'], 'min_by') if array: return min(array, key=keyfunc) else: return None @signature({'types': ['array']}, {'types': ['expref']}) def _func_max_by(self, array, expref): keyfunc = self._create_key_func(expref, ['number', 'string'], 'max_by') if array: return max(array, key=keyfunc) else: return None def _create_key_func(self, expref, allowed_types, function_name): def keyfunc(x): result = expref.visit(expref.expression, x) actual_typename = type(result).__name__ jmespath_type = self._convert_to_jmespath_type(actual_typename) # allowed_types is in term of jmespath types, not python types. if jmespath_type not in allowed_types: raise exceptions.JMESPathTypeError( function_name, result, jmespath_type, allowed_types) return result return keyfunc def _convert_to_jmespath_type(self, pyobject): return TYPES_MAP.get(pyobject, 'unknown')
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3-1.26.16.dist-info/top_level.txt
urllib3
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/urllib3-1.26.16.dist-info/LICENSE.txt
MIT License Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/setters.pyi
from . import _OnSetAttrType, Attribute from typing import TypeVar, Any, NewType, NoReturn, cast _T = TypeVar("_T") def frozen( instance: Any, attribute: Attribute, new_value: Any ) -> NoReturn: ... def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... # convert is allowed to return Any, because they can be chained using pipe. def convert( instance: Any, attribute: Attribute[Any], new_value: Any ) -> Any: ... _NoOpType = NewType("_NoOpType", object) NO_OP: _NoOpType
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/_funcs.py
from __future__ import absolute_import, division, print_function import copy from ._compat import iteritems from ._make import NOTHING, _obj_setattr, fields from .exceptions import AttrsAttributeNotFoundError def asdict( inst, recurse=True, filter=None, dict_factory=dict, retain_collection_types=False, ): """ Return the ``attrs`` attribute values of *inst* as a dict. Optionally recurse into other ``attrs``-decorated classes. :param inst: Instance of an ``attrs``-decorated class. :param bool recurse: Recurse into classes that are also ``attrs``-decorated. :param callable filter: A callable whose return code determines whether an attribute or element is included (``True``) or dropped (``False``). Is called with the `attr.Attribute` as the first argument and the value as the second argument. :param callable dict_factory: A callable to produce dictionaries from. For example, to produce ordered dictionaries instead of normal Python dictionaries, pass in ``collections.OrderedDict``. :param bool retain_collection_types: Do not convert to ``list`` when encountering an attribute whose type is ``tuple`` or ``set``. Only meaningful if ``recurse`` is ``True``. :rtype: return type of *dict_factory* :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` class. .. versionadded:: 16.0.0 *dict_factory* .. versionadded:: 16.1.0 *retain_collection_types* """ attrs = fields(inst.__class__) rv = dict_factory() for a in attrs: v = getattr(inst, a.name) if filter is not None and not filter(a, v): continue if recurse is True: if has(v.__class__): rv[a.name] = asdict( v, True, filter, dict_factory, retain_collection_types ) elif isinstance(v, (tuple, list, set)): cf = v.__class__ if retain_collection_types is True else list rv[a.name] = cf( [ _asdict_anything( i, filter, dict_factory, retain_collection_types ) for i in v ] ) elif isinstance(v, dict): df = dict_factory rv[a.name] = df( ( _asdict_anything( kk, filter, df, retain_collection_types ), _asdict_anything( vv, filter, df, retain_collection_types ), ) for kk, vv in iteritems(v) ) else: rv[a.name] = v else: rv[a.name] = v return rv def _asdict_anything(val, filter, dict_factory, retain_collection_types): """ ``asdict`` only works on attrs instances, this works on anything. """ if getattr(val.__class__, "__attrs_attrs__", None) is not None: # Attrs class. rv = asdict(val, True, filter, dict_factory, retain_collection_types) elif isinstance(val, (tuple, list, set)): cf = val.__class__ if retain_collection_types is True else list rv = cf( [ _asdict_anything( i, filter, dict_factory, retain_collection_types ) for i in val ] ) elif isinstance(val, dict): df = dict_factory rv = df( ( _asdict_anything(kk, filter, df, retain_collection_types), _asdict_anything(vv, filter, df, retain_collection_types), ) for kk, vv in iteritems(val) ) else: rv = val return rv def astuple( inst, recurse=True, filter=None, tuple_factory=tuple, retain_collection_types=False, ): """ Return the ``attrs`` attribute values of *inst* as a tuple. Optionally recurse into other ``attrs``-decorated classes. :param inst: Instance of an ``attrs``-decorated class. :param bool recurse: Recurse into classes that are also ``attrs``-decorated. :param callable filter: A callable whose return code determines whether an attribute or element is included (``True``) or dropped (``False``). Is called with the `attr.Attribute` as the first argument and the value as the second argument. :param callable tuple_factory: A callable to produce tuples from. For example, to produce lists instead of tuples. :param bool retain_collection_types: Do not convert to ``list`` or ``dict`` when encountering an attribute which type is ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is ``True``. :rtype: return type of *tuple_factory* :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` class. .. versionadded:: 16.2.0 """ attrs = fields(inst.__class__) rv = [] retain = retain_collection_types # Very long. :/ for a in attrs: v = getattr(inst, a.name) if filter is not None and not filter(a, v): continue if recurse is True: if has(v.__class__): rv.append( astuple( v, recurse=True, filter=filter, tuple_factory=tuple_factory, retain_collection_types=retain, ) ) elif isinstance(v, (tuple, list, set)): cf = v.__class__ if retain is True else list rv.append( cf( [ astuple( j, recurse=True, filter=filter, tuple_factory=tuple_factory, retain_collection_types=retain, ) if has(j.__class__) else j for j in v ] ) ) elif isinstance(v, dict): df = v.__class__ if retain is True else dict rv.append( df( ( astuple( kk, tuple_factory=tuple_factory, retain_collection_types=retain, ) if has(kk.__class__) else kk, astuple( vv, tuple_factory=tuple_factory, retain_collection_types=retain, ) if has(vv.__class__) else vv, ) for kk, vv in iteritems(v) ) ) else: rv.append(v) else: rv.append(v) return rv if tuple_factory is list else tuple_factory(rv) def has(cls): """ Check whether *cls* is a class with ``attrs`` attributes. :param type cls: Class to introspect. :raise TypeError: If *cls* is not a class. :rtype: bool """ return getattr(cls, "__attrs_attrs__", None) is not None def assoc(inst, **changes): """ Copy *inst* and apply *changes*. :param inst: Instance of a class with ``attrs`` attributes. :param changes: Keyword changes in the new copy. :return: A copy of inst with *changes* incorporated. :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't be found on *cls*. :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` class. .. deprecated:: 17.1.0 Use `evolve` instead. """ import warnings warnings.warn( "assoc is deprecated and will be removed after 2018/01.", DeprecationWarning, stacklevel=2, ) new = copy.copy(inst) attrs = fields(inst.__class__) for k, v in iteritems(changes): a = getattr(attrs, k, NOTHING) if a is NOTHING: raise AttrsAttributeNotFoundError( "{k} is not an attrs attribute on {cl}.".format( k=k, cl=new.__class__ ) ) _obj_setattr(new, k, v) return new def evolve(inst, **changes): """ Create a new instance, based on *inst* with *changes* applied. :param inst: Instance of a class with ``attrs`` attributes. :param changes: Keyword changes in the new copy. :return: A copy of inst with *changes* incorporated. :raise TypeError: If *attr_name* couldn't be found in the class ``__init__``. :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` class. .. versionadded:: 17.1.0 """ cls = inst.__class__ attrs = fields(cls) for a in attrs: if not a.init: continue attr_name = a.name # To deal with private attributes. init_name = attr_name if attr_name[0] != "_" else attr_name[1:] if init_name not in changes: changes[init_name] = getattr(inst, attr_name) return cls(**changes) def resolve_types(cls, globalns=None, localns=None): """ Resolve any strings and forward annotations in type annotations. This is only required if you need concrete types in `Attribute`'s *type* field. In other words, you don't need to resolve your types if you only use them for static type checking. With no arguments, names will be looked up in the module in which the class was created. If this is not what you want, e.g. if the name only exists inside a method, you may pass *globalns* or *localns* to specify other dictionaries in which to look up these names. See the docs of `typing.get_type_hints` for more details. :param type cls: Class to resolve. :param Optional[dict] globalns: Dictionary containing global variables. :param Optional[dict] localns: Dictionary containing local variables. :raise TypeError: If *cls* is not a class. :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` class. :raise NameError: If types cannot be resolved because of missing variables. :returns: *cls* so you can use this function also as a class decorator. Please note that you have to apply it **after** `attr.s`. That means the decorator has to come in the line **before** `attr.s`. .. versionadded:: 20.1.0 """ try: # Since calling get_type_hints is expensive we cache whether we've # done it already. cls.__attrs_types_resolved__ except AttributeError: import typing hints = typing.get_type_hints(cls, globalns=globalns, localns=localns) for field in fields(cls): if field.name in hints: # Since fields have been frozen we must work around it. _obj_setattr(field, "type", hints[field.name]) cls.__attrs_types_resolved__ = True # Return the class so you can use it as a decorator too. return cls
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/validators.py
""" Commonly useful validators. """ from __future__ import absolute_import, division, print_function import re from ._make import _AndValidator, and_, attrib, attrs from .exceptions import NotCallableError __all__ = [ "and_", "deep_iterable", "deep_mapping", "in_", "instance_of", "is_callable", "matches_re", "optional", "provides", ] @attrs(repr=False, slots=True, hash=True) class _InstanceOfValidator(object): type = attrib() def __call__(self, inst, attr, value): """ We use a callable class to be able to change the ``__repr__``. """ if not isinstance(value, self.type): raise TypeError( "'{name}' must be {type!r} (got {value!r} that is a " "{actual!r}).".format( name=attr.name, type=self.type, actual=value.__class__, value=value, ), attr, self.type, value, ) def __repr__(self): return "<instance_of validator for type {type!r}>".format( type=self.type ) def instance_of(type): """ A validator that raises a `TypeError` if the initializer is called with a wrong type for this particular attribute (checks are performed using `isinstance` therefore it's also valid to pass a tuple of types). :param type: The type to check for. :type type: type or tuple of types :raises TypeError: With a human readable error message, the attribute (of type `attr.Attribute`), the expected type, and the value it got. """ return _InstanceOfValidator(type) @attrs(repr=False, frozen=True, slots=True) class _MatchesReValidator(object): regex = attrib() flags = attrib() match_func = attrib() def __call__(self, inst, attr, value): """ We use a callable class to be able to change the ``__repr__``. """ if not self.match_func(value): raise ValueError( "'{name}' must match regex {regex!r}" " ({value!r} doesn't)".format( name=attr.name, regex=self.regex.pattern, value=value ), attr, self.regex, value, ) def __repr__(self): return "<matches_re validator for pattern {regex!r}>".format( regex=self.regex ) def matches_re(regex, flags=0, func=None): r""" A validator that raises `ValueError` if the initializer is called with a string that doesn't match *regex*. :param str regex: a regex string to match against :param int flags: flags that will be passed to the underlying re function (default 0) :param callable func: which underlying `re` function to call (options are `re.fullmatch`, `re.search`, `re.match`, default is ``None`` which means either `re.fullmatch` or an emulation of it on Python 2). For performance reasons, they won't be used directly but on a pre-`re.compile`\ ed pattern. .. versionadded:: 19.2.0 """ fullmatch = getattr(re, "fullmatch", None) valid_funcs = (fullmatch, None, re.search, re.match) if func not in valid_funcs: raise ValueError( "'func' must be one of %s." % ( ", ".join( sorted( e and e.__name__ or "None" for e in set(valid_funcs) ) ), ) ) pattern = re.compile(regex, flags) if func is re.match: match_func = pattern.match elif func is re.search: match_func = pattern.search else: if fullmatch: match_func = pattern.fullmatch else: pattern = re.compile(r"(?:{})\Z".format(regex), flags) match_func = pattern.match return _MatchesReValidator(pattern, flags, match_func) @attrs(repr=False, slots=True, hash=True) class _ProvidesValidator(object): interface = attrib() def __call__(self, inst, attr, value): """ We use a callable class to be able to change the ``__repr__``. """ if not self.interface.providedBy(value): raise TypeError( "'{name}' must provide {interface!r} which {value!r} " "doesn't.".format( name=attr.name, interface=self.interface, value=value ), attr, self.interface, value, ) def __repr__(self): return "<provides validator for interface {interface!r}>".format( interface=self.interface ) def provides(interface): """ A validator that raises a `TypeError` if the initializer is called with an object that does not provide the requested *interface* (checks are performed using ``interface.providedBy(value)`` (see `zope.interface <https://zopeinterface.readthedocs.io/en/latest/>`_). :param interface: The interface to check for. :type interface: ``zope.interface.Interface`` :raises TypeError: With a human readable error message, the attribute (of type `attr.Attribute`), the expected interface, and the value it got. """ return _ProvidesValidator(interface) @attrs(repr=False, slots=True, hash=True) class _OptionalValidator(object): validator = attrib() def __call__(self, inst, attr, value): if value is None: return self.validator(inst, attr, value) def __repr__(self): return "<optional validator for {what} or None>".format( what=repr(self.validator) ) def optional(validator): """ A validator that makes an attribute optional. An optional attribute is one which can be set to ``None`` in addition to satisfying the requirements of the sub-validator. :param validator: A validator (or a list of validators) that is used for non-``None`` values. :type validator: callable or `list` of callables. .. versionadded:: 15.1.0 .. versionchanged:: 17.1.0 *validator* can be a list of validators. """ if isinstance(validator, list): return _OptionalValidator(_AndValidator(validator)) return _OptionalValidator(validator) @attrs(repr=False, slots=True, hash=True) class _InValidator(object): options = attrib() def __call__(self, inst, attr, value): try: in_options = value in self.options except TypeError: # e.g. `1 in "abc"` in_options = False if not in_options: raise ValueError( "'{name}' must be in {options!r} (got {value!r})".format( name=attr.name, options=self.options, value=value ) ) def __repr__(self): return "<in_ validator with options {options!r}>".format( options=self.options ) def in_(options): """ A validator that raises a `ValueError` if the initializer is called with a value that does not belong in the options provided. The check is performed using ``value in options``. :param options: Allowed options. :type options: list, tuple, `enum.Enum`, ... :raises ValueError: With a human readable error message, the attribute (of type `attr.Attribute`), the expected options, and the value it got. .. versionadded:: 17.1.0 """ return _InValidator(options) @attrs(repr=False, slots=False, hash=True) class _IsCallableValidator(object): def __call__(self, inst, attr, value): """ We use a callable class to be able to change the ``__repr__``. """ if not callable(value): message = ( "'{name}' must be callable " "(got {value!r} that is a {actual!r})." ) raise NotCallableError( msg=message.format( name=attr.name, value=value, actual=value.__class__ ), value=value, ) def __repr__(self): return "<is_callable validator>" def is_callable(): """ A validator that raises a `attr.exceptions.NotCallableError` if the initializer is called with a value for this particular attribute that is not callable. .. versionadded:: 19.1.0 :raises `attr.exceptions.NotCallableError`: With a human readable error message containing the attribute (`attr.Attribute`) name, and the value it got. """ return _IsCallableValidator() @attrs(repr=False, slots=True, hash=True) class _DeepIterable(object): member_validator = attrib(validator=is_callable()) iterable_validator = attrib( default=None, validator=optional(is_callable()) ) def __call__(self, inst, attr, value): """ We use a callable class to be able to change the ``__repr__``. """ if self.iterable_validator is not None: self.iterable_validator(inst, attr, value) for member in value: self.member_validator(inst, attr, member) def __repr__(self): iterable_identifier = ( "" if self.iterable_validator is None else " {iterable!r}".format(iterable=self.iterable_validator) ) return ( "<deep_iterable validator for{iterable_identifier}" " iterables of {member!r}>" ).format( iterable_identifier=iterable_identifier, member=self.member_validator, ) def deep_iterable(member_validator, iterable_validator=None): """ A validator that performs deep validation of an iterable. :param member_validator: Validator to apply to iterable members :param iterable_validator: Validator to apply to iterable itself (optional) .. versionadded:: 19.1.0 :raises TypeError: if any sub-validators fail """ return _DeepIterable(member_validator, iterable_validator) @attrs(repr=False, slots=True, hash=True) class _DeepMapping(object): key_validator = attrib(validator=is_callable()) value_validator = attrib(validator=is_callable()) mapping_validator = attrib(default=None, validator=optional(is_callable())) def __call__(self, inst, attr, value): """ We use a callable class to be able to change the ``__repr__``. """ if self.mapping_validator is not None: self.mapping_validator(inst, attr, value) for key in value: self.key_validator(inst, attr, key) self.value_validator(inst, attr, value[key]) def __repr__(self): return ( "<deep_mapping validator for objects mapping {key!r} to {value!r}>" ).format(key=self.key_validator, value=self.value_validator) def deep_mapping(key_validator, value_validator, mapping_validator=None): """ A validator that performs deep validation of a dictionary. :param key_validator: Validator to apply to dictionary keys :param value_validator: Validator to apply to dictionary values :param mapping_validator: Validator to apply to top-level mapping attribute (optional) .. versionadded:: 19.1.0 :raises TypeError: if any sub-validators fail """ return _DeepMapping(key_validator, value_validator, mapping_validator)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/filters.py
""" Commonly useful filters for `attr.asdict`. """ from __future__ import absolute_import, division, print_function from ._compat import isclass from ._make import Attribute def _split_what(what): """ Returns a tuple of `frozenset`s of classes and attributes. """ return ( frozenset(cls for cls in what if isclass(cls)), frozenset(cls for cls in what if isinstance(cls, Attribute)), ) def include(*what): """ Whitelist *what*. :param what: What to whitelist. :type what: `list` of `type` or `attr.Attribute`\\ s :rtype: `callable` """ cls, attrs = _split_what(what) def include_(attribute, value): return value.__class__ in cls or attribute in attrs return include_ def exclude(*what): """ Blacklist *what*. :param what: What to blacklist. :type what: `list` of classes or `attr.Attribute`\\ s. :rtype: `callable` """ cls, attrs = _split_what(what) def exclude_(attribute, value): return value.__class__ not in cls and attribute not in attrs return exclude_
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/exceptions.pyi
from typing import Any class FrozenError(AttributeError): msg: str = ... class FrozenInstanceError(FrozenError): ... class FrozenAttributeError(FrozenError): ... class AttrsAttributeNotFoundError(ValueError): ... class NotAnAttrsClassError(ValueError): ... class DefaultAlreadySetError(RuntimeError): ... class UnannotatedAttributeError(RuntimeError): ... class PythonTooOldError(RuntimeError): ... class NotCallableError(TypeError): msg: str = ... value: Any = ... def __init__(self, msg: str, value: Any) -> None: ...
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/_version_info.py
from __future__ import absolute_import, division, print_function from functools import total_ordering from ._funcs import astuple from ._make import attrib, attrs @total_ordering @attrs(eq=False, order=False, slots=True, frozen=True) class VersionInfo(object): """ A version object that can be compared to tuple of length 1--4: >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) True >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) True >>> vi = attr.VersionInfo(19, 2, 0, "final") >>> vi < (19, 1, 1) False >>> vi < (19,) False >>> vi == (19, 2,) True >>> vi == (19, 2, 1) False .. versionadded:: 19.2 """ year = attrib(type=int) minor = attrib(type=int) micro = attrib(type=int) releaselevel = attrib(type=str) @classmethod def _from_version_string(cls, s): """ Parse *s* and return a _VersionInfo. """ v = s.split(".") if len(v) == 3: v.append("final") return cls( year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] ) def _ensure_tuple(self, other): """ Ensure *other* is a tuple of a valid length. Returns a possibly transformed *other* and ourselves as a tuple of the same length as *other*. """ if self.__class__ is other.__class__: other = astuple(other) if not isinstance(other, tuple): raise NotImplementedError if not (1 <= len(other) <= 4): raise NotImplementedError return astuple(self)[: len(other)], other def __eq__(self, other): try: us, them = self._ensure_tuple(other) except NotImplementedError: return NotImplemented return us == them def __lt__(self, other): try: us, them = self._ensure_tuple(other) except NotImplementedError: return NotImplemented # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't # have to do anything special with releaselevel for now. return us < them
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/exceptions.py
from __future__ import absolute_import, division, print_function class FrozenError(AttributeError): """ A frozen/immutable instance or attribute haave been attempted to be modified. It mirrors the behavior of ``namedtuples`` by using the same error message and subclassing `AttributeError`. .. versionadded:: 20.1.0 """ msg = "can't set attribute" args = [msg] class FrozenInstanceError(FrozenError): """ A frozen instance has been attempted to be modified. .. versionadded:: 16.1.0 """ class FrozenAttributeError(FrozenError): """ A frozen attribute has been attempted to be modified. .. versionadded:: 20.1.0 """ class AttrsAttributeNotFoundError(ValueError): """ An ``attrs`` function couldn't find an attribute that the user asked for. .. versionadded:: 16.2.0 """ class NotAnAttrsClassError(ValueError): """ A non-``attrs`` class has been passed into an ``attrs`` function. .. versionadded:: 16.2.0 """ class DefaultAlreadySetError(RuntimeError): """ A default has been set using ``attr.ib()`` and is attempted to be reset using the decorator. .. versionadded:: 17.1.0 """ class UnannotatedAttributeError(RuntimeError): """ A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type annotation. .. versionadded:: 17.3.0 """ class PythonTooOldError(RuntimeError): """ It was attempted to use an ``attrs`` feature that requires a newer Python version. .. versionadded:: 18.2.0 """ class NotCallableError(TypeError): """ A ``attr.ib()`` requiring a callable has been set with a value that is not callable. .. versionadded:: 19.2.0 """ def __init__(self, msg, value): super(TypeError, self).__init__(msg, value) self.msg = msg self.value = value def __str__(self): return str(self.msg)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/_compat.py
from __future__ import absolute_import, division, print_function import platform import sys import types import warnings PY2 = sys.version_info[0] == 2 PYPY = platform.python_implementation() == "PyPy" if PYPY or sys.version_info[:2] >= (3, 6): ordered_dict = dict else: from collections import OrderedDict ordered_dict = OrderedDict if PY2: from UserDict import IterableUserDict from collections import Mapping, Sequence # We 'bundle' isclass instead of using inspect as importing inspect is # fairly expensive (order of 10-15 ms for a modern machine in 2016) def isclass(klass): return isinstance(klass, (type, types.ClassType)) # TYPE is used in exceptions, repr(int) is different on Python 2 and 3. TYPE = "type" def iteritems(d): return d.iteritems() # Python 2 is bereft of a read-only dict proxy, so we make one! class ReadOnlyDict(IterableUserDict): """ Best-effort read-only dict wrapper. """ def __setitem__(self, key, val): # We gently pretend we're a Python 3 mappingproxy. raise TypeError( "'mappingproxy' object does not support item assignment" ) def update(self, _): # We gently pretend we're a Python 3 mappingproxy. raise AttributeError( "'mappingproxy' object has no attribute 'update'" ) def __delitem__(self, _): # We gently pretend we're a Python 3 mappingproxy. raise TypeError( "'mappingproxy' object does not support item deletion" ) def clear(self): # We gently pretend we're a Python 3 mappingproxy. raise AttributeError( "'mappingproxy' object has no attribute 'clear'" ) def pop(self, key, default=None): # We gently pretend we're a Python 3 mappingproxy. raise AttributeError( "'mappingproxy' object has no attribute 'pop'" ) def popitem(self): # We gently pretend we're a Python 3 mappingproxy. raise AttributeError( "'mappingproxy' object has no attribute 'popitem'" ) def setdefault(self, key, default=None): # We gently pretend we're a Python 3 mappingproxy. raise AttributeError( "'mappingproxy' object has no attribute 'setdefault'" ) def __repr__(self): # Override to be identical to the Python 3 version. return "mappingproxy(" + repr(self.data) + ")" def metadata_proxy(d): res = ReadOnlyDict() res.data.update(d) # We blocked update, so we have to do it like this. return res def just_warn(*args, **kw): # pragma: nocover """ We only warn on Python 3 because we are not aware of any concrete consequences of not setting the cell on Python 2. """ else: # Python 3 and later. from collections.abc import Mapping, Sequence # noqa def just_warn(*args, **kw): """ We only warn on Python 3 because we are not aware of any concrete consequences of not setting the cell on Python 2. """ warnings.warn( "Running interpreter doesn't sufficiently support code object " "introspection. Some features like bare super() or accessing " "__class__ will not work with slotted classes.", RuntimeWarning, stacklevel=2, ) def isclass(klass): return isinstance(klass, type) TYPE = "class" def iteritems(d): return d.items() def metadata_proxy(d): return types.MappingProxyType(dict(d)) def make_set_closure_cell(): """Return a function of two arguments (cell, value) which sets the value stored in the closure cell `cell` to `value`. """ # pypy makes this easy. (It also supports the logic below, but # why not do the easy/fast thing?) if PYPY: # pragma: no cover def set_closure_cell(cell, value): cell.__setstate__((value,)) return set_closure_cell # Otherwise gotta do it the hard way. # Create a function that will set its first cellvar to `value`. def set_first_cellvar_to(value): x = value return # This function will be eliminated as dead code, but # not before its reference to `x` forces `x` to be # represented as a closure cell rather than a local. def force_x_to_be_a_cell(): # pragma: no cover return x try: # Extract the code object and make sure our assumptions about # the closure behavior are correct. if PY2: co = set_first_cellvar_to.func_code else: co = set_first_cellvar_to.__code__ if co.co_cellvars != ("x",) or co.co_freevars != (): raise AssertionError # pragma: no cover # Convert this code object to a code object that sets the # function's first _freevar_ (not cellvar) to the argument. if sys.version_info >= (3, 8): # CPython 3.8+ has an incompatible CodeType signature # (added a posonlyargcount argument) but also added # CodeType.replace() to do this without counting parameters. set_first_freevar_code = co.replace( co_cellvars=co.co_freevars, co_freevars=co.co_cellvars ) else: args = [co.co_argcount] if not PY2: args.append(co.co_kwonlyargcount) args.extend( [ co.co_nlocals, co.co_stacksize, co.co_flags, co.co_code, co.co_consts, co.co_names, co.co_varnames, co.co_filename, co.co_name, co.co_firstlineno, co.co_lnotab, # These two arguments are reversed: co.co_cellvars, co.co_freevars, ] ) set_first_freevar_code = types.CodeType(*args) def set_closure_cell(cell, value): # Create a function using the set_first_freevar_code, # whose first closure cell is `cell`. Calling it will # change the value of that cell. setter = types.FunctionType( set_first_freevar_code, {}, "setter", (), (cell,) ) # And call it to set the cell. setter(value) # Make sure it works on this interpreter: def make_func_with_cell(): x = None def func(): return x # pragma: no cover return func if PY2: cell = make_func_with_cell().func_closure[0] else: cell = make_func_with_cell().__closure__[0] set_closure_cell(cell, 100) if cell.cell_contents != 100: raise AssertionError # pragma: no cover except Exception: return just_warn else: return set_closure_cell set_closure_cell = make_set_closure_cell()
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/__init__.py
from __future__ import absolute_import, division, print_function import sys from functools import partial from . import converters, exceptions, filters, setters, validators from ._config import get_run_validators, set_run_validators from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types from ._make import ( NOTHING, Attribute, Factory, attrib, attrs, fields, fields_dict, make_class, validate, ) from ._version_info import VersionInfo __version__ = "20.1.0" __version_info__ = VersionInfo._from_version_string(__version__) __title__ = "attrs" __description__ = "Classes Without Boilerplate" __url__ = "https://www.attrs.org/" __uri__ = __url__ __doc__ = __description__ + " <" + __uri__ + ">" __author__ = "Hynek Schlawack" __email__ = "[email protected]" __license__ = "MIT" __copyright__ = "Copyright (c) 2015 Hynek Schlawack" s = attributes = attrs ib = attr = attrib dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) __all__ = [ "Attribute", "Factory", "NOTHING", "asdict", "assoc", "astuple", "attr", "attrib", "attributes", "attrs", "converters", "evolve", "exceptions", "fields", "fields_dict", "filters", "get_run_validators", "has", "ib", "make_class", "resolve_types", "s", "set_run_validators", "setters", "validate", "validators", ] if sys.version_info[:2] >= (3, 6): from ._next_gen import define, field, frozen, mutable __all__.extend((define, field, frozen, mutable))
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/_next_gen.py
""" This is a Python 3.6 and later-only, keyword-only, and **provisional** API that calls `attr.s` with different default values. Provisional APIs that shall become "import attrs" one glorious day. """ from functools import partial from attr.exceptions import UnannotatedAttributeError from . import setters from ._make import NOTHING, attrib, attrs def define( maybe_cls=None, *, these=None, repr=None, hash=None, init=None, slots=True, frozen=False, weakref_slot=True, str=False, auto_attribs=None, kw_only=False, cache_hash=False, auto_exc=True, eq=True, order=False, auto_detect=True, getstate_setstate=None, on_setattr=setters.validate, ): r""" The only behavioral difference is the handling of the *auto_attribs* option: :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves exactly like `attr.s`. If left `None`, `attr.s` will try to guess: 1. If all attributes are annotated and no `attr.ib` is found, it assumes *auto_attribs=True*. 2. Otherwise it assumes *auto_attribs=False* and tries to collect `attr.ib`\ s. .. versionadded:: 20.1.0 """ def do_it(auto_attribs): return attrs( maybe_cls=maybe_cls, these=these, repr=repr, hash=hash, init=init, slots=slots, frozen=frozen, weakref_slot=weakref_slot, str=str, auto_attribs=auto_attribs, kw_only=kw_only, cache_hash=cache_hash, auto_exc=auto_exc, eq=eq, order=order, auto_detect=auto_detect, collect_by_mro=True, getstate_setstate=getstate_setstate, on_setattr=on_setattr, ) if auto_attribs is not None: return do_it(auto_attribs) try: return do_it(True) except UnannotatedAttributeError: return do_it(False) mutable = define frozen = partial(define, frozen=True, on_setattr=None) def field( *, default=NOTHING, validator=None, repr=True, hash=None, init=True, metadata=None, converter=None, factory=None, kw_only=False, eq=None, order=None, on_setattr=None, ): """ Identical to `attr.ib`, except keyword-only and with some arguments removed. .. versionadded:: 20.1.0 """ return attrib( default=default, validator=validator, repr=repr, hash=hash, init=init, metadata=metadata, converter=converter, factory=factory, kw_only=kw_only, eq=eq, order=order, on_setattr=on_setattr, )
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/_config.py
from __future__ import absolute_import, division, print_function __all__ = ["set_run_validators", "get_run_validators"] _run_validators = True def set_run_validators(run): """ Set whether or not validators are run. By default, they are run. """ if not isinstance(run, bool): raise TypeError("'run' must be bool.") global _run_validators _run_validators = run def get_run_validators(): """ Return whether or not validators are run. """ return _run_validators
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/validators.pyi
from typing import ( Container, List, Union, TypeVar, Type, Any, Optional, Tuple, Iterable, Mapping, Callable, Match, AnyStr, overload, ) from . import _ValidatorType _T = TypeVar("_T") _T1 = TypeVar("_T1") _T2 = TypeVar("_T2") _T3 = TypeVar("_T3") _I = TypeVar("_I", bound=Iterable) _K = TypeVar("_K") _V = TypeVar("_V") _M = TypeVar("_M", bound=Mapping) # To be more precise on instance_of use some overloads. # If there are more than 3 items in the tuple then we fall back to Any @overload def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ... @overload def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ... @overload def instance_of( type: Tuple[Type[_T1], Type[_T2]] ) -> _ValidatorType[Union[_T1, _T2]]: ... @overload def instance_of( type: Tuple[Type[_T1], Type[_T2], Type[_T3]] ) -> _ValidatorType[Union[_T1, _T2, _T3]]: ... @overload def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ... def provides(interface: Any) -> _ValidatorType[Any]: ... def optional( validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]] ) -> _ValidatorType[Optional[_T]]: ... def in_(options: Container[_T]) -> _ValidatorType[_T]: ... def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... def matches_re( regex: AnyStr, flags: int = ..., func: Optional[ Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]] ] = ..., ) -> _ValidatorType[AnyStr]: ... def deep_iterable( member_validator: _ValidatorType[_T], iterable_validator: Optional[_ValidatorType[_I]] = ..., ) -> _ValidatorType[_I]: ... def deep_mapping( key_validator: _ValidatorType[_K], value_validator: _ValidatorType[_V], mapping_validator: Optional[_ValidatorType[_M]] = ..., ) -> _ValidatorType[_M]: ... def is_callable() -> _ValidatorType[_T]: ...
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/converters.py
""" Commonly useful converters. """ from __future__ import absolute_import, division, print_function from ._make import NOTHING, Factory, pipe __all__ = [ "pipe", "optional", "default_if_none", ] def optional(converter): """ A converter that allows an attribute to be optional. An optional attribute is one which can be set to ``None``. :param callable converter: the converter that is used for non-``None`` values. .. versionadded:: 17.1.0 """ def optional_converter(val): if val is None: return None return converter(val) return optional_converter def default_if_none(default=NOTHING, factory=None): """ A converter that allows to replace ``None`` values by *default* or the result of *factory*. :param default: Value to be used if ``None`` is passed. Passing an instance of `attr.Factory` is supported, however the ``takes_self`` option is *not*. :param callable factory: A callable that takes not parameters whose result is used if ``None`` is passed. :raises TypeError: If **neither** *default* or *factory* is passed. :raises TypeError: If **both** *default* and *factory* are passed. :raises ValueError: If an instance of `attr.Factory` is passed with ``takes_self=True``. .. versionadded:: 18.2.0 """ if default is NOTHING and factory is None: raise TypeError("Must pass either `default` or `factory`.") if default is not NOTHING and factory is not None: raise TypeError( "Must pass either `default` or `factory` but not both." ) if factory is not None: default = Factory(factory) if isinstance(default, Factory): if default.takes_self: raise ValueError( "`takes_self` is not supported by default_if_none." ) def default_if_none_converter(val): if val is not None: return val return default.factory() else: def default_if_none_converter(val): if val is not None: return val return default return default_if_none_converter
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/_make.py
from __future__ import absolute_import, division, print_function import copy import linecache import sys import threading import uuid import warnings from operator import itemgetter from . import _config, setters from ._compat import ( PY2, isclass, iteritems, metadata_proxy, ordered_dict, set_closure_cell, ) from .exceptions import ( DefaultAlreadySetError, FrozenInstanceError, NotAnAttrsClassError, PythonTooOldError, UnannotatedAttributeError, ) # This is used at least twice, so cache it here. _obj_setattr = object.__setattr__ _init_converter_pat = "__attr_converter_%s" _init_factory_pat = "__attr_factory_{}" _tuple_property_pat = ( " {attr_name} = _attrs_property(_attrs_itemgetter({index}))" ) _classvar_prefixes = ("typing.ClassVar", "t.ClassVar", "ClassVar") # we don't use a double-underscore prefix because that triggers # name mangling when trying to create a slot for the field # (when slots=True) _hash_cache_field = "_attrs_cached_hash" _empty_metadata_singleton = metadata_proxy({}) # Unique object for unequivocal getattr() defaults. _sentinel = object() class _Nothing(object): """ Sentinel class to indicate the lack of a value when ``None`` is ambiguous. ``_Nothing`` is a singleton. There is only ever one of it. """ _singleton = None def __new__(cls): if _Nothing._singleton is None: _Nothing._singleton = super(_Nothing, cls).__new__(cls) return _Nothing._singleton def __repr__(self): return "NOTHING" NOTHING = _Nothing() """ Sentinel to indicate the lack of a value when ``None`` is ambiguous. """ class _CacheHashWrapper(int): """ An integer subclass that pickles / copies as None This is used for non-slots classes with ``cache_hash=True``, to avoid serializing a potentially (even likely) invalid hash value. Since ``None`` is the default value for uncalculated hashes, whenever this is copied, the copy's value for the hash should automatically reset. See GH #613 for more details. """ if PY2: # For some reason `type(None)` isn't callable in Python 2, but we don't # actually need a constructor for None objects, we just need any # available function that returns None. def __reduce__(self, _none_constructor=getattr, _args=(0, "", None)): return _none_constructor, _args else: def __reduce__(self, _none_constructor=type(None), _args=()): return _none_constructor, _args def attrib( default=NOTHING, validator=None, repr=True, cmp=None, hash=None, init=True, metadata=None, type=None, converter=None, factory=None, kw_only=False, eq=None, order=None, on_setattr=None, ): """ Create a new attribute on a class. .. warning:: Does *not* do anything unless the class is also decorated with `attr.s`! :param default: A value that is used if an ``attrs``-generated ``__init__`` is used and no value is passed while instantiating or the attribute is excluded using ``init=False``. If the value is an instance of `Factory`, its callable will be used to construct a new value (useful for mutable data types like lists or dicts). If a default is not set (or set manually to `attr.NOTHING`), a value *must* be supplied when instantiating; otherwise a `TypeError` will be raised. The default can also be set using decorator notation as shown below. :type default: Any value :param callable factory: Syntactic sugar for ``default=attr.Factory(factory)``. :param validator: `callable` that is called by ``attrs``-generated ``__init__`` methods after the instance has been initialized. They receive the initialized instance, the `Attribute`, and the passed value. The return value is *not* inspected so the validator has to throw an exception itself. If a `list` is passed, its items are treated as validators and must all pass. Validators can be globally disabled and re-enabled using `get_run_validators`. The validator can also be set using decorator notation as shown below. :type validator: `callable` or a `list` of `callable`\\ s. :param repr: Include this attribute in the generated ``__repr__`` method. If ``True``, include the attribute; if ``False``, omit it. By default, the built-in ``repr()`` function is used. To override how the attribute value is formatted, pass a ``callable`` that takes a single value and returns a string. Note that the resulting string is used as-is, i.e. it will be used directly *instead* of calling ``repr()`` (the default). :type repr: a `bool` or a `callable` to use a custom function. :param bool eq: If ``True`` (default), include this attribute in the generated ``__eq__`` and ``__ne__`` methods that check two instances for equality. :param bool order: If ``True`` (default), include this attributes in the generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. :param bool cmp: Setting to ``True`` is equivalent to setting ``eq=True, order=True``. Deprecated in favor of *eq* and *order*. :param Optional[bool] hash: Include this attribute in the generated ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This is the correct behavior according the Python spec. Setting this value to anything else than ``None`` is *discouraged*. :param bool init: Include this attribute in the generated ``__init__`` method. It is possible to set this to ``False`` and set a default value. In that case this attributed is unconditionally initialized with the specified default value or factory. :param callable converter: `callable` that is called by ``attrs``-generated ``__init__`` methods to convert attribute's value to the desired format. It is given the passed-in value, and the returned value will be used as the new value of the attribute. The value is converted before being passed to the validator, if any. :param metadata: An arbitrary mapping, to be used by third-party components. See `extending_metadata`. :param type: The type of the attribute. In Python 3.6 or greater, the preferred method to specify the type is using a variable annotation (see `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_). This argument is provided for backward compatibility. Regardless of the approach used, the type will be stored on ``Attribute.type``. Please note that ``attrs`` doesn't do anything with this metadata by itself. You can use it as part of your own code or for `static type checking <types>`. :param kw_only: Make this attribute keyword-only (Python 3+) in the generated ``__init__`` (if ``init`` is ``False``, this parameter is ignored). :param on_setattr: Allows to overwrite the *on_setattr* setting from `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. Set to `attr.setters.NO_OP` to run **no** `setattr` hooks for this attribute -- regardless of the setting in `attr.s`. :type on_setattr: `callable`, or a list of callables, or `None`, or `attr.setters.NO_OP` .. versionadded:: 15.2.0 *convert* .. versionadded:: 16.3.0 *metadata* .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. .. versionchanged:: 17.1.0 *hash* is ``None`` and therefore mirrors *eq* by default. .. versionadded:: 17.3.0 *type* .. deprecated:: 17.4.0 *convert* .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated *convert* to achieve consistency with other noun-based arguments. .. versionadded:: 18.1.0 ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. .. versionadded:: 18.2.0 *kw_only* .. versionchanged:: 19.2.0 *convert* keyword argument removed .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. .. versionadded:: 19.2.0 *eq* and *order* .. versionadded:: 20.1.0 *on_setattr* """ eq, order = _determine_eq_order(cmp, eq, order, True) if hash is not None and hash is not True and hash is not False: raise TypeError( "Invalid value for hash. Must be True, False, or None." ) if factory is not None: if default is not NOTHING: raise ValueError( "The `default` and `factory` arguments are mutually " "exclusive." ) if not callable(factory): raise ValueError("The `factory` argument must be a callable.") default = Factory(factory) if metadata is None: metadata = {} # Apply syntactic sugar by auto-wrapping. if isinstance(on_setattr, (list, tuple)): on_setattr = setters.pipe(*on_setattr) if validator and isinstance(validator, (list, tuple)): validator = and_(*validator) if converter and isinstance(converter, (list, tuple)): converter = pipe(*converter) return _CountingAttr( default=default, validator=validator, repr=repr, cmp=None, hash=hash, init=init, converter=converter, metadata=metadata, type=type, kw_only=kw_only, eq=eq, order=order, on_setattr=on_setattr, ) def _make_attr_tuple_class(cls_name, attr_names): """ Create a tuple subclass to hold `Attribute`s for an `attrs` class. The subclass is a bare tuple with properties for names. class MyClassAttributes(tuple): __slots__ = () x = property(itemgetter(0)) """ attr_class_name = "{}Attributes".format(cls_name) attr_class_template = [ "class {}(tuple):".format(attr_class_name), " __slots__ = ()", ] if attr_names: for i, attr_name in enumerate(attr_names): attr_class_template.append( _tuple_property_pat.format(index=i, attr_name=attr_name) ) else: attr_class_template.append(" pass") globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} eval(compile("\n".join(attr_class_template), "", "exec"), globs) return globs[attr_class_name] # Tuple class for extracted attributes from a class definition. # `base_attrs` is a subset of `attrs`. _Attributes = _make_attr_tuple_class( "_Attributes", [ # all attributes to build dunder methods for "attrs", # attributes that have been inherited "base_attrs", # map inherited attributes to their originating classes "base_attrs_map", ], ) def _is_class_var(annot): """ Check whether *annot* is a typing.ClassVar. The string comparison hack is used to avoid evaluating all string annotations which would put attrs-based classes at a performance disadvantage compared to plain old classes. """ return str(annot).startswith(_classvar_prefixes) def _has_own_attribute(cls, attrib_name): """ Check whether *cls* defines *attrib_name* (and doesn't just inherit it). Requires Python 3. """ attr = getattr(cls, attrib_name, _sentinel) if attr is _sentinel: return False for base_cls in cls.__mro__[1:]: a = getattr(base_cls, attrib_name, None) if attr is a: return False return True def _get_annotations(cls): """ Get annotations for *cls*. """ if _has_own_attribute(cls, "__annotations__"): return cls.__annotations__ return {} def _counter_getter(e): """ Key function for sorting to avoid re-creating a lambda for every class. """ return e[1].counter def _collect_base_attrs(cls, taken_attr_names): """ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. """ base_attrs = [] base_attr_map = {} # A dictionary of base attrs to their classes. # Traverse the MRO and collect attributes. for base_cls in reversed(cls.__mro__[1:-1]): for a in getattr(base_cls, "__attrs_attrs__", []): if a.inherited or a.name in taken_attr_names: continue a = a._assoc(inherited=True) base_attrs.append(a) base_attr_map[a.name] = base_cls # For each name, only keep the freshest definition i.e. the furthest at the # back. base_attr_map is fine because it gets overwritten with every new # instance. filtered = [] seen = set() for a in reversed(base_attrs): if a.name in seen: continue filtered.insert(0, a) seen.add(a.name) return filtered, base_attr_map def _collect_base_attrs_broken(cls, taken_attr_names): """ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. N.B. *taken_attr_names* will be mutated. Adhere to the old incorrect behavior. Notably it collects from the front and considers inherited attributes which leads to the buggy behavior reported in #428. """ base_attrs = [] base_attr_map = {} # A dictionary of base attrs to their classes. # Traverse the MRO and collect attributes. for base_cls in cls.__mro__[1:-1]: for a in getattr(base_cls, "__attrs_attrs__", []): if a.name in taken_attr_names: continue a = a._assoc(inherited=True) taken_attr_names.add(a.name) base_attrs.append(a) base_attr_map[a.name] = base_cls return base_attrs, base_attr_map def _transform_attrs(cls, these, auto_attribs, kw_only, collect_by_mro): """ Transform all `_CountingAttr`s on a class into `Attribute`s. If *these* is passed, use that and don't look for them on the class. *collect_by_mro* is True, collect them in the correct MRO order, otherwise use the old -- incorrect -- order. See #428. Return an `_Attributes`. """ cd = cls.__dict__ anns = _get_annotations(cls) if these is not None: ca_list = [(name, ca) for name, ca in iteritems(these)] if not isinstance(these, ordered_dict): ca_list.sort(key=_counter_getter) elif auto_attribs is True: ca_names = { name for name, attr in cd.items() if isinstance(attr, _CountingAttr) } ca_list = [] annot_names = set() for attr_name, type in anns.items(): if _is_class_var(type): continue annot_names.add(attr_name) a = cd.get(attr_name, NOTHING) if not isinstance(a, _CountingAttr): if a is NOTHING: a = attrib() else: a = attrib(default=a) ca_list.append((attr_name, a)) unannotated = ca_names - annot_names if len(unannotated) > 0: raise UnannotatedAttributeError( "The following `attr.ib`s lack a type annotation: " + ", ".join( sorted(unannotated, key=lambda n: cd.get(n).counter) ) + "." ) else: ca_list = sorted( ( (name, attr) for name, attr in cd.items() if isinstance(attr, _CountingAttr) ), key=lambda e: e[1].counter, ) own_attrs = [ Attribute.from_counting_attr( name=attr_name, ca=ca, type=anns.get(attr_name) ) for attr_name, ca in ca_list ] if collect_by_mro: base_attrs, base_attr_map = _collect_base_attrs( cls, {a.name for a in own_attrs} ) else: base_attrs, base_attr_map = _collect_base_attrs_broken( cls, {a.name for a in own_attrs} ) attr_names = [a.name for a in base_attrs + own_attrs] AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) if kw_only: own_attrs = [a._assoc(kw_only=True) for a in own_attrs] base_attrs = [a._assoc(kw_only=True) for a in base_attrs] attrs = AttrsClass(base_attrs + own_attrs) # Mandatory vs non-mandatory attr order only matters when they are part of # the __init__ signature and when they aren't kw_only (which are moved to # the end and can be mandatory or non-mandatory in any order, as they will # be specified as keyword args anyway). Check the order of those attrs: had_default = False for a in (a for a in attrs if a.init is not False and a.kw_only is False): if had_default is True and a.default is NOTHING: raise ValueError( "No mandatory attributes allowed after an attribute with a " "default value or factory. Attribute in question: %r" % (a,) ) if had_default is False and a.default is not NOTHING: had_default = True return _Attributes((attrs, base_attrs, base_attr_map)) def _frozen_setattrs(self, name, value): """ Attached to frozen classes as __setattr__. """ raise FrozenInstanceError() def _frozen_delattrs(self, name): """ Attached to frozen classes as __delattr__. """ raise FrozenInstanceError() class _ClassBuilder(object): """ Iteratively build *one* class. """ __slots__ = ( "_attr_names", "_attrs", "_base_attr_map", "_base_names", "_cache_hash", "_cls", "_cls_dict", "_delete_attribs", "_frozen", "_has_post_init", "_is_exc", "_on_setattr", "_slots", "_weakref_slot", "_has_own_setattr", ) def __init__( self, cls, these, slots, frozen, weakref_slot, getstate_setstate, auto_attribs, kw_only, cache_hash, is_exc, collect_by_mro, on_setattr, has_custom_setattr, ): attrs, base_attrs, base_map = _transform_attrs( cls, these, auto_attribs, kw_only, collect_by_mro, ) self._cls = cls self._cls_dict = dict(cls.__dict__) if slots else {} self._attrs = attrs self._base_names = set(a.name for a in base_attrs) self._base_attr_map = base_map self._attr_names = tuple(a.name for a in attrs) self._slots = slots self._frozen = frozen self._weakref_slot = weakref_slot self._cache_hash = cache_hash self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) self._delete_attribs = not bool(these) self._is_exc = is_exc self._on_setattr = on_setattr self._has_own_setattr = has_custom_setattr self._cls_dict["__attrs_attrs__"] = self._attrs if frozen: self._cls_dict["__setattr__"] = _frozen_setattrs self._cls_dict["__delattr__"] = _frozen_delattrs self._has_own_setattr = True if getstate_setstate: ( self._cls_dict["__getstate__"], self._cls_dict["__setstate__"], ) = self._make_getstate_setstate() def __repr__(self): return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__) def build_class(self): """ Finalize class based on the accumulated configuration. Builder cannot be used after calling this method. """ if self._slots is True: return self._create_slots_class() else: return self._patch_original_class() def _patch_original_class(self): """ Apply accumulated methods and return the class. """ cls = self._cls base_names = self._base_names # Clean class of attribute definitions (`attr.ib()`s). if self._delete_attribs: for name in self._attr_names: if ( name not in base_names and getattr(cls, name, _sentinel) is not _sentinel ): try: delattr(cls, name) except AttributeError: # This can happen if a base class defines a class # variable and we want to set an attribute with the # same name by using only a type annotation. pass # Attach our dunder methods. for name, value in self._cls_dict.items(): setattr(cls, name, value) # If we've inherited an attrs __setattr__ and don't write our own, # reset it to object's. if not self._has_own_setattr and getattr( cls, "__attrs_own_setattr__", False ): cls.__setattr__ = object.__setattr__ return cls def _create_slots_class(self): """ Build and return a new class with a `__slots__` attribute. """ base_names = self._base_names cd = { k: v for k, v in iteritems(self._cls_dict) if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") } # Traverse the MRO to check for an existing __weakref__ and # __setattr__. custom_setattr_inherited = False weakref_inherited = False for base_cls in self._cls.__mro__[1:-1]: d = getattr(base_cls, "__dict__", {}) weakref_inherited = weakref_inherited or "__weakref__" in d custom_setattr_inherited = custom_setattr_inherited or not ( d.get("__attrs_own_setattr__", False) ) if weakref_inherited and custom_setattr_inherited: break if not self._has_own_setattr and not custom_setattr_inherited: cd["__setattr__"] = object.__setattr__ names = self._attr_names if ( self._weakref_slot and "__weakref__" not in getattr(self._cls, "__slots__", ()) and "__weakref__" not in names and not weakref_inherited ): names += ("__weakref__",) # We only add the names of attributes that aren't inherited. # Settings __slots__ to inherited attributes wastes memory. slot_names = [name for name in names if name not in base_names] if self._cache_hash: slot_names.append(_hash_cache_field) cd["__slots__"] = tuple(slot_names) qualname = getattr(self._cls, "__qualname__", None) if qualname is not None: cd["__qualname__"] = qualname # Create new class based on old class and our methods. cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) # The following is a fix for # https://github.com/python-attrs/attrs/issues/102. On Python 3, # if a method mentions `__class__` or uses the no-arg super(), the # compiler will bake a reference to the class in the method itself # as `method.__closure__`. Since we replace the class with a # clone, we rewrite these references so it keeps working. for item in cls.__dict__.values(): if isinstance(item, (classmethod, staticmethod)): # Class- and staticmethods hide their functions inside. # These might need to be rewritten as well. closure_cells = getattr(item.__func__, "__closure__", None) else: closure_cells = getattr(item, "__closure__", None) if not closure_cells: # Catch None or the empty list. continue for cell in closure_cells: try: match = cell.cell_contents is self._cls except ValueError: # ValueError: Cell is empty pass else: if match: set_closure_cell(cell, cls) return cls def add_repr(self, ns): self._cls_dict["__repr__"] = self._add_method_dunders( _make_repr(self._attrs, ns=ns) ) return self def add_str(self): repr = self._cls_dict.get("__repr__") if repr is None: raise ValueError( "__str__ can only be generated if a __repr__ exists." ) def __str__(self): return self.__repr__() self._cls_dict["__str__"] = self._add_method_dunders(__str__) return self def _make_getstate_setstate(self): """ Create custom __setstate__ and __getstate__ methods. """ # __weakref__ is not writable. state_attr_names = tuple( an for an in self._attr_names if an != "__weakref__" ) def slots_getstate(self): """ Automatically created by attrs. """ return tuple(getattr(self, name) for name in state_attr_names) hash_caching_enabled = self._cache_hash def slots_setstate(self, state): """ Automatically created by attrs. """ __bound_setattr = _obj_setattr.__get__(self, Attribute) for name, value in zip(state_attr_names, state): __bound_setattr(name, value) # The hash code cache is not included when the object is # serialized, but it still needs to be initialized to None to # indicate that the first call to __hash__ should be a cache # miss. if hash_caching_enabled: __bound_setattr(_hash_cache_field, None) return slots_getstate, slots_setstate def make_unhashable(self): self._cls_dict["__hash__"] = None return self def add_hash(self): self._cls_dict["__hash__"] = self._add_method_dunders( _make_hash( self._cls, self._attrs, frozen=self._frozen, cache_hash=self._cache_hash, ) ) return self def add_init(self): self._cls_dict["__init__"] = self._add_method_dunders( _make_init( self._cls, self._attrs, self._has_post_init, self._frozen, self._slots, self._cache_hash, self._base_attr_map, self._is_exc, self._on_setattr is not None and self._on_setattr is not setters.NO_OP, ) ) return self def add_eq(self): cd = self._cls_dict cd["__eq__"] = self._add_method_dunders( _make_eq(self._cls, self._attrs) ) cd["__ne__"] = self._add_method_dunders(_make_ne()) return self def add_order(self): cd = self._cls_dict cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( self._add_method_dunders(meth) for meth in _make_order(self._cls, self._attrs) ) return self def add_setattr(self): if self._frozen: return self sa_attrs = {} for a in self._attrs: on_setattr = a.on_setattr or self._on_setattr if on_setattr and on_setattr is not setters.NO_OP: sa_attrs[a.name] = a, on_setattr if not sa_attrs: return self if self._has_own_setattr: # We need to write a __setattr__ but there already is one! raise ValueError( "Can't combine custom __setattr__ with on_setattr hooks." ) cls = self._cls def __setattr__(self, name, val): """ Method generated by attrs for class %s. """ % ( cls.__name__, ) try: a, hook = sa_attrs[name] except KeyError: nval = val else: nval = hook(self, a, val) _obj_setattr(self, name, nval) self._cls_dict["__attrs_own_setattr__"] = True self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) self._has_own_setattr = True return self def _add_method_dunders(self, method): """ Add __module__ and __qualname__ to a *method* if possible. """ try: method.__module__ = self._cls.__module__ except AttributeError: pass try: method.__qualname__ = ".".join( (self._cls.__qualname__, method.__name__) ) except AttributeError: pass try: method.__doc__ = "Method generated by attrs for class %s." % ( self._cls.__qualname__, ) except AttributeError: pass return method _CMP_DEPRECATION = ( "The usage of `cmp` is deprecated and will be removed on or after " "2021-06-01. Please use `eq` and `order` instead." ) def _determine_eq_order(cmp, eq, order, default_eq): """ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective values of eq and order. If *eq* is None, set it to *default_eq*. """ if cmp is not None and any((eq is not None, order is not None)): raise ValueError("Don't mix `cmp` with `eq' and `order`.") # cmp takes precedence due to bw-compatibility. if cmp is not None: warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=3) return cmp, cmp # If left None, equality is set to the specified default and ordering # mirrors equality. if eq is None: eq = default_eq if order is None: order = eq if eq is False and order is True: raise ValueError("`order` can only be True if `eq` is True too.") return eq, order def _determine_whether_to_implement( cls, flag, auto_detect, dunders, default=True ): """ Check whether we should implement a set of methods for *cls*. *flag* is the argument passed into @attr.s like 'init', *auto_detect* the same as passed into @attr.s and *dunders* is a tuple of attribute names whose presence signal that the user has implemented it themselves. Return *default* if no reason for either for or against is found. auto_detect must be False on Python 2. """ if flag is True or flag is False: return flag if flag is None and auto_detect is False: return default # Logically, flag is None and auto_detect is True here. for dunder in dunders: if _has_own_attribute(cls, dunder): return False return default def attrs( maybe_cls=None, these=None, repr_ns=None, repr=None, cmp=None, hash=None, init=None, slots=False, frozen=False, weakref_slot=True, str=False, auto_attribs=False, kw_only=False, cache_hash=False, auto_exc=False, eq=None, order=None, auto_detect=False, collect_by_mro=False, getstate_setstate=None, on_setattr=None, ): r""" A class decorator that adds `dunder <https://wiki.python.org/moin/DunderAlias>`_\ -methods according to the specified attributes using `attr.ib` or the *these* argument. :param these: A dictionary of name to `attr.ib` mappings. This is useful to avoid the definition of your attributes within the class body because you can't (e.g. if you want to add ``__repr__`` methods to Django models) or don't want to. If *these* is not ``None``, ``attrs`` will *not* search the class body for attributes and will *not* remove any attributes from it. If *these* is an ordered dict (`dict` on Python 3.6+, `collections.OrderedDict` otherwise), the order is deduced from the order of the attributes inside *these*. Otherwise the order of the definition of the attributes is used. :type these: `dict` of `str` to `attr.ib` :param str repr_ns: When using nested classes, there's no way in Python 2 to automatically detect that. Therefore it's possible to set the namespace explicitly for a more meaningful ``repr`` output. :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, *order*, and *hash* arguments explicitly, assume they are set to ``True`` **unless any** of the involved methods for one of the arguments is implemented in the *current* class (i.e. it is *not* inherited from some base class). So for example by implementing ``__eq__`` on a class yourself, ``attrs`` will deduce ``eq=False`` and won't create *neither* ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible ``__ne__`` by default, so it *should* be enough to only implement ``__eq__`` in most cases). .. warning:: If you prevent ``attrs`` from creating the ordering methods for you (``order=False``, e.g. by implementing ``__le__``), it becomes *your* responsibility to make sure its ordering is sound. The best way is to use the `functools.total_ordering` decorator. Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, *cmp*, or *hash* overrides whatever *auto_detect* would determine. *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises a `PythonTooOldError`. :param bool repr: Create a ``__repr__`` method with a human readable representation of ``attrs`` attributes.. :param bool str: Create a ``__str__`` method that is identical to ``__repr__``. This is usually not necessary except for `Exception`\ s. :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__`` and ``__ne__`` methods that check two instances for equality. They compare the instances as if they were tuples of their ``attrs`` attributes if and only if the types of both classes are *identical*! :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` methods that behave like *eq* above and allow instances to be ordered. If ``None`` (default) mirror value of *eq*. :param Optional[bool] cmp: Setting to ``True`` is equivalent to setting ``eq=True, order=True``. Deprecated in favor of *eq* and *order*, has precedence over them for backward-compatibility though. Must not be mixed with *eq* or *order*. :param Optional[bool] hash: If ``None`` (default), the ``__hash__`` method is generated according how *eq* and *frozen* are set. 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to None, marking it unhashable (which it is). 3. If *eq* is False, ``__hash__`` will be left untouched meaning the ``__hash__`` method of the base class will be used (if base class is ``object``, this means it will fall back to id-based hashing.). Although not recommended, you can decide for yourself and force ``attrs`` to create one (e.g. if the class is immutable even though you didn't freeze it programmatically) by passing ``True`` or not. Both of these cases are rather special and should be used carefully. See our documentation on `hashing`, Python's documentation on `object.__hash__`, and the `GitHub issue that led to the default \ behavior <https://github.com/python-attrs/attrs/issues/136>`_ for more details. :param bool init: Create a ``__init__`` method that initializes the ``attrs`` attributes. Leading underscores are stripped for the argument name. If a ``__attrs_post_init__`` method exists on the class, it will be called after the class is fully initialized. :param bool slots: Create a `slotted class <slotted classes>` that's more memory-efficient. :param bool frozen: Make instances immutable after initialization. If someone attempts to modify a frozen instance, `attr.exceptions.FrozenInstanceError` is raised. Please note: 1. This is achieved by installing a custom ``__setattr__`` method on your class, so you can't implement your own. 2. True immutability is impossible in Python. 3. This *does* have a minor a runtime performance `impact <how-frozen>` when initializing new instances. In other words: ``__init__`` is slightly slower with ``frozen=True``. 4. If a class is frozen, you cannot modify ``self`` in ``__attrs_post_init__`` or a self-written ``__init__``. You can circumvent that limitation by using ``object.__setattr__(self, "attribute_name", value)``. 5. Subclasses of a frozen class are frozen too. :param bool weakref_slot: Make instances weak-referenceable. This has no effect unless ``slots`` is also enabled. :param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated attributes (Python 3.6 and later only) from the class body. In this case, you **must** annotate every field. If ``attrs`` encounters a field that is set to an `attr.ib` but lacks a type annotation, an `attr.exceptions.UnannotatedAttributeError` is raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't want to set a type. If you assign a value to those attributes (e.g. ``x: int = 42``), that value becomes the default value like if it were passed using ``attr.ib(default=42)``. Passing an instance of `Factory` also works as expected. Attributes annotated as `typing.ClassVar`, and attributes that are neither annotated nor set to an `attr.ib` are **ignored**. .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/ :param bool kw_only: Make all attributes keyword-only (Python 3+) in the generated ``__init__`` (if ``init`` is ``False``, this parameter is ignored). :param bool cache_hash: Ensure that the object's hash code is computed only once and stored on the object. If this is set to ``True``, hashing must be either explicitly or implicitly enabled for this class. If the hash code is cached, avoid any reassignments of fields involved in hash code computation or mutations of the objects those fields point to after object creation. If such changes occur, the behavior of the object's hash code is undefined. :param bool auto_exc: If the class subclasses `BaseException` (which implicitly includes any subclass of any exception), the following happens to behave like a well-behaved Python exceptions class: - the values for *eq*, *order*, and *hash* are ignored and the instances compare and hash by the instance's ids (N.B. ``attrs`` will *not* remove existing implementations of ``__hash__`` or the equality methods. It just won't add own ones.), - all attributes that are either passed into ``__init__`` or have a default value are additionally available as a tuple in the ``args`` attribute, - the value of *str* is ignored leaving ``__str__`` to base classes. :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs`` collects attributes from base classes. The default behavior is incorrect in certain cases of multiple inheritance. It should be on by default but is kept off for backward-compatability. See issue `#428 <https://github.com/python-attrs/attrs/issues/428>`_ for more details. :param Optional[bool] getstate_setstate: .. note:: This is usually only interesting for slotted classes and you should probably just set *auto_detect* to `True`. If `True`, ``__getstate__`` and ``__setstate__`` are generated and attached to the class. This is necessary for slotted classes to be pickleable. If left `None`, it's `True` by default for slotted classes and ``False`` for dict classes. If *auto_detect* is `True`, and *getstate_setstate* is left `None`, and **either** ``__getstate__`` or ``__setstate__`` is detected directly on the class (i.e. not inherited), it is set to `False` (this is usually what you want). :param on_setattr: A callable that is run whenever the user attempts to set an attribute (either by assignment like ``i.x = 42`` or by using `setattr` like ``setattr(i, "x", 42)``). It receives the same argument as validators: the instance, the attribute that is being modified, and the new value. If no exception is raised, the attribute is set to the return value of the callable. If a list of callables is passed, they're automatically wrapped in an `attr.setters.pipe`. .. versionadded:: 16.0.0 *slots* .. versionadded:: 16.1.0 *frozen* .. versionadded:: 16.3.0 *str* .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. .. versionchanged:: 17.1.0 *hash* supports ``None`` as value which is also the default now. .. versionadded:: 17.3.0 *auto_attribs* .. versionchanged:: 18.1.0 If *these* is passed, no attributes are deleted from the class body. .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. .. versionadded:: 18.2.0 *weakref_slot* .. deprecated:: 18.2.0 ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a `DeprecationWarning` if the classes compared are subclasses of each other. ``__eq`` and ``__ne__`` never tried to compared subclasses to each other. .. versionchanged:: 19.2.0 ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider subclasses comparable anymore. .. versionadded:: 18.2.0 *kw_only* .. versionadded:: 18.2.0 *cache_hash* .. versionadded:: 19.1.0 *auto_exc* .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. .. versionadded:: 19.2.0 *eq* and *order* .. versionadded:: 20.1.0 *auto_detect* .. versionadded:: 20.1.0 *collect_by_mro* .. versionadded:: 20.1.0 *getstate_setstate* .. versionadded:: 20.1.0 *on_setattr* """ if auto_detect and PY2: raise PythonTooOldError( "auto_detect only works on Python 3 and later." ) eq_, order_ = _determine_eq_order(cmp, eq, order, None) hash_ = hash # work around the lack of nonlocal if isinstance(on_setattr, (list, tuple)): on_setattr = setters.pipe(*on_setattr) def wrap(cls): if getattr(cls, "__class__", None) is None: raise TypeError("attrs only works with new-style classes.") is_frozen = frozen or _has_frozen_base_class(cls) is_exc = auto_exc is True and issubclass(cls, BaseException) has_own_setattr = auto_detect and _has_own_attribute( cls, "__setattr__" ) if has_own_setattr and is_frozen: raise ValueError("Can't freeze a class with a custom __setattr__.") builder = _ClassBuilder( cls, these, slots, is_frozen, weakref_slot, _determine_whether_to_implement( cls, getstate_setstate, auto_detect, ("__getstate__", "__setstate__"), default=slots, ), auto_attribs, kw_only, cache_hash, is_exc, collect_by_mro, on_setattr, has_own_setattr, ) if _determine_whether_to_implement( cls, repr, auto_detect, ("__repr__",) ): builder.add_repr(repr_ns) if str is True: builder.add_str() eq = _determine_whether_to_implement( cls, eq_, auto_detect, ("__eq__", "__ne__") ) if not is_exc and eq is True: builder.add_eq() if not is_exc and _determine_whether_to_implement( cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") ): builder.add_order() builder.add_setattr() if ( hash_ is None and auto_detect is True and _has_own_attribute(cls, "__hash__") ): hash = False else: hash = hash_ if hash is not True and hash is not False and hash is not None: # Can't use `hash in` because 1 == True for example. raise TypeError( "Invalid value for hash. Must be True, False, or None." ) elif hash is False or (hash is None and eq is False) or is_exc: # Don't do anything. Should fall back to __object__'s __hash__ # which is by id. if cache_hash: raise TypeError( "Invalid value for cache_hash. To use hash caching," " hashing must be either explicitly or implicitly " "enabled." ) elif hash is True or ( hash is None and eq is True and is_frozen is True ): # Build a __hash__ if told so, or if it's safe. builder.add_hash() else: # Raise TypeError on attempts to hash. if cache_hash: raise TypeError( "Invalid value for cache_hash. To use hash caching," " hashing must be either explicitly or implicitly " "enabled." ) builder.make_unhashable() if _determine_whether_to_implement( cls, init, auto_detect, ("__init__",) ): builder.add_init() else: if cache_hash: raise TypeError( "Invalid value for cache_hash. To use hash caching," " init must be True." ) return builder.build_class() # maybe_cls's type depends on the usage of the decorator. It's a class # if it's used as `@attrs` but ``None`` if used as `@attrs()`. if maybe_cls is None: return wrap else: return wrap(maybe_cls) _attrs = attrs """ Internal alias so we can use it in functions that take an argument called *attrs*. """ if PY2: def _has_frozen_base_class(cls): """ Check whether *cls* has a frozen ancestor by looking at its __setattr__. """ return ( getattr(cls.__setattr__, "__module__", None) == _frozen_setattrs.__module__ and cls.__setattr__.__name__ == _frozen_setattrs.__name__ ) else: def _has_frozen_base_class(cls): """ Check whether *cls* has a frozen ancestor by looking at its __setattr__. """ return cls.__setattr__ == _frozen_setattrs def _attrs_to_tuple(obj, attrs): """ Create a tuple of all values of *obj*'s *attrs*. """ return tuple(getattr(obj, a.name) for a in attrs) def _generate_unique_filename(cls, func_name): """ Create a "filename" suitable for a function being generated. """ unique_id = uuid.uuid4() extra = "" count = 1 while True: unique_filename = "<attrs generated {0} {1}.{2}{3}>".format( func_name, cls.__module__, getattr(cls, "__qualname__", cls.__name__), extra, ) # To handle concurrency we essentially "reserve" our spot in # the linecache with a dummy line. The caller can then # set this value correctly. cache_line = (1, None, (str(unique_id),), unique_filename) if ( linecache.cache.setdefault(unique_filename, cache_line) == cache_line ): return unique_filename # Looks like this spot is taken. Try again. count += 1 extra = "-{0}".format(count) def _make_hash(cls, attrs, frozen, cache_hash): attrs = tuple( a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) ) tab = " " unique_filename = _generate_unique_filename(cls, "hash") type_hash = hash(unique_filename) hash_def = "def __hash__(self" hash_func = "hash((" closing_braces = "))" if not cache_hash: hash_def += "):" else: if not PY2: hash_def += ", *" hash_def += ( ", _cache_wrapper=" + "__import__('attr._make')._make._CacheHashWrapper):" ) hash_func = "_cache_wrapper(" + hash_func closing_braces += ")" method_lines = [hash_def] def append_hash_computation_lines(prefix, indent): """ Generate the code for actually computing the hash code. Below this will either be returned directly or used to compute a value which is then cached, depending on the value of cache_hash """ method_lines.extend( [ indent + prefix + hash_func, indent + " %d," % (type_hash,), ] ) for a in attrs: method_lines.append(indent + " self.%s," % a.name) method_lines.append(indent + " " + closing_braces) if cache_hash: method_lines.append(tab + "if self.%s is None:" % _hash_cache_field) if frozen: append_hash_computation_lines( "object.__setattr__(self, '%s', " % _hash_cache_field, tab * 2 ) method_lines.append(tab * 2 + ")") # close __setattr__ else: append_hash_computation_lines( "self.%s = " % _hash_cache_field, tab * 2 ) method_lines.append(tab + "return self.%s" % _hash_cache_field) else: append_hash_computation_lines("return ", tab) script = "\n".join(method_lines) globs = {} locs = {} bytecode = compile(script, unique_filename, "exec") eval(bytecode, globs, locs) # In order of debuggers like PDB being able to step through the code, # we add a fake linecache entry. linecache.cache[unique_filename] = ( len(script), None, script.splitlines(True), unique_filename, ) return locs["__hash__"] def _add_hash(cls, attrs): """ Add a hash method to *cls*. """ cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) return cls def _make_ne(): """ Create __ne__ method. """ def __ne__(self, other): """ Check equality and either forward a NotImplemented or return the result negated. """ result = self.__eq__(other) if result is NotImplemented: return NotImplemented return not result return __ne__ def _make_eq(cls, attrs): """ Create __eq__ method for *cls* with *attrs*. """ attrs = [a for a in attrs if a.eq] unique_filename = _generate_unique_filename(cls, "eq") lines = [ "def __eq__(self, other):", " if other.__class__ is not self.__class__:", " return NotImplemented", ] # We can't just do a big self.x = other.x and... clause due to # irregularities like nan == nan is false but (nan,) == (nan,) is true. if attrs: lines.append(" return (") others = [" ) == ("] for a in attrs: lines.append(" self.%s," % (a.name,)) others.append(" other.%s," % (a.name,)) lines += others + [" )"] else: lines.append(" return True") script = "\n".join(lines) globs = {} locs = {} bytecode = compile(script, unique_filename, "exec") eval(bytecode, globs, locs) # In order of debuggers like PDB being able to step through the code, # we add a fake linecache entry. linecache.cache[unique_filename] = ( len(script), None, script.splitlines(True), unique_filename, ) return locs["__eq__"] def _make_order(cls, attrs): """ Create ordering methods for *cls* with *attrs*. """ attrs = [a for a in attrs if a.order] def attrs_to_tuple(obj): """ Save us some typing. """ return _attrs_to_tuple(obj, attrs) def __lt__(self, other): """ Automatically created by attrs. """ if other.__class__ is self.__class__: return attrs_to_tuple(self) < attrs_to_tuple(other) return NotImplemented def __le__(self, other): """ Automatically created by attrs. """ if other.__class__ is self.__class__: return attrs_to_tuple(self) <= attrs_to_tuple(other) return NotImplemented def __gt__(self, other): """ Automatically created by attrs. """ if other.__class__ is self.__class__: return attrs_to_tuple(self) > attrs_to_tuple(other) return NotImplemented def __ge__(self, other): """ Automatically created by attrs. """ if other.__class__ is self.__class__: return attrs_to_tuple(self) >= attrs_to_tuple(other) return NotImplemented return __lt__, __le__, __gt__, __ge__ def _add_eq(cls, attrs=None): """ Add equality methods to *cls* with *attrs*. """ if attrs is None: attrs = cls.__attrs_attrs__ cls.__eq__ = _make_eq(cls, attrs) cls.__ne__ = _make_ne() return cls _already_repring = threading.local() def _make_repr(attrs, ns): """ Make a repr method that includes relevant *attrs*, adding *ns* to the full name. """ # Figure out which attributes to include, and which function to use to # format them. The a.repr value can be either bool or a custom callable. attr_names_with_reprs = tuple( (a.name, repr if a.repr is True else a.repr) for a in attrs if a.repr is not False ) def __repr__(self): """ Automatically created by attrs. """ try: working_set = _already_repring.working_set except AttributeError: working_set = set() _already_repring.working_set = working_set if id(self) in working_set: return "..." real_cls = self.__class__ if ns is None: qualname = getattr(real_cls, "__qualname__", None) if qualname is not None: class_name = qualname.rsplit(">.", 1)[-1] else: class_name = real_cls.__name__ else: class_name = ns + "." + real_cls.__name__ # Since 'self' remains on the stack (i.e.: strongly referenced) for the # duration of this call, it's safe to depend on id(...) stability, and # not need to track the instance and therefore worry about properties # like weakref- or hash-ability. working_set.add(id(self)) try: result = [class_name, "("] first = True for name, attr_repr in attr_names_with_reprs: if first: first = False else: result.append(", ") result.extend( (name, "=", attr_repr(getattr(self, name, NOTHING))) ) return "".join(result) + ")" finally: working_set.remove(id(self)) return __repr__ def _add_repr(cls, ns=None, attrs=None): """ Add a repr method to *cls*. """ if attrs is None: attrs = cls.__attrs_attrs__ cls.__repr__ = _make_repr(attrs, ns) return cls def fields(cls): """ Return the tuple of ``attrs`` attributes for a class. The tuple also allows accessing the fields by their names (see below for examples). :param type cls: Class to introspect. :raise TypeError: If *cls* is not a class. :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` class. :rtype: tuple (with name accessors) of `attr.Attribute` .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields by name. """ if not isclass(cls): raise TypeError("Passed object must be a class.") attrs = getattr(cls, "__attrs_attrs__", None) if attrs is None: raise NotAnAttrsClassError( "{cls!r} is not an attrs-decorated class.".format(cls=cls) ) return attrs def fields_dict(cls): """ Return an ordered dictionary of ``attrs`` attributes for a class, whose keys are the attribute names. :param type cls: Class to introspect. :raise TypeError: If *cls* is not a class. :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` class. :rtype: an ordered dict where keys are attribute names and values are `attr.Attribute`\\ s. This will be a `dict` if it's naturally ordered like on Python 3.6+ or an :class:`~collections.OrderedDict` otherwise. .. versionadded:: 18.1.0 """ if not isclass(cls): raise TypeError("Passed object must be a class.") attrs = getattr(cls, "__attrs_attrs__", None) if attrs is None: raise NotAnAttrsClassError( "{cls!r} is not an attrs-decorated class.".format(cls=cls) ) return ordered_dict(((a.name, a) for a in attrs)) def validate(inst): """ Validate all attributes on *inst* that have a validator. Leaves all exceptions through. :param inst: Instance of a class with ``attrs`` attributes. """ if _config._run_validators is False: return for a in fields(inst.__class__): v = a.validator if v is not None: v(inst, a, getattr(inst, a.name)) def _is_slot_cls(cls): return "__slots__" in cls.__dict__ def _is_slot_attr(a_name, base_attr_map): """ Check if the attribute name comes from a slot class. """ return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) def _make_init( cls, attrs, post_init, frozen, slots, cache_hash, base_attr_map, is_exc, has_global_on_setattr, ): if frozen and has_global_on_setattr: raise ValueError("Frozen classes can't use on_setattr.") needs_cached_setattr = cache_hash or frozen filtered_attrs = [] attr_dict = {} for a in attrs: if not a.init and a.default is NOTHING: continue filtered_attrs.append(a) attr_dict[a.name] = a if a.on_setattr is not None: if frozen is True: raise ValueError("Frozen classes can't use on_setattr.") needs_cached_setattr = True elif ( has_global_on_setattr and a.on_setattr is not setters.NO_OP ) or _is_slot_attr(a.name, base_attr_map): needs_cached_setattr = True unique_filename = _generate_unique_filename(cls, "init") script, globs, annotations = _attrs_to_init_script( filtered_attrs, frozen, slots, post_init, cache_hash, base_attr_map, is_exc, needs_cached_setattr, has_global_on_setattr, ) locs = {} bytecode = compile(script, unique_filename, "exec") globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) if needs_cached_setattr: # Save the lookup overhead in __init__ if we need to circumvent # setattr hooks. globs["_cached_setattr"] = _obj_setattr eval(bytecode, globs, locs) # In order of debuggers like PDB being able to step through the code, # we add a fake linecache entry. linecache.cache[unique_filename] = ( len(script), None, script.splitlines(True), unique_filename, ) __init__ = locs["__init__"] __init__.__annotations__ = annotations return __init__ def _setattr(attr_name, value_var, has_on_setattr): """ Use the cached object.setattr to set *attr_name* to *value_var*. """ return "_setattr('%s', %s)" % (attr_name, value_var,) def _setattr_with_converter(attr_name, value_var, has_on_setattr): """ Use the cached object.setattr to set *attr_name* to *value_var*, but run its converter first. """ return "_setattr('%s', %s(%s))" % ( attr_name, _init_converter_pat % (attr_name,), value_var, ) def _assign(attr_name, value, has_on_setattr): """ Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise relegate to _setattr. """ if has_on_setattr: return _setattr(attr_name, value, True) return "self.%s = %s" % (attr_name, value,) def _assign_with_converter(attr_name, value_var, has_on_setattr): """ Unless *attr_name* has an on_setattr hook, use normal assignment after conversion. Otherwise relegate to _setattr_with_converter. """ if has_on_setattr: return _setattr_with_converter(attr_name, value_var, True) return "self.%s = %s(%s)" % ( attr_name, _init_converter_pat % (attr_name,), value_var, ) def _attrs_to_init_script( attrs, frozen, slots, post_init, cache_hash, base_attr_map, is_exc, needs_cached_setattr, has_global_on_setattr, ): """ Return a script of an initializer for *attrs* and a dict of globals. The globals are expected by the generated script. If *frozen* is True, we cannot set the attributes directly so we use a cached ``object.__setattr__``. """ lines = [] if needs_cached_setattr: lines.append( # Circumvent the __setattr__ descriptor to save one lookup per # assignment. # Note _setattr will be used again below if cache_hash is True "_setattr = _cached_setattr.__get__(self, self.__class__)" ) if frozen is True: if slots is True: fmt_setter = _setattr fmt_setter_with_converter = _setattr_with_converter else: # Dict frozen classes assign directly to __dict__. # But only if the attribute doesn't come from an ancestor slot # class. # Note _inst_dict will be used again below if cache_hash is True lines.append("_inst_dict = self.__dict__") def fmt_setter(attr_name, value_var, has_on_setattr): if _is_slot_attr(attr_name, base_attr_map): return _setattr(attr_name, value_var, has_on_setattr) return "_inst_dict['%s'] = %s" % (attr_name, value_var,) def fmt_setter_with_converter( attr_name, value_var, has_on_setattr ): if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): return _setattr_with_converter( attr_name, value_var, has_on_setattr ) return "_inst_dict['%s'] = %s(%s)" % ( attr_name, _init_converter_pat % (attr_name,), value_var, ) else: # Not frozen. fmt_setter = _assign fmt_setter_with_converter = _assign_with_converter args = [] kw_only_args = [] attrs_to_validate = [] # This is a dictionary of names to validator and converter callables. # Injecting this into __init__ globals lets us avoid lookups. names_for_globals = {} annotations = {"return": None} for a in attrs: if a.validator: attrs_to_validate.append(a) attr_name = a.name has_on_setattr = a.on_setattr is not None or ( a.on_setattr is not setters.NO_OP and has_global_on_setattr ) arg_name = a.name.lstrip("_") has_factory = isinstance(a.default, Factory) if has_factory and a.default.takes_self: maybe_self = "self" else: maybe_self = "" if a.init is False: if has_factory: init_factory_name = _init_factory_pat.format(a.name) if a.converter is not None: lines.append( fmt_setter_with_converter( attr_name, init_factory_name + "(%s)" % (maybe_self,), has_on_setattr, ) ) conv_name = _init_converter_pat % (a.name,) names_for_globals[conv_name] = a.converter else: lines.append( fmt_setter( attr_name, init_factory_name + "(%s)" % (maybe_self,), has_on_setattr, ) ) names_for_globals[init_factory_name] = a.default.factory else: if a.converter is not None: lines.append( fmt_setter_with_converter( attr_name, "attr_dict['%s'].default" % (attr_name,), has_on_setattr, ) ) conv_name = _init_converter_pat % (a.name,) names_for_globals[conv_name] = a.converter else: lines.append( fmt_setter( attr_name, "attr_dict['%s'].default" % (attr_name,), has_on_setattr, ) ) elif a.default is not NOTHING and not has_factory: arg = "%s=attr_dict['%s'].default" % (arg_name, attr_name,) if a.kw_only: kw_only_args.append(arg) else: args.append(arg) if a.converter is not None: lines.append( fmt_setter_with_converter( attr_name, arg_name, has_on_setattr, ) ) names_for_globals[ _init_converter_pat % (a.name,) ] = a.converter else: lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) elif has_factory: arg = "%s=NOTHING" % (arg_name,) if a.kw_only: kw_only_args.append(arg) else: args.append(arg) lines.append("if %s is not NOTHING:" % (arg_name,)) init_factory_name = _init_factory_pat.format(a.name) if a.converter is not None: lines.append( " " + fmt_setter_with_converter( attr_name, arg_name, has_on_setattr ) ) lines.append("else:") lines.append( " " + fmt_setter_with_converter( attr_name, init_factory_name + "(" + maybe_self + ")", has_on_setattr, ) ) names_for_globals[ _init_converter_pat % (a.name,) ] = a.converter else: lines.append( " " + fmt_setter(attr_name, arg_name, has_on_setattr) ) lines.append("else:") lines.append( " " + fmt_setter( attr_name, init_factory_name + "(" + maybe_self + ")", has_on_setattr, ) ) names_for_globals[init_factory_name] = a.default.factory else: if a.kw_only: kw_only_args.append(arg_name) else: args.append(arg_name) if a.converter is not None: lines.append( fmt_setter_with_converter( attr_name, arg_name, has_on_setattr ) ) names_for_globals[ _init_converter_pat % (a.name,) ] = a.converter else: lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) if a.init is True and a.converter is None and a.type is not None: annotations[arg_name] = a.type if attrs_to_validate: # we can skip this if there are no validators. names_for_globals["_config"] = _config lines.append("if _config._run_validators is True:") for a in attrs_to_validate: val_name = "__attr_validator_" + a.name attr_name = "__attr_" + a.name lines.append( " %s(self, %s, self.%s)" % (val_name, attr_name, a.name) ) names_for_globals[val_name] = a.validator names_for_globals[attr_name] = a if post_init: lines.append("self.__attrs_post_init__()") # because this is set only after __attrs_post_init is called, a crash # will result if post-init tries to access the hash code. This seemed # preferable to setting this beforehand, in which case alteration to # field values during post-init combined with post-init accessing the # hash code would result in silent bugs. if cache_hash: if frozen: if slots: # if frozen and slots, then _setattr defined above init_hash_cache = "_setattr('%s', %s)" else: # if frozen and not slots, then _inst_dict defined above init_hash_cache = "_inst_dict['%s'] = %s" else: init_hash_cache = "self.%s = %s" lines.append(init_hash_cache % (_hash_cache_field, "None")) # For exceptions we rely on BaseException.__init__ for proper # initialization. if is_exc: vals = ",".join("self." + a.name for a in attrs if a.init) lines.append("BaseException.__init__(self, %s)" % (vals,)) args = ", ".join(args) if kw_only_args: if PY2: raise PythonTooOldError( "Keyword-only arguments only work on Python 3 and later." ) args += "{leading_comma}*, {kw_only_args}".format( leading_comma=", " if args else "", kw_only_args=", ".join(kw_only_args), ) return ( """\ def __init__(self, {args}): {lines} """.format( args=args, lines="\n ".join(lines) if lines else "pass" ), names_for_globals, annotations, ) class Attribute(object): """ *Read-only* representation of an attribute. :attribute name: The name of the attribute. :attribute inherited: Whether or not that attribute has been inherited from a base class. Plus *all* arguments of `attr.ib` (except for ``factory`` which is only syntactic sugar for ``default=Factory(...)``. .. versionadded:: 20.1.0 *inherited* .. versionadded:: 20.1.0 *on_setattr* For the full version history of the fields, see `attr.ib`. """ __slots__ = ( "name", "default", "validator", "repr", "eq", "order", "hash", "init", "metadata", "type", "converter", "kw_only", "inherited", "on_setattr", ) def __init__( self, name, default, validator, repr, cmp, # XXX: unused, remove along with other cmp code. hash, init, inherited, metadata=None, type=None, converter=None, kw_only=False, eq=None, order=None, on_setattr=None, ): eq, order = _determine_eq_order(cmp, eq, order, True) # Cache this descriptor here to speed things up later. bound_setattr = _obj_setattr.__get__(self, Attribute) # Despite the big red warning, people *do* instantiate `Attribute` # themselves. bound_setattr("name", name) bound_setattr("default", default) bound_setattr("validator", validator) bound_setattr("repr", repr) bound_setattr("eq", eq) bound_setattr("order", order) bound_setattr("hash", hash) bound_setattr("init", init) bound_setattr("converter", converter) bound_setattr( "metadata", ( metadata_proxy(metadata) if metadata else _empty_metadata_singleton ), ) bound_setattr("type", type) bound_setattr("kw_only", kw_only) bound_setattr("inherited", inherited) bound_setattr("on_setattr", on_setattr) def __setattr__(self, name, value): raise FrozenInstanceError() @classmethod def from_counting_attr(cls, name, ca, type=None): # type holds the annotated value. deal with conflicts: if type is None: type = ca.type elif ca.type is not None: raise ValueError( "Type annotation and type argument cannot both be present" ) inst_dict = { k: getattr(ca, k) for k in Attribute.__slots__ if k not in ( "name", "validator", "default", "type", "inherited", ) # exclude methods and deprecated alias } return cls( name=name, validator=ca._validator, default=ca._default, type=type, cmp=None, inherited=False, **inst_dict ) @property def cmp(self): """ Simulate the presence of a cmp attribute and warn. """ warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2) return self.eq and self.order # Don't use attr.assoc since fields(Attribute) doesn't work def _assoc(self, **changes): """ Copy *self* and apply *changes*. """ new = copy.copy(self) new._setattrs(changes.items()) return new # Don't use _add_pickle since fields(Attribute) doesn't work def __getstate__(self): """ Play nice with pickle. """ return tuple( getattr(self, name) if name != "metadata" else dict(self.metadata) for name in self.__slots__ ) def __setstate__(self, state): """ Play nice with pickle. """ self._setattrs(zip(self.__slots__, state)) def _setattrs(self, name_values_pairs): bound_setattr = _obj_setattr.__get__(self, Attribute) for name, value in name_values_pairs: if name != "metadata": bound_setattr(name, value) else: bound_setattr( name, metadata_proxy(value) if value else _empty_metadata_singleton, ) _a = [ Attribute( name=name, default=NOTHING, validator=None, repr=True, cmp=None, eq=True, order=False, hash=(name != "metadata"), init=True, inherited=False, ) for name in Attribute.__slots__ ] Attribute = _add_hash( _add_eq(_add_repr(Attribute, attrs=_a), attrs=_a), attrs=[a for a in _a if a.hash], ) class _CountingAttr(object): """ Intermediate representation of attributes that uses a counter to preserve the order in which the attributes have been defined. *Internal* data structure of the attrs library. Running into is most likely the result of a bug like a forgotten `@attr.s` decorator. """ __slots__ = ( "counter", "_default", "repr", "eq", "order", "hash", "init", "metadata", "_validator", "converter", "type", "kw_only", "on_setattr", ) __attrs_attrs__ = tuple( Attribute( name=name, default=NOTHING, validator=None, repr=True, cmp=None, hash=True, init=True, kw_only=False, eq=True, order=False, inherited=False, on_setattr=None, ) for name in ( "counter", "_default", "repr", "eq", "order", "hash", "init", "on_setattr", ) ) + ( Attribute( name="metadata", default=None, validator=None, repr=True, cmp=None, hash=False, init=True, kw_only=False, eq=True, order=False, inherited=False, on_setattr=None, ), ) cls_counter = 0 def __init__( self, default, validator, repr, cmp, # XXX: unused, remove along with cmp hash, init, converter, metadata, type, kw_only, eq, order, on_setattr, ): _CountingAttr.cls_counter += 1 self.counter = _CountingAttr.cls_counter self._default = default self._validator = validator self.converter = converter self.repr = repr self.eq = eq self.order = order self.hash = hash self.init = init self.metadata = metadata self.type = type self.kw_only = kw_only self.on_setattr = on_setattr def validator(self, meth): """ Decorator that adds *meth* to the list of validators. Returns *meth* unchanged. .. versionadded:: 17.1.0 """ if self._validator is None: self._validator = meth else: self._validator = and_(self._validator, meth) return meth def default(self, meth): """ Decorator that allows to set the default for an attribute. Returns *meth* unchanged. :raises DefaultAlreadySetError: If default has been set before. .. versionadded:: 17.1.0 """ if self._default is not NOTHING: raise DefaultAlreadySetError() self._default = Factory(meth, takes_self=True) return meth _CountingAttr = _add_eq(_add_repr(_CountingAttr)) @attrs(slots=True, init=False, hash=True) class Factory(object): """ Stores a factory callable. If passed as the default value to `attr.ib`, the factory is used to generate a new value. :param callable factory: A callable that takes either none or exactly one mandatory positional argument depending on *takes_self*. :param bool takes_self: Pass the partially initialized instance that is being initialized as a positional argument. .. versionadded:: 17.1.0 *takes_self* """ factory = attrib() takes_self = attrib() def __init__(self, factory, takes_self=False): """ `Factory` is part of the default machinery so if we want a default value here, we have to implement it ourselves. """ self.factory = factory self.takes_self = takes_self def make_class(name, attrs, bases=(object,), **attributes_arguments): """ A quick way to create a new class called *name* with *attrs*. :param str name: The name for the new class. :param attrs: A list of names or a dictionary of mappings of names to attributes. If *attrs* is a list or an ordered dict (`dict` on Python 3.6+, `collections.OrderedDict` otherwise), the order is deduced from the order of the names or attributes inside *attrs*. Otherwise the order of the definition of the attributes is used. :type attrs: `list` or `dict` :param tuple bases: Classes that the new class will subclass. :param attributes_arguments: Passed unmodified to `attr.s`. :return: A new class with *attrs*. :rtype: type .. versionadded:: 17.1.0 *bases* .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. """ if isinstance(attrs, dict): cls_dict = attrs elif isinstance(attrs, (list, tuple)): cls_dict = dict((a, attrib()) for a in attrs) else: raise TypeError("attrs argument must be a dict or a list.") post_init = cls_dict.pop("__attrs_post_init__", None) type_ = type( name, bases, {} if post_init is None else {"__attrs_post_init__": post_init}, ) # For pickling to work, the __module__ variable needs to be set to the # frame where the class is created. Bypass this step in environments where # sys._getframe is not defined (Jython for example) or sys._getframe is not # defined for arguments greater than 0 (IronPython). try: type_.__module__ = sys._getframe(1).f_globals.get( "__name__", "__main__" ) except (AttributeError, ValueError): pass # We do it here for proper warnings with meaningful stacklevel. cmp = attributes_arguments.pop("cmp", None) ( attributes_arguments["eq"], attributes_arguments["order"], ) = _determine_eq_order( cmp, attributes_arguments.get("eq"), attributes_arguments.get("order"), True, ) return _attrs(these=cls_dict, **attributes_arguments)(type_) # These are required by within this module so we define them here and merely # import into .validators / .converters. @attrs(slots=True, hash=True) class _AndValidator(object): """ Compose many validators to a single one. """ _validators = attrib() def __call__(self, inst, attr, value): for v in self._validators: v(inst, attr, value) def and_(*validators): """ A validator that composes multiple validators into one. When called on a value, it runs all wrapped validators. :param callables validators: Arbitrary number of validators. .. versionadded:: 17.1.0 """ vals = [] for validator in validators: vals.extend( validator._validators if isinstance(validator, _AndValidator) else [validator] ) return _AndValidator(tuple(vals)) def pipe(*converters): """ A converter that composes multiple converters into one. When called on a value, it runs all wrapped converters, returning the *last* value. :param callables converters: Arbitrary number of converters. .. versionadded:: 20.1.0 """ def pipe_converter(val): for converter in converters: val = converter(val) return val return pipe_converter
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/_version_info.pyi
class VersionInfo: @property def year(self) -> int: ... @property def minor(self) -> int: ... @property def micro(self) -> int: ... @property def releaselevel(self) -> str: ...
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/converters.pyi
from typing import TypeVar, Optional, Callable, overload from . import _ConverterType _T = TypeVar("_T") def pipe(*validators: _ConverterType) -> _ConverterType: ... def optional(converter: _ConverterType) -> _ConverterType: ... @overload def default_if_none(default: _T) -> _ConverterType: ... @overload def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/__init__.pyi
from typing import ( Any, Callable, Dict, Generic, List, Optional, Sequence, Mapping, Tuple, Type, TypeVar, Union, overload, ) # `import X as X` is required to make these public from . import exceptions as exceptions from . import filters as filters from . import converters as converters from . import setters as setters from . import validators as validators from ._version_info import VersionInfo __version__: str __version_info__: VersionInfo __title__: str __description__: str __url__: str __uri__: str __author__: str __email__: str __license__: str __copyright__: str _T = TypeVar("_T") _C = TypeVar("_C", bound=type) _ValidatorType = Callable[[Any, Attribute[_T], _T], Any] _ConverterType = Callable[[Any], Any] _FilterType = Callable[[Attribute[_T], _T], bool] _ReprType = Callable[[Any], str] _ReprArgType = Union[bool, _ReprType] _OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any] _OnSetAttrArgType = Union[ _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType ] # FIXME: in reality, if multiple validators are passed they must be in a list # or tuple, but those are invariant and so would prevent subtypes of # _ValidatorType from working when passed in a list or tuple. _ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] # _make -- NOTHING: object # NOTE: Factory lies about its return type to make this possible: # `x: List[int] # = Factory(list)` # Work around mypy issue #4554 in the common case by using an overload. @overload def Factory(factory: Callable[[], _T]) -> _T: ... @overload def Factory( factory: Union[Callable[[Any], _T], Callable[[], _T]], takes_self: bool = ..., ) -> _T: ... class Attribute(Generic[_T]): name: str default: Optional[_T] validator: Optional[_ValidatorType[_T]] repr: _ReprArgType cmp: bool eq: bool order: bool hash: Optional[bool] init: bool converter: Optional[_ConverterType] metadata: Dict[Any, Any] type: Optional[Type[_T]] kw_only: bool on_setattr: _OnSetAttrType # NOTE: We had several choices for the annotation to use for type arg: # 1) Type[_T] # - Pros: Handles simple cases correctly # - Cons: Might produce less informative errors in the case of conflicting # TypeVars e.g. `attr.ib(default='bad', type=int)` # 2) Callable[..., _T] # - Pros: Better error messages than #1 for conflicting TypeVars # - Cons: Terrible error messages for validator checks. # e.g. attr.ib(type=int, validator=validate_str) # -> error: Cannot infer function type argument # 3) type (and do all of the work in the mypy plugin) # - Pros: Simple here, and we could customize the plugin with our own errors. # - Cons: Would need to write mypy plugin code to handle all the cases. # We chose option #1. # `attr` lies about its return type to make the following possible: # attr() -> Any # attr(8) -> int # attr(validator=<some callable>) -> Whatever the callable expects. # This makes this type of assignments possible: # x: int = attr(8) # # This form catches explicit None or no default but with no other arguments # returns Any. @overload def attrib( default: None = ..., validator: None = ..., repr: _ReprArgType = ..., cmp: Optional[bool] = ..., hash: Optional[bool] = ..., init: bool = ..., metadata: Optional[Mapping[Any, Any]] = ..., type: None = ..., converter: None = ..., factory: None = ..., kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> Any: ... # This form catches an explicit None or no default and infers the type from the # other arguments. @overload def attrib( default: None = ..., validator: Optional[_ValidatorArgType[_T]] = ..., repr: _ReprArgType = ..., cmp: Optional[bool] = ..., hash: Optional[bool] = ..., init: bool = ..., metadata: Optional[Mapping[Any, Any]] = ..., type: Optional[Type[_T]] = ..., converter: Optional[_ConverterType] = ..., factory: Optional[Callable[[], _T]] = ..., kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> _T: ... # This form catches an explicit default argument. @overload def attrib( default: _T, validator: Optional[_ValidatorArgType[_T]] = ..., repr: _ReprArgType = ..., cmp: Optional[bool] = ..., hash: Optional[bool] = ..., init: bool = ..., metadata: Optional[Mapping[Any, Any]] = ..., type: Optional[Type[_T]] = ..., converter: Optional[_ConverterType] = ..., factory: Optional[Callable[[], _T]] = ..., kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> _T: ... # This form covers type=non-Type: e.g. forward references (str), Any @overload def attrib( default: Optional[_T] = ..., validator: Optional[_ValidatorArgType[_T]] = ..., repr: _ReprArgType = ..., cmp: Optional[bool] = ..., hash: Optional[bool] = ..., init: bool = ..., metadata: Optional[Mapping[Any, Any]] = ..., type: object = ..., converter: Optional[_ConverterType] = ..., factory: Optional[Callable[[], _T]] = ..., kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> Any: ... @overload def field( *, default: None = ..., validator: None = ..., repr: _ReprArgType = ..., hash: Optional[bool] = ..., init: bool = ..., metadata: Optional[Mapping[Any, Any]] = ..., converter: None = ..., factory: None = ..., kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> Any: ... # This form catches an explicit None or no default and infers the type from the # other arguments. @overload def field( *, default: None = ..., validator: Optional[_ValidatorArgType[_T]] = ..., repr: _ReprArgType = ..., hash: Optional[bool] = ..., init: bool = ..., metadata: Optional[Mapping[Any, Any]] = ..., converter: Optional[_ConverterType] = ..., factory: Optional[Callable[[], _T]] = ..., kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> _T: ... # This form catches an explicit default argument. @overload def field( *, default: _T, validator: Optional[_ValidatorArgType[_T]] = ..., repr: _ReprArgType = ..., hash: Optional[bool] = ..., init: bool = ..., metadata: Optional[Mapping[Any, Any]] = ..., converter: Optional[_ConverterType] = ..., factory: Optional[Callable[[], _T]] = ..., kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> _T: ... # This form covers type=non-Type: e.g. forward references (str), Any @overload def field( *, default: Optional[_T] = ..., validator: Optional[_ValidatorArgType[_T]] = ..., repr: _ReprArgType = ..., hash: Optional[bool] = ..., init: bool = ..., metadata: Optional[Mapping[Any, Any]] = ..., converter: Optional[_ConverterType] = ..., factory: Optional[Callable[[], _T]] = ..., kw_only: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> Any: ... @overload def attrs( maybe_cls: _C, these: Optional[Dict[str, Any]] = ..., repr_ns: Optional[str] = ..., repr: bool = ..., cmp: Optional[bool] = ..., hash: Optional[bool] = ..., init: bool = ..., slots: bool = ..., frozen: bool = ..., weakref_slot: bool = ..., str: bool = ..., auto_attribs: bool = ..., kw_only: bool = ..., cache_hash: bool = ..., auto_exc: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., auto_detect: bool = ..., getstate_setstate: Optional[bool] = ..., on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> _C: ... @overload def attrs( maybe_cls: None = ..., these: Optional[Dict[str, Any]] = ..., repr_ns: Optional[str] = ..., repr: bool = ..., cmp: Optional[bool] = ..., hash: Optional[bool] = ..., init: bool = ..., slots: bool = ..., frozen: bool = ..., weakref_slot: bool = ..., str: bool = ..., auto_attribs: bool = ..., kw_only: bool = ..., cache_hash: bool = ..., auto_exc: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., auto_detect: bool = ..., getstate_setstate: Optional[bool] = ..., on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> Callable[[_C], _C]: ... @overload def define( maybe_cls: _C, *, these: Optional[Dict[str, Any]] = ..., repr: bool = ..., hash: Optional[bool] = ..., init: bool = ..., slots: bool = ..., frozen: bool = ..., weakref_slot: bool = ..., str: bool = ..., auto_attribs: bool = ..., kw_only: bool = ..., cache_hash: bool = ..., auto_exc: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., auto_detect: bool = ..., getstate_setstate: Optional[bool] = ..., on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> _C: ... @overload def define( maybe_cls: None = ..., *, these: Optional[Dict[str, Any]] = ..., repr: bool = ..., hash: Optional[bool] = ..., init: bool = ..., slots: bool = ..., frozen: bool = ..., weakref_slot: bool = ..., str: bool = ..., auto_attribs: bool = ..., kw_only: bool = ..., cache_hash: bool = ..., auto_exc: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., auto_detect: bool = ..., getstate_setstate: Optional[bool] = ..., on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> Callable[[_C], _C]: ... mutable = define frozen = define # they differ only in their defaults # TODO: add support for returning NamedTuple from the mypy plugin class _Fields(Tuple[Attribute[Any], ...]): def __getattr__(self, name: str) -> Attribute[Any]: ... def fields(cls: type) -> _Fields: ... def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ... def validate(inst: Any) -> None: ... def resolve_types( cls: _C, globalns: Optional[Dict[str, Any]] = ..., localns: Optional[Dict[str, Any]] = ..., ) -> _C: ... # TODO: add support for returning a proper attrs class from the mypy plugin # we use Any instead of _CountingAttr so that e.g. `make_class('Foo', # [attr.ib()])` is valid def make_class( name: str, attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], bases: Tuple[type, ...] = ..., repr_ns: Optional[str] = ..., repr: bool = ..., cmp: Optional[bool] = ..., hash: Optional[bool] = ..., init: bool = ..., slots: bool = ..., frozen: bool = ..., weakref_slot: bool = ..., str: bool = ..., auto_attribs: bool = ..., kw_only: bool = ..., cache_hash: bool = ..., auto_exc: bool = ..., eq: Optional[bool] = ..., order: Optional[bool] = ..., on_setattr: Optional[_OnSetAttrArgType] = ..., ) -> type: ... # _funcs -- # TODO: add support for returning TypedDict from the mypy plugin # FIXME: asdict/astuple do not honor their factory args. Waiting on one of # these: # https://github.com/python/mypy/issues/4236 # https://github.com/python/typing/issues/253 def asdict( inst: Any, recurse: bool = ..., filter: Optional[_FilterType[Any]] = ..., dict_factory: Type[Mapping[Any, Any]] = ..., retain_collection_types: bool = ..., ) -> Dict[str, Any]: ... # TODO: add support for returning NamedTuple from the mypy plugin def astuple( inst: Any, recurse: bool = ..., filter: Optional[_FilterType[Any]] = ..., tuple_factory: Type[Sequence[Any]] = ..., retain_collection_types: bool = ..., ) -> Tuple[Any, ...]: ... def has(cls: type) -> bool: ... def assoc(inst: _T, **changes: Any) -> _T: ... def evolve(inst: _T, **changes: Any) -> _T: ... # _config -- def set_run_validators(run: bool) -> None: ... def get_run_validators() -> bool: ... # aliases -- s = attributes = attrs ib = attr = attrib dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/filters.pyi
from typing import Union, Any from . import Attribute, _FilterType def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/attr/setters.py
""" Commonly used hooks for on_setattr. """ from __future__ import absolute_import, division, print_function from . import _config from .exceptions import FrozenAttributeError def pipe(*setters): """ Run all *setters* and return the return value of the last one. .. versionadded:: 20.1.0 """ def wrapped_pipe(instance, attrib, new_value): rv = new_value for setter in setters: rv = setter(instance, attrib, rv) return rv return wrapped_pipe def frozen(_, __, ___): """ Prevent an attribute to be modified. .. versionadded:: 20.1.0 """ raise FrozenAttributeError() def validate(instance, attrib, new_value): """ Run *attrib*'s validator on *new_value* if it has one. .. versionadded:: 20.1.0 """ if _config._run_validators is False: return new_value v = attrib.validator if not v: return new_value v(instance, attrib, new_value) return new_value def convert(instance, attrib, new_value): """ Run *attrib*'s converter -- if it has one -- on *new_value* and return the result. .. versionadded:: 20.1.0 """ c = attrib.converter if c: return c(new_value) return new_value NO_OP = object() """ Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. Does not work in `pipe` or within lists. .. versionadded:: 20.1.0 """
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/build_meta.py
"""A PEP 517 interface to setuptools Previously, when a user or a command line tool (let's call it a "frontend") needed to make a request of setuptools to take a certain action, for example, generating a list of installation requirements, the frontend would would call "setup.py egg_info" or "setup.py bdist_wheel" on the command line. PEP 517 defines a different method of interfacing with setuptools. Rather than calling "setup.py" directly, the frontend should: 1. Set the current directory to the directory with a setup.py file 2. Import this module into a safe python interpreter (one in which setuptools can potentially set global variables or crash hard). 3. Call one of the functions defined in PEP 517. What each function does is defined in PEP 517. However, here is a "casual" definition of the functions (this definition should not be relied on for bug reports or API stability): - `build_wheel`: build a wheel in the folder and return the basename - `get_requires_for_build_wheel`: get the `setup_requires` to build - `prepare_metadata_for_build_wheel`: get the `install_requires` - `build_sdist`: build an sdist in the folder and return the basename - `get_requires_for_build_sdist`: get the `setup_requires` to build Again, this is not a formal definition! Just a "taste" of the module. """ import io import os import shlex import sys import tokenize import shutil import contextlib import tempfile import warnings from pathlib import Path from typing import Dict, Iterator, List, Optional, Union import setuptools import distutils from . import errors from ._path import same_path from ._reqs import parse_strings from .warnings import SetuptoolsDeprecationWarning from distutils.util import strtobool __all__ = ['get_requires_for_build_sdist', 'get_requires_for_build_wheel', 'prepare_metadata_for_build_wheel', 'build_wheel', 'build_sdist', 'get_requires_for_build_editable', 'prepare_metadata_for_build_editable', 'build_editable', '__legacy__', 'SetupRequirementsError'] SETUPTOOLS_ENABLE_FEATURES = os.getenv("SETUPTOOLS_ENABLE_FEATURES", "").lower() LEGACY_EDITABLE = "legacy-editable" in SETUPTOOLS_ENABLE_FEATURES.replace("_", "-") class SetupRequirementsError(BaseException): def __init__(self, specifiers): self.specifiers = specifiers class Distribution(setuptools.dist.Distribution): def fetch_build_eggs(self, specifiers): specifier_list = list(parse_strings(specifiers)) raise SetupRequirementsError(specifier_list) @classmethod @contextlib.contextmanager def patch(cls): """ Replace distutils.dist.Distribution with this class for the duration of this context. """ orig = distutils.core.Distribution distutils.core.Distribution = cls try: yield finally: distutils.core.Distribution = orig @contextlib.contextmanager def no_install_setup_requires(): """Temporarily disable installing setup_requires Under PEP 517, the backend reports build dependencies to the frontend, and the frontend is responsible for ensuring they're installed. So setuptools (acting as a backend) should not try to install them. """ orig = setuptools._install_setup_requires setuptools._install_setup_requires = lambda attrs: None try: yield finally: setuptools._install_setup_requires = orig def _get_immediate_subdirectories(a_dir): return [name for name in os.listdir(a_dir) if os.path.isdir(os.path.join(a_dir, name))] def _file_with_extension(directory, extension): matching = ( f for f in os.listdir(directory) if f.endswith(extension) ) try: file, = matching except ValueError: raise ValueError( 'No distribution was found. Ensure that `setup.py` ' 'is not empty and that it calls `setup()`.') return file def _open_setup_script(setup_script): if not os.path.exists(setup_script): # Supply a default setup.py return io.StringIO(u"from setuptools import setup; setup()") return getattr(tokenize, 'open', open)(setup_script) @contextlib.contextmanager def suppress_known_deprecation(): with warnings.catch_warnings(): warnings.filterwarnings('ignore', 'setup.py install is deprecated') yield _ConfigSettings = Optional[Dict[str, Union[str, List[str], None]]] """ Currently the user can run:: pip install -e . --config-settings key=value python -m build -C--key=value -C key=value - pip will pass both key and value as strings and overwriting repeated keys (pypa/pip#11059). - build will accumulate values associated with repeated keys in a list. It will also accept keys with no associated value. This means that an option passed by build can be ``str | list[str] | None``. - PEP 517 specifies that ``config_settings`` is an optional dict. """ class _ConfigSettingsTranslator: """Translate ``config_settings`` into distutils-style command arguments. Only a limited number of options is currently supported. """ # See pypa/setuptools#1928 pypa/setuptools#2491 def _get_config(self, key: str, config_settings: _ConfigSettings) -> List[str]: """ Get the value of a specific key in ``config_settings`` as a list of strings. >>> fn = _ConfigSettingsTranslator()._get_config >>> fn("--global-option", None) [] >>> fn("--global-option", {}) [] >>> fn("--global-option", {'--global-option': 'foo'}) ['foo'] >>> fn("--global-option", {'--global-option': ['foo']}) ['foo'] >>> fn("--global-option", {'--global-option': 'foo'}) ['foo'] >>> fn("--global-option", {'--global-option': 'foo bar'}) ['foo', 'bar'] """ cfg = config_settings or {} opts = cfg.get(key) or [] return shlex.split(opts) if isinstance(opts, str) else opts def _valid_global_options(self): """Global options accepted by setuptools (e.g. quiet or verbose).""" options = (opt[:2] for opt in setuptools.dist.Distribution.global_options) return {flag for long_and_short in options for flag in long_and_short if flag} def _global_args(self, config_settings: _ConfigSettings) -> Iterator[str]: """ Let the user specify ``verbose`` or ``quiet`` + escape hatch via ``--global-option``. Note: ``-v``, ``-vv``, ``-vvv`` have similar effects in setuptools, so we just have to cover the basic scenario ``-v``. >>> fn = _ConfigSettingsTranslator()._global_args >>> list(fn(None)) [] >>> list(fn({"verbose": "False"})) ['-q'] >>> list(fn({"verbose": "1"})) ['-v'] >>> list(fn({"--verbose": None})) ['-v'] >>> list(fn({"verbose": "true", "--global-option": "-q --no-user-cfg"})) ['-v', '-q', '--no-user-cfg'] >>> list(fn({"--quiet": None})) ['-q'] """ cfg = config_settings or {} falsey = {"false", "no", "0", "off"} if "verbose" in cfg or "--verbose" in cfg: level = str(cfg.get("verbose") or cfg.get("--verbose") or "1") yield ("-q" if level.lower() in falsey else "-v") if "quiet" in cfg or "--quiet" in cfg: level = str(cfg.get("quiet") or cfg.get("--quiet") or "1") yield ("-v" if level.lower() in falsey else "-q") valid = self._valid_global_options() args = self._get_config("--global-option", config_settings) yield from (arg for arg in args if arg.strip("-") in valid) def __dist_info_args(self, config_settings: _ConfigSettings) -> Iterator[str]: """ The ``dist_info`` command accepts ``tag-date`` and ``tag-build``. .. warning:: We cannot use this yet as it requires the ``sdist`` and ``bdist_wheel`` commands run in ``build_sdist`` and ``build_wheel`` to re-use the egg-info directory created in ``prepare_metadata_for_build_wheel``. >>> fn = _ConfigSettingsTranslator()._ConfigSettingsTranslator__dist_info_args >>> list(fn(None)) [] >>> list(fn({"tag-date": "False"})) ['--no-date'] >>> list(fn({"tag-date": None})) ['--no-date'] >>> list(fn({"tag-date": "true", "tag-build": ".a"})) ['--tag-date', '--tag-build', '.a'] """ cfg = config_settings or {} if "tag-date" in cfg: val = strtobool(str(cfg["tag-date"] or "false")) yield ("--tag-date" if val else "--no-date") if "tag-build" in cfg: yield from ["--tag-build", str(cfg["tag-build"])] def _editable_args(self, config_settings: _ConfigSettings) -> Iterator[str]: """ The ``editable_wheel`` command accepts ``editable-mode=strict``. >>> fn = _ConfigSettingsTranslator()._editable_args >>> list(fn(None)) [] >>> list(fn({"editable-mode": "strict"})) ['--mode', 'strict'] """ cfg = config_settings or {} mode = cfg.get("editable-mode") or cfg.get("editable_mode") if not mode: return yield from ["--mode", str(mode)] def _arbitrary_args(self, config_settings: _ConfigSettings) -> Iterator[str]: """ Users may expect to pass arbitrary lists of arguments to a command via "--global-option" (example provided in PEP 517 of a "escape hatch"). >>> fn = _ConfigSettingsTranslator()._arbitrary_args >>> list(fn(None)) [] >>> list(fn({})) [] >>> list(fn({'--build-option': 'foo'})) ['foo'] >>> list(fn({'--build-option': ['foo']})) ['foo'] >>> list(fn({'--build-option': 'foo'})) ['foo'] >>> list(fn({'--build-option': 'foo bar'})) ['foo', 'bar'] >>> warnings.simplefilter('error', SetuptoolsDeprecationWarning) >>> list(fn({'--global-option': 'foo'})) # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): SetuptoolsDeprecationWarning: ...arguments given via `--global-option`... """ args = self._get_config("--global-option", config_settings) global_opts = self._valid_global_options() bad_args = [] for arg in args: if arg.strip("-") not in global_opts: bad_args.append(arg) yield arg yield from self._get_config("--build-option", config_settings) if bad_args: SetuptoolsDeprecationWarning.emit( "Incompatible `config_settings` passed to build backend.", f""" The arguments {bad_args!r} were given via `--global-option`. Please use `--build-option` instead, `--global-option` is reserved for flags like `--verbose` or `--quiet`. """, due_date=(2023, 9, 26), # Warning introduced in v64.0.1, 11/Aug/2022. ) class _BuildMetaBackend(_ConfigSettingsTranslator): def _get_build_requires(self, config_settings, requirements): sys.argv = [ *sys.argv[:1], *self._global_args(config_settings), "egg_info", *self._arbitrary_args(config_settings), ] try: with Distribution.patch(): self.run_setup() except SetupRequirementsError as e: requirements += e.specifiers return requirements def run_setup(self, setup_script='setup.py'): # Note that we can reuse our build directory between calls # Correctness comes first, then optimization later __file__ = os.path.abspath(setup_script) __name__ = '__main__' with _open_setup_script(__file__) as f: code = f.read().replace(r'\r\n', r'\n') exec(code, locals()) def get_requires_for_build_wheel(self, config_settings=None): return self._get_build_requires(config_settings, requirements=['wheel']) def get_requires_for_build_sdist(self, config_settings=None): return self._get_build_requires(config_settings, requirements=[]) def _bubble_up_info_directory(self, metadata_directory: str, suffix: str) -> str: """ PEP 517 requires that the .dist-info directory be placed in the metadata_directory. To comply, we MUST copy the directory to the root. Returns the basename of the info directory, e.g. `proj-0.0.0.dist-info`. """ info_dir = self._find_info_directory(metadata_directory, suffix) if not same_path(info_dir.parent, metadata_directory): shutil.move(str(info_dir), metadata_directory) # PEP 517 allow other files and dirs to exist in metadata_directory return info_dir.name def _find_info_directory(self, metadata_directory: str, suffix: str) -> Path: for parent, dirs, _ in os.walk(metadata_directory): candidates = [f for f in dirs if f.endswith(suffix)] if len(candidates) != 0 or len(dirs) != 1: assert len(candidates) == 1, f"Multiple {suffix} directories found" return Path(parent, candidates[0]) msg = f"No {suffix} directory found in {metadata_directory}" raise errors.InternalError(msg) def prepare_metadata_for_build_wheel(self, metadata_directory, config_settings=None): sys.argv = [ *sys.argv[:1], *self._global_args(config_settings), "dist_info", "--output-dir", metadata_directory, "--keep-egg-info", ] with no_install_setup_requires(): self.run_setup() self._bubble_up_info_directory(metadata_directory, ".egg-info") return self._bubble_up_info_directory(metadata_directory, ".dist-info") def _build_with_temp_dir(self, setup_command, result_extension, result_directory, config_settings): result_directory = os.path.abspath(result_directory) # Build in a temporary directory, then copy to the target. os.makedirs(result_directory, exist_ok=True) temp_opts = {"prefix": ".tmp-", "dir": result_directory} with tempfile.TemporaryDirectory(**temp_opts) as tmp_dist_dir: sys.argv = [ *sys.argv[:1], *self._global_args(config_settings), *setup_command, "--dist-dir", tmp_dist_dir, *self._arbitrary_args(config_settings), ] with no_install_setup_requires(): self.run_setup() result_basename = _file_with_extension( tmp_dist_dir, result_extension) result_path = os.path.join(result_directory, result_basename) if os.path.exists(result_path): # os.rename will fail overwriting on non-Unix. os.remove(result_path) os.rename(os.path.join(tmp_dist_dir, result_basename), result_path) return result_basename def build_wheel(self, wheel_directory, config_settings=None, metadata_directory=None): with suppress_known_deprecation(): return self._build_with_temp_dir(['bdist_wheel'], '.whl', wheel_directory, config_settings) def build_sdist(self, sdist_directory, config_settings=None): return self._build_with_temp_dir(['sdist', '--formats', 'gztar'], '.tar.gz', sdist_directory, config_settings) def _get_dist_info_dir(self, metadata_directory: Optional[str]) -> Optional[str]: if not metadata_directory: return None dist_info_candidates = list(Path(metadata_directory).glob("*.dist-info")) assert len(dist_info_candidates) <= 1 return str(dist_info_candidates[0]) if dist_info_candidates else None if not LEGACY_EDITABLE: # PEP660 hooks: # build_editable # get_requires_for_build_editable # prepare_metadata_for_build_editable def build_editable( self, wheel_directory, config_settings=None, metadata_directory=None ): # XXX can or should we hide our editable_wheel command normally? info_dir = self._get_dist_info_dir(metadata_directory) opts = ["--dist-info-dir", info_dir] if info_dir else [] cmd = ["editable_wheel", *opts, *self._editable_args(config_settings)] with suppress_known_deprecation(): return self._build_with_temp_dir( cmd, ".whl", wheel_directory, config_settings ) def get_requires_for_build_editable(self, config_settings=None): return self.get_requires_for_build_wheel(config_settings) def prepare_metadata_for_build_editable(self, metadata_directory, config_settings=None): return self.prepare_metadata_for_build_wheel( metadata_directory, config_settings ) class _BuildMetaLegacyBackend(_BuildMetaBackend): """Compatibility backend for setuptools This is a version of setuptools.build_meta that endeavors to maintain backwards compatibility with pre-PEP 517 modes of invocation. It exists as a temporary bridge between the old packaging mechanism and the new packaging mechanism, and will eventually be removed. """ def run_setup(self, setup_script='setup.py'): # In order to maintain compatibility with scripts assuming that # the setup.py script is in a directory on the PYTHONPATH, inject # '' into sys.path. (pypa/setuptools#1642) sys_path = list(sys.path) # Save the original path script_dir = os.path.dirname(os.path.abspath(setup_script)) if script_dir not in sys.path: sys.path.insert(0, script_dir) # Some setup.py scripts (e.g. in pygame and numpy) use sys.argv[0] to # get the directory of the source code. They expect it to refer to the # setup.py script. sys_argv_0 = sys.argv[0] sys.argv[0] = setup_script try: super(_BuildMetaLegacyBackend, self).run_setup(setup_script=setup_script) finally: # While PEP 517 frontends should be calling each hook in a fresh # subprocess according to the standard (and thus it should not be # strictly necessary to restore the old sys.path), we'll restore # the original path so that the path manipulation does not persist # within the hook after run_setup is called. sys.path[:] = sys_path sys.argv[0] = sys_argv_0 # The primary backend _BACKEND = _BuildMetaBackend() get_requires_for_build_wheel = _BACKEND.get_requires_for_build_wheel get_requires_for_build_sdist = _BACKEND.get_requires_for_build_sdist prepare_metadata_for_build_wheel = _BACKEND.prepare_metadata_for_build_wheel build_wheel = _BACKEND.build_wheel build_sdist = _BACKEND.build_sdist if not LEGACY_EDITABLE: get_requires_for_build_editable = _BACKEND.get_requires_for_build_editable prepare_metadata_for_build_editable = _BACKEND.prepare_metadata_for_build_editable build_editable = _BACKEND.build_editable # The legacy backend __legacy__ = _BuildMetaLegacyBackend()
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/discovery.py
"""Automatic discovery of Python modules and packages (for inclusion in the distribution) and other config values. For the purposes of this module, the following nomenclature is used: - "src-layout": a directory representing a Python project that contains a "src" folder. Everything under the "src" folder is meant to be included in the distribution when packaging the project. Example:: . ├── tox.ini ├── pyproject.toml └── src/ └── mypkg/ ├── __init__.py ├── mymodule.py └── my_data_file.txt - "flat-layout": a Python project that does not use "src-layout" but instead have a directory under the project root for each package:: . ├── tox.ini ├── pyproject.toml └── mypkg/ ├── __init__.py ├── mymodule.py └── my_data_file.txt - "single-module": a project that contains a single Python script direct under the project root (no directory used):: . ├── tox.ini ├── pyproject.toml └── mymodule.py """ import itertools import os from fnmatch import fnmatchcase from glob import glob from pathlib import Path from typing import ( TYPE_CHECKING, Dict, Iterable, Iterator, List, Mapping, Optional, Tuple, Union ) import _distutils_hack.override # noqa: F401 from distutils import log from distutils.util import convert_path _Path = Union[str, os.PathLike] StrIter = Iterator[str] chain_iter = itertools.chain.from_iterable if TYPE_CHECKING: from setuptools import Distribution # noqa def _valid_name(path: _Path) -> bool: # Ignore invalid names that cannot be imported directly return os.path.basename(path).isidentifier() class _Filter: """ Given a list of patterns, create a callable that will be true only if the input matches at least one of the patterns. """ def __init__(self, *patterns: str): self._patterns = dict.fromkeys(patterns) def __call__(self, item: str) -> bool: return any(fnmatchcase(item, pat) for pat in self._patterns) def __contains__(self, item: str) -> bool: return item in self._patterns class _Finder: """Base class that exposes functionality for module/package finders""" ALWAYS_EXCLUDE: Tuple[str, ...] = () DEFAULT_EXCLUDE: Tuple[str, ...] = () @classmethod def find( cls, where: _Path = '.', exclude: Iterable[str] = (), include: Iterable[str] = ('*',) ) -> List[str]: """Return a list of all Python items (packages or modules, depending on the finder implementation) found within directory 'where'. 'where' is the root directory which will be searched. It should be supplied as a "cross-platform" (i.e. URL-style) path; it will be converted to the appropriate local path syntax. 'exclude' is a sequence of names to exclude; '*' can be used as a wildcard in the names. When finding packages, 'foo.*' will exclude all subpackages of 'foo' (but not 'foo' itself). 'include' is a sequence of names to include. If it's specified, only the named items will be included. If it's not specified, all found items will be included. 'include' can contain shell style wildcard patterns just like 'exclude'. """ exclude = exclude or cls.DEFAULT_EXCLUDE return list( cls._find_iter( convert_path(str(where)), _Filter(*cls.ALWAYS_EXCLUDE, *exclude), _Filter(*include), ) ) @classmethod def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter: raise NotImplementedError class PackageFinder(_Finder): """ Generate a list of all Python packages found within a directory """ ALWAYS_EXCLUDE = ("ez_setup", "*__pycache__") @classmethod def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter: """ All the packages found in 'where' that pass the 'include' filter, but not the 'exclude' filter. """ for root, dirs, files in os.walk(str(where), followlinks=True): # Copy dirs to iterate over it, then empty dirs. all_dirs = dirs[:] dirs[:] = [] for dir in all_dirs: full_path = os.path.join(root, dir) rel_path = os.path.relpath(full_path, where) package = rel_path.replace(os.path.sep, '.') # Skip directory trees that are not valid packages if '.' in dir or not cls._looks_like_package(full_path, package): continue # Should this package be included? if include(package) and not exclude(package): yield package # Early pruning if there is nothing else to be scanned if f"{package}*" in exclude or f"{package}.*" in exclude: continue # Keep searching subdirectories, as there may be more packages # down there, even if the parent was excluded. dirs.append(dir) @staticmethod def _looks_like_package(path: _Path, _package_name: str) -> bool: """Does a directory look like a package?""" return os.path.isfile(os.path.join(path, '__init__.py')) class PEP420PackageFinder(PackageFinder): @staticmethod def _looks_like_package(_path: _Path, _package_name: str) -> bool: return True class ModuleFinder(_Finder): """Find isolated Python modules. This function will **not** recurse subdirectories. """ @classmethod def _find_iter(cls, where: _Path, exclude: _Filter, include: _Filter) -> StrIter: for file in glob(os.path.join(where, "*.py")): module, _ext = os.path.splitext(os.path.basename(file)) if not cls._looks_like_module(module): continue if include(module) and not exclude(module): yield module _looks_like_module = staticmethod(_valid_name) # We have to be extra careful in the case of flat layout to not include files # and directories not meant for distribution (e.g. tool-related) class FlatLayoutPackageFinder(PEP420PackageFinder): _EXCLUDE = ( "ci", "bin", "doc", "docs", "documentation", "manpages", "news", "changelog", "test", "tests", "unit_test", "unit_tests", "example", "examples", "scripts", "tools", "util", "utils", "python", "build", "dist", "venv", "env", "requirements", # ---- Task runners / Build tools ---- "tasks", # invoke "fabfile", # fabric "site_scons", # SCons # ---- Other tools ---- "benchmark", "benchmarks", "exercise", "exercises", "htmlcov", # Coverage.py # ---- Hidden directories/Private packages ---- "[._]*", ) DEFAULT_EXCLUDE = tuple(chain_iter((p, f"{p}.*") for p in _EXCLUDE)) """Reserved package names""" @staticmethod def _looks_like_package(_path: _Path, package_name: str) -> bool: names = package_name.split('.') # Consider PEP 561 root_pkg_is_valid = names[0].isidentifier() or names[0].endswith("-stubs") return root_pkg_is_valid and all(name.isidentifier() for name in names[1:]) class FlatLayoutModuleFinder(ModuleFinder): DEFAULT_EXCLUDE = ( "setup", "conftest", "test", "tests", "example", "examples", "build", # ---- Task runners ---- "toxfile", "noxfile", "pavement", "dodo", "tasks", "fabfile", # ---- Other tools ---- "[Ss][Cc]onstruct", # SCons "conanfile", # Connan: C/C++ build tool "manage", # Django "benchmark", "benchmarks", "exercise", "exercises", # ---- Hidden files/Private modules ---- "[._]*", ) """Reserved top-level module names""" def _find_packages_within(root_pkg: str, pkg_dir: _Path) -> List[str]: nested = PEP420PackageFinder.find(pkg_dir) return [root_pkg] + [".".join((root_pkg, n)) for n in nested] class ConfigDiscovery: """Fill-in metadata and options that can be automatically derived (from other metadata/options, the file system or conventions) """ def __init__(self, distribution: "Distribution"): self.dist = distribution self._called = False self._disabled = False self._skip_ext_modules = False def _disable(self): """Internal API to disable automatic discovery""" self._disabled = True def _ignore_ext_modules(self): """Internal API to disregard ext_modules. Normally auto-discovery would not be triggered if ``ext_modules`` are set (this is done for backward compatibility with existing packages relying on ``setup.py`` or ``setup.cfg``). However, ``setuptools`` can call this function to ignore given ``ext_modules`` and proceed with the auto-discovery if ``packages`` and ``py_modules`` are not given (e.g. when using pyproject.toml metadata). """ self._skip_ext_modules = True @property def _root_dir(self) -> _Path: # The best is to wait until `src_root` is set in dist, before using _root_dir. return self.dist.src_root or os.curdir @property def _package_dir(self) -> Dict[str, str]: if self.dist.package_dir is None: return {} return self.dist.package_dir def __call__(self, force=False, name=True, ignore_ext_modules=False): """Automatically discover missing configuration fields and modifies the given ``distribution`` object in-place. Note that by default this will only have an effect the first time the ``ConfigDiscovery`` object is called. To repeatedly invoke automatic discovery (e.g. when the project directory changes), please use ``force=True`` (or create a new ``ConfigDiscovery`` instance). """ if force is False and (self._called or self._disabled): # Avoid overhead of multiple calls return self._analyse_package_layout(ignore_ext_modules) if name: self.analyse_name() # depends on ``packages`` and ``py_modules`` self._called = True def _explicitly_specified(self, ignore_ext_modules: bool) -> bool: """``True`` if the user has specified some form of package/module listing""" ignore_ext_modules = ignore_ext_modules or self._skip_ext_modules ext_modules = not (self.dist.ext_modules is None or ignore_ext_modules) return ( self.dist.packages is not None or self.dist.py_modules is not None or ext_modules or hasattr(self.dist, "configuration") and self.dist.configuration # ^ Some projects use numpy.distutils.misc_util.Configuration ) def _analyse_package_layout(self, ignore_ext_modules: bool) -> bool: if self._explicitly_specified(ignore_ext_modules): # For backward compatibility, just try to find modules/packages # when nothing is given return True log.debug( "No `packages` or `py_modules` configuration, performing " "automatic discovery." ) return ( self._analyse_explicit_layout() or self._analyse_src_layout() # flat-layout is the trickiest for discovery so it should be last or self._analyse_flat_layout() ) def _analyse_explicit_layout(self) -> bool: """The user can explicitly give a package layout via ``package_dir``""" package_dir = self._package_dir.copy() # don't modify directly package_dir.pop("", None) # This falls under the "src-layout" umbrella root_dir = self._root_dir if not package_dir: return False log.debug(f"`explicit-layout` detected -- analysing {package_dir}") pkgs = chain_iter( _find_packages_within(pkg, os.path.join(root_dir, parent_dir)) for pkg, parent_dir in package_dir.items() ) self.dist.packages = list(pkgs) log.debug(f"discovered packages -- {self.dist.packages}") return True def _analyse_src_layout(self) -> bool: """Try to find all packages or modules under the ``src`` directory (or anything pointed by ``package_dir[""]``). The "src-layout" is relatively safe for automatic discovery. We assume that everything within is meant to be included in the distribution. If ``package_dir[""]`` is not given, but the ``src`` directory exists, this function will set ``package_dir[""] = "src"``. """ package_dir = self._package_dir src_dir = os.path.join(self._root_dir, package_dir.get("", "src")) if not os.path.isdir(src_dir): return False log.debug(f"`src-layout` detected -- analysing {src_dir}") package_dir.setdefault("", os.path.basename(src_dir)) self.dist.package_dir = package_dir # persist eventual modifications self.dist.packages = PEP420PackageFinder.find(src_dir) self.dist.py_modules = ModuleFinder.find(src_dir) log.debug(f"discovered packages -- {self.dist.packages}") log.debug(f"discovered py_modules -- {self.dist.py_modules}") return True def _analyse_flat_layout(self) -> bool: """Try to find all packages and modules under the project root. Since the ``flat-layout`` is more dangerous in terms of accidentally including extra files/directories, this function is more conservative and will raise an error if multiple packages or modules are found. This assumes that multi-package dists are uncommon and refuse to support that use case in order to be able to prevent unintended errors. """ log.debug(f"`flat-layout` detected -- analysing {self._root_dir}") return self._analyse_flat_packages() or self._analyse_flat_modules() def _analyse_flat_packages(self) -> bool: self.dist.packages = FlatLayoutPackageFinder.find(self._root_dir) top_level = remove_nested_packages(remove_stubs(self.dist.packages)) log.debug(f"discovered packages -- {self.dist.packages}") self._ensure_no_accidental_inclusion(top_level, "packages") return bool(top_level) def _analyse_flat_modules(self) -> bool: self.dist.py_modules = FlatLayoutModuleFinder.find(self._root_dir) log.debug(f"discovered py_modules -- {self.dist.py_modules}") self._ensure_no_accidental_inclusion(self.dist.py_modules, "modules") return bool(self.dist.py_modules) def _ensure_no_accidental_inclusion(self, detected: List[str], kind: str): if len(detected) > 1: from inspect import cleandoc from setuptools.errors import PackageDiscoveryError msg = f"""Multiple top-level {kind} discovered in a flat-layout: {detected}. To avoid accidental inclusion of unwanted files or directories, setuptools will not proceed with this build. If you are trying to create a single distribution with multiple {kind} on purpose, you should not rely on automatic discovery. Instead, consider the following options: 1. set up custom discovery (`find` directive with `include` or `exclude`) 2. use a `src-layout` 3. explicitly set `py_modules` or `packages` with a list of names To find more information, look for "package discovery" on setuptools docs. """ raise PackageDiscoveryError(cleandoc(msg)) def analyse_name(self): """The packages/modules are the essential contribution of the author. Therefore the name of the distribution can be derived from them. """ if self.dist.metadata.name or self.dist.name: # get_name() is not reliable (can return "UNKNOWN") return None log.debug("No `name` configuration, performing automatic discovery") name = ( self._find_name_single_package_or_module() or self._find_name_from_packages() ) if name: self.dist.metadata.name = name def _find_name_single_package_or_module(self) -> Optional[str]: """Exactly one module or package""" for field in ('packages', 'py_modules'): items = getattr(self.dist, field, None) or [] if items and len(items) == 1: log.debug(f"Single module/package detected, name: {items[0]}") return items[0] return None def _find_name_from_packages(self) -> Optional[str]: """Try to find the root package that is not a PEP 420 namespace""" if not self.dist.packages: return None packages = remove_stubs(sorted(self.dist.packages, key=len)) package_dir = self.dist.package_dir or {} parent_pkg = find_parent_package(packages, package_dir, self._root_dir) if parent_pkg: log.debug(f"Common parent package detected, name: {parent_pkg}") return parent_pkg log.warn("No parent package detected, impossible to derive `name`") return None def remove_nested_packages(packages: List[str]) -> List[str]: """Remove nested packages from a list of packages. >>> remove_nested_packages(["a", "a.b1", "a.b2", "a.b1.c1"]) ['a'] >>> remove_nested_packages(["a", "b", "c.d", "c.d.e.f", "g.h", "a.a1"]) ['a', 'b', 'c.d', 'g.h'] """ pkgs = sorted(packages, key=len) top_level = pkgs[:] size = len(pkgs) for i, name in enumerate(reversed(pkgs)): if any(name.startswith(f"{other}.") for other in top_level): top_level.pop(size - i - 1) return top_level def remove_stubs(packages: List[str]) -> List[str]: """Remove type stubs (:pep:`561`) from a list of packages. >>> remove_stubs(["a", "a.b", "a-stubs", "a-stubs.b.c", "b", "c-stubs"]) ['a', 'a.b', 'b'] """ return [pkg for pkg in packages if not pkg.split(".")[0].endswith("-stubs")] def find_parent_package( packages: List[str], package_dir: Mapping[str, str], root_dir: _Path ) -> Optional[str]: """Find the parent package that is not a namespace.""" packages = sorted(packages, key=len) common_ancestors = [] for i, name in enumerate(packages): if not all(n.startswith(f"{name}.") for n in packages[i+1:]): # Since packages are sorted by length, this condition is able # to find a list of all common ancestors. # When there is divergence (e.g. multiple root packages) # the list will be empty break common_ancestors.append(name) for name in common_ancestors: pkg_path = find_package_path(name, package_dir, root_dir) init = os.path.join(pkg_path, "__init__.py") if os.path.isfile(init): return name return None def find_package_path( name: str, package_dir: Mapping[str, str], root_dir: _Path ) -> str: """Given a package name, return the path where it should be found on disk, considering the ``package_dir`` option. >>> path = find_package_path("my.pkg", {"": "root/is/nested"}, ".") >>> path.replace(os.sep, "/") './root/is/nested/my/pkg' >>> path = find_package_path("my.pkg", {"my": "root/is/nested"}, ".") >>> path.replace(os.sep, "/") './root/is/nested/pkg' >>> path = find_package_path("my.pkg", {"my.pkg": "root/is/nested"}, ".") >>> path.replace(os.sep, "/") './root/is/nested' >>> path = find_package_path("other.pkg", {"my.pkg": "root/is/nested"}, ".") >>> path.replace(os.sep, "/") './other/pkg' """ parts = name.split(".") for i in range(len(parts), 0, -1): # Look backwards, the most specific package_dir first partial_name = ".".join(parts[:i]) if partial_name in package_dir: parent = package_dir[partial_name] return os.path.join(root_dir, parent, *parts[i:]) parent = package_dir.get("") or "" return os.path.join(root_dir, *parent.split("/"), *parts) def construct_package_dir(packages: List[str], package_path: _Path) -> Dict[str, str]: parent_pkgs = remove_nested_packages(packages) prefix = Path(package_path).parts return {pkg: "/".join([*prefix, *pkg.split(".")]) for pkg in parent_pkgs}
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/depends.py
import sys import marshal import contextlib import dis from setuptools.extern.packaging import version from ._imp import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE from . import _imp __all__ = [ 'Require', 'find_module', 'get_module_constant', 'extract_constant' ] class Require: """A prerequisite to building or installing a distribution""" def __init__( self, name, requested_version, module, homepage='', attribute=None, format=None): if format is None and requested_version is not None: format = version.Version if format is not None: requested_version = format(requested_version) if attribute is None: attribute = '__version__' self.__dict__.update(locals()) del self.self def full_name(self): """Return full package/distribution name, w/version""" if self.requested_version is not None: return '%s-%s' % (self.name, self.requested_version) return self.name def version_ok(self, version): """Is 'version' sufficiently up-to-date?""" return self.attribute is None or self.format is None or \ str(version) != "unknown" and self.format(version) >= self.requested_version def get_version(self, paths=None, default="unknown"): """Get version number of installed module, 'None', or 'default' Search 'paths' for module. If not found, return 'None'. If found, return the extracted version attribute, or 'default' if no version attribute was specified, or the value cannot be determined without importing the module. The version is formatted according to the requirement's version format (if any), unless it is 'None' or the supplied 'default'. """ if self.attribute is None: try: f, p, i = find_module(self.module, paths) if f: f.close() return default except ImportError: return None v = get_module_constant(self.module, self.attribute, default, paths) if v is not None and v is not default and self.format is not None: return self.format(v) return v def is_present(self, paths=None): """Return true if dependency is present on 'paths'""" return self.get_version(paths) is not None def is_current(self, paths=None): """Return true if dependency is present and up-to-date on 'paths'""" version = self.get_version(paths) if version is None: return False return self.version_ok(str(version)) def maybe_close(f): @contextlib.contextmanager def empty(): yield return if not f: return empty() return contextlib.closing(f) def get_module_constant(module, symbol, default=-1, paths=None): """Find 'module' by searching 'paths', and extract 'symbol' Return 'None' if 'module' does not exist on 'paths', or it does not define 'symbol'. If the module defines 'symbol' as a constant, return the constant. Otherwise, return 'default'.""" try: f, path, (suffix, mode, kind) = info = find_module(module, paths) except ImportError: # Module doesn't exist return None with maybe_close(f): if kind == PY_COMPILED: f.read(8) # skip magic & date code = marshal.load(f) elif kind == PY_FROZEN: code = _imp.get_frozen_object(module, paths) elif kind == PY_SOURCE: code = compile(f.read(), path, 'exec') else: # Not something we can parse; we'll have to import it. :( imported = _imp.get_module(module, paths, info) return getattr(imported, symbol, None) return extract_constant(code, symbol, default) def extract_constant(code, symbol, default=-1): """Extract the constant value of 'symbol' from 'code' If the name 'symbol' is bound to a constant value by the Python code object 'code', return that value. If 'symbol' is bound to an expression, return 'default'. Otherwise, return 'None'. Return value is based on the first assignment to 'symbol'. 'symbol' must be a global, or at least a non-"fast" local in the code block. That is, only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' must be present in 'code.co_names'. """ if symbol not in code.co_names: # name's not there, can't possibly be an assignment return None name_idx = list(code.co_names).index(symbol) STORE_NAME = 90 STORE_GLOBAL = 97 LOAD_CONST = 100 const = default for byte_code in dis.Bytecode(code): op = byte_code.opcode arg = byte_code.arg if op == LOAD_CONST: const = code.co_consts[arg] elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL): return const else: const = default def _update_globals(): """ Patch the globals to remove the objects not available on some platforms. XXX it'd be better to test assertions about bytecode instead. """ if not sys.platform.startswith('java') and sys.platform != 'cli': return incompatible = 'extract_constant', 'get_module_constant' for name in incompatible: del globals()[name] __all__.remove(name) _update_globals()
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/monkey.py
""" Monkey patching of distutils. """ import sys import distutils.filelist import platform import types import functools from importlib import import_module import inspect import setuptools __all__ = [] """ Everything is private. Contact the project team if you think you need this functionality. """ def _get_mro(cls): """ Returns the bases classes for cls sorted by the MRO. Works around an issue on Jython where inspect.getmro will not return all base classes if multiple classes share the same name. Instead, this function will return a tuple containing the class itself, and the contents of cls.__bases__. See https://github.com/pypa/setuptools/issues/1024. """ if platform.python_implementation() == "Jython": return (cls,) + cls.__bases__ return inspect.getmro(cls) def get_unpatched(item): lookup = ( get_unpatched_class if isinstance(item, type) else get_unpatched_function if isinstance(item, types.FunctionType) else lambda item: None ) return lookup(item) def get_unpatched_class(cls): """Protect against re-patching the distutils if reloaded Also ensures that no other distutils extension monkeypatched the distutils first. """ external_bases = ( cls for cls in _get_mro(cls) if not cls.__module__.startswith('setuptools') ) base = next(external_bases) if not base.__module__.startswith('distutils'): msg = "distutils has already been patched by %r" % cls raise AssertionError(msg) return base def patch_all(): # we can't patch distutils.cmd, alas distutils.core.Command = setuptools.Command has_issue_12885 = sys.version_info <= (3, 5, 3) if has_issue_12885: # fix findall bug in distutils (http://bugs.python.org/issue12885) distutils.filelist.findall = setuptools.findall needs_warehouse = ( (3, 4) < sys.version_info < (3, 4, 6) or (3, 5) < sys.version_info <= (3, 5, 3) ) if needs_warehouse: warehouse = 'https://upload.pypi.org/legacy/' distutils.config.PyPIRCCommand.DEFAULT_REPOSITORY = warehouse _patch_distribution_metadata() # Install Distribution throughout the distutils for module in distutils.dist, distutils.core, distutils.cmd: module.Distribution = setuptools.dist.Distribution # Install the patched Extension distutils.core.Extension = setuptools.extension.Extension distutils.extension.Extension = setuptools.extension.Extension if 'distutils.command.build_ext' in sys.modules: sys.modules['distutils.command.build_ext'].Extension = ( setuptools.extension.Extension ) patch_for_msvc_specialized_compiler() def _patch_distribution_metadata(): """Patch write_pkg_file and read_pkg_file for higher metadata standards""" for attr in ('write_pkg_file', 'read_pkg_file', 'get_metadata_version'): new_val = getattr(setuptools.dist, attr) setattr(distutils.dist.DistributionMetadata, attr, new_val) def patch_func(replacement, target_mod, func_name): """ Patch func_name in target_mod with replacement Important - original must be resolved by name to avoid patching an already patched function. """ original = getattr(target_mod, func_name) # set the 'unpatched' attribute on the replacement to # point to the original. vars(replacement).setdefault('unpatched', original) # replace the function in the original module setattr(target_mod, func_name, replacement) def get_unpatched_function(candidate): return getattr(candidate, 'unpatched') def patch_for_msvc_specialized_compiler(): """ Patch functions in distutils to use standalone Microsoft Visual C++ compilers. """ # import late to avoid circular imports on Python < 3.5 msvc = import_module('setuptools.msvc') if platform.system() != 'Windows': # Compilers only available on Microsoft Windows return def patch_params(mod_name, func_name): """ Prepare the parameters for patch_func to patch indicated function. """ repl_prefix = 'msvc14_' repl_name = repl_prefix + func_name.lstrip('_') repl = getattr(msvc, repl_name) mod = import_module(mod_name) if not hasattr(mod, func_name): raise ImportError(func_name) return repl, mod, func_name # Python 3.5+ msvc14 = functools.partial(patch_params, 'distutils._msvccompiler') try: # Patch distutils._msvccompiler._get_vc_env patch_func(*msvc14('_get_vc_env')) except ImportError: pass
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/installer.py
import glob import os import subprocess import sys import tempfile from distutils import log from distutils.errors import DistutilsError from functools import partial from . import _reqs from .wheel import Wheel from .warnings import SetuptoolsDeprecationWarning def _fixup_find_links(find_links): """Ensure find-links option end-up being a list of strings.""" if isinstance(find_links, str): return find_links.split() assert isinstance(find_links, (tuple, list)) return find_links def fetch_build_egg(dist, req): """Fetch an egg needed for building. Use pip/wheel to fetch/build a wheel.""" _DeprecatedInstaller.emit() _warn_wheel_not_available(dist) return _fetch_build_egg_no_warn(dist, req) def _fetch_build_eggs(dist, requires): import pkg_resources # Delay import to avoid unnecessary side-effects _DeprecatedInstaller.emit(stacklevel=3) _warn_wheel_not_available(dist) resolved_dists = pkg_resources.working_set.resolve( _reqs.parse(requires, pkg_resources.Requirement), # required for compatibility installer=partial(_fetch_build_egg_no_warn, dist), # avoid warning twice replace_conflicting=True, ) for dist in resolved_dists: pkg_resources.working_set.add(dist, replace=True) return resolved_dists def _fetch_build_egg_no_warn(dist, req): # noqa: C901 # is too complex (16) # FIXME import pkg_resources # Delay import to avoid unnecessary side-effects # Ignore environment markers; if supplied, it is required. req = strip_marker(req) # Take easy_install options into account, but do not override relevant # pip environment variables (like PIP_INDEX_URL or PIP_QUIET); they'll # take precedence. opts = dist.get_option_dict('easy_install') if 'allow_hosts' in opts: raise DistutilsError('the `allow-hosts` option is not supported ' 'when using pip to install requirements.') quiet = 'PIP_QUIET' not in os.environ and 'PIP_VERBOSE' not in os.environ if 'PIP_INDEX_URL' in os.environ: index_url = None elif 'index_url' in opts: index_url = opts['index_url'][1] else: index_url = None find_links = ( _fixup_find_links(opts['find_links'][1])[:] if 'find_links' in opts else [] ) if dist.dependency_links: find_links.extend(dist.dependency_links) eggs_dir = os.path.realpath(dist.get_egg_cache_dir()) environment = pkg_resources.Environment() for egg_dist in pkg_resources.find_distributions(eggs_dir): if egg_dist in req and environment.can_add(egg_dist): return egg_dist with tempfile.TemporaryDirectory() as tmpdir: cmd = [ sys.executable, '-m', 'pip', '--disable-pip-version-check', 'wheel', '--no-deps', '-w', tmpdir, ] if quiet: cmd.append('--quiet') if index_url is not None: cmd.extend(('--index-url', index_url)) for link in find_links or []: cmd.extend(('--find-links', link)) # If requirement is a PEP 508 direct URL, directly pass # the URL to pip, as `req @ url` does not work on the # command line. cmd.append(req.url or str(req)) try: subprocess.check_call(cmd) except subprocess.CalledProcessError as e: raise DistutilsError(str(e)) from e wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0]) dist_location = os.path.join(eggs_dir, wheel.egg_name()) wheel.install_as_egg(dist_location) dist_metadata = pkg_resources.PathMetadata( dist_location, os.path.join(dist_location, 'EGG-INFO')) dist = pkg_resources.Distribution.from_filename( dist_location, metadata=dist_metadata) return dist def strip_marker(req): """ Return a new requirement without the environment marker to avoid calling pip with something like `babel; extra == "i18n"`, which would always be ignored. """ import pkg_resources # Delay import to avoid unnecessary side-effects # create a copy to avoid mutating the input req = pkg_resources.Requirement.parse(str(req)) req.marker = None return req def _warn_wheel_not_available(dist): import pkg_resources # Delay import to avoid unnecessary side-effects try: pkg_resources.get_distribution('wheel') except pkg_resources.DistributionNotFound: dist.announce('WARNING: The wheel package is not available.', log.WARN) class _DeprecatedInstaller(SetuptoolsDeprecationWarning): _SUMMARY = "setuptools.installer and fetch_build_eggs are deprecated." _DETAILS = """ Requirements should be satisfied by a PEP 517 installer. If you are using pip, you can try `pip install --use-pep517`. """ # _DUE_DATE not decided yet
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/_itertools.py
from setuptools.extern.more_itertools import consume # noqa: F401 # copied from jaraco.itertools 6.1 def ensure_unique(iterable, key=lambda x: x): """ Wrap an iterable to raise a ValueError if non-unique values are encountered. >>> list(ensure_unique('abc')) ['a', 'b', 'c'] >>> consume(ensure_unique('abca')) Traceback (most recent call last): ... ValueError: Duplicate element 'a' encountered. """ seen = set() seen_add = seen.add for element in iterable: k = key(element) if k in seen: raise ValueError(f"Duplicate element {element!r} encountered.") seen_add(k) yield element
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/py312compat.py
import sys import shutil def shutil_rmtree(path, ignore_errors=False, onexc=None): if sys.version_info >= (3, 12): return shutil.rmtree(path, ignore_errors, onexc=onexc) def _handler(fn, path, excinfo): return onexc(fn, path, excinfo[1]) return shutil.rmtree(path, ignore_errors, onerror=_handler)
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/_importlib.py
import sys def disable_importlib_metadata_finder(metadata): """ Ensure importlib_metadata doesn't provide older, incompatible Distributions. Workaround for #3102. """ try: import importlib_metadata except ImportError: return except AttributeError: from .warnings import SetuptoolsWarning SetuptoolsWarning.emit( "Incompatibility problem.", """ `importlib-metadata` version is incompatible with `setuptools`. This problem is likely to be solved by installing an updated version of `importlib-metadata`. """, see_url="https://github.com/python/importlib_metadata/issues/396" ) # Ensure a descriptive message is shown. raise # This exception can be suppressed by _distutils_hack if importlib_metadata is metadata: return to_remove = [ ob for ob in sys.meta_path if isinstance(ob, importlib_metadata.MetadataPathFinder) ] for item in to_remove: sys.meta_path.remove(item) if sys.version_info < (3, 10): from setuptools.extern import importlib_metadata as metadata disable_importlib_metadata_finder(metadata) else: import importlib.metadata as metadata # noqa: F401 if sys.version_info < (3, 9): from setuptools.extern import importlib_resources as resources else: import importlib.resources as resources # noqa: F401
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/extension.py
import re import functools import distutils.core import distutils.errors import distutils.extension from .monkey import get_unpatched def _have_cython(): """ Return True if Cython can be imported. """ cython_impl = 'Cython.Distutils.build_ext' try: # from (cython_impl) import build_ext __import__(cython_impl, fromlist=['build_ext']).build_ext return True except Exception: pass return False # for compatibility have_pyrex = _have_cython _Extension = get_unpatched(distutils.core.Extension) class Extension(_Extension): """ Describes a single extension module. This means that all source files will be compiled into a single binary file ``<module path>.<suffix>`` (with ``<module path>`` derived from ``name`` and ``<suffix>`` defined by one of the values in ``importlib.machinery.EXTENSION_SUFFIXES``). In the case ``.pyx`` files are passed as ``sources and`` ``Cython`` is **not** installed in the build environment, ``setuptools`` may also try to look for the equivalent ``.cpp`` or ``.c`` files. :arg str name: the full name of the extension, including any packages -- ie. *not* a filename or pathname, but Python dotted name :arg list[str] sources: list of source filenames, relative to the distribution root (where the setup script lives), in Unix form (slash-separated) for portability. Source files may be C, C++, SWIG (.i), platform-specific resource files, or whatever else is recognized by the "build_ext" command as source for a Python extension. :keyword list[str] include_dirs: list of directories to search for C/C++ header files (in Unix form for portability) :keyword list[tuple[str, str|None]] define_macros: list of macros to define; each macro is defined using a 2-tuple: the first item corresponding to the name of the macro and the second item either a string with its value or None to define it without a particular value (equivalent of "#define FOO" in source or -DFOO on Unix C compiler command line) :keyword list[str] undef_macros: list of macros to undefine explicitly :keyword list[str] library_dirs: list of directories to search for C/C++ libraries at link time :keyword list[str] libraries: list of library names (not filenames or paths) to link against :keyword list[str] runtime_library_dirs: list of directories to search for C/C++ libraries at run time (for shared extensions, this is when the extension is loaded). Setting this will cause an exception during build on Windows platforms. :keyword list[str] extra_objects: list of extra files to link with (eg. object files not implied by 'sources', static library that must be explicitly specified, binary resource files, etc.) :keyword list[str] extra_compile_args: any extra platform- and compiler-specific information to use when compiling the source files in 'sources'. For platforms and compilers where "command line" makes sense, this is typically a list of command-line arguments, but for other platforms it could be anything. :keyword list[str] extra_link_args: any extra platform- and compiler-specific information to use when linking object files together to create the extension (or to create a new static Python interpreter). Similar interpretation as for 'extra_compile_args'. :keyword list[str] export_symbols: list of symbols to be exported from a shared extension. Not used on all platforms, and not generally necessary for Python extensions, which typically export exactly one symbol: "init" + extension_name. :keyword list[str] swig_opts: any extra options to pass to SWIG if a source file has the .i extension. :keyword list[str] depends: list of files that the extension depends on :keyword str language: extension language (i.e. "c", "c++", "objc"). Will be detected from the source extensions if not provided. :keyword bool optional: specifies that a build failure in the extension should not abort the build process, but simply not install the failing extension. :keyword bool py_limited_api: opt-in flag for the usage of :doc:`Python's limited API <python:c-api/stable>`. :raises setuptools.errors.PlatformError: if 'runtime_library_dirs' is specified on Windows. (since v63) """ def __init__(self, name, sources, *args, **kw): # The *args is needed for compatibility as calls may use positional # arguments. py_limited_api may be set only via keyword. self.py_limited_api = kw.pop("py_limited_api", False) super().__init__(name, sources, *args, **kw) def _convert_pyx_sources_to_lang(self): """ Replace sources with .pyx extensions to sources with the target language extension. This mechanism allows language authors to supply pre-converted sources but to prefer the .pyx sources. """ if _have_cython(): # the build has Cython, so allow it to compile the .pyx files return lang = self.language or '' target_ext = '.cpp' if lang.lower() == 'c++' else '.c' sub = functools.partial(re.sub, '.pyx$', target_ext) self.sources = list(map(sub, self.sources)) class Library(Extension): """Just like a regular Extension, but built as a library instead"""
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/archive_util.py
"""Utilities for extracting common archive formats""" import zipfile import tarfile import os import shutil import posixpath import contextlib from distutils.errors import DistutilsError from ._path import ensure_directory __all__ = [ "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", "UnrecognizedFormat", "extraction_drivers", "unpack_directory", ] class UnrecognizedFormat(DistutilsError): """Couldn't recognize the archive type""" def default_filter(src, dst): """The default progress/filter callback; returns True for all files""" return dst def unpack_archive( filename, extract_dir, progress_filter=default_filter, drivers=None): """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` `progress_filter` is a function taking two arguments: a source path internal to the archive ('/'-separated), and a filesystem path where it will be extracted. The callback must return the desired extract path (which may be the same as the one passed in), or else ``None`` to skip that file or directory. The callback can thus be used to report on the progress of the extraction, as well as to filter the items extracted or alter their extraction paths. `drivers`, if supplied, must be a non-empty sequence of functions with the same signature as this function (minus the `drivers` argument), that raise ``UnrecognizedFormat`` if they do not support extracting the designated archive type. The `drivers` are tried in sequence until one is found that does not raise an error, or until all are exhausted (in which case ``UnrecognizedFormat`` is raised). If you do not supply a sequence of drivers, the module's ``extraction_drivers`` constant will be used, which means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that order. """ for driver in drivers or extraction_drivers: try: driver(filename, extract_dir, progress_filter) except UnrecognizedFormat: continue else: return else: raise UnrecognizedFormat( "Not a recognized archive type: %s" % filename ) def unpack_directory(filename, extract_dir, progress_filter=default_filter): """"Unpack" a directory, using the same interface as for archives Raises ``UnrecognizedFormat`` if `filename` is not a directory """ if not os.path.isdir(filename): raise UnrecognizedFormat("%s is not a directory" % filename) paths = { filename: ('', extract_dir), } for base, dirs, files in os.walk(filename): src, dst = paths[base] for d in dirs: paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d) for f in files: target = os.path.join(dst, f) target = progress_filter(src + f, target) if not target: # skip non-files continue ensure_directory(target) f = os.path.join(base, f) shutil.copyfile(f, target) shutil.copystat(f, target) def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): """Unpack zip `filename` to `extract_dir` Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation of the `progress_filter` argument. """ if not zipfile.is_zipfile(filename): raise UnrecognizedFormat("%s is not a zip file" % (filename,)) with zipfile.ZipFile(filename) as z: _unpack_zipfile_obj(z, extract_dir, progress_filter) def _unpack_zipfile_obj(zipfile_obj, extract_dir, progress_filter=default_filter): """Internal/private API used by other parts of setuptools. Similar to ``unpack_zipfile``, but receives an already opened :obj:`zipfile.ZipFile` object instead of a filename. """ for info in zipfile_obj.infolist(): name = info.filename # don't extract absolute paths or ones with .. in them if name.startswith('/') or '..' in name.split('/'): continue target = os.path.join(extract_dir, *name.split('/')) target = progress_filter(name, target) if not target: continue if name.endswith('/'): # directory ensure_directory(target) else: # file ensure_directory(target) data = zipfile_obj.read(info.filename) with open(target, 'wb') as f: f.write(data) unix_attributes = info.external_attr >> 16 if unix_attributes: os.chmod(target, unix_attributes) def _resolve_tar_file_or_dir(tar_obj, tar_member_obj): """Resolve any links and extract link targets as normal files.""" while tar_member_obj is not None and ( tar_member_obj.islnk() or tar_member_obj.issym()): linkpath = tar_member_obj.linkname if tar_member_obj.issym(): base = posixpath.dirname(tar_member_obj.name) linkpath = posixpath.join(base, linkpath) linkpath = posixpath.normpath(linkpath) tar_member_obj = tar_obj._getmember(linkpath) is_file_or_dir = ( tar_member_obj is not None and (tar_member_obj.isfile() or tar_member_obj.isdir()) ) if is_file_or_dir: return tar_member_obj raise LookupError('Got unknown file type') def _iter_open_tar(tar_obj, extract_dir, progress_filter): """Emit member-destination pairs from a tar archive.""" # don't do any chowning! tar_obj.chown = lambda *args: None with contextlib.closing(tar_obj): for member in tar_obj: name = member.name # don't extract absolute paths or ones with .. in them if name.startswith('/') or '..' in name.split('/'): continue prelim_dst = os.path.join(extract_dir, *name.split('/')) try: member = _resolve_tar_file_or_dir(tar_obj, member) except LookupError: continue final_dst = progress_filter(name, prelim_dst) if not final_dst: continue if final_dst.endswith(os.sep): final_dst = final_dst[:-1] yield member, final_dst def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined by ``tarfile.open()``). See ``unpack_archive()`` for an explanation of the `progress_filter` argument. """ try: tarobj = tarfile.open(filename) except tarfile.TarError as e: raise UnrecognizedFormat( "%s is not a compressed or uncompressed tar file" % (filename,) ) from e for member, final_dst in _iter_open_tar( tarobj, extract_dir, progress_filter, ): try: # XXX Ugh tarobj._extract_member(member, final_dst) except tarfile.ExtractError: # chown/chmod/mkfifo/mknode/makedev failed pass return True extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/__init__.py
"""Extensions to the 'distutils' for large or complex distributions""" import functools import os import re import _distutils_hack.override # noqa: F401 import distutils.core from distutils.errors import DistutilsOptionError from distutils.util import convert_path as _convert_path from .warnings import SetuptoolsDeprecationWarning import setuptools.version from setuptools.extension import Extension from setuptools.dist import Distribution from setuptools.depends import Require from setuptools.discovery import PackageFinder, PEP420PackageFinder from . import monkey from . import logging __all__ = [ 'setup', 'Distribution', 'Command', 'Extension', 'Require', 'SetuptoolsDeprecationWarning', 'find_packages', 'find_namespace_packages', ] __version__ = setuptools.version.__version__ bootstrap_install_from = None find_packages = PackageFinder.find find_namespace_packages = PEP420PackageFinder.find def _install_setup_requires(attrs): # Note: do not use `setuptools.Distribution` directly, as # our PEP 517 backend patch `distutils.core.Distribution`. class MinimalDistribution(distutils.core.Distribution): """ A minimal version of a distribution for supporting the fetch_build_eggs interface. """ def __init__(self, attrs): _incl = 'dependency_links', 'setup_requires' filtered = {k: attrs[k] for k in set(_incl) & set(attrs)} super().__init__(filtered) # Prevent accidentally triggering discovery with incomplete set of attrs self.set_defaults._disable() def _get_project_config_files(self, filenames=None): """Ignore ``pyproject.toml``, they are not related to setup_requires""" try: cfg, toml = super()._split_standard_project_metadata(filenames) return cfg, () except Exception: return filenames, () def finalize_options(self): """ Disable finalize_options to avoid building the working set. Ref #2158. """ dist = MinimalDistribution(attrs) # Honor setup.cfg's options. dist.parse_config_files(ignore_option_errors=True) if dist.setup_requires: _fetch_build_eggs(dist) def _fetch_build_eggs(dist): try: dist.fetch_build_eggs(dist.setup_requires) except Exception as ex: msg = """ It is possible a package already installed in your system contains an version that is invalid according to PEP 440. You can try `pip install --use-pep517` as a workaround for this problem, or rely on a new virtual environment. If the problem refers to a package that is not installed yet, please contact that package's maintainers or distributors. """ if "InvalidVersion" in ex.__class__.__name__: if hasattr(ex, "add_note"): ex.add_note(msg) # PEP 678 else: dist.announce(f"\n{msg}\n") raise def setup(**attrs): # Make sure we have any requirements needed to interpret 'attrs'. logging.configure() _install_setup_requires(attrs) return distutils.core.setup(**attrs) setup.__doc__ = distutils.core.setup.__doc__ _Command = monkey.get_unpatched(distutils.core.Command) class Command(_Command): """ Setuptools internal actions are organized using a *command design pattern*. This means that each action (or group of closely related actions) executed during the build should be implemented as a ``Command`` subclass. These commands are abstractions and do not necessarily correspond to a command that can (or should) be executed via a terminal, in a CLI fashion (although historically they would). When creating a new command from scratch, custom defined classes **SHOULD** inherit from ``setuptools.Command`` and implement a few mandatory methods. Between these mandatory methods, are listed: .. method:: initialize_options(self) Set or (reset) all options/attributes/caches used by the command to their default values. Note that these values may be overwritten during the build. .. method:: finalize_options(self) Set final values for all options/attributes used by the command. Most of the time, each option/attribute/cache should only be set if it does not have any value yet (e.g. ``if self.attr is None: self.attr = val``). .. method:: run(self) Execute the actions intended by the command. (Side effects **SHOULD** only take place when ``run`` is executed, for example, creating new files or writing to the terminal output). A useful analogy for command classes is to think of them as subroutines with local variables called "options". The options are "declared" in ``initialize_options()`` and "defined" (given their final values, aka "finalized") in ``finalize_options()``, both of which must be defined by every command class. The "body" of the subroutine, (where it does all the work) is the ``run()`` method. Between ``initialize_options()`` and ``finalize_options()``, ``setuptools`` may set the values for options/attributes based on user's input (or circumstance), which means that the implementation should be careful to not overwrite values in ``finalize_options`` unless necessary. Please note that other commands (or other parts of setuptools) may also overwrite the values of the command's options/attributes multiple times during the build process. Therefore it is important to consistently implement ``initialize_options()`` and ``finalize_options()``. For example, all derived attributes (or attributes that depend on the value of other attributes) **SHOULD** be recomputed in ``finalize_options``. When overwriting existing commands, custom defined classes **MUST** abide by the same APIs implemented by the original class. They also **SHOULD** inherit from the original class. """ command_consumes_arguments = False def __init__(self, dist, **kw): """ Construct the command for dist, updating vars(self) with any keyword parameters. """ super().__init__(dist) vars(self).update(kw) def _ensure_stringlike(self, option, what, default=None): val = getattr(self, option) if val is None: setattr(self, option, default) return default elif not isinstance(val, str): raise DistutilsOptionError( "'%s' must be a %s (got `%s`)" % (option, what, val) ) return val def ensure_string_list(self, option): r"""Ensure that 'option' is a list of strings. If 'option' is currently a string, we split it either on /,\s*/ or /\s+/, so "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become ["foo", "bar", "baz"]. .. TODO: This method seems to be similar to the one in ``distutils.cmd`` Probably it is just here for backward compatibility with old Python versions? :meta private: """ val = getattr(self, option) if val is None: return elif isinstance(val, str): setattr(self, option, re.split(r',\s*|\s+', val)) else: if isinstance(val, list): ok = all(isinstance(v, str) for v in val) else: ok = False if not ok: raise DistutilsOptionError( "'%s' must be a list of strings (got %r)" % (option, val) ) def reinitialize_command(self, command, reinit_subcommands=0, **kw): cmd = _Command.reinitialize_command(self, command, reinit_subcommands) vars(cmd).update(kw) return cmd def _find_all_simple(path): """ Find all files under 'path' """ results = ( os.path.join(base, file) for base, dirs, files in os.walk(path, followlinks=True) for file in files ) return filter(os.path.isfile, results) def findall(dir=os.curdir): """ Find all files under 'dir' and return the list of full filenames. Unless dir is '.', return full filenames with dir prepended. """ files = _find_all_simple(dir) if dir == os.curdir: make_rel = functools.partial(os.path.relpath, start=dir) files = map(make_rel, files) return list(files) @functools.wraps(_convert_path) def convert_path(pathname): SetuptoolsDeprecationWarning.emit( "Access to implementation detail", """ The function `convert_path` is not provided by setuptools itself, and therefore not part of the public API. Its direct usage by 3rd-party packages is considered improper and the function may be removed in the future. """, due_date=(2023, 12, 13) # initial deprecation 2022-03-25, see #3201 ) return _convert_path(pathname) class sic(str): """Treat this string as-is (https://en.wikipedia.org/wiki/Sic)""" # Apply monkey patches monkey.patch_all()
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/namespaces.py
import os from distutils import log import itertools flatten = itertools.chain.from_iterable class Installer: nspkg_ext = '-nspkg.pth' def install_namespaces(self): nsp = self._get_all_ns_packages() if not nsp: return filename, ext = os.path.splitext(self._get_target()) filename += self.nspkg_ext self.outputs.append(filename) log.info("Installing %s", filename) lines = map(self._gen_nspkg_line, nsp) if self.dry_run: # always generate the lines, even in dry run list(lines) return with open(filename, 'wt') as f: f.writelines(lines) def uninstall_namespaces(self): filename, ext = os.path.splitext(self._get_target()) filename += self.nspkg_ext if not os.path.exists(filename): return log.info("Removing %s", filename) os.remove(filename) def _get_target(self): return self.target _nspkg_tmpl = ( "import sys, types, os", "has_mfs = sys.version_info > (3, 5)", "p = os.path.join(%(root)s, *%(pth)r)", "importlib = has_mfs and __import__('importlib.util')", "has_mfs and __import__('importlib.machinery')", ( "m = has_mfs and " "sys.modules.setdefault(%(pkg)r, " "importlib.util.module_from_spec(" "importlib.machinery.PathFinder.find_spec(%(pkg)r, " "[os.path.dirname(p)])))" ), ( "m = m or " "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))" ), "mp = (m or []) and m.__dict__.setdefault('__path__',[])", "(p not in mp) and mp.append(p)", ) "lines for the namespace installer" _nspkg_tmpl_multi = ( 'm and setattr(sys.modules[%(parent)r], %(child)r, m)', ) "additional line(s) when a parent package is indicated" def _get_root(self): return "sys._getframe(1).f_locals['sitedir']" def _gen_nspkg_line(self, pkg): pth = tuple(pkg.split('.')) root = self._get_root() tmpl_lines = self._nspkg_tmpl parent, sep, child = pkg.rpartition('.') if parent: tmpl_lines += self._nspkg_tmpl_multi return ';'.join(tmpl_lines) % locals() + '\n' def _get_all_ns_packages(self): """Return sorted list of all package namespaces""" pkgs = self.distribution.namespace_packages or [] return sorted(flatten(map(self._pkg_names, pkgs))) @staticmethod def _pkg_names(pkg): """ Given a namespace package, yield the components of that package. >>> names = Installer._pkg_names('a.b.c') >>> set(names) == set(['a', 'a.b', 'a.b.c']) True """ parts = pkg.split('.') while parts: yield '.'.join(parts) parts.pop() class DevelopInstaller(Installer): def _get_root(self): return repr(str(self.egg_path)) def _get_target(self): return self.egg_link
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/version.py
from ._importlib import metadata try: __version__ = metadata.version('setuptools') or '0.dev0+unknown' except Exception: __version__ = '0.dev0+unknown'
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/errors.py
"""setuptools.errors Provides exceptions used by setuptools modules. """ from distutils import errors as _distutils_errors # Re-export errors from distutils to facilitate the migration to PEP632 ByteCompileError = _distutils_errors.DistutilsByteCompileError CCompilerError = _distutils_errors.CCompilerError ClassError = _distutils_errors.DistutilsClassError CompileError = _distutils_errors.CompileError ExecError = _distutils_errors.DistutilsExecError FileError = _distutils_errors.DistutilsFileError InternalError = _distutils_errors.DistutilsInternalError LibError = _distutils_errors.LibError LinkError = _distutils_errors.LinkError ModuleError = _distutils_errors.DistutilsModuleError OptionError = _distutils_errors.DistutilsOptionError PlatformError = _distutils_errors.DistutilsPlatformError PreprocessError = _distutils_errors.PreprocessError SetupError = _distutils_errors.DistutilsSetupError TemplateError = _distutils_errors.DistutilsTemplateError UnknownFileError = _distutils_errors.UnknownFileError # The root error class in the hierarchy BaseError = _distutils_errors.DistutilsError class RemovedCommandError(BaseError, RuntimeError): """Error used for commands that have been removed in setuptools. Since ``setuptools`` is built on ``distutils``, simply removing a command from ``setuptools`` will make the behavior fall back to ``distutils``; this error is raised if a command exists in ``distutils`` but has been actively removed in ``setuptools``. """ class PackageDiscoveryError(BaseError, RuntimeError): """Impossible to perform automatic discovery of packages and/or modules. The current project layout or given discovery options can lead to problems when scanning the project directory. Setuptools might also refuse to complete auto-discovery if an error prone condition is detected (e.g. when a project is organised as a flat-layout but contains multiple directories that can be taken as top-level packages inside a single distribution [*]_). In these situations the users are encouraged to be explicit about which packages to include or to make the discovery parameters more specific. .. [*] Since multi-package distributions are uncommon it is very likely that the developers did not intend for all the directories to be packaged, and are just leaving auxiliary code in the repository top-level, such as maintenance-related scripts. """
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/_normalization.py
""" Helpers for normalization as expected in wheel/sdist/module file names and core metadata """ import re from pathlib import Path from typing import Union from .extern import packaging from .warnings import SetuptoolsDeprecationWarning _Path = Union[str, Path] # https://packaging.python.org/en/latest/specifications/core-metadata/#name _VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.I) _UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9.]+", re.I) def safe_identifier(name: str) -> str: """Make a string safe to be used as Python identifier. >>> safe_identifier("12abc") '_12abc' >>> safe_identifier("__editable__.myns.pkg-78.9.3_local") '__editable___myns_pkg_78_9_3_local' """ safe = re.sub(r'\W|^(?=\d)', '_', name) assert safe.isidentifier() return safe def safe_name(component: str) -> str: """Escape a component used as a project name according to Core Metadata. >>> safe_name("hello world") 'hello-world' >>> safe_name("hello?world") 'hello-world' """ # See pkg_resources.safe_name return _UNSAFE_NAME_CHARS.sub("-", component) def safe_version(version: str) -> str: """Convert an arbitrary string into a valid version string. >>> safe_version("1988 12 25") '1988.12.25' >>> safe_version("v0.2.1") '0.2.1' >>> safe_version("v0.2?beta") '0.2b0' >>> safe_version("v0.2 beta") '0.2b0' >>> safe_version("ubuntu lts") Traceback (most recent call last): ... setuptools.extern.packaging.version.InvalidVersion: Invalid version: 'ubuntu.lts' """ v = version.replace(' ', '.') try: return str(packaging.version.Version(v)) except packaging.version.InvalidVersion: attempt = _UNSAFE_NAME_CHARS.sub("-", v) return str(packaging.version.Version(attempt)) def best_effort_version(version: str) -> str: """Convert an arbitrary string into a version-like string. >>> best_effort_version("v0.2 beta") '0.2b0' >>> import warnings >>> warnings.simplefilter("ignore", category=SetuptoolsDeprecationWarning) >>> best_effort_version("ubuntu lts") 'ubuntu.lts' """ # See pkg_resources.safe_version try: return safe_version(version) except packaging.version.InvalidVersion: SetuptoolsDeprecationWarning.emit( f"Invalid version: {version!r}.", f""" Version {version!r} is not valid according to PEP 440. Please make sure to specify a valid version for your package. Also note that future releases of setuptools may halt the build process if an invalid version is given. """, see_url="https://peps.python.org/pep-0440/", due_date=(2023, 9, 26), # See setuptools/dist _validate_version ) v = version.replace(' ', '.') return safe_name(v) def filename_component(value: str) -> str: """Normalize each component of a filename (e.g. distribution/version part of wheel) Note: ``value`` needs to be already normalized. >>> filename_component("my-pkg") 'my_pkg' """ return value.replace("-", "_").strip("_") def safer_name(value: str) -> str: """Like ``safe_name`` but can be used as filename component for wheel""" # See bdist_wheel.safer_name return filename_component(safe_name(value)) def safer_best_effort_version(value: str) -> str: """Like ``best_effort_version`` but can be used as filename component for wheel""" # See bdist_wheel.safer_verion # TODO: Replace with only safe_version in the future (no need for best effort) return filename_component(best_effort_version(value))
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/unicode_utils.py
import unicodedata import sys # HFS Plus uses decomposed UTF-8 def decompose(path): if isinstance(path, str): return unicodedata.normalize('NFD', path) try: path = path.decode('utf-8') path = unicodedata.normalize('NFD', path) path = path.encode('utf-8') except UnicodeError: pass # Not UTF-8 return path def filesys_decode(path): """ Ensure that the given path is decoded, NONE when no expected encoding works """ if isinstance(path, str): return path fs_enc = sys.getfilesystemencoding() or 'utf-8' candidates = fs_enc, 'utf-8' for enc in candidates: try: return path.decode(enc) except UnicodeDecodeError: continue def try_encode(string, enc): "turn unicode encoding into a functional routine" try: return string.encode(enc) except UnicodeEncodeError: return None
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/sandbox.py
import os import sys import tempfile import operator import functools import itertools import re import contextlib import pickle import textwrap import builtins import pkg_resources from distutils.errors import DistutilsError from pkg_resources import working_set if sys.platform.startswith('java'): import org.python.modules.posix.PosixModule as _os else: _os = sys.modules[os.name] try: _file = file except NameError: _file = None _open = open __all__ = [ "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", ] def _execfile(filename, globals, locals=None): """ Python 3 implementation of execfile. """ mode = 'rb' with open(filename, mode) as stream: script = stream.read() if locals is None: locals = globals code = compile(script, filename, 'exec') exec(code, globals, locals) @contextlib.contextmanager def save_argv(repl=None): saved = sys.argv[:] if repl is not None: sys.argv[:] = repl try: yield saved finally: sys.argv[:] = saved @contextlib.contextmanager def save_path(): saved = sys.path[:] try: yield saved finally: sys.path[:] = saved @contextlib.contextmanager def override_temp(replacement): """ Monkey-patch tempfile.tempdir with replacement, ensuring it exists """ os.makedirs(replacement, exist_ok=True) saved = tempfile.tempdir tempfile.tempdir = replacement try: yield finally: tempfile.tempdir = saved @contextlib.contextmanager def pushd(target): saved = os.getcwd() os.chdir(target) try: yield saved finally: os.chdir(saved) class UnpickleableException(Exception): """ An exception representing another Exception that could not be pickled. """ @staticmethod def dump(type, exc): """ Always return a dumped (pickled) type and exc. If exc can't be pickled, wrap it in UnpickleableException first. """ try: return pickle.dumps(type), pickle.dumps(exc) except Exception: # get UnpickleableException inside the sandbox from setuptools.sandbox import UnpickleableException as cls return cls.dump(cls, cls(repr(exc))) class ExceptionSaver: """ A Context Manager that will save an exception, serialized, and restore it later. """ def __enter__(self): return self def __exit__(self, type, exc, tb): if not exc: return # dump the exception self._saved = UnpickleableException.dump(type, exc) self._tb = tb # suppress the exception return True def resume(self): "restore and re-raise any exception" if '_saved' not in vars(self): return type, exc = map(pickle.loads, self._saved) raise exc.with_traceback(self._tb) @contextlib.contextmanager def save_modules(): """ Context in which imported modules are saved. Translates exceptions internal to the context into the equivalent exception outside the context. """ saved = sys.modules.copy() with ExceptionSaver() as saved_exc: yield saved sys.modules.update(saved) # remove any modules imported since del_modules = ( mod_name for mod_name in sys.modules if mod_name not in saved # exclude any encodings modules. See #285 and not mod_name.startswith('encodings.') ) _clear_modules(del_modules) saved_exc.resume() def _clear_modules(module_names): for mod_name in list(module_names): del sys.modules[mod_name] @contextlib.contextmanager def save_pkg_resources_state(): saved = pkg_resources.__getstate__() try: yield saved finally: pkg_resources.__setstate__(saved) @contextlib.contextmanager def setup_context(setup_dir): temp_dir = os.path.join(setup_dir, 'temp') with save_pkg_resources_state(): with save_modules(): with save_path(): hide_setuptools() with save_argv(): with override_temp(temp_dir): with pushd(setup_dir): # ensure setuptools commands are available __import__('setuptools') yield _MODULES_TO_HIDE = { 'setuptools', 'distutils', 'pkg_resources', 'Cython', '_distutils_hack', } def _needs_hiding(mod_name): """ >>> _needs_hiding('setuptools') True >>> _needs_hiding('pkg_resources') True >>> _needs_hiding('setuptools_plugin') False >>> _needs_hiding('setuptools.__init__') True >>> _needs_hiding('distutils') True >>> _needs_hiding('os') False >>> _needs_hiding('Cython') True """ base_module = mod_name.split('.', 1)[0] return base_module in _MODULES_TO_HIDE def hide_setuptools(): """ Remove references to setuptools' modules from sys.modules to allow the invocation to import the most appropriate setuptools. This technique is necessary to avoid issues such as #315 where setuptools upgrading itself would fail to find a function declared in the metadata. """ _distutils_hack = sys.modules.get('_distutils_hack', None) if _distutils_hack is not None: _distutils_hack._remove_shim() modules = filter(_needs_hiding, sys.modules) _clear_modules(modules) def run_setup(setup_script, args): """Run a distutils setup script, sandboxed in its directory""" setup_dir = os.path.abspath(os.path.dirname(setup_script)) with setup_context(setup_dir): try: sys.argv[:] = [setup_script] + list(args) sys.path.insert(0, setup_dir) # reset to include setup dir, w/clean callback list working_set.__init__() working_set.callbacks.append(lambda dist: dist.activate()) with DirectorySandbox(setup_dir): ns = dict(__file__=setup_script, __name__='__main__') _execfile(setup_script, ns) except SystemExit as v: if v.args and v.args[0]: raise # Normal exit, just return class AbstractSandbox: """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" _active = False def __init__(self): self._attrs = [ name for name in dir(_os) if not name.startswith('_') and hasattr(self, name) ] def _copy(self, source): for name in self._attrs: setattr(os, name, getattr(source, name)) def __enter__(self): self._copy(self) if _file: builtins.file = self._file builtins.open = self._open self._active = True def __exit__(self, exc_type, exc_value, traceback): self._active = False if _file: builtins.file = _file builtins.open = _open self._copy(_os) def run(self, func): """Run 'func' under os sandboxing""" with self: return func() def _mk_dual_path_wrapper(name): original = getattr(_os, name) def wrap(self, src, dst, *args, **kw): if self._active: src, dst = self._remap_pair(name, src, dst, *args, **kw) return original(src, dst, *args, **kw) return wrap for name in ["rename", "link", "symlink"]: if hasattr(_os, name): locals()[name] = _mk_dual_path_wrapper(name) def _mk_single_path_wrapper(name, original=None): original = original or getattr(_os, name) def wrap(self, path, *args, **kw): if self._active: path = self._remap_input(name, path, *args, **kw) return original(path, *args, **kw) return wrap if _file: _file = _mk_single_path_wrapper('file', _file) _open = _mk_single_path_wrapper('open', _open) for name in [ "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", "startfile", "mkfifo", "mknod", "pathconf", "access", ]: if hasattr(_os, name): locals()[name] = _mk_single_path_wrapper(name) def _mk_single_with_return(name): original = getattr(_os, name) def wrap(self, path, *args, **kw): if self._active: path = self._remap_input(name, path, *args, **kw) return self._remap_output(name, original(path, *args, **kw)) return original(path, *args, **kw) return wrap for name in ['readlink', 'tempnam']: if hasattr(_os, name): locals()[name] = _mk_single_with_return(name) def _mk_query(name): original = getattr(_os, name) def wrap(self, *args, **kw): retval = original(*args, **kw) if self._active: return self._remap_output(name, retval) return retval return wrap for name in ['getcwd', 'tmpnam']: if hasattr(_os, name): locals()[name] = _mk_query(name) def _validate_path(self, path): """Called to remap or validate any path, whether input or output""" return path def _remap_input(self, operation, path, *args, **kw): """Called for path inputs""" return self._validate_path(path) def _remap_output(self, operation, path): """Called for path outputs""" return self._validate_path(path) def _remap_pair(self, operation, src, dst, *args, **kw): """Called for path pairs like rename, link, and symlink operations""" return ( self._remap_input(operation + '-from', src, *args, **kw), self._remap_input(operation + '-to', dst, *args, **kw), ) if hasattr(os, 'devnull'): _EXCEPTIONS = [os.devnull] else: _EXCEPTIONS = [] class DirectorySandbox(AbstractSandbox): """Restrict operations to a single subdirectory - pseudo-chroot""" write_ops = dict.fromkeys( [ "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam", ] ) _exception_patterns = [] "exempt writing to paths that match the pattern" def __init__(self, sandbox, exceptions=_EXCEPTIONS): self._sandbox = os.path.normcase(os.path.realpath(sandbox)) self._prefix = os.path.join(self._sandbox, '') self._exceptions = [ os.path.normcase(os.path.realpath(path)) for path in exceptions ] AbstractSandbox.__init__(self) def _violation(self, operation, *args, **kw): from setuptools.sandbox import SandboxViolation raise SandboxViolation(operation, args, kw) if _file: def _file(self, path, mode='r', *args, **kw): if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): self._violation("file", path, mode, *args, **kw) return _file(path, mode, *args, **kw) def _open(self, path, mode='r', *args, **kw): if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): self._violation("open", path, mode, *args, **kw) return _open(path, mode, *args, **kw) def tmpnam(self): self._violation("tmpnam") def _ok(self, path): active = self._active try: self._active = False realpath = os.path.normcase(os.path.realpath(path)) return ( self._exempted(realpath) or realpath == self._sandbox or realpath.startswith(self._prefix) ) finally: self._active = active def _exempted(self, filepath): start_matches = ( filepath.startswith(exception) for exception in self._exceptions ) pattern_matches = ( re.match(pattern, filepath) for pattern in self._exception_patterns ) candidates = itertools.chain(start_matches, pattern_matches) return any(candidates) def _remap_input(self, operation, path, *args, **kw): """Called for path inputs""" if operation in self.write_ops and not self._ok(path): self._violation(operation, os.path.realpath(path), *args, **kw) return path def _remap_pair(self, operation, src, dst, *args, **kw): """Called for path pairs like rename, link, and symlink operations""" if not self._ok(src) or not self._ok(dst): self._violation(operation, src, dst, *args, **kw) return (src, dst) def open(self, file, flags, mode=0o777, *args, **kw): """Called for low-level os.open()""" if flags & WRITE_FLAGS and not self._ok(file): self._violation("os.open", file, flags, mode, *args, **kw) return _os.open(file, flags, mode, *args, **kw) WRITE_FLAGS = functools.reduce( operator.or_, [ getattr(_os, a, 0) for a in "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split() ], ) class SandboxViolation(DistutilsError): """A setup script attempted to modify the filesystem outside the sandbox""" tmpl = textwrap.dedent( """ SandboxViolation: {cmd}{args!r} {kwargs} The package setup script has attempted to modify files on your system that are not within the EasyInstall build area, and has been aborted. This package cannot be safely installed by EasyInstall, and may not support alternate installation locations even if you run its setup script by hand. Please inform the package's author and the EasyInstall maintainers to find out if a fix or workaround is available. """ ).lstrip() def __str__(self): cmd, args, kwargs = self.args return self.tmpl.format(**locals())
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/setuptools/dist.py
__all__ = ['Distribution'] import io import sys import re import os import numbers import distutils.log import distutils.core import distutils.cmd import distutils.dist import distutils.command from distutils.util import strtobool from distutils.debug import DEBUG from distutils.fancy_getopt import translate_longopt from glob import iglob import itertools import textwrap from contextlib import suppress from typing import List, Optional, Set, TYPE_CHECKING from pathlib import Path from collections import defaultdict from email import message_from_file from distutils.errors import DistutilsOptionError, DistutilsSetupError from distutils.util import rfc822_escape from setuptools.extern import packaging from setuptools.extern import ordered_set from setuptools.extern.more_itertools import unique_everseen, partition import setuptools import setuptools.command from setuptools import windows_support from setuptools.monkey import get_unpatched from setuptools.config import setupcfg, pyprojecttoml from setuptools.discovery import ConfigDiscovery from setuptools.extern.packaging import version from . import _reqs from . import _entry_points from . import _normalization from ._importlib import metadata from .warnings import InformationOnly, SetuptoolsDeprecationWarning if TYPE_CHECKING: from email.message import Message __import__('setuptools.extern.packaging.specifiers') __import__('setuptools.extern.packaging.version') def get_metadata_version(self): mv = getattr(self, 'metadata_version', None) if mv is None: mv = version.Version('2.1') self.metadata_version = mv return mv def rfc822_unescape(content: str) -> str: """Reverse RFC-822 escaping by removing leading whitespaces from content.""" lines = content.splitlines() if len(lines) == 1: return lines[0].lstrip() return '\n'.join((lines[0].lstrip(), textwrap.dedent('\n'.join(lines[1:])))) def _read_field_from_msg(msg: "Message", field: str) -> Optional[str]: """Read Message header field.""" value = msg[field] if value == 'UNKNOWN': return None return value def _read_field_unescaped_from_msg(msg: "Message", field: str) -> Optional[str]: """Read Message header field and apply rfc822_unescape.""" value = _read_field_from_msg(msg, field) if value is None: return value return rfc822_unescape(value) def _read_list_from_msg(msg: "Message", field: str) -> Optional[List[str]]: """Read Message header field and return all results as list.""" values = msg.get_all(field, None) if values == []: return None return values def _read_payload_from_msg(msg: "Message") -> Optional[str]: value = msg.get_payload().strip() if value == 'UNKNOWN' or not value: return None return value def read_pkg_file(self, file): """Reads the metadata values from a file object.""" msg = message_from_file(file) self.metadata_version = version.Version(msg['metadata-version']) self.name = _read_field_from_msg(msg, 'name') self.version = _read_field_from_msg(msg, 'version') self.description = _read_field_from_msg(msg, 'summary') # we are filling author only. self.author = _read_field_from_msg(msg, 'author') self.maintainer = None self.author_email = _read_field_from_msg(msg, 'author-email') self.maintainer_email = None self.url = _read_field_from_msg(msg, 'home-page') self.download_url = _read_field_from_msg(msg, 'download-url') self.license = _read_field_unescaped_from_msg(msg, 'license') self.long_description = _read_field_unescaped_from_msg(msg, 'description') if ( self.long_description is None and self.metadata_version >= version.Version('2.1') ): self.long_description = _read_payload_from_msg(msg) self.description = _read_field_from_msg(msg, 'summary') if 'keywords' in msg: self.keywords = _read_field_from_msg(msg, 'keywords').split(',') self.platforms = _read_list_from_msg(msg, 'platform') self.classifiers = _read_list_from_msg(msg, 'classifier') # PEP 314 - these fields only exist in 1.1 if self.metadata_version == version.Version('1.1'): self.requires = _read_list_from_msg(msg, 'requires') self.provides = _read_list_from_msg(msg, 'provides') self.obsoletes = _read_list_from_msg(msg, 'obsoletes') else: self.requires = None self.provides = None self.obsoletes = None self.license_files = _read_list_from_msg(msg, 'license-file') def single_line(val): """ Quick and dirty validation for Summary pypa/setuptools#1390. """ if '\n' in val: # TODO: Replace with `raise ValueError("newlines not allowed")` # after reviewing #2893. msg = "newlines are not allowed in `summary` and will break in the future" SetuptoolsDeprecationWarning.emit("Invalid config.", msg) # due_date is undefined. Controversial change, there was a lot of push back. val = val.strip().split('\n')[0] return val # Based on Python 3.5 version def write_pkg_file(self, file): # noqa: C901 # is too complex (14) # FIXME """Write the PKG-INFO format data to a file object.""" version = self.get_metadata_version() def write_field(key, value): file.write("%s: %s\n" % (key, value)) write_field('Metadata-Version', str(version)) write_field('Name', self.get_name()) write_field('Version', self.get_version()) summary = self.get_description() if summary: write_field('Summary', single_line(summary)) optional_fields = ( ('Home-page', 'url'), ('Download-URL', 'download_url'), ('Author', 'author'), ('Author-email', 'author_email'), ('Maintainer', 'maintainer'), ('Maintainer-email', 'maintainer_email'), ) for field, attr in optional_fields: attr_val = getattr(self, attr, None) if attr_val is not None: write_field(field, attr_val) license = self.get_license() if license: write_field('License', rfc822_escape(license)) for project_url in self.project_urls.items(): write_field('Project-URL', '%s, %s' % project_url) keywords = ','.join(self.get_keywords()) if keywords: write_field('Keywords', keywords) platforms = self.get_platforms() or [] for platform in platforms: write_field('Platform', platform) self._write_list(file, 'Classifier', self.get_classifiers()) # PEP 314 self._write_list(file, 'Requires', self.get_requires()) self._write_list(file, 'Provides', self.get_provides()) self._write_list(file, 'Obsoletes', self.get_obsoletes()) # Setuptools specific for PEP 345 if hasattr(self, 'python_requires'): write_field('Requires-Python', self.python_requires) # PEP 566 if self.long_description_content_type: write_field('Description-Content-Type', self.long_description_content_type) if self.provides_extras: for extra in self.provides_extras: write_field('Provides-Extra', extra) self._write_list(file, 'License-File', self.license_files or []) long_description = self.get_long_description() if long_description: file.write("\n%s" % long_description) if not long_description.endswith("\n"): file.write("\n") sequence = tuple, list def check_importable(dist, attr, value): try: ep = metadata.EntryPoint(value=value, name=None, group=None) assert not ep.extras except (TypeError, ValueError, AttributeError, AssertionError) as e: raise DistutilsSetupError( "%r must be importable 'module:attrs' string (got %r)" % (attr, value) ) from e def assert_string_list(dist, attr, value): """Verify that value is a string list""" try: # verify that value is a list or tuple to exclude unordered # or single-use iterables assert isinstance(value, (list, tuple)) # verify that elements of value are strings assert ''.join(value) != value except (TypeError, ValueError, AttributeError, AssertionError) as e: raise DistutilsSetupError( "%r must be a list of strings (got %r)" % (attr, value) ) from e def check_nsp(dist, attr, value): """Verify that namespace packages are valid""" ns_packages = value assert_string_list(dist, attr, ns_packages) for nsp in ns_packages: if not dist.has_contents_for(nsp): raise DistutilsSetupError( "Distribution contains no modules or packages for " + "namespace package %r" % nsp ) parent, sep, child = nsp.rpartition('.') if parent and parent not in ns_packages: distutils.log.warn( "WARNING: %r is declared as a package namespace, but %r" " is not: please correct this in setup.py", nsp, parent, ) SetuptoolsDeprecationWarning.emit( "The namespace_packages parameter is deprecated.", "Please replace its usage with implicit namespaces (PEP 420).", see_docs="references/keywords.html#keyword-namespace-packages" # TODO: define due_date, it may break old packages that are no longer # maintained (e.g. sphinxcontrib extensions) when installed from source. # Warning officially introduced in May 2022, however the deprecation # was mentioned much earlier in the docs (May 2020, see #2149). ) def check_extras(dist, attr, value): """Verify that extras_require mapping is valid""" try: list(itertools.starmap(_check_extra, value.items())) except (TypeError, ValueError, AttributeError) as e: raise DistutilsSetupError( "'extras_require' must be a dictionary whose values are " "strings or lists of strings containing valid project/version " "requirement specifiers." ) from e def _check_extra(extra, reqs): name, sep, marker = extra.partition(':') try: _check_marker(marker) except packaging.markers.InvalidMarker: msg = f"Invalid environment marker: {marker} ({extra!r})" raise DistutilsSetupError(msg) from None list(_reqs.parse(reqs)) def _check_marker(marker): if not marker: return m = packaging.markers.Marker(marker) m.evaluate() def assert_bool(dist, attr, value): """Verify that value is True, False, 0, or 1""" if bool(value) != value: tmpl = "{attr!r} must be a boolean value (got {value!r})" raise DistutilsSetupError(tmpl.format(attr=attr, value=value)) def invalid_unless_false(dist, attr, value): if not value: DistDeprecationWarning.emit(f"{attr} is ignored.") # TODO: should there be a `due_date` here? return raise DistutilsSetupError(f"{attr} is invalid.") def check_requirements(dist, attr, value): """Verify that install_requires is a valid requirements list""" try: list(_reqs.parse(value)) if isinstance(value, (dict, set)): raise TypeError("Unordered types are not allowed") except (TypeError, ValueError) as error: tmpl = ( "{attr!r} must be a string or list of strings " "containing valid project/version requirement specifiers; {error}" ) raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) from error def check_specifier(dist, attr, value): """Verify that value is a valid version specifier""" try: packaging.specifiers.SpecifierSet(value) except (packaging.specifiers.InvalidSpecifier, AttributeError) as error: tmpl = ( "{attr!r} must be a string " "containing valid version specifiers; {error}" ) raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) from error def check_entry_points(dist, attr, value): """Verify that entry_points map is parseable""" try: _entry_points.load(value) except Exception as e: raise DistutilsSetupError(e) from e def check_test_suite(dist, attr, value): if not isinstance(value, str): raise DistutilsSetupError("test_suite must be a string") def check_package_data(dist, attr, value): """Verify that value is a dictionary of package names to glob lists""" if not isinstance(value, dict): raise DistutilsSetupError( "{!r} must be a dictionary mapping package names to lists of " "string wildcard patterns".format(attr) ) for k, v in value.items(): if not isinstance(k, str): raise DistutilsSetupError( "keys of {!r} dict must be strings (got {!r})".format(attr, k) ) assert_string_list(dist, 'values of {!r} dict'.format(attr), v) def check_packages(dist, attr, value): for pkgname in value: if not re.match(r'\w+(\.\w+)*', pkgname): distutils.log.warn( "WARNING: %r not a valid package name; please use only " ".-separated package names in setup.py", pkgname, ) _Distribution = get_unpatched(distutils.core.Distribution) class Distribution(_Distribution): """Distribution with support for tests and package data This is an enhanced version of 'distutils.dist.Distribution' that effectively adds the following new optional keyword arguments to 'setup()': 'install_requires' -- a string or sequence of strings specifying project versions that the distribution requires when installed, in the format used by 'pkg_resources.require()'. They will be installed automatically when the package is installed. If you wish to use packages that are not available in PyPI, or want to give your users an alternate download location, you can add a 'find_links' option to the '[easy_install]' section of your project's 'setup.cfg' file, and then setuptools will scan the listed web pages for links that satisfy the requirements. 'extras_require' -- a dictionary mapping names of optional "extras" to the additional requirement(s) that using those extras incurs. For example, this:: extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) indicates that the distribution can optionally provide an extra capability called "reST", but it can only be used if docutils and reSTedit are installed. If the user installs your package using EasyInstall and requests one of your extras, the corresponding additional requirements will be installed if needed. 'test_suite' -- the name of a test suite to run for the 'test' command. If the user runs 'python setup.py test', the package will be installed, and the named test suite will be run. The format is the same as would be used on a 'unittest.py' command line. That is, it is the dotted name of an object to import and call to generate a test suite. 'package_data' -- a dictionary mapping package names to lists of filenames or globs to use to find data files contained in the named packages. If the dictionary has filenames or globs listed under '""' (the empty string), those names will be searched for in every package, in addition to any names for the specific package. Data files found using these names/globs will be installed along with the package, in the same location as the package. Note that globs are allowed to reference the contents of non-package subdirectories, as long as you use '/' as a path separator. (Globs are automatically converted to platform-specific paths at runtime.) In addition to these new keywords, this class also has several new methods for manipulating the distribution's contents. For example, the 'include()' and 'exclude()' methods can be thought of as in-place add and subtract commands that add or remove packages, modules, extensions, and so on from the distribution. """ _DISTUTILS_UNSUPPORTED_METADATA = { 'long_description_content_type': lambda: None, 'project_urls': dict, 'provides_extras': ordered_set.OrderedSet, 'license_file': lambda: None, 'license_files': lambda: None, } _patched_dist = None def patch_missing_pkg_info(self, attrs): # Fake up a replacement for the data that would normally come from # PKG-INFO, but which might not yet be built if this is a fresh # checkout. # if not attrs or 'name' not in attrs or 'version' not in attrs: return name = _normalization.safe_name(str(attrs['name'])).lower() with suppress(metadata.PackageNotFoundError): dist = metadata.distribution(name) if dist is not None and not dist.read_text('PKG-INFO'): dist._version = _normalization.safe_version(str(attrs['version'])) self._patched_dist = dist def __init__(self, attrs=None): have_package_data = hasattr(self, "package_data") if not have_package_data: self.package_data = {} attrs = attrs or {} self.dist_files = [] # Filter-out setuptools' specific options. self.src_root = attrs.pop("src_root", None) self.patch_missing_pkg_info(attrs) self.dependency_links = attrs.pop('dependency_links', []) self.setup_requires = attrs.pop('setup_requires', []) for ep in metadata.entry_points(group='distutils.setup_keywords'): vars(self).setdefault(ep.name, None) _Distribution.__init__( self, { k: v for k, v in attrs.items() if k not in self._DISTUTILS_UNSUPPORTED_METADATA }, ) # Private API (setuptools-use only, not restricted to Distribution) # Stores files that are referenced by the configuration and need to be in the # sdist (e.g. `version = file: VERSION.txt`) self._referenced_files: Set[str] = set() # Save the original dependencies before they are processed into the egg format self._orig_extras_require = {} self._orig_install_requires = [] self._tmp_extras_require = defaultdict(ordered_set.OrderedSet) self.set_defaults = ConfigDiscovery(self) self._set_metadata_defaults(attrs) self.metadata.version = self._normalize_version( self._validate_version(self.metadata.version) ) self._finalize_requires() def _validate_metadata(self): required = {"name"} provided = { key for key in vars(self.metadata) if getattr(self.metadata, key, None) is not None } missing = required - provided if missing: msg = f"Required package metadata is missing: {missing}" raise DistutilsSetupError(msg) def _set_metadata_defaults(self, attrs): """ Fill-in missing metadata fields not supported by distutils. Some fields may have been set by other tools (e.g. pbr). Those fields (vars(self.metadata)) take precedence to supplied attrs. """ for option, default in self._DISTUTILS_UNSUPPORTED_METADATA.items(): vars(self.metadata).setdefault(option, attrs.get(option, default())) @staticmethod def _normalize_version(version): if isinstance(version, setuptools.sic) or version is None: return version normalized = str(packaging.version.Version(version)) if version != normalized: InformationOnly.emit(f"Normalizing '{version}' to '{normalized}'") return normalized return version @staticmethod def _validate_version(version): if isinstance(version, numbers.Number): # Some people apparently take "version number" too literally :) version = str(version) if version is not None: try: packaging.version.Version(version) except (packaging.version.InvalidVersion, TypeError): SetuptoolsDeprecationWarning.emit( f"Invalid version: {version!r}.", """ The version specified is not a valid version according to PEP 440. This may not work as expected with newer versions of setuptools, pip, and PyPI. """, see_url="https://peps.python.org/pep-0440/", due_date=(2023, 9, 26), # Warning initially introduced in 26 Sept 2014 # pypa/packaging already removed legacy versions. ) return setuptools.sic(version) return version def _finalize_requires(self): """ Set `metadata.python_requires` and fix environment markers in `install_requires` and `extras_require`. """ if getattr(self, 'python_requires', None): self.metadata.python_requires = self.python_requires if getattr(self, 'extras_require', None): # Save original before it is messed by _convert_extras_requirements self._orig_extras_require = self._orig_extras_require or self.extras_require for extra in self.extras_require.keys(): # Since this gets called multiple times at points where the # keys have become 'converted' extras, ensure that we are only # truly adding extras we haven't seen before here. extra = extra.split(':')[0] if extra: self.metadata.provides_extras.add(extra) if getattr(self, 'install_requires', None) and not self._orig_install_requires: # Save original before it is messed by _move_install_requirements_markers self._orig_install_requires = self.install_requires self._convert_extras_requirements() self._move_install_requirements_markers() def _convert_extras_requirements(self): """ Convert requirements in `extras_require` of the form `"extra": ["barbazquux; {marker}"]` to `"extra:{marker}": ["barbazquux"]`. """ spec_ext_reqs = getattr(self, 'extras_require', None) or {} tmp = defaultdict(ordered_set.OrderedSet) self._tmp_extras_require = getattr(self, '_tmp_extras_require', tmp) for section, v in spec_ext_reqs.items(): # Do not strip empty sections. self._tmp_extras_require[section] for r in _reqs.parse(v): suffix = self._suffix_for(r) self._tmp_extras_require[section + suffix].append(r) @staticmethod def _suffix_for(req): """ For a requirement, return the 'extras_require' suffix for that requirement. """ return ':' + str(req.marker) if req.marker else '' def _move_install_requirements_markers(self): """ Move requirements in `install_requires` that are using environment markers `extras_require`. """ # divide the install_requires into two sets, simple ones still # handled by install_requires and more complex ones handled # by extras_require. def is_simple_req(req): return not req.marker spec_inst_reqs = getattr(self, 'install_requires', None) or () inst_reqs = list(_reqs.parse(spec_inst_reqs)) simple_reqs = filter(is_simple_req, inst_reqs) complex_reqs = itertools.filterfalse(is_simple_req, inst_reqs) self.install_requires = list(map(str, simple_reqs)) for r in complex_reqs: self._tmp_extras_require[':' + str(r.marker)].append(r) self.extras_require = dict( # list(dict.fromkeys(...)) ensures a list of unique strings (k, list(dict.fromkeys(str(r) for r in map(self._clean_req, v)))) for k, v in self._tmp_extras_require.items() ) def _clean_req(self, req): """ Given a Requirement, remove environment markers and return it. """ req.marker = None return req def _finalize_license_files(self): """Compute names of all license files which should be included.""" license_files: Optional[List[str]] = self.metadata.license_files patterns: List[str] = license_files if license_files else [] license_file: Optional[str] = self.metadata.license_file if license_file and license_file not in patterns: patterns.append(license_file) if license_files is None and license_file is None: # Default patterns match the ones wheel uses # See https://wheel.readthedocs.io/en/stable/user_guide.html # -> 'Including license files in the generated wheel file' patterns = ('LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*') self.metadata.license_files = list( unique_everseen(self._expand_patterns(patterns)) ) @staticmethod def _expand_patterns(patterns): """ >>> list(Distribution._expand_patterns(['LICENSE'])) ['LICENSE'] >>> list(Distribution._expand_patterns(['setup.cfg', 'LIC*'])) ['setup.cfg', 'LICENSE'] """ return ( path for pattern in patterns for path in sorted(iglob(pattern)) if not path.endswith('~') and os.path.isfile(path) ) # FIXME: 'Distribution._parse_config_files' is too complex (14) def _parse_config_files(self, filenames=None): # noqa: C901 """ Adapted from distutils.dist.Distribution.parse_config_files, this method provides the same functionality in subtly-improved ways. """ from configparser import ConfigParser # Ignore install directory options if we have a venv ignore_options = ( [] if sys.prefix == sys.base_prefix else [ 'install-base', 'install-platbase', 'install-lib', 'install-platlib', 'install-purelib', 'install-headers', 'install-scripts', 'install-data', 'prefix', 'exec-prefix', 'home', 'user', 'root', ] ) ignore_options = frozenset(ignore_options) if filenames is None: filenames = self.find_config_files() if DEBUG: self.announce("Distribution.parse_config_files():") parser = ConfigParser() parser.optionxform = str for filename in filenames: with io.open(filename, encoding='utf-8') as reader: if DEBUG: self.announce(" reading {filename}".format(**locals())) parser.read_file(reader) for section in parser.sections(): options = parser.options(section) opt_dict = self.get_option_dict(section) for opt in options: if opt == '__name__' or opt in ignore_options: continue val = parser.get(section, opt) opt = self.warn_dash_deprecation(opt, section) opt = self.make_option_lowercase(opt, section) opt_dict[opt] = (filename, val) # Make the ConfigParser forget everything (so we retain # the original filenames that options come from) parser.__init__() if 'global' not in self.command_options: return # If there was a "global" section in the config file, use it # to set Distribution options. for (opt, (src, val)) in self.command_options['global'].items(): alias = self.negative_opt.get(opt) if alias: val = not strtobool(val) elif opt in ('verbose', 'dry_run'): # ugh! val = strtobool(val) try: setattr(self, alias or opt, val) except ValueError as e: raise DistutilsOptionError(e) from e def warn_dash_deprecation(self, opt, section): if section in ( 'options.extras_require', 'options.data_files', ): return opt underscore_opt = opt.replace('-', '_') commands = list(itertools.chain( distutils.command.__all__, self._setuptools_commands(), )) if ( not section.startswith('options') and section != 'metadata' and section not in commands ): return underscore_opt if '-' in opt: SetuptoolsDeprecationWarning.emit( "Invalid dash-separated options", f""" Usage of dash-separated {opt!r} will not be supported in future versions. Please use the underscore name {underscore_opt!r} instead. """, see_docs="userguide/declarative_config.html", due_date=(2023, 9, 26), # Warning initially introduced in 3 Mar 2021 ) return underscore_opt def _setuptools_commands(self): try: return metadata.distribution('setuptools').entry_points.names except metadata.PackageNotFoundError: # during bootstrapping, distribution doesn't exist return [] def make_option_lowercase(self, opt, section): if section != 'metadata' or opt.islower(): return opt lowercase_opt = opt.lower() SetuptoolsDeprecationWarning.emit( "Invalid uppercase configuration", f""" Usage of uppercase key {opt!r} in {section!r} will not be supported in future versions. Please use lowercase {lowercase_opt!r} instead. """, see_docs="userguide/declarative_config.html", due_date=(2023, 9, 26), # Warning initially introduced in 6 Mar 2021 ) return lowercase_opt # FIXME: 'Distribution._set_command_options' is too complex (14) def _set_command_options(self, command_obj, option_dict=None): # noqa: C901 """ Set the options for 'command_obj' from 'option_dict'. Basically this means copying elements of a dictionary ('option_dict') to attributes of an instance ('command'). 'command_obj' must be a Command instance. If 'option_dict' is not supplied, uses the standard option dictionary for this command (from 'self.command_options'). (Adopted from distutils.dist.Distribution._set_command_options) """ command_name = command_obj.get_command_name() if option_dict is None: option_dict = self.get_option_dict(command_name) if DEBUG: self.announce(" setting options for '%s' command:" % command_name) for (option, (source, value)) in option_dict.items(): if DEBUG: self.announce(" %s = %s (from %s)" % (option, value, source)) try: bool_opts = [translate_longopt(o) for o in command_obj.boolean_options] except AttributeError: bool_opts = [] try: neg_opt = command_obj.negative_opt except AttributeError: neg_opt = {} try: is_string = isinstance(value, str) if option in neg_opt and is_string: setattr(command_obj, neg_opt[option], not strtobool(value)) elif option in bool_opts and is_string: setattr(command_obj, option, strtobool(value)) elif hasattr(command_obj, option): setattr(command_obj, option, value) else: raise DistutilsOptionError( "error in %s: command '%s' has no such option '%s'" % (source, command_name, option) ) except ValueError as e: raise DistutilsOptionError(e) from e def _get_project_config_files(self, filenames): """Add default file and split between INI and TOML""" tomlfiles = [] standard_project_metadata = Path(self.src_root or os.curdir, "pyproject.toml") if filenames is not None: parts = partition(lambda f: Path(f).suffix == ".toml", filenames) filenames = list(parts[0]) # 1st element => predicate is False tomlfiles = list(parts[1]) # 2nd element => predicate is True elif standard_project_metadata.exists(): tomlfiles = [standard_project_metadata] return filenames, tomlfiles def parse_config_files(self, filenames=None, ignore_option_errors=False): """Parses configuration files from various levels and loads configuration. """ inifiles, tomlfiles = self._get_project_config_files(filenames) self._parse_config_files(filenames=inifiles) setupcfg.parse_configuration( self, self.command_options, ignore_option_errors=ignore_option_errors ) for filename in tomlfiles: pyprojecttoml.apply_configuration(self, filename, ignore_option_errors) self._finalize_requires() self._finalize_license_files() def fetch_build_eggs(self, requires): """Resolve pre-setup requirements""" from setuptools.installer import _fetch_build_eggs return _fetch_build_eggs(self, requires) def finalize_options(self): """ Allow plugins to apply arbitrary operations to the distribution. Each hook may optionally define a 'order' to influence the order of execution. Smaller numbers go first and the default is 0. """ group = 'setuptools.finalize_distribution_options' def by_order(hook): return getattr(hook, 'order', 0) defined = metadata.entry_points(group=group) filtered = itertools.filterfalse(self._removed, defined) loaded = map(lambda e: e.load(), filtered) for ep in sorted(loaded, key=by_order): ep(self) @staticmethod def _removed(ep): """ When removing an entry point, if metadata is loaded from an older version of Setuptools, that removed entry point will attempt to be loaded and will fail. See #2765 for more details. """ removed = { # removed 2021-09-05 '2to3_doctests', } return ep.name in removed def _finalize_setup_keywords(self): for ep in metadata.entry_points(group='distutils.setup_keywords'): value = getattr(self, ep.name, None) if value is not None: ep.load()(self, ep.name, value) def get_egg_cache_dir(self): egg_cache_dir = os.path.join(os.curdir, '.eggs') if not os.path.exists(egg_cache_dir): os.mkdir(egg_cache_dir) windows_support.hide_file(egg_cache_dir) readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt') with open(readme_txt_filename, 'w') as f: f.write( 'This directory contains eggs that were downloaded ' 'by setuptools to build, test, and run plug-ins.\n\n' ) f.write( 'This directory caches those eggs to prevent ' 'repeated downloads.\n\n' ) f.write('However, it is safe to delete this directory.\n\n') return egg_cache_dir def fetch_build_egg(self, req): """Fetch an egg needed for building""" from setuptools.installer import fetch_build_egg return fetch_build_egg(self, req) def get_command_class(self, command): """Pluggable version of get_command_class()""" if command in self.cmdclass: return self.cmdclass[command] eps = metadata.entry_points(group='distutils.commands', name=command) for ep in eps: self.cmdclass[command] = cmdclass = ep.load() return cmdclass else: return _Distribution.get_command_class(self, command) def print_commands(self): for ep in metadata.entry_points(group='distutils.commands'): if ep.name not in self.cmdclass: cmdclass = ep.load() self.cmdclass[ep.name] = cmdclass return _Distribution.print_commands(self) def get_command_list(self): for ep in metadata.entry_points(group='distutils.commands'): if ep.name not in self.cmdclass: cmdclass = ep.load() self.cmdclass[ep.name] = cmdclass return _Distribution.get_command_list(self) def include(self, **attrs): """Add items to distribution that are named in keyword arguments For example, 'dist.include(py_modules=["x"])' would add 'x' to the distribution's 'py_modules' attribute, if it was not already there. Currently, this method only supports inclusion for attributes that are lists or tuples. If you need to add support for adding to other attributes in this or a subclass, you can add an '_include_X' method, where 'X' is the name of the attribute. The method will be called with the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' will try to call 'dist._include_foo({"bar":"baz"})', which can then handle whatever special inclusion logic is needed. """ for k, v in attrs.items(): include = getattr(self, '_include_' + k, None) if include: include(v) else: self._include_misc(k, v) def exclude_package(self, package): """Remove packages, modules, and extensions in named package""" pfx = package + '.' if self.packages: self.packages = [ p for p in self.packages if p != package and not p.startswith(pfx) ] if self.py_modules: self.py_modules = [ p for p in self.py_modules if p != package and not p.startswith(pfx) ] if self.ext_modules: self.ext_modules = [ p for p in self.ext_modules if p.name != package and not p.name.startswith(pfx) ] def has_contents_for(self, package): """Return true if 'exclude_package(package)' would do something""" pfx = package + '.' for p in self.iter_distribution_names(): if p == package or p.startswith(pfx): return True def _exclude_misc(self, name, value): """Handle 'exclude()' for list/tuple attrs without a special handler""" if not isinstance(value, sequence): raise DistutilsSetupError( "%s: setting must be a list or tuple (%r)" % (name, value) ) try: old = getattr(self, name) except AttributeError as e: raise DistutilsSetupError("%s: No such distribution setting" % name) from e if old is not None and not isinstance(old, sequence): raise DistutilsSetupError( name + ": this setting cannot be changed via include/exclude" ) elif old: setattr(self, name, [item for item in old if item not in value]) def _include_misc(self, name, value): """Handle 'include()' for list/tuple attrs without a special handler""" if not isinstance(value, sequence): raise DistutilsSetupError("%s: setting must be a list (%r)" % (name, value)) try: old = getattr(self, name) except AttributeError as e: raise DistutilsSetupError("%s: No such distribution setting" % name) from e if old is None: setattr(self, name, value) elif not isinstance(old, sequence): raise DistutilsSetupError( name + ": this setting cannot be changed via include/exclude" ) else: new = [item for item in value if item not in old] setattr(self, name, old + new) def exclude(self, **attrs): """Remove items from distribution that are named in keyword arguments For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from the distribution's 'py_modules' attribute. Excluding packages uses the 'exclude_package()' method, so all of the package's contained packages, modules, and extensions are also excluded. Currently, this method only supports exclusion from attributes that are lists or tuples. If you need to add support for excluding from other attributes in this or a subclass, you can add an '_exclude_X' method, where 'X' is the name of the attribute. The method will be called with the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' will try to call 'dist._exclude_foo({"bar":"baz"})', which can then handle whatever special exclusion logic is needed. """ for k, v in attrs.items(): exclude = getattr(self, '_exclude_' + k, None) if exclude: exclude(v) else: self._exclude_misc(k, v) def _exclude_packages(self, packages): if not isinstance(packages, sequence): raise DistutilsSetupError( "packages: setting must be a list or tuple (%r)" % (packages,) ) list(map(self.exclude_package, packages)) def _parse_command_opts(self, parser, args): # Remove --with-X/--without-X options when processing command args self.global_options = self.__class__.global_options self.negative_opt = self.__class__.negative_opt # First, expand any aliases command = args[0] aliases = self.get_option_dict('aliases') while command in aliases: src, alias = aliases[command] del aliases[command] # ensure each alias can expand only once! import shlex args[:1] = shlex.split(alias, True) command = args[0] nargs = _Distribution._parse_command_opts(self, parser, args) # Handle commands that want to consume all remaining arguments cmd_class = self.get_command_class(command) if getattr(cmd_class, 'command_consumes_arguments', None): self.get_option_dict(command)['args'] = ("command line", nargs) if nargs is not None: return [] return nargs def get_cmdline_options(self): """Return a '{cmd: {opt:val}}' map of all command-line options Option names are all long, but do not include the leading '--', and contain dashes rather than underscores. If the option doesn't take an argument (e.g. '--quiet'), the 'val' is 'None'. Note that options provided by config files are intentionally excluded. """ d = {} for cmd, opts in self.command_options.items(): for opt, (src, val) in opts.items(): if src != "command line": continue opt = opt.replace('_', '-') if val == 0: cmdobj = self.get_command_obj(cmd) neg_opt = self.negative_opt.copy() neg_opt.update(getattr(cmdobj, 'negative_opt', {})) for neg, pos in neg_opt.items(): if pos == opt: opt = neg val = None break else: raise AssertionError("Shouldn't be able to get here") elif val == 1: val = None d.setdefault(cmd, {})[opt] = val return d def iter_distribution_names(self): """Yield all packages, modules, and extension names in distribution""" for pkg in self.packages or (): yield pkg for module in self.py_modules or (): yield module for ext in self.ext_modules or (): if isinstance(ext, tuple): name, buildinfo = ext else: name = ext.name if name.endswith('module'): name = name[:-6] yield name def handle_display_options(self, option_order): """If there were any non-global "display-only" options (--help-commands or the metadata display options) on the command line, display the requested info and return true; else return false. """ import sys if self.help_commands: return _Distribution.handle_display_options(self, option_order) # Stdout may be StringIO (e.g. in tests) if not isinstance(sys.stdout, io.TextIOWrapper): return _Distribution.handle_display_options(self, option_order) # Don't wrap stdout if utf-8 is already the encoding. Provides # workaround for #334. if sys.stdout.encoding.lower() in ('utf-8', 'utf8'): return _Distribution.handle_display_options(self, option_order) # Print metadata in UTF-8 no matter the platform encoding = sys.stdout.encoding sys.stdout.reconfigure(encoding='utf-8') try: return _Distribution.handle_display_options(self, option_order) finally: sys.stdout.reconfigure(encoding=encoding) def run_command(self, command): self.set_defaults() # Postpone defaults until all explicit configuration is considered # (setup() args, config files, command line and plugins) super().run_command(command) class DistDeprecationWarning(SetuptoolsDeprecationWarning): """Class for warning about deprecations in dist in setuptools. Not ignored by default, unlike DeprecationWarning."""