index
int64
0
731k
package
stringlengths
2
98
name
stringlengths
1
76
docstring
stringlengths
0
281k
code
stringlengths
4
1.07M
signature
stringlengths
2
42.8k
24,349
gotrue._sync.gotrue_client
sign_in_with_sso
Attempts a single-sign on using an enterprise Identity Provider. A successful SSO attempt will redirect the current page to the identity provider authorization page. The redirect URL is implementation and SSO protocol specific. You can use it by providing a SSO domain. Typically you can extract this domain by asking users for their email address. If this domain is registered on the Auth instance the redirect will use that organization's currently active SSO Identity Provider for the login. If you have built an organization-specific login page, you can use the organization's SSO Identity Provider UUID directly instead.
def sign_in_with_sso(self, credentials: SignInWithSSOCredentials): """ Attempts a single-sign on using an enterprise Identity Provider. A successful SSO attempt will redirect the current page to the identity provider authorization page. The redirect URL is implementation and SSO protocol specific. You can use it by providing a SSO domain. Typically you can extract this domain by asking users for their email address. If this domain is registered on the Auth instance the redirect will use that organization's currently active SSO Identity Provider for the login. If you have built an organization-specific login page, you can use the organization's SSO Identity Provider UUID directly instead. """ self._remove_session() provider_id = credentials.get("provider_id") domain = credentials.get("domain") options = credentials.get("options", {}) redirect_to = options.get("redirect_to") captcha_token = options.get("captcha_token") # HTTPX currently does not follow redirects: https://www.python-httpx.org/compatibility/ # Additionally, unlike the JS client, Python is a server side language and it's not possible # to automatically redirect in browser for hte user skip_http_redirect = options.get("skip_http_redirect", True) if domain: return self._request( "POST", "sso", body={ "domain": domain, "skip_http_redirect": skip_http_redirect, "gotrue_meta_security": { "captcha_token": captcha_token, }, }, redirect_to=redirect_to, xform=parse_sso_response, ) if provider_id: return self._request( "POST", "sso", body={ "provider_id": provider_id, "skip_http_redirect": skip_http_redirect, "gotrue_meta_security": { "captcha_token": captcha_token, }, }, redirect_to=redirect_to, xform=parse_sso_response, ) raise AuthInvalidCredentialsError( "You must provide either a domain or provider_id" )
(self, credentials: 'SignInWithSSOCredentials')
24,350
gotrue._sync.gotrue_client
sign_out
Inside a browser context, `sign_out` will remove the logged in user from the browser session and log them out - removing all items from localstorage and then trigger a `"SIGNED_OUT"` event. For server-side management, you can revoke all refresh tokens for a user by passing a user's JWT through to `api.sign_out`. There is no way to revoke a user's access token jwt until it expires. It is recommended to set a shorter expiry on the jwt for this reason.
def sign_out(self, options: SignOutOptions = {"scope": "global"}) -> None: """ Inside a browser context, `sign_out` will remove the logged in user from the browser session and log them out - removing all items from localstorage and then trigger a `"SIGNED_OUT"` event. For server-side management, you can revoke all refresh tokens for a user by passing a user's JWT through to `api.sign_out`. There is no way to revoke a user's access token jwt until it expires. It is recommended to set a shorter expiry on the jwt for this reason. """ with suppress(AuthApiError): session = self.get_session() access_token = session.access_token if session else None if access_token: self.admin.sign_out(access_token, options["scope"]) if options["scope"] != "others": self._remove_session() self._notify_all_subscribers("SIGNED_OUT", None)
(self, options: gotrue.types.SignOutOptions = {'scope': 'global'}) -> NoneType
24,351
gotrue._sync.gotrue_client
sign_up
Creates a new user.
def sign_up( self, credentials: SignUpWithPasswordCredentials, ) -> AuthResponse: """ Creates a new user. """ self._remove_session() email = credentials.get("email") phone = credentials.get("phone") password = credentials.get("password") options = credentials.get("options", {}) redirect_to = options.get("redirect_to") data = options.get("data") or {} captcha_token = options.get("captcha_token") if email: response = self._request( "POST", "signup", body={ "email": email, "password": password, "data": data, "gotrue_meta_security": { "captcha_token": captcha_token, }, }, redirect_to=redirect_to, xform=parse_auth_response, ) elif phone: response = self._request( "POST", "signup", body={ "phone": phone, "password": password, "data": data, "gotrue_meta_security": { "captcha_token": captcha_token, }, }, xform=parse_auth_response, ) else: raise AuthInvalidCredentialsError( "You must provide either an email or phone number and a password" ) if response.session: self._save_session(response.session) self._notify_all_subscribers("SIGNED_IN", response.session) return response
(self, credentials: Union[gotrue.types.SignUpWithEmailAndPasswordCredentials, gotrue.types.SignUpWithPhoneAndPasswordCredentials]) -> gotrue.types.AuthResponse
24,352
gotrue._sync.gotrue_client
unlink_identity
null
def unlink_identity(self, identity): return self._request( "POST", f"/user/identities/{identity.id}", )
(self, identity)
24,353
gotrue._sync.gotrue_client
update_user
Updates user data, if there is a logged in user.
def update_user(self, attributes: UserAttributes) -> UserResponse: """ Updates user data, if there is a logged in user. """ session = self.get_session() if not session: raise AuthSessionMissingError() response = self._request( "PUT", "user", body=attributes, jwt=session.access_token, xform=parse_user_response, ) session.user = response.user self._save_session(session) self._notify_all_subscribers("USER_UPDATED", session) return response
(self, attributes: gotrue.types.UserAttributes) -> gotrue.types.UserResponse
24,354
gotrue._sync.gotrue_client
verify_otp
Log in a user given a User supplied OTP received via mobile.
def verify_otp(self, params: VerifyOtpParams) -> AuthResponse: """ Log in a user given a User supplied OTP received via mobile. """ self._remove_session() response = self._request( "POST", "verify", body={ "gotrue_meta_security": { "captcha_token": params.get("options", {}).get("captcha_token"), }, **params, }, redirect_to=params.get("options", {}).get("redirect_to"), xform=parse_auth_response, ) if response.session: self._save_session(response.session) self._notify_all_subscribers("SIGNED_IN", response.session) return response
(self, params: Union[gotrue.types.VerifyEmailOtpParams, gotrue.types.VerifyMobileOtpParams, gotrue.types.VerifyTokenHashParams]) -> gotrue.types.AuthResponse
24,355
supabase.lib.realtime_client
SupabaseRealtimeClient
null
class SupabaseRealtimeClient: def __init__(self, socket: Socket, schema: str, table_name: str): topic = ( f"realtime:{schema}" if table_name == "*" else f"realtime:{schema}:{table_name}" ) self.subscription = socket.set_channel(topic) @staticmethod def get_payload_records(payload: Any): records: dict = {"new": {}, "old": {}} if payload.type in ["INSERT", "UPDATE"]: records["new"] = payload.record convert_change_data(payload.columns, payload.record) if payload.type in ["UPDATE", "DELETE"]: records["old"] = payload.record convert_change_data(payload.columns, payload.old_record) return records def on(self, event, callback: Callable[..., Any]): def cb(payload): enriched_payload = { "schema": payload.schema, "table": payload.table, "commit_timestamp": payload.commit_timestamp, "event_type": payload.type, "new": {}, "old": {}, } enriched_payload = {**enriched_payload, **self.get_payload_records(payload)} callback(enriched_payload) self.subscription.join().on(event, cb) return self def subscribe(self, callback: Callable[..., Any]): # TODO: Handle state change callbacks for error and close self.subscription.join().on("ok", callback("SUBSCRIBED")) self.subscription.join().on( "error", lambda x: callback("SUBSCRIPTION_ERROR", x) ) self.subscription.join().on( "timeout", lambda: callback("RETRYING_AFTER_TIMEOUT") ) return self.subscription
(socket: realtime.connection.Socket, schema: str, table_name: str)
24,356
supabase.lib.realtime_client
__init__
null
def __init__(self, socket: Socket, schema: str, table_name: str): topic = ( f"realtime:{schema}" if table_name == "*" else f"realtime:{schema}:{table_name}" ) self.subscription = socket.set_channel(topic)
(self, socket: realtime.connection.Socket, schema: str, table_name: str)
24,357
supabase.lib.realtime_client
get_payload_records
null
@staticmethod def get_payload_records(payload: Any): records: dict = {"new": {}, "old": {}} if payload.type in ["INSERT", "UPDATE"]: records["new"] = payload.record convert_change_data(payload.columns, payload.record) if payload.type in ["UPDATE", "DELETE"]: records["old"] = payload.record convert_change_data(payload.columns, payload.old_record) return records
(payload: Any)
24,358
supabase.lib.realtime_client
on
null
def on(self, event, callback: Callable[..., Any]): def cb(payload): enriched_payload = { "schema": payload.schema, "table": payload.table, "commit_timestamp": payload.commit_timestamp, "event_type": payload.type, "new": {}, "old": {}, } enriched_payload = {**enriched_payload, **self.get_payload_records(payload)} callback(enriched_payload) self.subscription.join().on(event, cb) return self
(self, event, callback: Callable[..., Any])
24,359
supabase.lib.realtime_client
subscribe
null
def subscribe(self, callback: Callable[..., Any]): # TODO: Handle state change callbacks for error and close self.subscription.join().on("ok", callback("SUBSCRIBED")) self.subscription.join().on( "error", lambda x: callback("SUBSCRIPTION_ERROR", x) ) self.subscription.join().on( "timeout", lambda: callback("RETRYING_AFTER_TIMEOUT") ) return self.subscription
(self, callback: Callable[..., Any])
24,360
storage3._sync.client
SyncStorageClient
Manage storage buckets and files.
class SyncStorageClient(SyncStorageBucketAPI): """Manage storage buckets and files.""" def __init__( self, url: str, headers: dict[str, str], timeout: int = DEFAULT_TIMEOUT ) -> None: headers = { "User-Agent": f"supabase-py/storage3 v{__version__}", **headers, } self.session = self._create_session(url, headers, timeout) super().__init__(self.session) def _create_session( self, base_url: str, headers: dict[str, str], timeout: int ) -> SyncClient: return SyncClient(base_url=base_url, headers=headers, timeout=timeout) def __enter__(self) -> SyncStorageClient: return self def __exit__(self, exc_type, exc, tb) -> None: self.aclose() def aclose(self) -> None: self.session.aclose() def from_(self, id: str) -> SyncBucketProxy: """Run a storage file operation. Parameters ---------- id The unique identifier of the bucket """ return SyncBucketProxy(id, self._client)
(url: 'str', headers: 'dict[str, str]', timeout: 'int' = 20) -> 'None'
24,361
storage3._sync.client
__enter__
null
def __enter__(self) -> SyncStorageClient: return self
(self) -> storage3._sync.client.SyncStorageClient
24,362
storage3._sync.client
__exit__
null
def __exit__(self, exc_type, exc, tb) -> None: self.aclose()
(self, exc_type, exc, tb) -> NoneType
24,364
storage3._sync.client
_create_session
null
def _create_session( self, base_url: str, headers: dict[str, str], timeout: int ) -> SyncClient: return SyncClient(base_url=base_url, headers=headers, timeout=timeout)
(self, base_url: str, headers: dict[str, str], timeout: int) -> storage3.utils.SyncClient
24,365
storage3._sync.bucket
_request
null
def _request( self, method: RequestMethod, url: str, json: Optional[dict[Any, Any]] = None, ) -> Response: response = self._client.request(method, url, json=json) try: response.raise_for_status() except HTTPError: raise StorageException( {**response.json(), "statusCode": response.status_code} ) return response
(self, method: Literal['GET', 'POST', 'DELETE', 'PUT', 'HEAD'], url: str, json: Optional[dict[Any, Any]] = None) -> httpx.Response
24,366
storage3._sync.client
aclose
null
def aclose(self) -> None: self.session.aclose()
(self) -> NoneType
24,367
storage3._sync.bucket
create_bucket
Creates a new storage bucket. Parameters ---------- id A unique identifier for the bucket you are creating. name A name for the bucket you are creating. If not passed, the id is used as the name as well. options Extra options to send while creating the bucket. Valid options are `public`, `file_size_limit` and `allowed_mime_types`.
def create_bucket( self, id: str, name: Optional[str] = None, options: Optional[CreateOrUpdateBucketOptions] = None, ) -> dict[str, str]: """Creates a new storage bucket. Parameters ---------- id A unique identifier for the bucket you are creating. name A name for the bucket you are creating. If not passed, the id is used as the name as well. options Extra options to send while creating the bucket. Valid options are `public`, `file_size_limit` and `allowed_mime_types`. """ json: dict[str, Any] = {"id": id, "name": name or id} if options: json.update(**options) res = self._request( "POST", "/bucket", json=json, ) return res.json()
(self, id: str, name: Optional[str] = None, options: Optional[storage3.types.CreateOrUpdateBucketOptions] = None) -> dict[str, str]
24,368
storage3._sync.bucket
delete_bucket
Deletes an existing bucket. Note that you cannot delete buckets with existing objects inside. You must first `empty()` the bucket. Parameters ---------- id The unique identifier of the bucket you would like to delete.
def delete_bucket(self, id: str) -> dict[str, str]: """Deletes an existing bucket. Note that you cannot delete buckets with existing objects inside. You must first `empty()` the bucket. Parameters ---------- id The unique identifier of the bucket you would like to delete. """ res = self._request("DELETE", f"/bucket/{id}", json={}) return res.json()
(self, id: str) -> dict[str, str]
24,369
storage3._sync.bucket
empty_bucket
Removes all objects inside a single bucket. Parameters ---------- id The unique identifier of the bucket you would like to empty.
def empty_bucket(self, id: str) -> dict[str, str]: """Removes all objects inside a single bucket. Parameters ---------- id The unique identifier of the bucket you would like to empty. """ res = self._request("POST", f"/bucket/{id}/empty", json={}) return res.json()
(self, id: str) -> dict[str, str]
24,370
storage3._sync.client
from_
Run a storage file operation. Parameters ---------- id The unique identifier of the bucket
def from_(self, id: str) -> SyncBucketProxy: """Run a storage file operation. Parameters ---------- id The unique identifier of the bucket """ return SyncBucketProxy(id, self._client)
(self, id: str) -> storage3._sync.file_api.SyncBucketProxy
24,371
storage3._sync.bucket
get_bucket
Retrieves the details of an existing storage bucket. Parameters ---------- id The unique identifier of the bucket you would like to retrieve.
def get_bucket(self, id: str) -> SyncBucket: """Retrieves the details of an existing storage bucket. Parameters ---------- id The unique identifier of the bucket you would like to retrieve. """ res = self._request("GET", f"/bucket/{id}") json = res.json() return SyncBucket(**json, _client=self._client)
(self, id: str) -> storage3._sync.file_api.SyncBucket
24,372
storage3._sync.bucket
list_buckets
Retrieves the details of all storage buckets within an existing product.
def list_buckets(self) -> list[SyncBucket]: """Retrieves the details of all storage buckets within an existing product.""" # if the request doesn't error, it is assured to return a list res = self._request("GET", "/bucket") return [SyncBucket(**bucket, _client=self._client) for bucket in res.json()]
(self) -> list[storage3._sync.file_api.SyncBucket]
24,373
storage3._sync.bucket
update_bucket
Update a storage bucket. Parameters ---------- id The unique identifier of the bucket you would like to update. options The properties you want to update. Valid options are `public`, `file_size_limit` and `allowed_mime_types`.
def update_bucket( self, id: str, options: CreateOrUpdateBucketOptions ) -> dict[str, str]: """Update a storage bucket. Parameters ---------- id The unique identifier of the bucket you would like to update. options The properties you want to update. Valid options are `public`, `file_size_limit` and `allowed_mime_types`. """ json = {"id": id, "name": id, **options} res = self._request("PUT", f"/bucket/{id}", json=json) return res.json()
(self, id: str, options: storage3.types.CreateOrUpdateBucketOptions) -> dict[str, str]
24,376
supabase._async.client
create_client
Create client function to instantiate supabase client like JS runtime. Parameters ---------- supabase_url: str The URL to the Supabase instance that should be connected to. supabase_key: str The API key to the Supabase instance that should be connected to. **options Any extra settings to be optionally specified - also see the `DEFAULT_OPTIONS` dict. Examples -------- Instantiating the client. >>> import os >>> from supabase import create_client, Client >>> >>> url: str = os.environ.get("SUPABASE_TEST_URL") >>> key: str = os.environ.get("SUPABASE_TEST_KEY") >>> supabase: Client = create_client(url, key) Returns ------- Client
def _listen_to_auth_events( self, event: AuthChangeEvent, session: Union[Session, None] ): access_token = self.supabase_key if event in ["SIGNED_IN", "TOKEN_REFRESHED", "SIGNED_OUT"]: # reset postgrest and storage instance on event change self._postgrest = None self._storage = None self._functions = None access_token = session.access_token if session else self.supabase_key self.options.headers["Authorization"] = self._create_auth_header(access_token)
(supabase_url: str, supabase_key: str, options: Optional[supabase.lib.client_options.ClientOptions] = None) -> supabase._async.client.AsyncClient
24,377
supabase._sync.client
create_client
Create client function to instantiate supabase client like JS runtime. Parameters ---------- supabase_url: str The URL to the Supabase instance that should be connected to. supabase_key: str The API key to the Supabase instance that should be connected to. **options Any extra settings to be optionally specified - also see the `DEFAULT_OPTIONS` dict. Examples -------- Instantiating the client. >>> import os >>> from supabase import create_client, Client >>> >>> url: str = os.environ.get("SUPABASE_TEST_URL") >>> key: str = os.environ.get("SUPABASE_TEST_KEY") >>> supabase: Client = create_client(url, key) Returns ------- Client
def create_client( supabase_url: str, supabase_key: str, options: Union[ClientOptions, None] = None, ) -> SyncClient: """Create client function to instantiate supabase client like JS runtime. Parameters ---------- supabase_url: str The URL to the Supabase instance that should be connected to. supabase_key: str The API key to the Supabase instance that should be connected to. **options Any extra settings to be optionally specified - also see the `DEFAULT_OPTIONS` dict. Examples -------- Instantiating the client. >>> import os >>> from supabase import create_client, Client >>> >>> url: str = os.environ.get("SUPABASE_TEST_URL") >>> key: str = os.environ.get("SUPABASE_TEST_KEY") >>> supabase: Client = create_client(url, key) Returns ------- Client """ return SyncClient.create( supabase_url=supabase_url, supabase_key=supabase_key, options=options )
(supabase_url: str, supabase_key: str, options: Optional[supabase.lib.client_options.ClientOptions] = None) -> supabase._sync.client.SyncClient
24,379
py3rijndael.paddings
Pkcs7Padding
Technique for padding a string as defined in RFC 2315, section 10.3, note #2
class Pkcs7Padding(PaddingBase): """ Technique for padding a string as defined in RFC 2315, section 10.3, note #2 """ def encode(self, source): amount_to_pad = self.block_size - (len(source) % self.block_size) amount_to_pad = self.block_size if amount_to_pad == 0 else amount_to_pad pad = chr(amount_to_pad).encode() return source + pad * amount_to_pad def decode(self, source): return source[:-source[-1]]
(block_size)
24,380
py3rijndael.paddings
__init__
null
def __init__(self, block_size): self.block_size = block_size
(self, block_size)
24,381
py3rijndael.paddings
decode
null
def decode(self, source): return source[:-source[-1]]
(self, source)
24,382
py3rijndael.paddings
encode
null
def encode(self, source): amount_to_pad = self.block_size - (len(source) % self.block_size) amount_to_pad = self.block_size if amount_to_pad == 0 else amount_to_pad pad = chr(amount_to_pad).encode() return source + pad * amount_to_pad
(self, source)
24,383
py3rijndael.rijndael
Rijndael
null
class Rijndael: def __init__(self, key, block_size: int = 16): if block_size not in (16, 24, 32): raise ValueError('Invalid block size: %s' % str(block_size)) if len(key) not in (16, 24, 32): raise ValueError('Invalid key size: %s' % str(len(key))) self.block_size = block_size self.key = key rounds = num_rounds[len(key)][block_size] b_c = block_size // 4 # encryption round keys k_e = [[0] * b_c for _ in range(rounds + 1)] # decryption round keys k_d = [[0] * b_c for _ in range(rounds + 1)] round_key_count = (rounds + 1) * b_c k_c = len(key) // 4 # copy user material bytes into temporary ints tk = [] for i in range(0, k_c): tk.append((ord(key[i * 4:i * 4 + 1]) << 24) | (ord(key[i * 4 + 1:i * 4 + 1 + 1]) << 16) | (ord(key[i * 4 + 2: i * 4 + 2 + 1]) << 8) | ord(key[i * 4 + 3:i * 4 + 3 + 1])) # copy values into round key arrays t = 0 j = 0 while j < k_c and t < round_key_count: k_e[t // b_c][t % b_c] = tk[j] k_d[rounds - (t // b_c)][t % b_c] = tk[j] j += 1 t += 1 r_con_pointer = 0 while t < round_key_count: # extrapolate using phi (the round key evolution function) tt = tk[k_c - 1] tk[0] ^= (S[(tt >> 16) & 0xFF] & 0xFF) << 24 ^ \ (S[(tt >> 8) & 0xFF] & 0xFF) << 16 ^ \ (S[tt & 0xFF] & 0xFF) << 8 ^ \ (S[(tt >> 24) & 0xFF] & 0xFF) ^ \ (r_con[r_con_pointer] & 0xFF) << 24 r_con_pointer += 1 if k_c != 8: for i in range(1, k_c): tk[i] ^= tk[i - 1] else: for i in range(1, k_c // 2): tk[i] ^= tk[i - 1] tt = tk[k_c // 2 - 1] tk[k_c // 2] ^= (S[tt & 0xFF] & 0xFF) ^ \ (S[(tt >> 8) & 0xFF] & 0xFF) << 8 ^ \ (S[(tt >> 16) & 0xFF] & 0xFF) << 16 ^ \ (S[(tt >> 24) & 0xFF] & 0xFF) << 24 for i in range(k_c // 2 + 1, k_c): tk[i] ^= tk[i - 1] # copy values into round key arrays j = 0 while j < k_c and t < round_key_count: k_e[t // b_c][t % b_c] = tk[j] k_d[rounds - (t // b_c)][t % b_c] = tk[j] j += 1 t += 1 # inverse MixColumn where needed for r in range(1, rounds): for j in range(b_c): tt = k_d[r][j] k_d[r][j] = ( U1[(tt >> 24) & 0xFF] ^ U2[(tt >> 16) & 0xFF] ^ U3[(tt >> 8) & 0xFF] ^ U4[tt & 0xFF] ) self.Ke = k_e self.Kd = k_d def encrypt(self, source): if len(source) != self.block_size: raise ValueError( 'Wrong block length, expected %s got %s' % ( str(self.block_size), str(len(source)) ) ) k_e = self.Ke b_c = self.block_size // 4 rounds = len(k_e) - 1 if b_c == 4: s_c = 0 elif b_c == 6: s_c = 1 else: s_c = 2 s1 = shifts[s_c][1][0] s2 = shifts[s_c][2][0] s3 = shifts[s_c][3][0] a = [0] * b_c # temporary work array t = [] # source to ints + key for i in range(b_c): t.append((ord(source[i * 4: i * 4 + 1]) << 24 | ord(source[i * 4 + 1: i * 4 + 1 + 1]) << 16 | ord(source[i * 4 + 2: i * 4 + 2 + 1]) << 8 | ord(source[i * 4 + 3: i * 4 + 3 + 1])) ^ k_e[0][i]) # apply round transforms for r in range(1, rounds): for i in range(b_c): a[i] = (T1[(t[i] >> 24) & 0xFF] ^ T2[(t[(i + s1) % b_c] >> 16) & 0xFF] ^ T3[(t[(i + s2) % b_c] >> 8) & 0xFF] ^ T4[t[(i + s3) % b_c] & 0xFF]) ^ k_e[r][i] t = copy.copy(a) # last round is special result = [] for i in range(b_c): tt = k_e[rounds][i] result.append((S[(t[i] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF) result.append((S[(t[(i + s1) % b_c] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF) result.append((S[(t[(i + s2) % b_c] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF) result.append((S[t[(i + s3) % b_c] & 0xFF] ^ tt) & 0xFF) out = bytes() for xx in result: out += bytes([xx]) return out def decrypt(self, cipher): if len(cipher) != self.block_size: raise ValueError( 'wrong block length, expected %s got %s' % ( str(self.block_size), str(len(cipher)) ) ) k_d = self.Kd b_c = self.block_size // 4 rounds = len(k_d) - 1 if b_c == 4: s_c = 0 elif b_c == 6: s_c = 1 else: s_c = 2 s1 = shifts[s_c][1][1] s2 = shifts[s_c][2][1] s3 = shifts[s_c][3][1] a = [0] * b_c # temporary work array t = [0] * b_c # cipher to ints + key for i in range(b_c): t[i] = (ord(cipher[i * 4: i * 4 + 1]) << 24 | ord(cipher[i * 4 + 1: i * 4 + 1 + 1]) << 16 | ord(cipher[i * 4 + 2: i * 4 + 2 + 1]) << 8 | ord(cipher[i * 4 + 3: i * 4 + 3 + 1])) ^ k_d[0][i] # apply round transforms for r in range(1, rounds): for i in range(b_c): a[i] = (T5[(t[i] >> 24) & 0xFF] ^ T6[(t[(i + s1) % b_c] >> 16) & 0xFF] ^ T7[(t[(i + s2) % b_c] >> 8) & 0xFF] ^ T8[t[(i + s3) % b_c] & 0xFF]) ^ k_d[r][i] t = copy.copy(a) # last round is special result = [] for i in range(b_c): tt = k_d[rounds][i] result.append((Si[(t[i] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF) result.append((Si[(t[(i + s1) % b_c] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF) result.append((Si[(t[(i + s2) % b_c] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF) result.append((Si[t[(i + s3) % b_c] & 0xFF] ^ tt) & 0xFF) out = bytes() for xx in result: out += bytes([xx]) return out
(key, block_size: int = 16)
24,384
py3rijndael.rijndael
__init__
null
def __init__(self, key, block_size: int = 16): if block_size not in (16, 24, 32): raise ValueError('Invalid block size: %s' % str(block_size)) if len(key) not in (16, 24, 32): raise ValueError('Invalid key size: %s' % str(len(key))) self.block_size = block_size self.key = key rounds = num_rounds[len(key)][block_size] b_c = block_size // 4 # encryption round keys k_e = [[0] * b_c for _ in range(rounds + 1)] # decryption round keys k_d = [[0] * b_c for _ in range(rounds + 1)] round_key_count = (rounds + 1) * b_c k_c = len(key) // 4 # copy user material bytes into temporary ints tk = [] for i in range(0, k_c): tk.append((ord(key[i * 4:i * 4 + 1]) << 24) | (ord(key[i * 4 + 1:i * 4 + 1 + 1]) << 16) | (ord(key[i * 4 + 2: i * 4 + 2 + 1]) << 8) | ord(key[i * 4 + 3:i * 4 + 3 + 1])) # copy values into round key arrays t = 0 j = 0 while j < k_c and t < round_key_count: k_e[t // b_c][t % b_c] = tk[j] k_d[rounds - (t // b_c)][t % b_c] = tk[j] j += 1 t += 1 r_con_pointer = 0 while t < round_key_count: # extrapolate using phi (the round key evolution function) tt = tk[k_c - 1] tk[0] ^= (S[(tt >> 16) & 0xFF] & 0xFF) << 24 ^ \ (S[(tt >> 8) & 0xFF] & 0xFF) << 16 ^ \ (S[tt & 0xFF] & 0xFF) << 8 ^ \ (S[(tt >> 24) & 0xFF] & 0xFF) ^ \ (r_con[r_con_pointer] & 0xFF) << 24 r_con_pointer += 1 if k_c != 8: for i in range(1, k_c): tk[i] ^= tk[i - 1] else: for i in range(1, k_c // 2): tk[i] ^= tk[i - 1] tt = tk[k_c // 2 - 1] tk[k_c // 2] ^= (S[tt & 0xFF] & 0xFF) ^ \ (S[(tt >> 8) & 0xFF] & 0xFF) << 8 ^ \ (S[(tt >> 16) & 0xFF] & 0xFF) << 16 ^ \ (S[(tt >> 24) & 0xFF] & 0xFF) << 24 for i in range(k_c // 2 + 1, k_c): tk[i] ^= tk[i - 1] # copy values into round key arrays j = 0 while j < k_c and t < round_key_count: k_e[t // b_c][t % b_c] = tk[j] k_d[rounds - (t // b_c)][t % b_c] = tk[j] j += 1 t += 1 # inverse MixColumn where needed for r in range(1, rounds): for j in range(b_c): tt = k_d[r][j] k_d[r][j] = ( U1[(tt >> 24) & 0xFF] ^ U2[(tt >> 16) & 0xFF] ^ U3[(tt >> 8) & 0xFF] ^ U4[tt & 0xFF] ) self.Ke = k_e self.Kd = k_d
(self, key, block_size: int = 16)
24,385
py3rijndael.rijndael
decrypt
null
def decrypt(self, cipher): if len(cipher) != self.block_size: raise ValueError( 'wrong block length, expected %s got %s' % ( str(self.block_size), str(len(cipher)) ) ) k_d = self.Kd b_c = self.block_size // 4 rounds = len(k_d) - 1 if b_c == 4: s_c = 0 elif b_c == 6: s_c = 1 else: s_c = 2 s1 = shifts[s_c][1][1] s2 = shifts[s_c][2][1] s3 = shifts[s_c][3][1] a = [0] * b_c # temporary work array t = [0] * b_c # cipher to ints + key for i in range(b_c): t[i] = (ord(cipher[i * 4: i * 4 + 1]) << 24 | ord(cipher[i * 4 + 1: i * 4 + 1 + 1]) << 16 | ord(cipher[i * 4 + 2: i * 4 + 2 + 1]) << 8 | ord(cipher[i * 4 + 3: i * 4 + 3 + 1])) ^ k_d[0][i] # apply round transforms for r in range(1, rounds): for i in range(b_c): a[i] = (T5[(t[i] >> 24) & 0xFF] ^ T6[(t[(i + s1) % b_c] >> 16) & 0xFF] ^ T7[(t[(i + s2) % b_c] >> 8) & 0xFF] ^ T8[t[(i + s3) % b_c] & 0xFF]) ^ k_d[r][i] t = copy.copy(a) # last round is special result = [] for i in range(b_c): tt = k_d[rounds][i] result.append((Si[(t[i] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF) result.append((Si[(t[(i + s1) % b_c] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF) result.append((Si[(t[(i + s2) % b_c] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF) result.append((Si[t[(i + s3) % b_c] & 0xFF] ^ tt) & 0xFF) out = bytes() for xx in result: out += bytes([xx]) return out
(self, cipher)
24,386
py3rijndael.rijndael
encrypt
null
def encrypt(self, source): if len(source) != self.block_size: raise ValueError( 'Wrong block length, expected %s got %s' % ( str(self.block_size), str(len(source)) ) ) k_e = self.Ke b_c = self.block_size // 4 rounds = len(k_e) - 1 if b_c == 4: s_c = 0 elif b_c == 6: s_c = 1 else: s_c = 2 s1 = shifts[s_c][1][0] s2 = shifts[s_c][2][0] s3 = shifts[s_c][3][0] a = [0] * b_c # temporary work array t = [] # source to ints + key for i in range(b_c): t.append((ord(source[i * 4: i * 4 + 1]) << 24 | ord(source[i * 4 + 1: i * 4 + 1 + 1]) << 16 | ord(source[i * 4 + 2: i * 4 + 2 + 1]) << 8 | ord(source[i * 4 + 3: i * 4 + 3 + 1])) ^ k_e[0][i]) # apply round transforms for r in range(1, rounds): for i in range(b_c): a[i] = (T1[(t[i] >> 24) & 0xFF] ^ T2[(t[(i + s1) % b_c] >> 16) & 0xFF] ^ T3[(t[(i + s2) % b_c] >> 8) & 0xFF] ^ T4[t[(i + s3) % b_c] & 0xFF]) ^ k_e[r][i] t = copy.copy(a) # last round is special result = [] for i in range(b_c): tt = k_e[rounds][i] result.append((S[(t[i] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF) result.append((S[(t[(i + s1) % b_c] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF) result.append((S[(t[(i + s2) % b_c] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF) result.append((S[t[(i + s3) % b_c] & 0xFF] ^ tt) & 0xFF) out = bytes() for xx in result: out += bytes([xx]) return out
(self, source)
24,387
py3rijndael.rijndael
RijndaelCbc
null
class RijndaelCbc(Rijndael): def __init__(self, key: bytes, iv: bytes, padding: PaddingBase, block_size: int=16): super().__init__(key=key, block_size=block_size) self.iv = iv self.padding = padding def encrypt(self, source: bytes): ppt = self.padding.encode(source) offset = 0 ct = bytes() v = self.iv while offset < len(ppt): block = ppt[offset:offset + self.block_size] block = self.x_or_block(block, v) block = super().encrypt(block) ct += block offset += self.block_size v = block return ct def decrypt(self, cipher): assert len(cipher) % self.block_size == 0 ppt = bytes() offset = 0 v = self.iv while offset < len(cipher): block = cipher[offset:offset + self.block_size] decrypted = super().decrypt(block) ppt += self.x_or_block(decrypted, v) offset += self.block_size v = block pt = self.padding.decode(ppt) return pt def x_or_block(self, b1, b2): i = 0 r = bytes() while i < self.block_size: r += bytes([ord(b1[i:i+1]) ^ ord(b2[i:i+1])]) i += 1 return r
(key: bytes, iv: bytes, padding: py3rijndael.paddings.PaddingBase, block_size: int = 16)
24,388
py3rijndael.rijndael
__init__
null
def __init__(self, key: bytes, iv: bytes, padding: PaddingBase, block_size: int=16): super().__init__(key=key, block_size=block_size) self.iv = iv self.padding = padding
(self, key: bytes, iv: bytes, padding: py3rijndael.paddings.PaddingBase, block_size: int = 16)
24,389
py3rijndael.rijndael
decrypt
null
def decrypt(self, cipher): assert len(cipher) % self.block_size == 0 ppt = bytes() offset = 0 v = self.iv while offset < len(cipher): block = cipher[offset:offset + self.block_size] decrypted = super().decrypt(block) ppt += self.x_or_block(decrypted, v) offset += self.block_size v = block pt = self.padding.decode(ppt) return pt
(self, cipher)
24,390
py3rijndael.rijndael
encrypt
null
def encrypt(self, source: bytes): ppt = self.padding.encode(source) offset = 0 ct = bytes() v = self.iv while offset < len(ppt): block = ppt[offset:offset + self.block_size] block = self.x_or_block(block, v) block = super().encrypt(block) ct += block offset += self.block_size v = block return ct
(self, source: bytes)
24,391
py3rijndael.rijndael
x_or_block
null
def x_or_block(self, b1, b2): i = 0 r = bytes() while i < self.block_size: r += bytes([ord(b1[i:i+1]) ^ ord(b2[i:i+1])]) i += 1 return r
(self, b1, b2)
24,392
py3rijndael.paddings
ZeroPadding
Specified for hashes and MACs as Padding Method 1 in ISO/IEC 10118-1 and ISO/IEC 9797-1.
class ZeroPadding(PaddingBase): """ Specified for hashes and MACs as Padding Method 1 in ISO/IEC 10118-1 and ISO/IEC 9797-1. """ def encode(self, source): pad_size = self.block_size - ((len(source) + self.block_size - 1) % self.block_size + 1) return source + b'\0' * pad_size def decode(self, source): assert len(source) % self.block_size == 0 offset = len(source) if offset == 0: return b'' end = offset - self.block_size + 1 while offset > end: offset -= 1 if source[offset]: return source[:offset + 1] return source[:end]
(block_size)
24,394
py3rijndael.paddings
decode
null
def decode(self, source): assert len(source) % self.block_size == 0 offset = len(source) if offset == 0: return b'' end = offset - self.block_size + 1 while offset > end: offset -= 1 if source[offset]: return source[:offset + 1] return source[:end]
(self, source)
24,395
py3rijndael.paddings
encode
null
def encode(self, source): pad_size = self.block_size - ((len(source) + self.block_size - 1) % self.block_size + 1) return source + b'\0' * pad_size
(self, source)
24,399
okta_jwt_verifier.jwt_verifier
AccessTokenVerifier
null
class AccessTokenVerifier(): def __init__(self, issuer=None, audience='api://default', request_executor=RequestExecutor, max_retries=MAX_RETRIES, request_timeout=REQUEST_TIMEOUT, max_requests=MAX_REQUESTS, leeway=LEEWAY, cache_jwks=True, proxy=None): """ Args: issuer: string, full URI of the token issuer, required audience: string, expected audience, optional request_executor: RequestExecutor class or its subclass, optional max_retries: int, number of times to retry a failed network request, optional request_timeout: int, max request timeout, optional max_requests: int, max number of concurrent requests leeway: int, amount of time to expand the window for token expiration (to work around clock skew) cache_jwks: bool, optional """ self._jwt_verifier = BaseJWTVerifier(issuer=issuer, client_id='client_id_stub', audience=audience, request_executor=request_executor, max_retries=max_retries, request_timeout=request_timeout, max_requests=max_requests, leeway=leeway, cache_jwks=cache_jwks, proxy=proxy) async def verify(self, token, claims_to_verify=('iss', 'aud', 'exp')): await self._jwt_verifier.verify_access_token(token, claims_to_verify)
(issuer=None, audience='api://default', request_executor=<class 'okta_jwt_verifier.request_executor.RequestExecutor'>, max_retries=1, request_timeout=30, max_requests=10, leeway=120, cache_jwks=True, proxy=None)
24,400
okta_jwt_verifier.jwt_verifier
__init__
Args: issuer: string, full URI of the token issuer, required audience: string, expected audience, optional request_executor: RequestExecutor class or its subclass, optional max_retries: int, number of times to retry a failed network request, optional request_timeout: int, max request timeout, optional max_requests: int, max number of concurrent requests leeway: int, amount of time to expand the window for token expiration (to work around clock skew) cache_jwks: bool, optional
def __init__(self, issuer=None, audience='api://default', request_executor=RequestExecutor, max_retries=MAX_RETRIES, request_timeout=REQUEST_TIMEOUT, max_requests=MAX_REQUESTS, leeway=LEEWAY, cache_jwks=True, proxy=None): """ Args: issuer: string, full URI of the token issuer, required audience: string, expected audience, optional request_executor: RequestExecutor class or its subclass, optional max_retries: int, number of times to retry a failed network request, optional request_timeout: int, max request timeout, optional max_requests: int, max number of concurrent requests leeway: int, amount of time to expand the window for token expiration (to work around clock skew) cache_jwks: bool, optional """ self._jwt_verifier = BaseJWTVerifier(issuer=issuer, client_id='client_id_stub', audience=audience, request_executor=request_executor, max_retries=max_retries, request_timeout=request_timeout, max_requests=max_requests, leeway=leeway, cache_jwks=cache_jwks, proxy=proxy)
(self, issuer=None, audience='api://default', request_executor=<class 'okta_jwt_verifier.request_executor.RequestExecutor'>, max_retries=1, request_timeout=30, max_requests=10, leeway=120, cache_jwks=True, proxy=None)
24,401
okta_jwt_verifier.jwt_verifier
verify
null
def __init__(self, issuer=None, audience='api://default', request_executor=RequestExecutor, max_retries=MAX_RETRIES, request_timeout=REQUEST_TIMEOUT, max_requests=MAX_REQUESTS, leeway=LEEWAY, cache_jwks=True, proxy=None): """ Args: issuer: string, full URI of the token issuer, required audience: string, expected audience, optional request_executor: RequestExecutor class or its subclass, optional max_retries: int, number of times to retry a failed network request, optional request_timeout: int, max request timeout, optional max_requests: int, max number of concurrent requests leeway: int, amount of time to expand the window for token expiration (to work around clock skew) cache_jwks: bool, optional """ self._jwt_verifier = BaseJWTVerifier(issuer=issuer, client_id='client_id_stub', audience=audience, request_executor=request_executor, max_retries=max_retries, request_timeout=request_timeout, max_requests=max_requests, leeway=leeway, cache_jwks=cache_jwks, proxy=proxy)
(self, token, claims_to_verify=('iss', 'aud', 'exp'))
24,402
okta_jwt_verifier.jwt_verifier
BaseJWTVerifier
null
class BaseJWTVerifier(): def __init__(self, issuer=None, client_id='client_id_stub', audience='api://default', request_executor=RequestExecutor, max_retries=MAX_RETRIES, request_timeout=REQUEST_TIMEOUT, max_requests=MAX_REQUESTS, leeway=LEEWAY, cache_jwks=True, proxy=None): """ Args: issuer: string, full URI of the token issuer, required client_id: string, expected client_id, required audience: string, expected audience, optional request_executor: RequestExecutor class or its subclass, optional max_retries: int, number of times to retry a failed network request, optional request_timeout: int, max request timeout, optional max_requests: int, max number of concurrent requests leeway: int, amount of time to expand the window for token expiration (to work around clock skew) cache_jwks: bool, optional """ # validate input data before any processing config = {'issuer': issuer, 'client_id': client_id, 'audience': audience, 'max_retries': max_retries, 'request_timeout': request_timeout, 'max_requests': max_requests, 'leeway': leeway, 'cache_jwks': cache_jwks} ConfigValidator(config).validate_config() self.issuer = issuer self.client_id = client_id self.audience = audience self.request_executor = request_executor(max_retries=max_retries, max_requests=max_requests, request_timeout=request_timeout, proxy=proxy) self.max_retries = max_retries self.request_timeout = request_timeout self.max_requests = max_requests self.leeway = leeway self.cache_jwks = cache_jwks def parse_token(self, token): """Parse JWT token, get headers, claims and signature. Return: tuple (headers, claims, signing_input, signature) """ return JWTUtils.parse_token(token) async def verify_access_token(self, token, claims_to_verify=('iss', 'aud', 'exp')): """Verify acess token. Algorithm: 1. Retrieve and parse your Okta JSON Web Keys (JWK), which should be checked periodically and cached by your application. 2. Decode the access token, which is in JSON Web Token format 3. Verify the signature used to sign the access token 4. Verify the claims found inside the access token Default claims to verify for access token: 'exp' Expiration - The time after which the token is invalid. 'iss' Issuer - The principal that issued the JWT. 'aud' Audience - The recipient that the JWT is intended for. Raise an Exception if any validation is failed, return None otherwise. """ try: headers, claims, signing_input, signature = self.parse_token(token) if headers.get('alg') != 'RS256': raise JWTValidationException('Header claim "alg" is invalid.') self.verify_claims(claims, claims_to_verify=claims_to_verify, leeway=self.leeway) okta_jwk = await self.get_jwk(headers['kid']) self.verify_signature(token, okta_jwk) except JWTValidationException: raise except Exception as err: raise JWTValidationException(str(err)) async def verify_id_token(self, token, claims_to_verify=('iss', 'exp'), nonce=None): """Verify id token. Algorithm: 1. Retrieve and parse your Okta JSON Web Keys (JWK), which should be checked periodically and cached by your application. 2. Decode the access token, which is in JSON Web Token format. 3. Verify the signature used to sign the access token. 4. Verify the claims found inside the access token. 5. Verify claim "cid" matches provided client_id. 6. If claim "nonce" was provided for token generation, it should be validated too. Default claims to verify for id token: 'exp' Expiration - The time after which the token is invalid. 'iss' Issuer - The principal that issued the JWT. 'aud' Audience - The recipient that the JWT is intended for. For ID token 'aud' should match Client ID Raise an Exception if any validation is failed, return None otherwise. """ try: headers, claims, signing_input, signature = self.parse_token(token) if headers.get('alg') != 'RS256': raise JWTValidationException('Header claim "alg" is invalid.') self.verify_claims(claims, claims_to_verify=claims_to_verify, leeway=self.leeway) okta_jwk = await self.get_jwk(headers['kid']) self.verify_signature(token, okta_jwk) # verify client_id and nonce self.verify_client_id(claims['aud']) if 'nonce' in claims and claims['nonce'] != nonce: raise JWTValidationException('Claim "nonce" is invalid.') except JWTValidationException: raise except Exception as err: raise JWTValidationException(str(err)) def verify_client_id(self, aud): """Verify client_id match aud or one of its elements.""" if isinstance(aud, str): if aud != self.client_id: raise JWTValidationException('Claim "aud" does not match Client ID.') elif isinstance(aud, list): for elem in aud: if elem == self.client_id: return raise JWTValidationException('Claim "aud" does not contain Client ID.') else: raise JWTValidationException('Claim "aud" has unsupported format.') def verify_signature(self, token, okta_jwk): """Verify token signature using received jwk.""" JWTUtils.verify_signature(token, okta_jwk) def verify_claims(self, claims, claims_to_verify, leeway=LEEWAY): """Verify claims are present and valid.""" JWTUtils.verify_claims(claims, claims_to_verify, self.audience, self.issuer, leeway) def verify_expiration(self, token, leeway=LEEWAY): """Verify if token is not expired.""" JWTUtils.verify_expiration(token, leeway) def _get_jwk_by_kid(self, jwks, kid): """Loop through given jwks and find jwk which matches by kid. Return: str if jwk match found, None - otherwise """ okta_jwk = None for key in jwks['keys']: if key['kid'] == kid: okta_jwk = key return okta_jwk async def get_jwk(self, kid): """Get JWK by kid. If key not found, clear cache and retry again to support keys rollover. Return: str - represents JWK Raise JWKException if key not found after retry. """ jwks = await self.get_jwks() okta_jwk = self._get_jwk_by_kid(jwks, kid) if not okta_jwk: # retry logic self._clear_requests_cache() jwks = await self.get_jwks() okta_jwk = self._get_jwk_by_kid(jwks, kid) if not okta_jwk: raise JWKException('No matching JWK.') return okta_jwk async def get_jwks(self): """Get jwks_uri from claims and download jwks. version from okta_jwt_verifier.__init__.py """ jwks_uri = self._construct_jwks_uri() headers = {'User-Agent': f'okta-jwt-verifier-python/{version}', 'Content-Type': 'application/json'} jwks = await self.request_executor.get(jwks_uri, headers=headers) if not self.cache_jwks: self._clear_requests_cache() return jwks def _construct_jwks_uri(self): """Construct URI for JWKs download. Issuer URL should end with '/', automatic add '/' otherwise. If the issuer URL does not contain /oauth2/, then: jwks_uri_base = {issuer}/oauth2. Otherwise: jwks_uri_base = {issuer}. Final JWKS URI: {jwks_uri_base}/v1/keys """ jwks_uri_base = self.issuer if not jwks_uri_base.endswith('/'): jwks_uri_base = jwks_uri_base + '/' if '/oauth2/' not in jwks_uri_base: jwks_uri_base = urljoin(jwks_uri_base, 'oauth2/') return urljoin(jwks_uri_base, 'v1/keys') def _clear_requests_cache(self): """Clear whole cache.""" self.request_executor.clear_cache()
(issuer=None, client_id='client_id_stub', audience='api://default', request_executor=<class 'okta_jwt_verifier.request_executor.RequestExecutor'>, max_retries=1, request_timeout=30, max_requests=10, leeway=120, cache_jwks=True, proxy=None)
24,403
okta_jwt_verifier.jwt_verifier
__init__
Args: issuer: string, full URI of the token issuer, required client_id: string, expected client_id, required audience: string, expected audience, optional request_executor: RequestExecutor class or its subclass, optional max_retries: int, number of times to retry a failed network request, optional request_timeout: int, max request timeout, optional max_requests: int, max number of concurrent requests leeway: int, amount of time to expand the window for token expiration (to work around clock skew) cache_jwks: bool, optional
def __init__(self, issuer=None, client_id='client_id_stub', audience='api://default', request_executor=RequestExecutor, max_retries=MAX_RETRIES, request_timeout=REQUEST_TIMEOUT, max_requests=MAX_REQUESTS, leeway=LEEWAY, cache_jwks=True, proxy=None): """ Args: issuer: string, full URI of the token issuer, required client_id: string, expected client_id, required audience: string, expected audience, optional request_executor: RequestExecutor class or its subclass, optional max_retries: int, number of times to retry a failed network request, optional request_timeout: int, max request timeout, optional max_requests: int, max number of concurrent requests leeway: int, amount of time to expand the window for token expiration (to work around clock skew) cache_jwks: bool, optional """ # validate input data before any processing config = {'issuer': issuer, 'client_id': client_id, 'audience': audience, 'max_retries': max_retries, 'request_timeout': request_timeout, 'max_requests': max_requests, 'leeway': leeway, 'cache_jwks': cache_jwks} ConfigValidator(config).validate_config() self.issuer = issuer self.client_id = client_id self.audience = audience self.request_executor = request_executor(max_retries=max_retries, max_requests=max_requests, request_timeout=request_timeout, proxy=proxy) self.max_retries = max_retries self.request_timeout = request_timeout self.max_requests = max_requests self.leeway = leeway self.cache_jwks = cache_jwks
(self, issuer=None, client_id='client_id_stub', audience='api://default', request_executor=<class 'okta_jwt_verifier.request_executor.RequestExecutor'>, max_retries=1, request_timeout=30, max_requests=10, leeway=120, cache_jwks=True, proxy=None)
24,404
okta_jwt_verifier.jwt_verifier
_clear_requests_cache
Clear whole cache.
def _clear_requests_cache(self): """Clear whole cache.""" self.request_executor.clear_cache()
(self)
24,405
okta_jwt_verifier.jwt_verifier
_construct_jwks_uri
Construct URI for JWKs download. Issuer URL should end with '/', automatic add '/' otherwise. If the issuer URL does not contain /oauth2/, then: jwks_uri_base = {issuer}/oauth2. Otherwise: jwks_uri_base = {issuer}. Final JWKS URI: {jwks_uri_base}/v1/keys
def _construct_jwks_uri(self): """Construct URI for JWKs download. Issuer URL should end with '/', automatic add '/' otherwise. If the issuer URL does not contain /oauth2/, then: jwks_uri_base = {issuer}/oauth2. Otherwise: jwks_uri_base = {issuer}. Final JWKS URI: {jwks_uri_base}/v1/keys """ jwks_uri_base = self.issuer if not jwks_uri_base.endswith('/'): jwks_uri_base = jwks_uri_base + '/' if '/oauth2/' not in jwks_uri_base: jwks_uri_base = urljoin(jwks_uri_base, 'oauth2/') return urljoin(jwks_uri_base, 'v1/keys')
(self)
24,406
okta_jwt_verifier.jwt_verifier
_get_jwk_by_kid
Loop through given jwks and find jwk which matches by kid. Return: str if jwk match found, None - otherwise
def _get_jwk_by_kid(self, jwks, kid): """Loop through given jwks and find jwk which matches by kid. Return: str if jwk match found, None - otherwise """ okta_jwk = None for key in jwks['keys']: if key['kid'] == kid: okta_jwk = key return okta_jwk
(self, jwks, kid)
24,407
okta_jwt_verifier.jwt_verifier
get_jwk
Get JWK by kid. If key not found, clear cache and retry again to support keys rollover. Return: str - represents JWK Raise JWKException if key not found after retry.
def _get_jwk_by_kid(self, jwks, kid): """Loop through given jwks and find jwk which matches by kid. Return: str if jwk match found, None - otherwise """ okta_jwk = None for key in jwks['keys']: if key['kid'] == kid: okta_jwk = key return okta_jwk
(self, kid)
24,408
okta_jwt_verifier.jwt_verifier
get_jwks
Get jwks_uri from claims and download jwks. version from okta_jwt_verifier.__init__.py
def _get_jwk_by_kid(self, jwks, kid): """Loop through given jwks and find jwk which matches by kid. Return: str if jwk match found, None - otherwise """ okta_jwk = None for key in jwks['keys']: if key['kid'] == kid: okta_jwk = key return okta_jwk
(self)
24,409
okta_jwt_verifier.jwt_verifier
parse_token
Parse JWT token, get headers, claims and signature. Return: tuple (headers, claims, signing_input, signature)
def parse_token(self, token): """Parse JWT token, get headers, claims and signature. Return: tuple (headers, claims, signing_input, signature) """ return JWTUtils.parse_token(token)
(self, token)
24,410
okta_jwt_verifier.jwt_verifier
verify_access_token
Verify acess token. Algorithm: 1. Retrieve and parse your Okta JSON Web Keys (JWK), which should be checked periodically and cached by your application. 2. Decode the access token, which is in JSON Web Token format 3. Verify the signature used to sign the access token 4. Verify the claims found inside the access token Default claims to verify for access token: 'exp' Expiration - The time after which the token is invalid. 'iss' Issuer - The principal that issued the JWT. 'aud' Audience - The recipient that the JWT is intended for. Raise an Exception if any validation is failed, return None otherwise.
def parse_token(self, token): """Parse JWT token, get headers, claims and signature. Return: tuple (headers, claims, signing_input, signature) """ return JWTUtils.parse_token(token)
(self, token, claims_to_verify=('iss', 'aud', 'exp'))
24,411
okta_jwt_verifier.jwt_verifier
verify_claims
Verify claims are present and valid.
def verify_claims(self, claims, claims_to_verify, leeway=LEEWAY): """Verify claims are present and valid.""" JWTUtils.verify_claims(claims, claims_to_verify, self.audience, self.issuer, leeway)
(self, claims, claims_to_verify, leeway=120)
24,412
okta_jwt_verifier.jwt_verifier
verify_client_id
Verify client_id match aud or one of its elements.
def verify_client_id(self, aud): """Verify client_id match aud or one of its elements.""" if isinstance(aud, str): if aud != self.client_id: raise JWTValidationException('Claim "aud" does not match Client ID.') elif isinstance(aud, list): for elem in aud: if elem == self.client_id: return raise JWTValidationException('Claim "aud" does not contain Client ID.') else: raise JWTValidationException('Claim "aud" has unsupported format.')
(self, aud)
24,413
okta_jwt_verifier.jwt_verifier
verify_expiration
Verify if token is not expired.
def verify_expiration(self, token, leeway=LEEWAY): """Verify if token is not expired.""" JWTUtils.verify_expiration(token, leeway)
(self, token, leeway=120)
24,414
okta_jwt_verifier.jwt_verifier
verify_id_token
Verify id token. Algorithm: 1. Retrieve and parse your Okta JSON Web Keys (JWK), which should be checked periodically and cached by your application. 2. Decode the access token, which is in JSON Web Token format. 3. Verify the signature used to sign the access token. 4. Verify the claims found inside the access token. 5. Verify claim "cid" matches provided client_id. 6. If claim "nonce" was provided for token generation, it should be validated too. Default claims to verify for id token: 'exp' Expiration - The time after which the token is invalid. 'iss' Issuer - The principal that issued the JWT. 'aud' Audience - The recipient that the JWT is intended for. For ID token 'aud' should match Client ID Raise an Exception if any validation is failed, return None otherwise.
def parse_token(self, token): """Parse JWT token, get headers, claims and signature. Return: tuple (headers, claims, signing_input, signature) """ return JWTUtils.parse_token(token)
(self, token, claims_to_verify=('iss', 'exp'), nonce=None)
24,415
okta_jwt_verifier.jwt_verifier
verify_signature
Verify token signature using received jwk.
def verify_signature(self, token, okta_jwk): """Verify token signature using received jwk.""" JWTUtils.verify_signature(token, okta_jwk)
(self, token, okta_jwk)
24,416
okta_jwt_verifier.jwt_verifier
IDTokenVerifier
null
class IDTokenVerifier(): def __init__(self, issuer=None, client_id='client_id_stub', audience='api://default', request_executor=RequestExecutor, max_retries=MAX_RETRIES, request_timeout=REQUEST_TIMEOUT, max_requests=MAX_REQUESTS, leeway=LEEWAY, cache_jwks=True, proxy=None): """ Args: issuer: string, full URI of the token issuer, required client_id: string, expected client_id, required audience: string, expected audience, optional request_executor: RequestExecutor class or its subclass, optional max_retries: int, number of times to retry a failed network request, optional request_timeout: int, max request timeout, optional max_requests: int, max number of concurrent requests leeway: int, amount of time to expand the window for token expiration (to work around clock skew) cache_jwks: bool, optional """ self._jwt_verifier = BaseJWTVerifier(issuer, client_id, audience, request_executor, max_retries, request_timeout, max_requests, leeway, cache_jwks, proxy) async def verify(self, token, claims_to_verify=('iss', 'exp'), nonce=None): await self._jwt_verifier.verify_id_token(token, claims_to_verify, nonce)
(issuer=None, client_id='client_id_stub', audience='api://default', request_executor=<class 'okta_jwt_verifier.request_executor.RequestExecutor'>, max_retries=1, request_timeout=30, max_requests=10, leeway=120, cache_jwks=True, proxy=None)
24,417
okta_jwt_verifier.jwt_verifier
__init__
Args: issuer: string, full URI of the token issuer, required client_id: string, expected client_id, required audience: string, expected audience, optional request_executor: RequestExecutor class or its subclass, optional max_retries: int, number of times to retry a failed network request, optional request_timeout: int, max request timeout, optional max_requests: int, max number of concurrent requests leeway: int, amount of time to expand the window for token expiration (to work around clock skew) cache_jwks: bool, optional
def __init__(self, issuer=None, client_id='client_id_stub', audience='api://default', request_executor=RequestExecutor, max_retries=MAX_RETRIES, request_timeout=REQUEST_TIMEOUT, max_requests=MAX_REQUESTS, leeway=LEEWAY, cache_jwks=True, proxy=None): """ Args: issuer: string, full URI of the token issuer, required client_id: string, expected client_id, required audience: string, expected audience, optional request_executor: RequestExecutor class or its subclass, optional max_retries: int, number of times to retry a failed network request, optional request_timeout: int, max request timeout, optional max_requests: int, max number of concurrent requests leeway: int, amount of time to expand the window for token expiration (to work around clock skew) cache_jwks: bool, optional """ self._jwt_verifier = BaseJWTVerifier(issuer, client_id, audience, request_executor, max_retries, request_timeout, max_requests, leeway, cache_jwks, proxy)
(self, issuer=None, client_id='client_id_stub', audience='api://default', request_executor=<class 'okta_jwt_verifier.request_executor.RequestExecutor'>, max_retries=1, request_timeout=30, max_requests=10, leeway=120, cache_jwks=True, proxy=None)
24,418
okta_jwt_verifier.jwt_verifier
verify
null
def __init__(self, issuer=None, client_id='client_id_stub', audience='api://default', request_executor=RequestExecutor, max_retries=MAX_RETRIES, request_timeout=REQUEST_TIMEOUT, max_requests=MAX_REQUESTS, leeway=LEEWAY, cache_jwks=True, proxy=None): """ Args: issuer: string, full URI of the token issuer, required client_id: string, expected client_id, required audience: string, expected audience, optional request_executor: RequestExecutor class or its subclass, optional max_retries: int, number of times to retry a failed network request, optional request_timeout: int, max request timeout, optional max_requests: int, max number of concurrent requests leeway: int, amount of time to expand the window for token expiration (to work around clock skew) cache_jwks: bool, optional """ self._jwt_verifier = BaseJWTVerifier(issuer, client_id, audience, request_executor, max_retries, request_timeout, max_requests, leeway, cache_jwks, proxy)
(self, token, claims_to_verify=('iss', 'exp'), nonce=None)
24,419
okta_jwt_verifier.jwt_utils
JWTUtils
Contains different utils and common methods for jwt verification.
class JWTUtils: """Contains different utils and common methods for jwt verification.""" @staticmethod def parse_token(token): """Parse JWT token, get headers, claims and signature. Return: tuple (headers, claims, signing_input, signature) """ headers, payload, signing_input, signature = jws._load(token) claims = json.loads(payload.decode('utf-8')) return (headers, claims, signing_input, signature) @staticmethod def verify_claims(claims, claims_to_verify, audience, issuer, leeway=LEEWAY): """Verify claims are present and valid.""" # Check if required claims are present, because library "jose" doesn't raise an exception for claim in claims_to_verify: if claim not in claims: raise JWTValidationException(f'Required claim "{claim}" is not present.') # Overwrite defaults in python-jose library options = {'verify_aud': 'aud' in claims_to_verify, 'verify_iat': 'iat' in claims_to_verify, 'verify_exp': 'exp' in claims_to_verify, 'verify_nbf': 'nbf' in claims_to_verify, 'verify_iss': 'iss' in claims_to_verify, 'verify_sub': 'sub' in claims_to_verify, 'verify_jti': 'jti' in claims_to_verify, 'leeway': leeway} # Validate claims jwt._validate_claims(claims, audience=audience, issuer=issuer, options=options) @staticmethod def verify_signature(token, okta_jwk): """Verify token signature using received jwk.""" headers, claims, signing_input, signature = JWTUtils.parse_token(token) jws._verify_signature(signing_input=signing_input, header=headers, signature=signature, key=okta_jwk, algorithms=['RS256']) @staticmethod def verify_expiration(token, leeway=LEEWAY): """Verify if token is not expired.""" headers, claims, signing_input, signature = JWTUtils.parse_token(token) try: JWTUtils.verify_claims(claims, claims_to_verify=('exp',), audience=None, issuer=None, leeway=LEEWAY) except ExpiredSignatureError: raise JWTValidationException('Signature has expired.')
()
24,420
okta_jwt_verifier.jwt_utils
parse_token
Parse JWT token, get headers, claims and signature. Return: tuple (headers, claims, signing_input, signature)
@staticmethod def parse_token(token): """Parse JWT token, get headers, claims and signature. Return: tuple (headers, claims, signing_input, signature) """ headers, payload, signing_input, signature = jws._load(token) claims = json.loads(payload.decode('utf-8')) return (headers, claims, signing_input, signature)
(token)
24,421
okta_jwt_verifier.jwt_utils
verify_claims
Verify claims are present and valid.
@staticmethod def verify_claims(claims, claims_to_verify, audience, issuer, leeway=LEEWAY): """Verify claims are present and valid.""" # Check if required claims are present, because library "jose" doesn't raise an exception for claim in claims_to_verify: if claim not in claims: raise JWTValidationException(f'Required claim "{claim}" is not present.') # Overwrite defaults in python-jose library options = {'verify_aud': 'aud' in claims_to_verify, 'verify_iat': 'iat' in claims_to_verify, 'verify_exp': 'exp' in claims_to_verify, 'verify_nbf': 'nbf' in claims_to_verify, 'verify_iss': 'iss' in claims_to_verify, 'verify_sub': 'sub' in claims_to_verify, 'verify_jti': 'jti' in claims_to_verify, 'leeway': leeway} # Validate claims jwt._validate_claims(claims, audience=audience, issuer=issuer, options=options)
(claims, claims_to_verify, audience, issuer, leeway=120)
24,422
okta_jwt_verifier.jwt_utils
verify_expiration
Verify if token is not expired.
@staticmethod def verify_expiration(token, leeway=LEEWAY): """Verify if token is not expired.""" headers, claims, signing_input, signature = JWTUtils.parse_token(token) try: JWTUtils.verify_claims(claims, claims_to_verify=('exp',), audience=None, issuer=None, leeway=LEEWAY) except ExpiredSignatureError: raise JWTValidationException('Signature has expired.')
(token, leeway=120)
24,423
okta_jwt_verifier.jwt_utils
verify_signature
Verify token signature using received jwk.
@staticmethod def verify_signature(token, okta_jwk): """Verify token signature using received jwk.""" headers, claims, signing_input, signature = JWTUtils.parse_token(token) jws._verify_signature(signing_input=signing_input, header=headers, signature=signature, key=okta_jwk, algorithms=['RS256'])
(token, okta_jwk)
24,424
okta_jwt_verifier.jwt_verifier
JWTVerifier
null
class JWTVerifier(BaseJWTVerifier): def __init__(self, issuer=None, client_id='client_id_stub', audience='api://default', request_executor=RequestExecutor, max_retries=MAX_RETRIES, request_timeout=REQUEST_TIMEOUT, max_requests=MAX_REQUESTS, leeway=LEEWAY, cache_jwks=True, proxy=None): """ Args: issuer: string, full URI of the token issuer, required client_id: string, expected client_id, required audience: string, expected audience, optional request_executor: RequestExecutor class or its subclass, optional max_retries: int, number of times to retry a failed network request, optional request_timeout: int, max request timeout, optional max_requests: int, max number of concurrent requests leeway: int, amount of time to expand the window for token expiration (to work around clock skew) cache_jwks: bool, optional """ warnings.simplefilter('module') warnings.warn('JWTVerifier will be deprecated soon. ' 'For token verification use IDTokenVerifier or AccessTokenVerifier. ' 'For different jwt utils use JWTUtils.', DeprecationWarning) super().__init__(issuer=issuer, client_id=client_id, audience=audience, request_executor=request_executor, max_retries=max_retries, request_timeout=request_timeout, max_requests=max_requests, leeway=leeway, cache_jwks=cache_jwks, proxy=proxy)
(issuer=None, client_id='client_id_stub', audience='api://default', request_executor=<class 'okta_jwt_verifier.request_executor.RequestExecutor'>, max_retries=1, request_timeout=30, max_requests=10, leeway=120, cache_jwks=True, proxy=None)
24,425
okta_jwt_verifier.jwt_verifier
__init__
Args: issuer: string, full URI of the token issuer, required client_id: string, expected client_id, required audience: string, expected audience, optional request_executor: RequestExecutor class or its subclass, optional max_retries: int, number of times to retry a failed network request, optional request_timeout: int, max request timeout, optional max_requests: int, max number of concurrent requests leeway: int, amount of time to expand the window for token expiration (to work around clock skew) cache_jwks: bool, optional
def __init__(self, issuer=None, client_id='client_id_stub', audience='api://default', request_executor=RequestExecutor, max_retries=MAX_RETRIES, request_timeout=REQUEST_TIMEOUT, max_requests=MAX_REQUESTS, leeway=LEEWAY, cache_jwks=True, proxy=None): """ Args: issuer: string, full URI of the token issuer, required client_id: string, expected client_id, required audience: string, expected audience, optional request_executor: RequestExecutor class or its subclass, optional max_retries: int, number of times to retry a failed network request, optional request_timeout: int, max request timeout, optional max_requests: int, max number of concurrent requests leeway: int, amount of time to expand the window for token expiration (to work around clock skew) cache_jwks: bool, optional """ warnings.simplefilter('module') warnings.warn('JWTVerifier will be deprecated soon. ' 'For token verification use IDTokenVerifier or AccessTokenVerifier. ' 'For different jwt utils use JWTUtils.', DeprecationWarning) super().__init__(issuer=issuer, client_id=client_id, audience=audience, request_executor=request_executor, max_retries=max_retries, request_timeout=request_timeout, max_requests=max_requests, leeway=leeway, cache_jwks=cache_jwks, proxy=proxy)
(self, issuer=None, client_id='client_id_stub', audience='api://default', request_executor=<class 'okta_jwt_verifier.request_executor.RequestExecutor'>, max_retries=1, request_timeout=30, max_requests=10, leeway=120, cache_jwks=True, proxy=None)
24,445
encodings.utf_8
IncrementalDecoder
null
class IncrementalDecoder(codecs.BufferedIncrementalDecoder): _buffer_decode = codecs.utf_8_decode
(errors='strict')
24,446
codecs
__init__
null
def __init__(self, errors='strict'): IncrementalDecoder.__init__(self, errors) # undecoded input that is kept between calls to decode() self.buffer = b""
(self, errors='strict')
24,447
codecs
decode
null
def decode(self, input, final=False): # decode input (taking the buffer into account) data = self.buffer + input (result, consumed) = self._buffer_decode(data, self.errors, final) # keep undecoded input until the next call self.buffer = data[consumed:] return result
(self, input, final=False)
24,448
codecs
getstate
null
def getstate(self): # additional state info is always 0 return (self.buffer, 0)
(self)
24,449
codecs
reset
null
def reset(self): IncrementalDecoder.reset(self) self.buffer = b""
(self)
24,450
codecs
setstate
null
def setstate(self, state): # ignore additional state info self.buffer = state[0]
(self, state)
24,451
encodings.utf_8
StreamReader
null
class StreamReader(codecs.StreamReader): decode = codecs.utf_8_decode
(stream, errors='strict')
24,453
codecs
__exit__
null
def __exit__(self, type, value, tb): self.stream.close()
(self, type, value, tb)
24,454
codecs
__getattr__
Inherit all other methods from the underlying stream.
def __getattr__(self, name, getattr=getattr): """ Inherit all other methods from the underlying stream. """ return getattr(self.stream, name)
(self, name, getattr=<built-in function getattr>)
24,455
codecs
__init__
Creates a StreamReader instance. stream must be a file-like object open for reading. The StreamReader may use different error handling schemes by providing the errors keyword argument. These parameters are predefined: 'strict' - raise a ValueError (or a subclass) 'ignore' - ignore the character and continue with the next 'replace'- replace with a suitable replacement character 'backslashreplace' - Replace with backslashed escape sequences; The set of allowed parameter values can be extended via register_error.
def __init__(self, stream, errors='strict'): """ Creates a StreamReader instance. stream must be a file-like object open for reading. The StreamReader may use different error handling schemes by providing the errors keyword argument. These parameters are predefined: 'strict' - raise a ValueError (or a subclass) 'ignore' - ignore the character and continue with the next 'replace'- replace with a suitable replacement character 'backslashreplace' - Replace with backslashed escape sequences; The set of allowed parameter values can be extended via register_error. """ self.stream = stream self.errors = errors self.bytebuffer = b"" self._empty_charbuffer = self.charbuffertype() self.charbuffer = self._empty_charbuffer self.linebuffer = None
(self, stream, errors='strict')
24,457
codecs
__next__
Return the next decoded line from the input stream.
def __next__(self): """ Return the next decoded line from the input stream.""" line = self.readline() if line: return line raise StopIteration
(self)
24,458
codecs
encode
Encodes the object input and returns a tuple (output object, length consumed). errors defines the error handling to apply. It defaults to 'strict' handling. The method may not store state in the Codec instance. Use StreamWriter for codecs which have to keep state in order to make encoding efficient. The encoder must be able to handle zero length input and return an empty object of the output object type in this situation.
def encode(self, input, errors='strict'): """ Encodes the object input and returns a tuple (output object, length consumed). errors defines the error handling to apply. It defaults to 'strict' handling. The method may not store state in the Codec instance. Use StreamWriter for codecs which have to keep state in order to make encoding efficient. The encoder must be able to handle zero length input and return an empty object of the output object type in this situation. """ raise NotImplementedError
(self, input, errors='strict')
24,459
codecs
read
Decodes data from the stream self.stream and returns the resulting object. chars indicates the number of decoded code points or bytes to return. read() will never return more data than requested, but it might return less, if there is not enough available. size indicates the approximate maximum number of decoded bytes or code points to read for decoding. The decoder can modify this setting as appropriate. The default value -1 indicates to read and decode as much as possible. size is intended to prevent having to decode huge files in one step. If firstline is true, and a UnicodeDecodeError happens after the first line terminator in the input only the first line will be returned, the rest of the input will be kept until the next call to read(). The method should use a greedy read strategy, meaning that it should read as much data as is allowed within the definition of the encoding and the given size, e.g. if optional encoding endings or state markers are available on the stream, these should be read too.
def read(self, size=-1, chars=-1, firstline=False): """ Decodes data from the stream self.stream and returns the resulting object. chars indicates the number of decoded code points or bytes to return. read() will never return more data than requested, but it might return less, if there is not enough available. size indicates the approximate maximum number of decoded bytes or code points to read for decoding. The decoder can modify this setting as appropriate. The default value -1 indicates to read and decode as much as possible. size is intended to prevent having to decode huge files in one step. If firstline is true, and a UnicodeDecodeError happens after the first line terminator in the input only the first line will be returned, the rest of the input will be kept until the next call to read(). The method should use a greedy read strategy, meaning that it should read as much data as is allowed within the definition of the encoding and the given size, e.g. if optional encoding endings or state markers are available on the stream, these should be read too. """ # If we have lines cached, first merge them back into characters if self.linebuffer: self.charbuffer = self._empty_charbuffer.join(self.linebuffer) self.linebuffer = None if chars < 0: # For compatibility with other read() methods that take a # single argument chars = size # read until we get the required number of characters (if available) while True: # can the request be satisfied from the character buffer? if chars >= 0: if len(self.charbuffer) >= chars: break # we need more data if size < 0: newdata = self.stream.read() else: newdata = self.stream.read(size) # decode bytes (those remaining from the last call included) data = self.bytebuffer + newdata if not data: break try: newchars, decodedbytes = self.decode(data, self.errors) except UnicodeDecodeError as exc: if firstline: newchars, decodedbytes = \ self.decode(data[:exc.start], self.errors) lines = newchars.splitlines(keepends=True) if len(lines)<=1: raise else: raise # keep undecoded bytes until the next call self.bytebuffer = data[decodedbytes:] # put new characters in the character buffer self.charbuffer += newchars # there was no data available if not newdata: break if chars < 0: # Return everything we've got result = self.charbuffer self.charbuffer = self._empty_charbuffer else: # Return the first chars characters result = self.charbuffer[:chars] self.charbuffer = self.charbuffer[chars:] return result
(self, size=-1, chars=-1, firstline=False)
24,460
codecs
readline
Read one line from the input stream and return the decoded data. size, if given, is passed as size argument to the read() method.
def readline(self, size=None, keepends=True): """ Read one line from the input stream and return the decoded data. size, if given, is passed as size argument to the read() method. """ # If we have lines cached from an earlier read, return # them unconditionally if self.linebuffer: line = self.linebuffer[0] del self.linebuffer[0] if len(self.linebuffer) == 1: # revert to charbuffer mode; we might need more data # next time self.charbuffer = self.linebuffer[0] self.linebuffer = None if not keepends: line = line.splitlines(keepends=False)[0] return line readsize = size or 72 line = self._empty_charbuffer # If size is given, we call read() only once while True: data = self.read(readsize, firstline=True) if data: # If we're at a "\r" read one extra character (which might # be a "\n") to get a proper line ending. If the stream is # temporarily exhausted we return the wrong line ending. if (isinstance(data, str) and data.endswith("\r")) or \ (isinstance(data, bytes) and data.endswith(b"\r")): data += self.read(size=1, chars=1) line += data lines = line.splitlines(keepends=True) if lines: if len(lines) > 1: # More than one line result; the first line is a full line # to return line = lines[0] del lines[0] if len(lines) > 1: # cache the remaining lines lines[-1] += self.charbuffer self.linebuffer = lines self.charbuffer = None else: # only one remaining line, put it back into charbuffer self.charbuffer = lines[0] + self.charbuffer if not keepends: line = line.splitlines(keepends=False)[0] break line0withend = lines[0] line0withoutend = lines[0].splitlines(keepends=False)[0] if line0withend != line0withoutend: # We really have a line end # Put the rest back together and keep it until the next call self.charbuffer = self._empty_charbuffer.join(lines[1:]) + \ self.charbuffer if keepends: line = line0withend else: line = line0withoutend break # we didn't get anything or this was our only try if not data or size is not None: if line and not keepends: line = line.splitlines(keepends=False)[0] break if readsize < 8000: readsize *= 2 return line
(self, size=None, keepends=True)
24,461
codecs
readlines
Read all lines available on the input stream and return them as a list. Line breaks are implemented using the codec's decoder method and are included in the list entries. sizehint, if given, is ignored since there is no efficient way to finding the true end-of-line.
def readlines(self, sizehint=None, keepends=True): """ Read all lines available on the input stream and return them as a list. Line breaks are implemented using the codec's decoder method and are included in the list entries. sizehint, if given, is ignored since there is no efficient way to finding the true end-of-line. """ data = self.read() return data.splitlines(keepends)
(self, sizehint=None, keepends=True)
24,462
codecs
reset
Resets the codec buffers used for keeping internal state. Note that no stream repositioning should take place. This method is primarily intended to be able to recover from decoding errors.
def reset(self): """ Resets the codec buffers used for keeping internal state. Note that no stream repositioning should take place. This method is primarily intended to be able to recover from decoding errors. """ self.bytebuffer = b"" self.charbuffer = self._empty_charbuffer self.linebuffer = None
(self)
24,463
codecs
seek
Set the input stream's current position. Resets the codec buffers used for keeping state.
def seek(self, offset, whence=0): """ Set the input stream's current position. Resets the codec buffers used for keeping state. """ self.stream.seek(offset, whence) self.reset()
(self, offset, whence=0)
24,464
future_fstrings
TokenSyntaxError
null
class TokenSyntaxError(SyntaxError): def __init__(self, e, token): super(TokenSyntaxError, self).__init__(e) self.e = e self.token = token
(e, token)
24,465
future_fstrings
__init__
null
def __init__(self, e, token): super(TokenSyntaxError, self).__init__(e) self.e = e self.token = token
(self, e, token)
24,466
future_fstrings
_find_expr
Roughly Python/ast.c:fstring_find_expr
def _find_expr(s, start, level, parts, exprs): """Roughly Python/ast.c:fstring_find_expr""" i = start nested_depth = 0 quote_char = None triple_quoted = None def _check_end(): if i == len(s): raise SyntaxError("f-string: expecting '}'") if level >= 2: raise SyntaxError("f-string: expressions nested too deeply") parts.append(s[i]) i += 1 while i < len(s): ch = s[i] if ch == '\\': raise SyntaxError( 'f-string expression part cannot include a backslash', ) if quote_char is not None: if ch == quote_char: if triple_quoted: if i + 2 < len(s) and s[i + 1] == ch and s[i + 2] == ch: i += 2 quote_char = None triple_quoted = None else: quote_char = None triple_quoted = None elif ch in ('"', "'"): quote_char = ch if i + 2 < len(s) and s[i + 1] == ch and s[i + 2] == ch: triple_quoted = True i += 2 else: triple_quoted = False elif ch in ('[', '{', '('): nested_depth += 1 elif nested_depth and ch in (']', '}', ')'): nested_depth -= 1 elif ch == '#': raise SyntaxError("f-string expression cannot include '#'") elif nested_depth == 0 and ch in ('!', ':', '}'): if ch == '!' and i + 1 < len(s) and s[i + 1] == '=': # Allow != at top level as `=` isn't a valid conversion pass else: break i += 1 if quote_char is not None: raise SyntaxError('f-string: unterminated string') elif nested_depth: raise SyntaxError("f-string: mismatched '(', '{', or '['") _check_end() exprs.append(s[start + 1:i]) if s[i] == '!': parts.append(s[i]) i += 1 _check_end() parts.append(s[i]) i += 1 _check_end() if s[i] == ':': parts.append(s[i]) i += 1 _check_end() i = _fstring_parse(s, i, level + 1, parts, exprs) _check_end() if s[i] != '}': raise SyntaxError("f-string: expecting '}'") parts.append(s[i]) i += 1 return i
(s, start, level, parts, exprs)
24,467
future_fstrings
_find_literal
Roughly Python/ast.c:fstring_find_literal
def _find_literal(s, start, level, parts, exprs): """Roughly Python/ast.c:fstring_find_literal""" i = start parse_expr = True while i < len(s): ch = s[i] if ch in ('{', '}'): if level == 0: if i + 1 < len(s) and s[i + 1] == ch: i += 2 parse_expr = False break elif ch == '}': raise SyntaxError("f-string: single '}' is not allowed") break i += 1 parts.append(s[start:i]) return i, parse_expr and i < len(s)
(s, start, level, parts, exprs)
24,468
future_fstrings
_fstring_parse
Roughly Python/ast.c:fstring_find_literal_and_expr
def _fstring_parse(s, i, level, parts, exprs): """Roughly Python/ast.c:fstring_find_literal_and_expr""" while True: i, parse_expr = _find_literal(s, i, level, parts, exprs) if i == len(s) or s[i] == '}': return i if parse_expr: i = _find_expr(s, i, level, parts, exprs)
(s, i, level, parts, exprs)
24,469
future_fstrings
_fstring_parse_outer
null
def _fstring_parse_outer(s, i, level, parts, exprs): for q in ('"' * 3, "'" * 3, '"', "'"): if s.startswith(q): s = s[len(q):len(s) - len(q)] break else: raise AssertionError('unreachable') parts.append(q) ret = _fstring_parse(s, i, level, parts, exprs) parts.append(q) return ret
(s, i, level, parts, exprs)
24,470
future_fstrings
_is_f
null
def _is_f(token): import tokenize_rt prefix, _ = tokenize_rt.parse_string_literal(token.src) return 'f' in prefix.lower()
(token)
24,471
future_fstrings
_make_fstring
null
def _make_fstring(tokens): import tokenize_rt new_tokens = [] exprs = [] for i, token in enumerate(tokens): if token.name == 'STRING' and _is_f(token): prefix, s = tokenize_rt.parse_string_literal(token.src) parts = [] try: _fstring_parse_outer(s, 0, 0, parts, exprs) except SyntaxError as e: raise TokenSyntaxError(e, tokens[i - 1]) if 'r' in prefix.lower(): parts = [s.replace('\\', '\\\\') for s in parts] token = token._replace(src=''.join(parts)) elif token.name == 'STRING': new_src = token.src.replace('{', '{{').replace('}', '}}') token = token._replace(src=new_src) new_tokens.append(token) exprs = ('({})'.format(expr) for expr in exprs) format_src = '.format({})'.format(', '.join(exprs)) new_tokens.append(tokenize_rt.Token('FORMAT', src=format_src)) return new_tokens
(tokens)
24,472
future_fstrings
_natively_supports_fstrings
null
def _natively_supports_fstrings(): try: return eval('f"hi"') == 'hi' except SyntaxError: return False
()
24,475
encodings.utf_8
decode
null
def decode(input, errors='strict'): return codecs.utf_8_decode(input, errors, True)
(input, errors='strict')
24,477
future_fstrings
decode
null
def decode(b, errors='strict'): import tokenize_rt # pip install future-fstrings[rewrite] u, length = utf_8.decode(b, errors) tokens = tokenize_rt.src_to_tokens(u) to_replace = [] start = end = seen_f = None for i, token in enumerate(tokens): if start is None: if token.name == 'STRING': start, end = i, i + 1 seen_f = _is_f(token) elif token.name == 'STRING': end = i + 1 seen_f |= _is_f(token) elif token.name not in tokenize_rt.NON_CODING_TOKENS: if seen_f: to_replace.append((start, end)) start = end = seen_f = None for start, end in reversed(to_replace): try: tokens[start:end] = _make_fstring(tokens[start:end]) except TokenSyntaxError as e: msg = str(e.e) line = u.splitlines()[e.token.line - 1] bts = line.encode('UTF-8')[:e.token.utf8_byte_offset] indent = len(bts.decode('UTF-8')) raise SyntaxError(msg + '\n\n' + line + '\n' + ' ' * indent + '^') return tokenize_rt.tokens_to_src(tokens), length
(b, errors='strict')
24,479
future_fstrings
main
null
def main(argv=None): parser = argparse.ArgumentParser(description='Prints transformed source.') parser.add_argument('filename') args = parser.parse_args(argv) with open(args.filename, 'rb') as f: text, _ = fstring_decode(f.read()) getattr(sys.stdout, 'buffer', sys.stdout).write(text.encode('UTF-8'))
(argv=None)
24,480
future_fstrings
register
null
def register(): # pragma: no cover codecs.register(codec_map.get)
()
24,490
aws_lambda_context
LambdaClientContext
null
class LambdaClientContext: client: LambdaClientContextMobileClient custom: LambdaDict env: LambdaDict
()