language
stringclasses
6 values
original_string
stringlengths
25
887k
text
stringlengths
25
887k
Python
def unpack_sequence(data: typing.Union[ASN1Value, bytes]) -> typing.Dict[int, ASN1Value]: """ Helper function that can unpack a sequence as either it's raw bytes or the already unpacked ASN.1 tuple. """ if not isinstance(data, ASN1Value): data = unpack_asn1(data)[0] return unpack_asn1_tagged_sequence(data)
def unpack_sequence(data: typing.Union[ASN1Value, bytes]) -> typing.Dict[int, ASN1Value]: """ Helper function that can unpack a sequence as either it's raw bytes or the already unpacked ASN.1 tuple. """ if not isinstance(data, ASN1Value): data = unpack_asn1(data)[0] return unpack_asn1_tagged_sequence(data)
Python
def pack(self) -> bytes: """ Packs the NegoData as a byte string. """ return pack_asn1_sequence([ pack_asn1(TagClass.context_specific, True, 0, pack_asn1_octet_string(self.nego_token)), ])
def pack(self) -> bytes: """ Packs the NegoData as a byte string. """ return pack_asn1_sequence([ pack_asn1(TagClass.context_specific, True, 0, pack_asn1_octet_string(self.nego_token)), ])
Python
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "NegoData": """ Unpacks the NegoData TLV value. """ nego_data = unpack_sequence(b_data) nego_token = get_sequence_value(nego_data, 0, 'NegoData', 'negoToken', unpack_asn1_octet_string) return NegoData(nego_token)
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "NegoData": """ Unpacks the NegoData TLV value. """ nego_data = unpack_sequence(b_data) nego_token = get_sequence_value(nego_data, 0, 'NegoData', 'negoToken', unpack_asn1_octet_string) return NegoData(nego_token)
Python
def pack(self) -> bytes: """ Packs the TSRequest as a byte string. """ elements = [ pack_asn1(TagClass.context_specific, True, 0, pack_asn1_integer(self.version)) ] if self.nego_tokens: nego_tokens = [token.pack() for token in self.nego_tokens] elements.append(pack_asn1(TagClass.context_specific, True, 1, pack_asn1_sequence(nego_tokens))) value_map = [ (2, self.auth_info, pack_asn1_octet_string), (3, self.pub_key_auth, pack_asn1_octet_string), (4, self.error_code, pack_asn1_integer), (5, self.client_nonce, pack_asn1_octet_string), ] for tag, value, pack_func in value_map: if value is not None: elements.append(pack_asn1(TagClass.context_specific, True, tag, pack_func(value))) return pack_asn1_sequence(elements)
def pack(self) -> bytes: """ Packs the TSRequest as a byte string. """ elements = [ pack_asn1(TagClass.context_specific, True, 0, pack_asn1_integer(self.version)) ] if self.nego_tokens: nego_tokens = [token.pack() for token in self.nego_tokens] elements.append(pack_asn1(TagClass.context_specific, True, 1, pack_asn1_sequence(nego_tokens))) value_map = [ (2, self.auth_info, pack_asn1_octet_string), (3, self.pub_key_auth, pack_asn1_octet_string), (4, self.error_code, pack_asn1_integer), (5, self.client_nonce, pack_asn1_octet_string), ] for tag, value, pack_func in value_map: if value is not None: elements.append(pack_asn1(TagClass.context_specific, True, tag, pack_func(value))) return pack_asn1_sequence(elements)
Python
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "TSRequest": """ Unpacks the TSRequest TLV value. """ request = unpack_sequence(b_data) version = get_sequence_value(request, 0, 'TSRequest', 'version', unpack_asn1_integer) nego_tokens = get_sequence_value(request, 1, 'TSRequest', 'negoTokens') if nego_tokens is not None: remaining_bytes = nego_tokens.b_data nego_tokens = [] while remaining_bytes: nego_tokens.append(NegoData.unpack(remaining_bytes)) remaining_bytes = unpack_asn1(remaining_bytes)[1] auth_info = get_sequence_value(request, 2, 'TSRequest', 'authInfo', unpack_asn1_octet_string) pub_key_auth = get_sequence_value(request, 3, 'TSRequest', 'pubKeyAuth', unpack_asn1_octet_string) error_code = get_sequence_value(request, 4, 'TSRequest', 'errorCode', unpack_asn1_integer) client_nonce = get_sequence_value(request, 5, 'TSRequest', 'clientNonce', unpack_asn1_octet_string) return TSRequest(version, nego_tokens=nego_tokens, auth_info=auth_info, pub_key_auth=pub_key_auth, error_code=error_code, client_nonce=client_nonce)
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "TSRequest": """ Unpacks the TSRequest TLV value. """ request = unpack_sequence(b_data) version = get_sequence_value(request, 0, 'TSRequest', 'version', unpack_asn1_integer) nego_tokens = get_sequence_value(request, 1, 'TSRequest', 'negoTokens') if nego_tokens is not None: remaining_bytes = nego_tokens.b_data nego_tokens = [] while remaining_bytes: nego_tokens.append(NegoData.unpack(remaining_bytes)) remaining_bytes = unpack_asn1(remaining_bytes)[1] auth_info = get_sequence_value(request, 2, 'TSRequest', 'authInfo', unpack_asn1_octet_string) pub_key_auth = get_sequence_value(request, 3, 'TSRequest', 'pubKeyAuth', unpack_asn1_octet_string) error_code = get_sequence_value(request, 4, 'TSRequest', 'errorCode', unpack_asn1_integer) client_nonce = get_sequence_value(request, 5, 'TSRequest', 'clientNonce', unpack_asn1_octet_string) return TSRequest(version, nego_tokens=nego_tokens, auth_info=auth_info, pub_key_auth=pub_key_auth, error_code=error_code, client_nonce=client_nonce)
Python
def cred_type(self) -> int: """ The credential type of credentials as an integer. """ if isinstance(self.credentials, TSPasswordCreds): return 1 elif isinstance(self.credentials, TSSmartCardCreds): return 2 elif isinstance(self.credentials, TSRemoteGuardCreds): return 6 else: raise ValueError('Invalid credential type set')
def cred_type(self) -> int: """ The credential type of credentials as an integer. """ if isinstance(self.credentials, TSPasswordCreds): return 1 elif isinstance(self.credentials, TSSmartCardCreds): return 2 elif isinstance(self.credentials, TSRemoteGuardCreds): return 6 else: raise ValueError('Invalid credential type set')
Python
def pack(self) -> bytes: """ Packs the TSCredentials as a byte string. """ cred_type = self.cred_type credentials = self.credentials.pack() return pack_asn1_sequence([ pack_asn1(TagClass.context_specific, True, 0, pack_asn1_integer(cred_type)), pack_asn1(TagClass.context_specific, True, 1, pack_asn1_octet_string(credentials)), ])
def pack(self) -> bytes: """ Packs the TSCredentials as a byte string. """ cred_type = self.cred_type credentials = self.credentials.pack() return pack_asn1_sequence([ pack_asn1(TagClass.context_specific, True, 0, pack_asn1_integer(cred_type)), pack_asn1(TagClass.context_specific, True, 1, pack_asn1_octet_string(credentials)), ])
Python
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "TSCredentials": """ Unpacks the TSCredentials TLV value. """ credential = unpack_sequence(b_data) cred_type = get_sequence_value(credential, 0, 'TSCredentials', 'credType', unpack_asn1_integer) credentials_raw = get_sequence_value(credential, 1, 'TSCredentials', 'credentials', unpack_asn1_octet_string) cred_class = { 1: TSPasswordCreds, 2: TSSmartCardCreds, 6: TSRemoteGuardCreds, }.get(cred_type) if not cred_class: raise ValueError('Unknown credType %s in TSCredentials, cannot unpack' % cred_type) credentials = cred_class.unpack(credentials_raw) return TSCredentials(credentials)
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "TSCredentials": """ Unpacks the TSCredentials TLV value. """ credential = unpack_sequence(b_data) cred_type = get_sequence_value(credential, 0, 'TSCredentials', 'credType', unpack_asn1_integer) credentials_raw = get_sequence_value(credential, 1, 'TSCredentials', 'credentials', unpack_asn1_octet_string) cred_class = { 1: TSPasswordCreds, 2: TSSmartCardCreds, 6: TSRemoteGuardCreds, }.get(cred_type) if not cred_class: raise ValueError('Unknown credType %s in TSCredentials, cannot unpack' % cred_type) credentials = cred_class.unpack(credentials_raw) return TSCredentials(credentials)
Python
def pack(self) -> bytes: """ Packs the TSPasswordCreds as a byte string. """ elements = [] for idx, value in enumerate([self.domain_name, self.username, self.password]): b_value = value.encode('utf-16-le') elements.append(pack_asn1(TagClass.context_specific, True, idx, pack_asn1_octet_string(b_value))) return pack_asn1_sequence(elements)
def pack(self) -> bytes: """ Packs the TSPasswordCreds as a byte string. """ elements = [] for idx, value in enumerate([self.domain_name, self.username, self.password]): b_value = value.encode('utf-16-le') elements.append(pack_asn1(TagClass.context_specific, True, idx, pack_asn1_octet_string(b_value))) return pack_asn1_sequence(elements)
Python
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "TSPasswordCreds": """ Unpacks the TSPasswordCreds TLV value. """ creds = unpack_sequence(b_data) domain_name = unpack_text_field(creds, 0, 'TSPasswordCreds', 'domainName') username = unpack_text_field(creds, 1, 'TSPasswordCreds', 'userName') password = unpack_text_field(creds, 2, 'TSPasswordCreds', 'password') return TSPasswordCreds(domain_name, username, password)
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "TSPasswordCreds": """ Unpacks the TSPasswordCreds TLV value. """ creds = unpack_sequence(b_data) domain_name = unpack_text_field(creds, 0, 'TSPasswordCreds', 'domainName') username = unpack_text_field(creds, 1, 'TSPasswordCreds', 'userName') password = unpack_text_field(creds, 2, 'TSPasswordCreds', 'password') return TSPasswordCreds(domain_name, username, password)
Python
def pack(self) -> bytes: """ Packs the TSSmartCardCreds as a byte string. """ elements = [ pack_asn1(TagClass.context_specific, True, 0, pack_asn1_octet_string(self.pin.encode('utf-16-le'))), pack_asn1(TagClass.context_specific, True, 1, self.csp_data.pack()), ] for idx, value in [(2, self.user_hint), (3, self.domain_hint)]: if value: b_value = value.encode('utf-16-le') elements.append(pack_asn1(TagClass.context_specific, True, idx, pack_asn1_octet_string(b_value))) return pack_asn1_sequence(elements)
def pack(self) -> bytes: """ Packs the TSSmartCardCreds as a byte string. """ elements = [ pack_asn1(TagClass.context_specific, True, 0, pack_asn1_octet_string(self.pin.encode('utf-16-le'))), pack_asn1(TagClass.context_specific, True, 1, self.csp_data.pack()), ] for idx, value in [(2, self.user_hint), (3, self.domain_hint)]: if value: b_value = value.encode('utf-16-le') elements.append(pack_asn1(TagClass.context_specific, True, idx, pack_asn1_octet_string(b_value))) return pack_asn1_sequence(elements)
Python
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "TSSmartCardCreds": """ Unpacks the TSSmartCardCreds TLV value. """ creds = unpack_sequence(b_data) pin = unpack_text_field(creds, 0, 'TSSmartCardCreds', 'pin') csp_data = get_sequence_value(creds, 1, 'TSSmartCardCreds', 'cspData', TSCspDataDetail.unpack) user_hint = unpack_text_field(creds, 2, 'TSSmartCardCreds', 'userHint', default=None) domain_hint = unpack_text_field(creds, 3, 'TSSmartCardCreds', 'domainHint', default=None) return TSSmartCardCreds(pin, csp_data, user_hint, domain_hint)
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "TSSmartCardCreds": """ Unpacks the TSSmartCardCreds TLV value. """ creds = unpack_sequence(b_data) pin = unpack_text_field(creds, 0, 'TSSmartCardCreds', 'pin') csp_data = get_sequence_value(creds, 1, 'TSSmartCardCreds', 'cspData', TSCspDataDetail.unpack) user_hint = unpack_text_field(creds, 2, 'TSSmartCardCreds', 'userHint', default=None) domain_hint = unpack_text_field(creds, 3, 'TSSmartCardCreds', 'domainHint', default=None) return TSSmartCardCreds(pin, csp_data, user_hint, domain_hint)
Python
def pack(self) -> bytes: """ Packs the TSCspDataDetail as a byte string. """ elements = [ pack_asn1(TagClass.context_specific, True, 0, pack_asn1_integer(self.key_spec)), ] value_map = [ (1, self.card_name), (2, self.reader_name), (3, self.container_name), (4, self.csp_name), ] for idx, value in value_map: if value: b_value = value.encode('utf-16-le') elements.append(pack_asn1(TagClass.context_specific, True, idx, pack_asn1_octet_string(b_value))) return pack_asn1_sequence(elements)
def pack(self) -> bytes: """ Packs the TSCspDataDetail as a byte string. """ elements = [ pack_asn1(TagClass.context_specific, True, 0, pack_asn1_integer(self.key_spec)), ] value_map = [ (1, self.card_name), (2, self.reader_name), (3, self.container_name), (4, self.csp_name), ] for idx, value in value_map: if value: b_value = value.encode('utf-16-le') elements.append(pack_asn1(TagClass.context_specific, True, idx, pack_asn1_octet_string(b_value))) return pack_asn1_sequence(elements)
Python
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "TSCspDataDetail": """ Unpacks the TSCspDataDetail TLV value. """ csp_data = unpack_sequence(b_data) key_spec = get_sequence_value(csp_data, 0, 'TSCspDataDetail', 'keySpec', unpack_asn1_integer) card_name = unpack_text_field(csp_data, 1, 'TSCspDataDetail', 'cardName', default=None) reader_name = unpack_text_field(csp_data, 2, 'TSCspDataDetail', 'readerName', default=None) container_name = unpack_text_field(csp_data, 3, 'TSCspDataDetail', 'containerName', default=None) csp_name = unpack_text_field(csp_data, 4, 'TSCspDataDetail', 'cspName', default=None) return TSCspDataDetail(key_spec, card_name, reader_name, container_name, csp_name)
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "TSCspDataDetail": """ Unpacks the TSCspDataDetail TLV value. """ csp_data = unpack_sequence(b_data) key_spec = get_sequence_value(csp_data, 0, 'TSCspDataDetail', 'keySpec', unpack_asn1_integer) card_name = unpack_text_field(csp_data, 1, 'TSCspDataDetail', 'cardName', default=None) reader_name = unpack_text_field(csp_data, 2, 'TSCspDataDetail', 'readerName', default=None) container_name = unpack_text_field(csp_data, 3, 'TSCspDataDetail', 'containerName', default=None) csp_name = unpack_text_field(csp_data, 4, 'TSCspDataDetail', 'cspName', default=None) return TSCspDataDetail(key_spec, card_name, reader_name, container_name, csp_name)
Python
def pack(self) -> bytes: """ Packs the TSRemoteGuardCreds as a byte string. """ elements = [pack_asn1(TagClass.context_specific, True, 0, self.logon_cred.pack())] if self.supplemental_creds is not None: supplemental_creds = [cred.pack() for cred in self.supplemental_creds] elements.append(pack_asn1(TagClass.context_specific, True, 1, pack_asn1_sequence(supplemental_creds))) return pack_asn1_sequence(elements)
def pack(self) -> bytes: """ Packs the TSRemoteGuardCreds as a byte string. """ elements = [pack_asn1(TagClass.context_specific, True, 0, self.logon_cred.pack())] if self.supplemental_creds is not None: supplemental_creds = [cred.pack() for cred in self.supplemental_creds] elements.append(pack_asn1(TagClass.context_specific, True, 1, pack_asn1_sequence(supplemental_creds))) return pack_asn1_sequence(elements)
Python
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "TSRemoteGuardCreds": """ Unpacks the TSRemoteGuardCreds TLV value. """ cred = unpack_sequence(b_data) logon_cred = get_sequence_value(cred, 0, 'TSRemoteGuardCreds', 'logonCred', TSRemoteGuardPackageCred.unpack) raw_supplemental_creds = get_sequence_value(cred, 1, 'TSRemoteGuardCreds', 'supplementalCreds') if raw_supplemental_creds: supplemental_creds = [] remaining_bytes = raw_supplemental_creds.b_data while remaining_bytes: supplemental_creds.append(TSRemoteGuardPackageCred.unpack(remaining_bytes)) remaining_bytes = unpack_asn1(remaining_bytes)[1] else: supplemental_creds = None return TSRemoteGuardCreds(logon_cred, supplemental_creds)
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "TSRemoteGuardCreds": """ Unpacks the TSRemoteGuardCreds TLV value. """ cred = unpack_sequence(b_data) logon_cred = get_sequence_value(cred, 0, 'TSRemoteGuardCreds', 'logonCred', TSRemoteGuardPackageCred.unpack) raw_supplemental_creds = get_sequence_value(cred, 1, 'TSRemoteGuardCreds', 'supplementalCreds') if raw_supplemental_creds: supplemental_creds = [] remaining_bytes = raw_supplemental_creds.b_data while remaining_bytes: supplemental_creds.append(TSRemoteGuardPackageCred.unpack(remaining_bytes)) remaining_bytes = unpack_asn1(remaining_bytes)[1] else: supplemental_creds = None return TSRemoteGuardCreds(logon_cred, supplemental_creds)
Python
def pack(self) -> bytes: """ Packs the TSRemoteGuardPackageCred as a byte string. """ b_package_name = self.package_name.encode('utf-16-le') return pack_asn1_sequence([ pack_asn1(TagClass.context_specific, True, 0, pack_asn1_octet_string(b_package_name)), pack_asn1(TagClass.context_specific, True, 1, pack_asn1_octet_string(self.cred_buffer)), ])
def pack(self) -> bytes: """ Packs the TSRemoteGuardPackageCred as a byte string. """ b_package_name = self.package_name.encode('utf-16-le') return pack_asn1_sequence([ pack_asn1(TagClass.context_specific, True, 0, pack_asn1_octet_string(b_package_name)), pack_asn1(TagClass.context_specific, True, 1, pack_asn1_octet_string(self.cred_buffer)), ])
Python
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "TSRemoteGuardPackageCred": """ Unpacks the TSRemoteGuardPackageCred TLV value. """ package_cred = unpack_sequence(b_data) package_name = unpack_text_field(package_cred, 0, 'TSRemoteGuardPackageCred', 'packageName') cred_buffer = get_sequence_value(package_cred, 1, 'TSRemoteGuardPackageCred', 'credBuffer', unpack_asn1_octet_string) return TSRemoteGuardPackageCred(package_name, cred_buffer)
def unpack(b_data: typing.Union[ASN1Value, bytes]) -> "TSRemoteGuardPackageCred": """ Unpacks the TSRemoteGuardPackageCred TLV value. """ package_cred = unpack_sequence(b_data) package_name = unpack_text_field(package_cred, 0, 'TSRemoteGuardPackageCred', 'packageName') cred_buffer = get_sequence_value(package_cred, 1, 'TSRemoteGuardPackageCred', 'credBuffer', unpack_asn1_octet_string) return TSRemoteGuardPackageCred(package_name, cred_buffer)
Python
def pack_mech_type_list( mech_list: typing.Union[str, typing.List[str], typing.Tuple[str, ...], typing.Set[str]], ) -> bytes: """Packs a list of OIDs for the mechListMIC value. Will pack a list of object identifiers to the raw byte string value for the mechListMIC. Args: mech_list: The list of OIDs to back Returns: bytes: The byte string of the packed ASN.1 MechTypeList SEQUENCE OF value. """ if not isinstance(mech_list, (list, tuple, set)): mech_list = [mech_list] return pack_asn1_sequence([pack_asn1_object_identifier(oid) for oid in mech_list])
def pack_mech_type_list( mech_list: typing.Union[str, typing.List[str], typing.Tuple[str, ...], typing.Set[str]], ) -> bytes: """Packs a list of OIDs for the mechListMIC value. Will pack a list of object identifiers to the raw byte string value for the mechListMIC. Args: mech_list: The list of OIDs to back Returns: bytes: The byte string of the packed ASN.1 MechTypeList SEQUENCE OF value. """ if not isinstance(mech_list, (list, tuple, set)): mech_list = [mech_list] return pack_asn1_sequence([pack_asn1_object_identifier(oid) for oid in mech_list])
Python
def unpack_token( b_data: bytes, mech: typing.Optional[GSSMech] = None, unwrap: bool = False, encoding: typing.Optional[str] = None, ) -> typing.Any: """Unpacks a raw GSSAPI/SPNEGO token to a Python object. Unpacks the byte string into a Python object that represents the token passed in. This can return many different token types such as: * NTLM message(s) * SPNEGO/Negotiate init or response * Kerberos message(s) Args: b_data: The raw byte string to unpack. mech: A hint as to what the byte string is for. unwrap: Whether to unwrap raw bytes to a structured message or return the raw tokens bytes. encoding: Optional encoding used when unwrapping NTLM messages. Returns: any: The unpacked SPNEGO, Kerberos, or NTLM token. """ # First check if the message is an NTLM message. if b_data.startswith(b"NTLMSSP\x00"): if unwrap: return NTLMMessage.unpack(b_data, encoding=encoding) else: return b_data if mech and mech.is_kerberos_oid: # A Kerberos value inside an InitialContextToken contains 2 bytes which we ignore. raw_data = unpack_asn1(b_data[2:])[0] else: raw_data = unpack_asn1(b_data)[0] if raw_data.tag_class == TagClass.application and mech and mech.is_kerberos_oid: return KerberosV5Msg.unpack(unpack_asn1(raw_data.b_data)[0]) elif raw_data.tag_class == TagClass.application: # The first token is encapsulated in an InitialContextToken. if raw_data.tag_number != 0: raise ValueError("Expecting a tag number of 0 not %s for InitialContextToken" % raw_data.tag_number) initial_context_token = InitialContextToken.unpack(raw_data.b_data) # unwrap=True is called from python -m spnego and we don't want to loose any info in the output. if unwrap: return initial_context_token this_mech: typing.Optional[GSSMech] try: this_mech = GSSMech.from_oid(initial_context_token.this_mech) except ValueError: this_mech = None # We currently only support SPNEGO, or raw Kerberos here. if this_mech and (this_mech == GSSMech.spnego or (this_mech.is_kerberos_oid and unwrap)): return unpack_token(initial_context_token.inner_context_token, mech=this_mech) return b_data elif raw_data.tag_class == TagClass.context_specific: # This is a raw NegotiationToken that is wrapped in a CHOICE or 0 or 1. if raw_data.tag_number == 0: return NegTokenInit.unpack(raw_data.b_data) elif raw_data.tag_number == 1: return NegTokenResp.unpack(raw_data.b_data) else: raise ValueError("Unknown NegotiationToken CHOICE %d, only expecting 0 or 1" % raw_data.tag_number) elif unwrap: # Could also be the ASN.1 Sequence of the Kerberos message. return KerberosV5Msg.unpack(raw_data) else: return b_data
def unpack_token( b_data: bytes, mech: typing.Optional[GSSMech] = None, unwrap: bool = False, encoding: typing.Optional[str] = None, ) -> typing.Any: """Unpacks a raw GSSAPI/SPNEGO token to a Python object. Unpacks the byte string into a Python object that represents the token passed in. This can return many different token types such as: * NTLM message(s) * SPNEGO/Negotiate init or response * Kerberos message(s) Args: b_data: The raw byte string to unpack. mech: A hint as to what the byte string is for. unwrap: Whether to unwrap raw bytes to a structured message or return the raw tokens bytes. encoding: Optional encoding used when unwrapping NTLM messages. Returns: any: The unpacked SPNEGO, Kerberos, or NTLM token. """ # First check if the message is an NTLM message. if b_data.startswith(b"NTLMSSP\x00"): if unwrap: return NTLMMessage.unpack(b_data, encoding=encoding) else: return b_data if mech and mech.is_kerberos_oid: # A Kerberos value inside an InitialContextToken contains 2 bytes which we ignore. raw_data = unpack_asn1(b_data[2:])[0] else: raw_data = unpack_asn1(b_data)[0] if raw_data.tag_class == TagClass.application and mech and mech.is_kerberos_oid: return KerberosV5Msg.unpack(unpack_asn1(raw_data.b_data)[0]) elif raw_data.tag_class == TagClass.application: # The first token is encapsulated in an InitialContextToken. if raw_data.tag_number != 0: raise ValueError("Expecting a tag number of 0 not %s for InitialContextToken" % raw_data.tag_number) initial_context_token = InitialContextToken.unpack(raw_data.b_data) # unwrap=True is called from python -m spnego and we don't want to loose any info in the output. if unwrap: return initial_context_token this_mech: typing.Optional[GSSMech] try: this_mech = GSSMech.from_oid(initial_context_token.this_mech) except ValueError: this_mech = None # We currently only support SPNEGO, or raw Kerberos here. if this_mech and (this_mech == GSSMech.spnego or (this_mech.is_kerberos_oid and unwrap)): return unpack_token(initial_context_token.inner_context_token, mech=this_mech) return b_data elif raw_data.tag_class == TagClass.context_specific: # This is a raw NegotiationToken that is wrapped in a CHOICE or 0 or 1. if raw_data.tag_number == 0: return NegTokenInit.unpack(raw_data.b_data) elif raw_data.tag_number == 1: return NegTokenResp.unpack(raw_data.b_data) else: raise ValueError("Unknown NegotiationToken CHOICE %d, only expecting 0 or 1" % raw_data.tag_number) elif unwrap: # Could also be the ASN.1 Sequence of the Kerberos message. return KerberosV5Msg.unpack(raw_data) else: return b_data
Python
def unpack(b_data: bytes) -> "InitialContextToken": """ Unpacks the InitialContextToken TLV value. """ this_mech, inner_context_token = unpack_asn1(b_data) mech = unpack_asn1_object_identifier(this_mech) return InitialContextToken(mech, inner_context_token)
def unpack(b_data: bytes) -> "InitialContextToken": """ Unpacks the InitialContextToken TLV value. """ this_mech, inner_context_token = unpack_asn1(b_data) mech = unpack_asn1_object_identifier(this_mech) return InitialContextToken(mech, inner_context_token)
Python
def pack(self) -> bytes: """ Packs the NegTokenInit as a byte string. """ def pack_elements(value_map): elements = [] for tag, value, pack_func in value_map: if value is not None: elements.append(pack_asn1(TagClass.context_specific, True, tag, pack_func(value))) return elements req_flags = struct.pack("B", self.req_flags) if self.req_flags is not None else None base_map = [ (0, self.mech_types, pack_mech_type_list), (1, req_flags, pack_asn1_bit_string), (2, self.mech_token, pack_asn1_octet_string), ] # The placement of the mechListMIC is dependent on whether we are packing a NegTokenInit with or without the # negHints field. neg_hints_map = [ (0, self.hint_name, pack_asn1_general_string), (1, self.hint_address, pack_asn1_octet_string), ] neg_hints = pack_elements(neg_hints_map) if neg_hints: base_map.append((3, neg_hints, pack_asn1_sequence)) base_map.append((4, self.mech_list_mic, pack_asn1_octet_string)) else: base_map.append((3, self.mech_list_mic, pack_asn1_octet_string)) init_sequence = pack_elements(base_map) # The NegTokenInit will always be wrapped in an InitialContextToken -> NegotiationToken - CHOICE 0. b_data = pack_asn1_sequence(init_sequence) return InitialContextToken(GSSMech.spnego.value, pack_asn1(TagClass.context_specific, True, 0, b_data)).pack()
def pack(self) -> bytes: """ Packs the NegTokenInit as a byte string. """ def pack_elements(value_map): elements = [] for tag, value, pack_func in value_map: if value is not None: elements.append(pack_asn1(TagClass.context_specific, True, tag, pack_func(value))) return elements req_flags = struct.pack("B", self.req_flags) if self.req_flags is not None else None base_map = [ (0, self.mech_types, pack_mech_type_list), (1, req_flags, pack_asn1_bit_string), (2, self.mech_token, pack_asn1_octet_string), ] # The placement of the mechListMIC is dependent on whether we are packing a NegTokenInit with or without the # negHints field. neg_hints_map = [ (0, self.hint_name, pack_asn1_general_string), (1, self.hint_address, pack_asn1_octet_string), ] neg_hints = pack_elements(neg_hints_map) if neg_hints: base_map.append((3, neg_hints, pack_asn1_sequence)) base_map.append((4, self.mech_list_mic, pack_asn1_octet_string)) else: base_map.append((3, self.mech_list_mic, pack_asn1_octet_string)) init_sequence = pack_elements(base_map) # The NegTokenInit will always be wrapped in an InitialContextToken -> NegotiationToken - CHOICE 0. b_data = pack_asn1_sequence(init_sequence) return InitialContextToken(GSSMech.spnego.value, pack_asn1(TagClass.context_specific, True, 0, b_data)).pack()
Python
def unpack(b_data: bytes) -> "NegTokenInit": """ Unpacks the NegTokenInit TLV value. """ neg_seq = unpack_asn1_tagged_sequence(unpack_asn1(b_data)[0]) mech_types = [unpack_asn1_object_identifier(m) for m in get_sequence_value(neg_seq, 0, 'NegTokenInit', 'mechTypes', unpack_asn1_sequence) or []] req_flags = get_sequence_value(neg_seq, 1, 'NegTokenInit', 'reqFlags', unpack_asn1_bit_string) if req_flags: # Can be up to 32 bits in length but RFC 4178 states "Implementations should not expect to receive exactly # 32 bits in an encoding of ContextFlags." The spec also documents req flags up to 6 so let's just get the # last byte. In reality we shouldn't ever receive this but it's left here for posterity. req_flags = ContextFlags(bytearray(req_flags)[-1]) mech_token = get_sequence_value(neg_seq, 2, 'NegTokenInit', 'mechToken', unpack_asn1_octet_string) hint_name = hint_address = mech_list_mic = None if 3 in neg_seq: # Microsoft helpfully sends a NegTokenInit2 payload which sets 'negHints [3] NegHints OPTIONAL' and the # mechListMIC is actually at the 4th sequence entry. Because the NegTokenInit2 has the same choice in # NegotiationToken as NegTokenInit ([0]) we can only differentiate when unpacking based on the class/tags. tag_class = neg_seq[3].tag_class tag_number = neg_seq[3].tag_number if tag_class == TagClass.universal and tag_number == TypeTagNumber.sequence: neg_hints = unpack_asn1_tagged_sequence(neg_seq[3].b_data) # Windows 2000, 2003, and XP put the SPN encoded as the OEM code page, because there's no sane way # to decode this without prior knowledge a GeneralString stays a byte string in Python. # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-spng/211417c4-11ef-46c0-a8fb-f178a51c2088#Appendix_A_5 hint_name = get_sequence_value(neg_hints, 0, 'NegHints', 'hintName', unpack_asn1_general_string) hint_address = get_sequence_value(neg_hints, 1, 'NegHints', 'hintAddress', unpack_asn1_octet_string) else: # Wasn't a sequence, should be mechListMIC. mech_list_mic = get_sequence_value(neg_seq, 3, 'NegTokenInit', 'mechListMIC', unpack_asn1_octet_string) if not mech_list_mic: mech_list_mic = get_sequence_value(neg_seq, 4, 'NegTokenInit2', 'mechListMIC', unpack_asn1_octet_string) return NegTokenInit(mech_types, req_flags, mech_token, hint_name, hint_address, mech_list_mic)
def unpack(b_data: bytes) -> "NegTokenInit": """ Unpacks the NegTokenInit TLV value. """ neg_seq = unpack_asn1_tagged_sequence(unpack_asn1(b_data)[0]) mech_types = [unpack_asn1_object_identifier(m) for m in get_sequence_value(neg_seq, 0, 'NegTokenInit', 'mechTypes', unpack_asn1_sequence) or []] req_flags = get_sequence_value(neg_seq, 1, 'NegTokenInit', 'reqFlags', unpack_asn1_bit_string) if req_flags: # Can be up to 32 bits in length but RFC 4178 states "Implementations should not expect to receive exactly # 32 bits in an encoding of ContextFlags." The spec also documents req flags up to 6 so let's just get the # last byte. In reality we shouldn't ever receive this but it's left here for posterity. req_flags = ContextFlags(bytearray(req_flags)[-1]) mech_token = get_sequence_value(neg_seq, 2, 'NegTokenInit', 'mechToken', unpack_asn1_octet_string) hint_name = hint_address = mech_list_mic = None if 3 in neg_seq: # Microsoft helpfully sends a NegTokenInit2 payload which sets 'negHints [3] NegHints OPTIONAL' and the # mechListMIC is actually at the 4th sequence entry. Because the NegTokenInit2 has the same choice in # NegotiationToken as NegTokenInit ([0]) we can only differentiate when unpacking based on the class/tags. tag_class = neg_seq[3].tag_class tag_number = neg_seq[3].tag_number if tag_class == TagClass.universal and tag_number == TypeTagNumber.sequence: neg_hints = unpack_asn1_tagged_sequence(neg_seq[3].b_data) # Windows 2000, 2003, and XP put the SPN encoded as the OEM code page, because there's no sane way # to decode this without prior knowledge a GeneralString stays a byte string in Python. # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-spng/211417c4-11ef-46c0-a8fb-f178a51c2088#Appendix_A_5 hint_name = get_sequence_value(neg_hints, 0, 'NegHints', 'hintName', unpack_asn1_general_string) hint_address = get_sequence_value(neg_hints, 1, 'NegHints', 'hintAddress', unpack_asn1_octet_string) else: # Wasn't a sequence, should be mechListMIC. mech_list_mic = get_sequence_value(neg_seq, 3, 'NegTokenInit', 'mechListMIC', unpack_asn1_octet_string) if not mech_list_mic: mech_list_mic = get_sequence_value(neg_seq, 4, 'NegTokenInit2', 'mechListMIC', unpack_asn1_octet_string) return NegTokenInit(mech_types, req_flags, mech_token, hint_name, hint_address, mech_list_mic)
Python
def pack(self) -> bytes: """ Packs the NegTokenResp as a byte string. """ value_map: typing.List[typing.Tuple[int, typing.Any, typing.Callable[[typing.Any], bytes]]] = [ (0, self.neg_state, pack_asn1_enumerated), (1, self.supported_mech, pack_asn1_object_identifier), (2, self.response_token, pack_asn1_octet_string), (3, self.mech_list_mic, pack_asn1_octet_string), ] elements = [] for tag, value, pack_func in value_map: if value is not None: elements.append(pack_asn1(TagClass.context_specific, True, tag, pack_func(value))) # The NegTokenResp will always be wrapped NegotiationToken - CHOICE 1. b_data = pack_asn1_sequence(elements) return pack_asn1(TagClass.context_specific, True, 1, b_data)
def pack(self) -> bytes: """ Packs the NegTokenResp as a byte string. """ value_map: typing.List[typing.Tuple[int, typing.Any, typing.Callable[[typing.Any], bytes]]] = [ (0, self.neg_state, pack_asn1_enumerated), (1, self.supported_mech, pack_asn1_object_identifier), (2, self.response_token, pack_asn1_octet_string), (3, self.mech_list_mic, pack_asn1_octet_string), ] elements = [] for tag, value, pack_func in value_map: if value is not None: elements.append(pack_asn1(TagClass.context_specific, True, tag, pack_func(value))) # The NegTokenResp will always be wrapped NegotiationToken - CHOICE 1. b_data = pack_asn1_sequence(elements) return pack_asn1(TagClass.context_specific, True, 1, b_data)
Python
def unpack(b_data: bytes) -> "NegTokenResp": """ Unpacks the NegTokenResp TLV value. """ neg_seq = unpack_asn1_tagged_sequence(unpack_asn1(b_data)[0]) neg_state = get_sequence_value(neg_seq, 0, 'NegTokenResp', 'negState', unpack_asn1_enumerated) if neg_state is not None: neg_state = NegState(neg_state) supported_mech = get_sequence_value(neg_seq, 1, 'NegTokenResp', 'supportedMech', unpack_asn1_object_identifier) response_token = get_sequence_value(neg_seq, 2, 'NegTokenResp', 'responseToken', unpack_asn1_octet_string) mech_list_mic = get_sequence_value(neg_seq, 3, 'NegTokenResp', 'mechListMIC', unpack_asn1_octet_string) return NegTokenResp(neg_state, supported_mech, response_token, mech_list_mic)
def unpack(b_data: bytes) -> "NegTokenResp": """ Unpacks the NegTokenResp TLV value. """ neg_seq = unpack_asn1_tagged_sequence(unpack_asn1(b_data)[0]) neg_state = get_sequence_value(neg_seq, 0, 'NegTokenResp', 'negState', unpack_asn1_enumerated) if neg_state is not None: neg_state = NegState(neg_state) supported_mech = get_sequence_value(neg_seq, 1, 'NegTokenResp', 'supportedMech', unpack_asn1_object_identifier) response_token = get_sequence_value(neg_seq, 2, 'NegTokenResp', 'responseToken', unpack_asn1_octet_string) mech_list_mic = get_sequence_value(neg_seq, 3, 'NegTokenResp', 'mechListMIC', unpack_asn1_octet_string) return NegTokenResp(neg_state, supported_mech, response_token, mech_list_mic)
Python
def _rebuild_context_list( self, mech_types: typing.Optional[typing.List[str]] = None, ) -> typing.List[str]: """ Builds a new context list that are available to the client. """ context_kwargs = { 'username': self.username, 'password': self.password, 'hostname': self._hostname, 'service': self._service, 'channel_bindings': self.channel_bindings, 'context_req': self.context_req, 'usage': self.usage, 'options': self.options, '_is_wrapped': True, } gssapi_protocols = GSSAPIProxy.available_protocols(options=self.options) all_protocols = self._preferred_mech_list() self._context_list = {} mech_list = [] last_err = None for mech in all_protocols: if mech_types and mech.value not in mech_types: continue protocol = mech.name try: proxy_obj = GSSAPIProxy if protocol in gssapi_protocols else NTLMProxy log.debug("Checking %s with %s when building SPNEGO mech list" % (proxy_obj.__name__, protocol)) context = proxy_obj(protocol=protocol, **context_kwargs) first_token = context.step() if self.usage == 'initiate' else None except Exception as e: last_err = e log.debug("Failed to create gssapi context for SPNEGO protocol %s: %s", protocol, str(e)) continue self._context_list[mech] = (context, first_token) mech_list.append(mech.value) if not mech_list: raise BadMechanismError(context_msg="Unable to negotiate common mechanism", base_error=last_err) return mech_list
def _rebuild_context_list( self, mech_types: typing.Optional[typing.List[str]] = None, ) -> typing.List[str]: """ Builds a new context list that are available to the client. """ context_kwargs = { 'username': self.username, 'password': self.password, 'hostname': self._hostname, 'service': self._service, 'channel_bindings': self.channel_bindings, 'context_req': self.context_req, 'usage': self.usage, 'options': self.options, '_is_wrapped': True, } gssapi_protocols = GSSAPIProxy.available_protocols(options=self.options) all_protocols = self._preferred_mech_list() self._context_list = {} mech_list = [] last_err = None for mech in all_protocols: if mech_types and mech.value not in mech_types: continue protocol = mech.name try: proxy_obj = GSSAPIProxy if protocol in gssapi_protocols else NTLMProxy log.debug("Checking %s with %s when building SPNEGO mech list" % (proxy_obj.__name__, protocol)) context = proxy_obj(protocol=protocol, **context_kwargs) first_token = context.step() if self.usage == 'initiate' else None except Exception as e: last_err = e log.debug("Failed to create gssapi context for SPNEGO protocol %s: %s", protocol, str(e)) continue self._context_list[mech] = (context, first_token) mech_list.append(mech.value) if not mech_list: raise BadMechanismError(context_msg="Unable to negotiate common mechanism", base_error=last_err) return mech_list
Python
def _get_credential_file() -> typing.Optional[str]: """Get the path to the NTLM credential store. Returns the path to the NTLM credential store specified by the environment variable `NTLM_USER_FILE`. Returns: Optional[bytes]: The path to the NTLM credential file or None if not set or found. """ user_file_path = os.environ.get('NTLM_USER_FILE', None) if not user_file_path: return None file_path = to_text(user_file_path, encoding='utf-8') if os.path.isfile(file_path): return file_path return None
def _get_credential_file() -> typing.Optional[str]: """Get the path to the NTLM credential store. Returns the path to the NTLM credential store specified by the environment variable `NTLM_USER_FILE`. Returns: Optional[bytes]: The path to the NTLM credential file or None if not set or found. """ user_file_path = os.environ.get('NTLM_USER_FILE', None) if not user_file_path: return None file_path = to_text(user_file_path, encoding='utf-8') if os.path.isfile(file_path): return file_path return None
Python
def _get_credential( store: str, domain: typing.Optional[str] = None, username: typing.Optional[str] = None, ) -> typing.Tuple[str, str, bytes, bytes]: """Look up NTLM credentials from the common flat file. Retrieves the LM and NT hash for use with authentication or validating a credential from an initiator. Each line in the store can be in the Heimdal format `DOMAIN:USER:PASSWORD` like:: testdom:testuser:Password01 :[email protected]:Password01 Or it can use the `smbpasswd`_ file format `USERNAME:UID:LM_HASH:NT_HASH:ACCT_FLAGS:TIMESTAMP` like:: testuser:1000:278623D830DABE161104594F8C2EF12B:C3C6F4FD8A02A6C1268F1A8074B6E7E0:[U]:LCT-1589398321 TESTDOM\testuser:1000:4588C64B89437893AAD3B435B51404EE:65202355FA01AEF26B89B19E00F52679:[U]:LCT-1589398321 [email protected]:1000:00000000000000000000000000000000:8ADB9B997580D69E69CAA2BBB68F4697:[U]:LCT-1589398321 While only the `USERNAME`, `LM_HASH`, and `NT_HASH` fields are used, the colons are still required to differentiate between the 2 formats. See `ntlm hash generator`_ for ways to generate the `LM_HASH` and `NT_HASH`. The username is case insensitive but the format of the domain and user part must match up with the value used as the username specified by the caller. While each line can use a different format, it is recommended to stick to 1 throughout the file. The same env var and format can also be read with gss-ntlmssp. Args: store: The credential store to lookup the credential from. domain: The domain for the user to get the credentials for. Should be `None` for a user in the UPN form. username: The username to get the credentials for. If omitted then the first entry in the store is used. Returns: Tuple[str, str, bytes, bytes]: The domain, username, LM, and NT hash of the user specified. .. _smbpasswd: https://www.samba.org/samba/docs/current/man-html/smbpasswd.5.html .. _ntlm hash generator: https://asecuritysite.com/encryption/lmhash """ if not store: raise OperationNotAvailableError(context_msg="Retrieving NTLM store without NTLM_USER_FILE set to a filepath") domain = domain or "" def store_lines(text): for line in text.splitlines(): line_split = line.split(':') if len(line_split) == 3: yield line_split[0], line_split[1], line_split[2], None, None elif len(line_split) == 6: domain_entry, user_entry = split_username(line_split[0]) lm_entry = base64.b16decode(line_split[2].upper()) nt_entry = base64.b16decode(line_split[3].upper()) yield domain_entry or "", user_entry, None, lm_entry, nt_entry with open(store, mode='rb') as fd: cred_text = fd.read().decode() for line_domain, line_user, line_password, lm_hash, nt_hash in store_lines(cred_text): if not username or (username.upper() == line_user.upper() and domain.upper() == line_domain.upper()): # The Heimdal format uses the password so if the LM or NT hash isn't set generate it ourselves. if not lm_hash: lm_hash = lmowfv1(line_password) if not nt_hash: nt_hash = ntowfv1(line_password) # Favour the explicit username/password value, otherwise use what was in the credential file. if not username: username = line_user if not domain: domain = line_domain or None return domain, username, lm_hash, nt_hash else: raise SpnegoError(ErrorCode.failure, context_msg="Failed to find any matching credential in " "NTLM_USER_FILE credential store.")
def _get_credential( store: str, domain: typing.Optional[str] = None, username: typing.Optional[str] = None, ) -> typing.Tuple[str, str, bytes, bytes]: """Look up NTLM credentials from the common flat file. Retrieves the LM and NT hash for use with authentication or validating a credential from an initiator. Each line in the store can be in the Heimdal format `DOMAIN:USER:PASSWORD` like:: testdom:testuser:Password01 :[email protected]:Password01 Or it can use the `smbpasswd`_ file format `USERNAME:UID:LM_HASH:NT_HASH:ACCT_FLAGS:TIMESTAMP` like:: testuser:1000:278623D830DABE161104594F8C2EF12B:C3C6F4FD8A02A6C1268F1A8074B6E7E0:[U]:LCT-1589398321 TESTDOM\testuser:1000:4588C64B89437893AAD3B435B51404EE:65202355FA01AEF26B89B19E00F52679:[U]:LCT-1589398321 [email protected]:1000:00000000000000000000000000000000:8ADB9B997580D69E69CAA2BBB68F4697:[U]:LCT-1589398321 While only the `USERNAME`, `LM_HASH`, and `NT_HASH` fields are used, the colons are still required to differentiate between the 2 formats. See `ntlm hash generator`_ for ways to generate the `LM_HASH` and `NT_HASH`. The username is case insensitive but the format of the domain and user part must match up with the value used as the username specified by the caller. While each line can use a different format, it is recommended to stick to 1 throughout the file. The same env var and format can also be read with gss-ntlmssp. Args: store: The credential store to lookup the credential from. domain: The domain for the user to get the credentials for. Should be `None` for a user in the UPN form. username: The username to get the credentials for. If omitted then the first entry in the store is used. Returns: Tuple[str, str, bytes, bytes]: The domain, username, LM, and NT hash of the user specified. .. _smbpasswd: https://www.samba.org/samba/docs/current/man-html/smbpasswd.5.html .. _ntlm hash generator: https://asecuritysite.com/encryption/lmhash """ if not store: raise OperationNotAvailableError(context_msg="Retrieving NTLM store without NTLM_USER_FILE set to a filepath") domain = domain or "" def store_lines(text): for line in text.splitlines(): line_split = line.split(':') if len(line_split) == 3: yield line_split[0], line_split[1], line_split[2], None, None elif len(line_split) == 6: domain_entry, user_entry = split_username(line_split[0]) lm_entry = base64.b16decode(line_split[2].upper()) nt_entry = base64.b16decode(line_split[3].upper()) yield domain_entry or "", user_entry, None, lm_entry, nt_entry with open(store, mode='rb') as fd: cred_text = fd.read().decode() for line_domain, line_user, line_password, lm_hash, nt_hash in store_lines(cred_text): if not username or (username.upper() == line_user.upper() and domain.upper() == line_domain.upper()): # The Heimdal format uses the password so if the LM or NT hash isn't set generate it ourselves. if not lm_hash: lm_hash = lmowfv1(line_password) if not nt_hash: nt_hash = ntowfv1(line_password) # Favour the explicit username/password value, otherwise use what was in the credential file. if not username: username = line_user if not domain: domain = line_domain or None return domain, username, lm_hash, nt_hash else: raise SpnegoError(ErrorCode.failure, context_msg="Failed to find any matching credential in " "NTLM_USER_FILE credential store.")
Python
def _get_workstation() -> typing.Optional[str]: """Get the current workstation name. This gets the current workstation name that respects `NETBIOS_COMPUTER_NAME`. The env var is used by the library that gss-ntlmssp calls and makes sure that this Python implementation is a closer in its behaviour. Returns: Optional[str]: The workstation to supply in the NTLM authentication message or None. """ if 'NETBIOS_COMPUTER_NAME' in os.environ: workstation = os.environ['NETBIOS_COMPUTER_NAME'] else: workstation = socket.gethostname().upper() # An empty workstation should be None so we don't set it in the message. return to_text(workstation) if workstation else None
def _get_workstation() -> typing.Optional[str]: """Get the current workstation name. This gets the current workstation name that respects `NETBIOS_COMPUTER_NAME`. The env var is used by the library that gss-ntlmssp calls and makes sure that this Python implementation is a closer in its behaviour. Returns: Optional[str]: The workstation to supply in the NTLM authentication message or None. """ if 'NETBIOS_COMPUTER_NAME' in os.environ: workstation = os.environ['NETBIOS_COMPUTER_NAME'] else: workstation = socket.gethostname().upper() # An empty workstation should be None so we don't set it in the message. return to_text(workstation) if workstation else None
Python
def _step_accept_negotiate(self, token: bytes) -> bytes: """ Process the Negotiate message from the initiator. """ negotiate = Negotiate.unpack(token) flags = negotiate.flags | NegotiateFlags.request_target | NegotiateFlags.ntlm | \ NegotiateFlags.always_sign | NegotiateFlags.target_info | NegotiateFlags.target_type_server # Make sure either UNICODE or OEM is set, not both. if flags & NegotiateFlags.unicode: flags &= ~NegotiateFlags.oem elif flags & NegotiateFlags.oem == 0: raise SpnegoError(ErrorCode.failure, context_msg="Neither NEGOTIATE_OEM or NEGOTIATE_UNICODE flags were " "set, cannot derive encoding for text fields") if flags & NegotiateFlags.extended_session_security: flags &= ~NegotiateFlags.lm_key server_challenge = os.urandom(8) target_name = to_text(socket.gethostname()).upper() target_info = TargetInfo() target_info[AvId.nb_computer_name] = target_name target_info[AvId.nb_domain_name] = "WORKSTATION" target_info[AvId.dns_computer_name] = to_text(socket.getfqdn()) target_info[AvId.timestamp] = FileTime.now() challenge = Challenge(flags, server_challenge, target_name=target_name, target_info=target_info) self._temp_msg = { 'negotiate': negotiate, 'challenge': challenge, } return challenge.pack()
def _step_accept_negotiate(self, token: bytes) -> bytes: """ Process the Negotiate message from the initiator. """ negotiate = Negotiate.unpack(token) flags = negotiate.flags | NegotiateFlags.request_target | NegotiateFlags.ntlm | \ NegotiateFlags.always_sign | NegotiateFlags.target_info | NegotiateFlags.target_type_server # Make sure either UNICODE or OEM is set, not both. if flags & NegotiateFlags.unicode: flags &= ~NegotiateFlags.oem elif flags & NegotiateFlags.oem == 0: raise SpnegoError(ErrorCode.failure, context_msg="Neither NEGOTIATE_OEM or NEGOTIATE_UNICODE flags were " "set, cannot derive encoding for text fields") if flags & NegotiateFlags.extended_session_security: flags &= ~NegotiateFlags.lm_key server_challenge = os.urandom(8) target_name = to_text(socket.gethostname()).upper() target_info = TargetInfo() target_info[AvId.nb_computer_name] = target_name target_info[AvId.nb_domain_name] = "WORKSTATION" target_info[AvId.dns_computer_name] = to_text(socket.getfqdn()) target_info[AvId.timestamp] = FileTime.now() challenge = Challenge(flags, server_challenge, target_name=target_name, target_info=target_info) self._temp_msg = { 'negotiate': negotiate, 'challenge': challenge, } return challenge.pack()
Python
def _step_accept_authenticate(self, token: bytes) -> None: """ Process the Authenticate message from the initiator. """ challenge = self._temp_msg['challenge'] server_challenge = challenge.server_challenge auth = Authenticate.unpack(token) # TODO: Add anonymous user support. if not auth.user_name or (not auth.nt_challenge_response and (not auth.lm_challenge_response or auth.lm_challenge_response == b"\x00")): raise OperationNotAvailableError(context_msg="Anonymous user authentication not implemented") self._credential = _NTLMCredential(domain=auth.domain_name, username=auth.user_name) expected_mic = None if auth.nt_challenge_response and len(auth.nt_challenge_response) > 24: nt_hash = ntowfv2(self._credential.username, self._credential.nt_hash, self._credential.domain) nt_challenge = NTClientChallengeV2.unpack(auth.nt_challenge_response[16:]) time = nt_challenge.time_stamp client_challenge = nt_challenge.challenge_from_client target_info = nt_challenge.av_pairs expected_nt, expected_lm, key_exchange_key = compute_response_v2( nt_hash, server_challenge, client_challenge, time, target_info) if self.channel_bindings: if AvId.channel_bindings not in target_info: raise BadBindingsError(context_msg="Acceptor bindings specified but not present in initiator " "response") expected_bindings = target_info[AvId.channel_bindings] actual_bindings = md5(self.channel_bindings.pack()) if expected_bindings not in [actual_bindings, b"\x00" * 16]: raise BadBindingsError(context_msg="Acceptor bindings do not match initiator bindings") if target_info.get(AvId.flags, 0) & AvFlags.mic: expected_mic = auth.mic else: if not self._nt_v1: raise InvalidTokenError(context_msg="Acceptor settings are set to reject NTv1 responses") elif not auth.nt_challenge_response and not self._lm: raise InvalidTokenError(context_msg="Acceptor settings are set to reject LM responses") client_challenge = None if auth.flags & NegotiateFlags.extended_session_security: client_challenge = auth.lm_challenge_response[:8] expected_nt, expected_lm, key_exchange_key = compute_response_v1( auth.flags, self._credential.nt_hash, self._credential.lm_hash, server_challenge, client_challenge, no_lm_response=not self._lm) auth_success = False if auth.nt_challenge_response: auth_success = auth.nt_challenge_response == expected_nt elif auth.lm_challenge_response: auth_success = auth.lm_challenge_response == expected_lm if not auth_success: raise InvalidTokenError(context_msg="Invalid NTLM response from initiator") if auth.flags & NegotiateFlags.key_exch and \ (auth.flags & NegotiateFlags.sign or auth.flags & NegotiateFlags.seal): self._session_key = rc4k(key_exchange_key, auth.encrypted_random_session_key) else: self._session_key = key_exchange_key if expected_mic: auth.mic = b"\x00" * 16 actual_mic = self._calculate_mic(self._temp_msg['negotiate'].pack(), challenge.pack(), auth.pack()) if actual_mic != expected_mic: raise InvalidTokenError(context_msg="Invalid MIC in NTLM authentication message") self._context_attr = auth.flags self._complete = True
def _step_accept_authenticate(self, token: bytes) -> None: """ Process the Authenticate message from the initiator. """ challenge = self._temp_msg['challenge'] server_challenge = challenge.server_challenge auth = Authenticate.unpack(token) # TODO: Add anonymous user support. if not auth.user_name or (not auth.nt_challenge_response and (not auth.lm_challenge_response or auth.lm_challenge_response == b"\x00")): raise OperationNotAvailableError(context_msg="Anonymous user authentication not implemented") self._credential = _NTLMCredential(domain=auth.domain_name, username=auth.user_name) expected_mic = None if auth.nt_challenge_response and len(auth.nt_challenge_response) > 24: nt_hash = ntowfv2(self._credential.username, self._credential.nt_hash, self._credential.domain) nt_challenge = NTClientChallengeV2.unpack(auth.nt_challenge_response[16:]) time = nt_challenge.time_stamp client_challenge = nt_challenge.challenge_from_client target_info = nt_challenge.av_pairs expected_nt, expected_lm, key_exchange_key = compute_response_v2( nt_hash, server_challenge, client_challenge, time, target_info) if self.channel_bindings: if AvId.channel_bindings not in target_info: raise BadBindingsError(context_msg="Acceptor bindings specified but not present in initiator " "response") expected_bindings = target_info[AvId.channel_bindings] actual_bindings = md5(self.channel_bindings.pack()) if expected_bindings not in [actual_bindings, b"\x00" * 16]: raise BadBindingsError(context_msg="Acceptor bindings do not match initiator bindings") if target_info.get(AvId.flags, 0) & AvFlags.mic: expected_mic = auth.mic else: if not self._nt_v1: raise InvalidTokenError(context_msg="Acceptor settings are set to reject NTv1 responses") elif not auth.nt_challenge_response and not self._lm: raise InvalidTokenError(context_msg="Acceptor settings are set to reject LM responses") client_challenge = None if auth.flags & NegotiateFlags.extended_session_security: client_challenge = auth.lm_challenge_response[:8] expected_nt, expected_lm, key_exchange_key = compute_response_v1( auth.flags, self._credential.nt_hash, self._credential.lm_hash, server_challenge, client_challenge, no_lm_response=not self._lm) auth_success = False if auth.nt_challenge_response: auth_success = auth.nt_challenge_response == expected_nt elif auth.lm_challenge_response: auth_success = auth.lm_challenge_response == expected_lm if not auth_success: raise InvalidTokenError(context_msg="Invalid NTLM response from initiator") if auth.flags & NegotiateFlags.key_exch and \ (auth.flags & NegotiateFlags.sign or auth.flags & NegotiateFlags.seal): self._session_key = rc4k(key_exchange_key, auth.encrypted_random_session_key) else: self._session_key = key_exchange_key if expected_mic: auth.mic = b"\x00" * 16 actual_mic = self._calculate_mic(self._temp_msg['negotiate'].pack(), challenge.pack(), auth.pack()) if actual_mic != expected_mic: raise InvalidTokenError(context_msg="Invalid MIC in NTLM authentication message") self._context_attr = auth.flags self._complete = True
Python
def _calculate_mic( self, negotiate: bytes, challenge: bytes, authenticate: bytes, ) -> bytes: """ Calculates the MIC value for the negotiated context. """ return hmac_md5(self._session_key, negotiate + challenge + authenticate)
def _calculate_mic( self, negotiate: bytes, challenge: bytes, authenticate: bytes, ) -> bytes: """ Calculates the MIC value for the negotiated context. """ return hmac_md5(self._session_key, negotiate + challenge + authenticate)
Python
def _compute_response( self, challenge: Challenge, credential: _NTLMCredential, ) -> typing.Tuple[bytes, bytes, bytes]: """ Compute the NT and LM responses and the key exchange key. """ client_challenge = os.urandom(8) if self._nt_v2: target_info = challenge.target_info.copy() if challenge.target_info else TargetInfo() if AvId.timestamp in target_info: time = target_info[AvId.timestamp] self._mic_required = True else: time = FileTime.now() # The docs seem to indicate that a 0'd bindings hash means to ignore it but that does not seem to be the # case. Instead only add the bindings if they have been specified by the caller. if self.channel_bindings: target_info[AvId.channel_bindings] = md5(self.channel_bindings.pack()) target_info[AvId.target_name] = self.spn or "" if self._mic_required: target_info[AvId.flags] = target_info.get(AvId.flags, AvFlags(0)) | AvFlags.mic ntv2_hash = ntowfv2(credential.username, credential.nt_hash, credential.domain) nt_challenge, lm_challenge, key_exchange_key = compute_response_v2( ntv2_hash, challenge.server_challenge, client_challenge, time, target_info) if self._mic_required: lm_challenge = b"\x00" * 24 return nt_challenge, lm_challenge, key_exchange_key else: return compute_response_v1(challenge.flags, credential.nt_hash, credential.lm_hash, challenge.server_challenge, client_challenge, no_lm_response=not self._lm)
def _compute_response( self, challenge: Challenge, credential: _NTLMCredential, ) -> typing.Tuple[bytes, bytes, bytes]: """ Compute the NT and LM responses and the key exchange key. """ client_challenge = os.urandom(8) if self._nt_v2: target_info = challenge.target_info.copy() if challenge.target_info else TargetInfo() if AvId.timestamp in target_info: time = target_info[AvId.timestamp] self._mic_required = True else: time = FileTime.now() # The docs seem to indicate that a 0'd bindings hash means to ignore it but that does not seem to be the # case. Instead only add the bindings if they have been specified by the caller. if self.channel_bindings: target_info[AvId.channel_bindings] = md5(self.channel_bindings.pack()) target_info[AvId.target_name] = self.spn or "" if self._mic_required: target_info[AvId.flags] = target_info.get(AvId.flags, AvFlags(0)) | AvFlags.mic ntv2_hash = ntowfv2(credential.username, credential.nt_hash, credential.domain) nt_challenge, lm_challenge, key_exchange_key = compute_response_v2( ntv2_hash, challenge.server_challenge, client_challenge, time, target_info) if self._mic_required: lm_challenge = b"\x00" * 24 return nt_challenge, lm_challenge, key_exchange_key else: return compute_response_v1(challenge.flags, credential.nt_hash, credential.lm_hash, challenge.server_challenge, client_challenge, no_lm_response=not self._lm)
Python
def reset(self) -> None: """ Reset's the cipher stream back to the original state. """ arc4 = algorithms.ARC4(self._key) cipher = Cipher(arc4, mode=None, backend=default_backend()) self._handle = cipher.encryptor()
def reset(self) -> None: """ Reset's the cipher stream back to the original state. """ arc4 = algorithms.ARC4(self._key) cipher = Cipher(arc4, mode=None, backend=default_backend()) self._handle = cipher.encryptor()
Python
def compute_response_v1( flags: int, nt_hash: bytes, lm_hash: bytes, server_challenge: bytes, client_challenge: bytes, no_lm_response: bool = True, ) -> typing.Tuple[bytes, bytes, bytes]: """Compute NT and LM Response for NTLMv1. Computes the NT and LM Response for NTLMv1 messages. The response is dependent on the flags that were negotiated between the client and server. The pseudo-code for this function as documented under `NTLM v1 Authentication`_ is:: Define ComputeResponse(NegFlg, ResponseKeyNT, ResponseKeyLM, CHALLENGE_MESSAGE.ServerChallenge, ClientChallenge, Time, ServerName) As If (User is set to "" AND Passwd is set to "") -- Special case for anonymous authentication Set NtChallengeResponseLen to 0 Set NtChallengeResponseMaxLen to 0 Set NtChallengeResponseBufferOffset to 0 Set LmChallengeResponse to Z(1) ElseIf If (NTLMSSP_NEGOTIATE_EXTENDED_SESSIONSECURITY flag is set in NegFlg) Set NtChallengeResponse to DESL(ResponseKeyNT, MD5(ConcatenationOf(CHALLENGE_MESSAGE.ServerChallenge, ClientChallenge))[0..7]) Set LmChallengeResponse to ConcatenationOf{ClientChallenge, Z(16)} Else Set NtChallengeResponse to DESL(ResponseKeyNT, CHALLENGE_MESSAGE.ServerChallenge) If (NoLMResponseNTLMv1 is TRUE) Set LmChallengeResponse to NtChallengeResponse Else Set LmChallengeResponse to DESL(ResponseKeyLM, CHALLENGE_MESSAGE.ServerChallenge) EndIf EndIf EndIf Set SessionBaseKey to MD4(NTOWF) Args: flags: The negotiated flags between the initiator and acceptor. nt_hash: The response key computed by :meth:`ntowfv1`. lm_hash: The response key computed by :meth:`lmowfv1`. server_challenge: The 8 byte nonce generated by the acceptor. client_challenge: The 8 byte nonce generated by the initiator. no_lm_response: Whether to compute (True) the `LmChallengeResponse` or not (False) when extended session security was not negotiated. Returns: Tuple[bytes, bytes, bytes]: Returns the NTChallengeResponse, LMChallengeResponse and KeyExchangeKey. .. _NTLM v1 Authentication: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/464551a8-9fc4-428e-b3d3-bc5bfb2e73a5 """ if flags & NegotiateFlags.extended_session_security: nt_response = desl(nt_hash, md5(server_challenge + client_challenge[:8])) lm_response = client_challenge + (b"\x00" * 16) else: nt_response = lm_response = desl(nt_hash, server_challenge) if not no_lm_response: lm_response = desl(lm_hash, server_challenge) session_base_key = md4(nt_hash) key_exchange_key = kxkey(flags, session_base_key, lm_hash, lm_response, server_challenge) return nt_response, lm_response, key_exchange_key
def compute_response_v1( flags: int, nt_hash: bytes, lm_hash: bytes, server_challenge: bytes, client_challenge: bytes, no_lm_response: bool = True, ) -> typing.Tuple[bytes, bytes, bytes]: """Compute NT and LM Response for NTLMv1. Computes the NT and LM Response for NTLMv1 messages. The response is dependent on the flags that were negotiated between the client and server. The pseudo-code for this function as documented under `NTLM v1 Authentication`_ is:: Define ComputeResponse(NegFlg, ResponseKeyNT, ResponseKeyLM, CHALLENGE_MESSAGE.ServerChallenge, ClientChallenge, Time, ServerName) As If (User is set to "" AND Passwd is set to "") -- Special case for anonymous authentication Set NtChallengeResponseLen to 0 Set NtChallengeResponseMaxLen to 0 Set NtChallengeResponseBufferOffset to 0 Set LmChallengeResponse to Z(1) ElseIf If (NTLMSSP_NEGOTIATE_EXTENDED_SESSIONSECURITY flag is set in NegFlg) Set NtChallengeResponse to DESL(ResponseKeyNT, MD5(ConcatenationOf(CHALLENGE_MESSAGE.ServerChallenge, ClientChallenge))[0..7]) Set LmChallengeResponse to ConcatenationOf{ClientChallenge, Z(16)} Else Set NtChallengeResponse to DESL(ResponseKeyNT, CHALLENGE_MESSAGE.ServerChallenge) If (NoLMResponseNTLMv1 is TRUE) Set LmChallengeResponse to NtChallengeResponse Else Set LmChallengeResponse to DESL(ResponseKeyLM, CHALLENGE_MESSAGE.ServerChallenge) EndIf EndIf EndIf Set SessionBaseKey to MD4(NTOWF) Args: flags: The negotiated flags between the initiator and acceptor. nt_hash: The response key computed by :meth:`ntowfv1`. lm_hash: The response key computed by :meth:`lmowfv1`. server_challenge: The 8 byte nonce generated by the acceptor. client_challenge: The 8 byte nonce generated by the initiator. no_lm_response: Whether to compute (True) the `LmChallengeResponse` or not (False) when extended session security was not negotiated. Returns: Tuple[bytes, bytes, bytes]: Returns the NTChallengeResponse, LMChallengeResponse and KeyExchangeKey. .. _NTLM v1 Authentication: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/464551a8-9fc4-428e-b3d3-bc5bfb2e73a5 """ if flags & NegotiateFlags.extended_session_security: nt_response = desl(nt_hash, md5(server_challenge + client_challenge[:8])) lm_response = client_challenge + (b"\x00" * 16) else: nt_response = lm_response = desl(nt_hash, server_challenge) if not no_lm_response: lm_response = desl(lm_hash, server_challenge) session_base_key = md4(nt_hash) key_exchange_key = kxkey(flags, session_base_key, lm_hash, lm_response, server_challenge) return nt_response, lm_response, key_exchange_key
Python
def compute_response_v2( nt_hash: bytes, server_challenge: bytes, client_challenge: bytes, time: FileTime, av_pairs: TargetInfo, ) -> typing.Tuple[bytes, bytes, bytes]: """Compute NT and LM Response for NTLMv2. Computes the NT and LM Response for NTLMv2 messages. The response is dependent on the flags that were negotiated between the client and server. The pseudo-code for this function as documented under `NTLM v2 Authentication`_ is:: Define ComputeResponse(NegFlg, ResponseKeyNT, ResponseKeyLM, CHALLENGE_MESSAGE.ServerChallenge, ClientChallenge, Time, ServerName) As If (User is set to "" && Passwd is set to "") -- Special case for anonymous authentication Set NtChallengeResponseLen to 0 Set NtChallengeResponseMaxLen to 0 Set NtChallengeResponseBufferOffset to 0 Set LmChallengeResponse to Z(1) Else Set temp to ConcatenationOf(Responserversion, HiResponserversion, Z(6), Time, ClientChallenge, Z(4), ServerName, Z(4)) Set NTProofStr to HMAC_MD5(ResponseKeyNT, ConcatenationOf(CHALLENGE_MESSAGE.ServerChallenge,temp)) Set NtChallengeResponse to ConcatenationOf(NTProofStr, temp) Set LmChallengeResponse to ConcatenationOf( HMAC_MD5(ResponseKeyLM, ConcatenationOf(CHALLENGE_MESSAGE.ServerChallenge, ClientChallenge)), ClientChallenge) EndIf Set SessionBaseKey to HMAC_MD5(ResponseKeyNT, NTProofStr) Args: nt_hash: The response key computed by :meth:`ntwofv2`. The `ResponseKeyLM` is the same value so we only pass in the 1 key. server_challenge: The 8 byte nonce generated by the acceptor. client_challenge: The 8 byte nonce generated by the initiator. time: The FileTime to place in the NT hash. av_pairs: The TargetInfo AvPairs fields that are placed in the Authenticate message. Returns: Tuple[bytes, bytes, bytes]: Returns the NTChallengeResponse, LMChallengeResponse and KeyExchangeKey. .. _NTLM v2 Authentication: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/5e550938-91d4-459f-b67d-75d70009e3f3 """ temp = NTClientChallengeV2(time_stamp=time, client_challenge=client_challenge, av_pairs=av_pairs) b_temp = temp.pack() + b"\x00\x00\x00\x00" nt_proof_str = hmac_md5(nt_hash, server_challenge + b_temp) nt_response = nt_proof_str + b_temp lm_response = hmac_md5(nt_hash, server_challenge + client_challenge) + client_challenge session_base_key = hmac_md5(nt_hash, nt_proof_str) return nt_response, lm_response, session_base_key
def compute_response_v2( nt_hash: bytes, server_challenge: bytes, client_challenge: bytes, time: FileTime, av_pairs: TargetInfo, ) -> typing.Tuple[bytes, bytes, bytes]: """Compute NT and LM Response for NTLMv2. Computes the NT and LM Response for NTLMv2 messages. The response is dependent on the flags that were negotiated between the client and server. The pseudo-code for this function as documented under `NTLM v2 Authentication`_ is:: Define ComputeResponse(NegFlg, ResponseKeyNT, ResponseKeyLM, CHALLENGE_MESSAGE.ServerChallenge, ClientChallenge, Time, ServerName) As If (User is set to "" && Passwd is set to "") -- Special case for anonymous authentication Set NtChallengeResponseLen to 0 Set NtChallengeResponseMaxLen to 0 Set NtChallengeResponseBufferOffset to 0 Set LmChallengeResponse to Z(1) Else Set temp to ConcatenationOf(Responserversion, HiResponserversion, Z(6), Time, ClientChallenge, Z(4), ServerName, Z(4)) Set NTProofStr to HMAC_MD5(ResponseKeyNT, ConcatenationOf(CHALLENGE_MESSAGE.ServerChallenge,temp)) Set NtChallengeResponse to ConcatenationOf(NTProofStr, temp) Set LmChallengeResponse to ConcatenationOf( HMAC_MD5(ResponseKeyLM, ConcatenationOf(CHALLENGE_MESSAGE.ServerChallenge, ClientChallenge)), ClientChallenge) EndIf Set SessionBaseKey to HMAC_MD5(ResponseKeyNT, NTProofStr) Args: nt_hash: The response key computed by :meth:`ntwofv2`. The `ResponseKeyLM` is the same value so we only pass in the 1 key. server_challenge: The 8 byte nonce generated by the acceptor. client_challenge: The 8 byte nonce generated by the initiator. time: The FileTime to place in the NT hash. av_pairs: The TargetInfo AvPairs fields that are placed in the Authenticate message. Returns: Tuple[bytes, bytes, bytes]: Returns the NTChallengeResponse, LMChallengeResponse and KeyExchangeKey. .. _NTLM v2 Authentication: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/5e550938-91d4-459f-b67d-75d70009e3f3 """ temp = NTClientChallengeV2(time_stamp=time, client_challenge=client_challenge, av_pairs=av_pairs) b_temp = temp.pack() + b"\x00\x00\x00\x00" nt_proof_str = hmac_md5(nt_hash, server_challenge + b_temp) nt_response = nt_proof_str + b_temp lm_response = hmac_md5(nt_hash, server_challenge + client_challenge) + client_challenge session_base_key = hmac_md5(nt_hash, nt_proof_str) return nt_response, lm_response, session_base_key
Python
def kxkey( flags: int, session_base_key: bytes, lmowf: bytes, lm_response: bytes, server_challenge: bytes, ) -> bytes: """NTLM KXKEY function. The MS-NLMP `KXKEY`_ function used to derive the key exchange key for a security context. This is only for NTLMv1 contexts as NTLMv2 just re-uses the session base key. Args: flags: The negotiate flags in the Challenge msg. session_base_key: The session base key from :meth:`compute_response_v1`. lmowf: The LM hash from :meth:`lmowfv1`. lm_response: The lm response from :meth:`compute_response_v1`. server_challenge: The server challenge in the Challenge msg. Returns: bytes: The derived key exchange key. .. _KXKEY: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/d86303b5-b29e-4fb9-b119-77579c761370 """ if flags & NegotiateFlags.extended_session_security: return hmac_md5(session_base_key, server_challenge + lm_response[:8]) elif flags & NegotiateFlags.lm_key: b_data = lm_response[:8] return des(lmowf[:7], b_data) + des(lmowf[7:8] + b"\xBD\xBD\xBD\xBD\xBD\xBD", b_data) elif flags & NegotiateFlags.non_nt_session_key: return lmowf[:8] + b"\x00" * 8 else: return session_base_key
def kxkey( flags: int, session_base_key: bytes, lmowf: bytes, lm_response: bytes, server_challenge: bytes, ) -> bytes: """NTLM KXKEY function. The MS-NLMP `KXKEY`_ function used to derive the key exchange key for a security context. This is only for NTLMv1 contexts as NTLMv2 just re-uses the session base key. Args: flags: The negotiate flags in the Challenge msg. session_base_key: The session base key from :meth:`compute_response_v1`. lmowf: The LM hash from :meth:`lmowfv1`. lm_response: The lm response from :meth:`compute_response_v1`. server_challenge: The server challenge in the Challenge msg. Returns: bytes: The derived key exchange key. .. _KXKEY: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/d86303b5-b29e-4fb9-b119-77579c761370 """ if flags & NegotiateFlags.extended_session_security: return hmac_md5(session_base_key, server_challenge + lm_response[:8]) elif flags & NegotiateFlags.lm_key: b_data = lm_response[:8] return des(lmowf[:7], b_data) + des(lmowf[7:8] + b"\xBD\xBD\xBD\xBD\xBD\xBD", b_data) elif flags & NegotiateFlags.non_nt_session_key: return lmowf[:8] + b"\x00" * 8 else: return session_base_key
Python
def lmowfv1(password: str) -> bytes: """NTLMv1 LMOWFv1 function The Lan Manager v1 one way function as documented under `NTLM v1 Authentication`_. The pseudo-code for this function is:: Define LMOWFv1(Passwd, User, UserDom) as ConcatenationOf( DES(UpperCase(Passwd)[0..6], "KGS!@#$%"), DES(UpperCase(Passwd)[7..13], "KGS!@#$%"), ); Args: password: The password for the user. Returns: bytes: The LMv1 one way hash of the user's password. .. _NTLM v1 Authentication: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/464551a8-9fc4-428e-b3d3-bc5bfb2e73a5 """ if is_ntlm_hash(password): return base64.b16decode(password.split(':')[0].upper()) # Fix the password to upper case and pad the length to exactly 14 bytes. While it is true LM only authentication # will fail if the password exceeds 14 bytes typically it is used in conjunction with the NTv1 hash which has no # such restrictions. b_password = password.upper().encode('utf-8').ljust(14, b"\x00")[:14] b_hash = io.BytesIO() for start, end in [(0, 7), (7, 14)]: b_hash.write(des(b_password[start:end], b'KGS!@#$%')) return b_hash.getvalue()
def lmowfv1(password: str) -> bytes: """NTLMv1 LMOWFv1 function The Lan Manager v1 one way function as documented under `NTLM v1 Authentication`_. The pseudo-code for this function is:: Define LMOWFv1(Passwd, User, UserDom) as ConcatenationOf( DES(UpperCase(Passwd)[0..6], "KGS!@#$%"), DES(UpperCase(Passwd)[7..13], "KGS!@#$%"), ); Args: password: The password for the user. Returns: bytes: The LMv1 one way hash of the user's password. .. _NTLM v1 Authentication: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/464551a8-9fc4-428e-b3d3-bc5bfb2e73a5 """ if is_ntlm_hash(password): return base64.b16decode(password.split(':')[0].upper()) # Fix the password to upper case and pad the length to exactly 14 bytes. While it is true LM only authentication # will fail if the password exceeds 14 bytes typically it is used in conjunction with the NTv1 hash which has no # such restrictions. b_password = password.upper().encode('utf-8').ljust(14, b"\x00")[:14] b_hash = io.BytesIO() for start, end in [(0, 7), (7, 14)]: b_hash.write(des(b_password[start:end], b'KGS!@#$%')) return b_hash.getvalue()
Python
def ntowfv1(password: str) -> bytes: """NTLMv1 NTOWFv1 function The NT v1 one way function as documented under `NTLM v1 Authentication`_. The pseudo-code for this function is:: Define NTOWFv1(Passwd, User, UserDom) as MD4(UNICODE(Passwd)) Args: password: The password for the user. Returns: bytes: The NTv1 one way hash of the user's password. .. _NTLM v1 Authentication: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/464551a8-9fc4-428e-b3d3-bc5bfb2e73a5 """ if is_ntlm_hash(password): return base64.b16decode(password.split(':')[1].upper()) return md4(password.encode('utf-16-le'))
def ntowfv1(password: str) -> bytes: """NTLMv1 NTOWFv1 function The NT v1 one way function as documented under `NTLM v1 Authentication`_. The pseudo-code for this function is:: Define NTOWFv1(Passwd, User, UserDom) as MD4(UNICODE(Passwd)) Args: password: The password for the user. Returns: bytes: The NTv1 one way hash of the user's password. .. _NTLM v1 Authentication: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/464551a8-9fc4-428e-b3d3-bc5bfb2e73a5 """ if is_ntlm_hash(password): return base64.b16decode(password.split(':')[1].upper()) return md4(password.encode('utf-16-le'))
Python
def rc4k(k: bytes, d: bytes) -> bytes: """RC4 encryption with an explicit key. Indicates the encryption of data item `d` with the key `k` using the `RC4`_ algorithm. Args: k: The key to use for the RC4 cipher. d: The data to encrypt. Returns: bytes: The RC4 encrypted bytes. .. _RC4K: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/26c42637-9549-46ae-be2e-90f6f1360193 """ return rc4init(k).update(d)
def rc4k(k: bytes, d: bytes) -> bytes: """RC4 encryption with an explicit key. Indicates the encryption of data item `d` with the key `k` using the `RC4`_ algorithm. Args: k: The key to use for the RC4 cipher. d: The data to encrypt. Returns: bytes: The RC4 encrypted bytes. .. _RC4K: https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-nlmp/26c42637-9549-46ae-be2e-90f6f1360193 """ return rc4init(k).update(d)
Python
def auth_url_encode(byte_data): """ Safe encoding handles + and /, and also replace = with nothing :param byte_data: :return: """ return base64.urlsafe_b64encode(byte_data).decode('utf-8').replace('=', '')
def auth_url_encode(byte_data): """ Safe encoding handles + and /, and also replace = with nothing :param byte_data: :return: """ return base64.urlsafe_b64encode(byte_data).decode('utf-8').replace('=', '')
Python
def retrieve(self): """Retrieve results data for the United Kingdom's 2015 General Election.""" url = 'https://www.electoralcommission.org.uk/sites/default/files/2019-08/' filename = '2015-UK-general-election-data-results%20-%20CSV.zip' target = self.directory / 'raw' os.makedirs(target, exist_ok=True) # create directory if it doesn't exist print(f'Downloading into {target.resolve()}') with open(target / filename, 'wb') as f: response = requests.get(url + filename) f.write(response.content) print(f'Extracting into {target.resolve()}') with zipfile.ZipFile(target / filename, 'r') as f: f.extractall(target) print('Cleaning up') os.remove(target / filename)
def retrieve(self): """Retrieve results data for the United Kingdom's 2015 General Election.""" url = 'https://www.electoralcommission.org.uk/sites/default/files/2019-08/' filename = '2015-UK-general-election-data-results%20-%20CSV.zip' target = self.directory / 'raw' os.makedirs(target, exist_ok=True) # create directory if it doesn't exist print(f'Downloading into {target.resolve()}') with open(target / filename, 'wb') as f: response = requests.get(url + filename) f.write(response.content) print(f'Extracting into {target.resolve()}') with zipfile.ZipFile(target / filename, 'r') as f: f.extractall(target) print('Cleaning up') os.remove(target / filename)
Python
def process(self): """Process results data for the United Kingdom's 2015 General Election.""" processed_results_filename = 'general_election-uk-2015-results.csv' processed_results_full_filename = 'general_election-uk-2015-results-full.csv' processed_results_location = self.directory / 'processed' / processed_results_filename processed_results_full_location = self.directory / 'processed' / processed_results_full_filename os.makedirs(self.directory / 'processed', exist_ok=True) # create directory if it doesn't exist # TODO: Refactor these sections into functions to make it easier to read. ########################## # GENERAL ELECTION RESULTS ########################## print('Read and clean RESULTS FOR ANALYSIS.csv') # Import general election results results = pd.read_csv(self.directory / 'raw' / 'RESULTS FOR ANALYSIS.csv') # Remove 'Unnamed: 9' columnd del results['Unnamed: 9'] # Fix bad column name (' Total number of valid votes counted ' to 'Valid Votes') results.columns = list(results.columns[:8]) + ['Valid Votes'] + list(results.columns[9:]) # Remove rows where Constituency Name is blank blank_rows = results['Constituency Name'].isnull() results = results[-blank_rows].copy() # Remove commas & coerce Electorate and Total number of valid votes counted for col in ['Electorate', 'Valid Votes']: results[col] = results[col].apply(lambda x: float(x.replace(',', ''))) # Set NA vals to zero for col in results.columns[9:]: results[col] = results[col].fillna(0) # Checks assert results.shape == (650, 146) ################### # CONSTITUENCY DATA ################### print('Read and clean CONSTITUENCY.csv') # Import constituency data constituency = pd.read_csv(self.directory / 'raw' / 'CONSTITUENCY.csv', encoding='latin1') # Remove rows where Constituency Name is blank blank_rows = constituency['Constituency Name'].isnull() constituency = constituency[-blank_rows].copy() # Remove 'Unnamed: 6' columnd del constituency['Unnamed: 6'] # Checks assert constituency.shape == (650, 10) ####### # MERGE ####### print('Merging in constituency identifiers') # Pre-merge checks match_col = 'Constituency ID' assert len(set(constituency[match_col]).intersection(set(results[match_col]))) == 650 assert len(set(constituency[match_col]).difference(set(results[match_col]))) == 0 assert len(set(results[match_col]).difference(set(constituency[match_col]))) == 0 # Merge on Constituency ID results = pd.merge( left=results, right=constituency[['Constituency ID', 'Region ID', 'County']], how='left', on='Constituency ID', ) column_order = [ 'Press Association ID Number', 'Constituency ID', 'Constituency Name', 'Constituency Type', 'County', 'Region ID', 'Region', 'Country', 'Election Year', 'Electorate', 'Valid Votes', ] + list(results.columns[9:146]) results = results[column_order].copy() ############################ # ADDITIONAL TRANSFORMATIONS ############################ # Some MPs are members of both the Labour Party and the Co-operative Party, which plays havoc with modelling. # We will therefore consider them all members of the Labour party. results['Lab'] = results['Lab'] + results['Lab Co-op'] del results['Lab Co-op'] # Save this for convenience results_full = results.copy() # Filter to metadata cols + parties of interest parties_lookup = { 'C': 'con', 'Lab': 'lab', 'LD': 'ld', 'UKIP': 'ukip', 'Green': 'grn', 'SNP': 'snp', 'PC': 'pc', 'Other': 'other', } other_parties = list(set(results.columns) - set(results.columns[:11]) - set(parties_lookup.keys())) results['Other'] = results.loc[:, other_parties].sum(axis=1) results = results.loc[:, list(results.columns[:11]) + list(parties_lookup.keys())] # Rename parties results.columns = [parties_lookup[x] if x in parties_lookup else x for x in results.columns] # Calculate constituency level vote share for party in parties_lookup.values(): results[party + '_pc'] = results[party] / results['Valid Votes'] # Create PANO -> geo lookup geo_lookup = {x[1][0]: x[1][1] for x in results[['Press Association ID Number', 'Country']].iterrows()} assert geo_lookup[14.0] == 'Northern Ireland' # Add London boroughs london_panos = results[results.County == 'London']['Press Association ID Number'].values for pano in london_panos: geo_lookup[pano] = 'London' assert geo_lookup[237.0] == 'London' # Rename other England for k in geo_lookup: if geo_lookup[k] == 'England': geo_lookup[k] = 'England_not_london' elif geo_lookup[k] == 'Northern Ireland': geo_lookup[k] = 'NI' results['geo'] = results['Press Association ID Number'].map(geo_lookup) # Calculate geo-level vote share # TODO: Do we use this? results_by_geo = results.loc[:, ['Valid Votes', 'geo'] + list(parties_lookup.values())].groupby('geo').sum() results_by_geo_voteshare = results_by_geo.div(results_by_geo['Valid Votes'], axis=0) del results_by_geo_voteshare['Valid Votes'] # Who won? def winner(row): all_parties = set(results_full.columns[11:]) - set(['Other']) winning_party = row[all_parties].sort_values(ascending=False).index[0] if winning_party in parties_lookup.keys(): winning_party = parties_lookup[winning_party] elif winning_party == 'Speaker': winning_party = 'other' return winning_party results['winner'] = results_full.apply(winner, axis=1) # Check Conservatives won 330 seats in 2015. assert results.groupby('winner').count()['Constituency Name'].sort_values(ascending=False)[0] == 330 # EXPORT print(f'Exporting dataset to {processed_results_location.resolve()}') results.to_csv(processed_results_location, index=False) results_full.to_csv(processed_results_full_location, index=False)
def process(self): """Process results data for the United Kingdom's 2015 General Election.""" processed_results_filename = 'general_election-uk-2015-results.csv' processed_results_full_filename = 'general_election-uk-2015-results-full.csv' processed_results_location = self.directory / 'processed' / processed_results_filename processed_results_full_location = self.directory / 'processed' / processed_results_full_filename os.makedirs(self.directory / 'processed', exist_ok=True) # create directory if it doesn't exist # TODO: Refactor these sections into functions to make it easier to read. ########################## # GENERAL ELECTION RESULTS ########################## print('Read and clean RESULTS FOR ANALYSIS.csv') # Import general election results results = pd.read_csv(self.directory / 'raw' / 'RESULTS FOR ANALYSIS.csv') # Remove 'Unnamed: 9' columnd del results['Unnamed: 9'] # Fix bad column name (' Total number of valid votes counted ' to 'Valid Votes') results.columns = list(results.columns[:8]) + ['Valid Votes'] + list(results.columns[9:]) # Remove rows where Constituency Name is blank blank_rows = results['Constituency Name'].isnull() results = results[-blank_rows].copy() # Remove commas & coerce Electorate and Total number of valid votes counted for col in ['Electorate', 'Valid Votes']: results[col] = results[col].apply(lambda x: float(x.replace(',', ''))) # Set NA vals to zero for col in results.columns[9:]: results[col] = results[col].fillna(0) # Checks assert results.shape == (650, 146) ################### # CONSTITUENCY DATA ################### print('Read and clean CONSTITUENCY.csv') # Import constituency data constituency = pd.read_csv(self.directory / 'raw' / 'CONSTITUENCY.csv', encoding='latin1') # Remove rows where Constituency Name is blank blank_rows = constituency['Constituency Name'].isnull() constituency = constituency[-blank_rows].copy() # Remove 'Unnamed: 6' columnd del constituency['Unnamed: 6'] # Checks assert constituency.shape == (650, 10) ####### # MERGE ####### print('Merging in constituency identifiers') # Pre-merge checks match_col = 'Constituency ID' assert len(set(constituency[match_col]).intersection(set(results[match_col]))) == 650 assert len(set(constituency[match_col]).difference(set(results[match_col]))) == 0 assert len(set(results[match_col]).difference(set(constituency[match_col]))) == 0 # Merge on Constituency ID results = pd.merge( left=results, right=constituency[['Constituency ID', 'Region ID', 'County']], how='left', on='Constituency ID', ) column_order = [ 'Press Association ID Number', 'Constituency ID', 'Constituency Name', 'Constituency Type', 'County', 'Region ID', 'Region', 'Country', 'Election Year', 'Electorate', 'Valid Votes', ] + list(results.columns[9:146]) results = results[column_order].copy() ############################ # ADDITIONAL TRANSFORMATIONS ############################ # Some MPs are members of both the Labour Party and the Co-operative Party, which plays havoc with modelling. # We will therefore consider them all members of the Labour party. results['Lab'] = results['Lab'] + results['Lab Co-op'] del results['Lab Co-op'] # Save this for convenience results_full = results.copy() # Filter to metadata cols + parties of interest parties_lookup = { 'C': 'con', 'Lab': 'lab', 'LD': 'ld', 'UKIP': 'ukip', 'Green': 'grn', 'SNP': 'snp', 'PC': 'pc', 'Other': 'other', } other_parties = list(set(results.columns) - set(results.columns[:11]) - set(parties_lookup.keys())) results['Other'] = results.loc[:, other_parties].sum(axis=1) results = results.loc[:, list(results.columns[:11]) + list(parties_lookup.keys())] # Rename parties results.columns = [parties_lookup[x] if x in parties_lookup else x for x in results.columns] # Calculate constituency level vote share for party in parties_lookup.values(): results[party + '_pc'] = results[party] / results['Valid Votes'] # Create PANO -> geo lookup geo_lookup = {x[1][0]: x[1][1] for x in results[['Press Association ID Number', 'Country']].iterrows()} assert geo_lookup[14.0] == 'Northern Ireland' # Add London boroughs london_panos = results[results.County == 'London']['Press Association ID Number'].values for pano in london_panos: geo_lookup[pano] = 'London' assert geo_lookup[237.0] == 'London' # Rename other England for k in geo_lookup: if geo_lookup[k] == 'England': geo_lookup[k] = 'England_not_london' elif geo_lookup[k] == 'Northern Ireland': geo_lookup[k] = 'NI' results['geo'] = results['Press Association ID Number'].map(geo_lookup) # Calculate geo-level vote share # TODO: Do we use this? results_by_geo = results.loc[:, ['Valid Votes', 'geo'] + list(parties_lookup.values())].groupby('geo').sum() results_by_geo_voteshare = results_by_geo.div(results_by_geo['Valid Votes'], axis=0) del results_by_geo_voteshare['Valid Votes'] # Who won? def winner(row): all_parties = set(results_full.columns[11:]) - set(['Other']) winning_party = row[all_parties].sort_values(ascending=False).index[0] if winning_party in parties_lookup.keys(): winning_party = parties_lookup[winning_party] elif winning_party == 'Speaker': winning_party = 'other' return winning_party results['winner'] = results_full.apply(winner, axis=1) # Check Conservatives won 330 seats in 2015. assert results.groupby('winner').count()['Constituency Name'].sort_values(ascending=False)[0] == 330 # EXPORT print(f'Exporting dataset to {processed_results_location.resolve()}') results.to_csv(processed_results_location, index=False) results_full.to_csv(processed_results_full_location, index=False)
Python
def retrieve(self): """Will check to see if this already exists in directory tree, otherwise puts the datasets there by executing the necessary code from within this repo.""" destination_target = self.directory / 'raw' os.makedirs(destination_target, exist_ok=True) # create directory if it doesn't exist data_directory = (self.directory / '..' / '..' / '..' / '..').resolve() # sensible guess data = [ # (identifier, type, filename) ('general-election/UK/2010/results', 'processed', 'general_election-uk-2010-results.csv',), ('general-election/UK/2010/results', 'processed', 'general_election-uk-2010-results-full.csv',), ('general-election/UK/2015/results', 'processed', 'general_election-uk-2015-results.csv',), ('general-election/UK/2015/results', 'processed', 'general_election-uk-2015-results-full.csv',), ('general-election/UK/polls', 'processed', 'general_election-uk-polls.csv'), ('general-election/UK/polls', 'processed', 'general_election-london-polls.csv',), ('general-election/UK/polls', 'processed', 'general_election-scotland-polls.csv',), ('general-election/UK/polls', 'processed', 'general_election-wales-polls.csv',), ('general-election/UK/polls', 'processed', 'general_election-ni-polls.csv'), ] for identifier, data_type, filename in data: source_target = f'{identifier}/{data_type}/{filename}' if not (data_directory / source_target).is_file(): print(f'Dataset {identifier} not found - retrieving now') maven.get(identifier, data_directory=data_directory) shutil.copyfile(src=data_directory / source_target, dst=destination_target / filename)
def retrieve(self): """Will check to see if this already exists in directory tree, otherwise puts the datasets there by executing the necessary code from within this repo.""" destination_target = self.directory / 'raw' os.makedirs(destination_target, exist_ok=True) # create directory if it doesn't exist data_directory = (self.directory / '..' / '..' / '..' / '..').resolve() # sensible guess data = [ # (identifier, type, filename) ('general-election/UK/2010/results', 'processed', 'general_election-uk-2010-results.csv',), ('general-election/UK/2010/results', 'processed', 'general_election-uk-2010-results-full.csv',), ('general-election/UK/2015/results', 'processed', 'general_election-uk-2015-results.csv',), ('general-election/UK/2015/results', 'processed', 'general_election-uk-2015-results-full.csv',), ('general-election/UK/polls', 'processed', 'general_election-uk-polls.csv'), ('general-election/UK/polls', 'processed', 'general_election-london-polls.csv',), ('general-election/UK/polls', 'processed', 'general_election-scotland-polls.csv',), ('general-election/UK/polls', 'processed', 'general_election-wales-polls.csv',), ('general-election/UK/polls', 'processed', 'general_election-ni-polls.csv'), ] for identifier, data_type, filename in data: source_target = f'{identifier}/{data_type}/{filename}' if not (data_directory / source_target).is_file(): print(f'Dataset {identifier} not found - retrieving now') maven.get(identifier, data_directory=data_directory) shutil.copyfile(src=data_directory / source_target, dst=destination_target / filename)
Python
def retrieve(self): """Retrieve results data for the United Kingdom's 2010 General Election.""" target = self.directory / 'raw' os.makedirs(target, exist_ok=True) # create directory if it doesn't exist for url, filename in self.sources: response = requests.get(url + filename) if response.status_code == 200: with open(target / filename, 'wb') as file: file.write(response.content) print(f'Successfully downloaded raw data into {target.resolve()}') return warnings.warn(f'Received status 404 when trying to retrieve {url}{filename}') raise RuntimeError('Unable to download UK 2010 General Election results data.')
def retrieve(self): """Retrieve results data for the United Kingdom's 2010 General Election.""" target = self.directory / 'raw' os.makedirs(target, exist_ok=True) # create directory if it doesn't exist for url, filename in self.sources: response = requests.get(url + filename) if response.status_code == 200: with open(target / filename, 'wb') as file: file.write(response.content) print(f'Successfully downloaded raw data into {target.resolve()}') return warnings.warn(f'Received status 404 when trying to retrieve {url}{filename}') raise RuntimeError('Unable to download UK 2010 General Election results data.')
Python
def process(self): """Process results data for the United Kingdom's 2010 General Election.""" filename = self.sources[0][1] processed_results_filename = 'general_election-uk-2010-results.csv' processed_results_full_filename = 'general_election-uk-2010-results-full.csv' processed_results_location = self.directory / 'processed' / processed_results_filename processed_results_full_location = self.directory / 'processed' / processed_results_full_filename os.makedirs(self.directory / 'processed', exist_ok=True) # create directory if it doesn't exist ########################## # GENERAL ELECTION RESULTS ########################## print(f'Read and clean {filename}') # Import general election results results = pd.read_excel(self.directory / 'raw' / filename, sheet_name='Party vote share') # Remove rows where Constituency Name is blank (one row only - the last row of the sheet) blank_rows = results['Constituency Name'].isnull() results = results[-blank_rows].copy() # Set NA vals to zero (NA => zero votes for that party within the constituency) for party_vote_result in results.columns[6:]: # first 6 cols are not party votes results[party_vote_result] = results[party_vote_result].fillna(0) assert results.shape == (650, 144) # Save this for convenience results_full = results.copy() ############################ # ADDITIONAL TRANSFORMATIONS ############################ # Filter to metadata cols + parties of interest (from ~139 parties to ~11). parties_lookup = { 'Con': 'con', 'Lab': 'lab', 'LD': 'ld', 'UKIP': 'ukip', 'Grn': 'grn', # Northern Ireland 'DUP': 'dup', 'SF': 'sf', 'SDLP': 'sdlp', # Scotland 'SNP': 'snp', # Wales 'PC': 'pc', # Other 'Other': 'other', } other_parties = list(set(results.columns) - set(results.columns[:6]) - set(parties_lookup.keys())) results['Other'] = results.loc[:, other_parties].sum(axis=1) results = results.loc[:, list(results.columns[:6]) + list(parties_lookup.keys())] # Rename parties (if in parties_lookup, else ignore) # TODO: Cleaner with .rename() results.columns = [parties_lookup[x] if x in parties_lookup else x for x in results.columns] # Calculate constituency level vote share % (pc = percent) for party in parties_lookup.values(): results[party + '_pc'] = results[party] / results['Votes'] # Create PANO -> geo lookup results['geo'] = results.Region.map( { 'East Midlands': 'England_not_london', 'Eastern': 'England_not_london', 'London': 'London', 'North East': 'England_not_london', 'North West': 'England_not_london', 'Northern Ireland': 'NI', 'Scotland': 'Scotland', 'South East': 'England_not_london', 'South West': 'England_not_london', 'Wales': 'Wales', 'West Midlands': 'England_not_london', 'Yorkshire and the Humber': 'England_not_london', } ) assert results.loc[237.0, 'geo'] == 'London' # Who won? def winner(row): """Return winning party for given row of constituency outcomes.""" # Need to remove Other as this represents multiple parties so (usually) not the actual FPTP winner. all_parties = set(results_full.columns[6:]) - set(['Other']) winning_party = row[all_parties].sort_values(ascending=False).index[0] if winning_party in parties_lookup.keys(): winning_party = parties_lookup[winning_party] elif winning_party == 'Speaker': winning_party = 'other' return winning_party results['winner'] = results_full.apply(winner, axis=1) assert results.winner.value_counts()[0] == 306 # Check Conservatives won 306 seats in 2010 # EXPORT print(f'Exporting dataset to {processed_results_location.resolve()}') results.to_csv(processed_results_location, index=False) print(f'Exporting dataset to {processed_results_full_location.resolve()}') results_full.to_csv(processed_results_full_location, index=False)
def process(self): """Process results data for the United Kingdom's 2010 General Election.""" filename = self.sources[0][1] processed_results_filename = 'general_election-uk-2010-results.csv' processed_results_full_filename = 'general_election-uk-2010-results-full.csv' processed_results_location = self.directory / 'processed' / processed_results_filename processed_results_full_location = self.directory / 'processed' / processed_results_full_filename os.makedirs(self.directory / 'processed', exist_ok=True) # create directory if it doesn't exist ########################## # GENERAL ELECTION RESULTS ########################## print(f'Read and clean {filename}') # Import general election results results = pd.read_excel(self.directory / 'raw' / filename, sheet_name='Party vote share') # Remove rows where Constituency Name is blank (one row only - the last row of the sheet) blank_rows = results['Constituency Name'].isnull() results = results[-blank_rows].copy() # Set NA vals to zero (NA => zero votes for that party within the constituency) for party_vote_result in results.columns[6:]: # first 6 cols are not party votes results[party_vote_result] = results[party_vote_result].fillna(0) assert results.shape == (650, 144) # Save this for convenience results_full = results.copy() ############################ # ADDITIONAL TRANSFORMATIONS ############################ # Filter to metadata cols + parties of interest (from ~139 parties to ~11). parties_lookup = { 'Con': 'con', 'Lab': 'lab', 'LD': 'ld', 'UKIP': 'ukip', 'Grn': 'grn', # Northern Ireland 'DUP': 'dup', 'SF': 'sf', 'SDLP': 'sdlp', # Scotland 'SNP': 'snp', # Wales 'PC': 'pc', # Other 'Other': 'other', } other_parties = list(set(results.columns) - set(results.columns[:6]) - set(parties_lookup.keys())) results['Other'] = results.loc[:, other_parties].sum(axis=1) results = results.loc[:, list(results.columns[:6]) + list(parties_lookup.keys())] # Rename parties (if in parties_lookup, else ignore) # TODO: Cleaner with .rename() results.columns = [parties_lookup[x] if x in parties_lookup else x for x in results.columns] # Calculate constituency level vote share % (pc = percent) for party in parties_lookup.values(): results[party + '_pc'] = results[party] / results['Votes'] # Create PANO -> geo lookup results['geo'] = results.Region.map( { 'East Midlands': 'England_not_london', 'Eastern': 'England_not_london', 'London': 'London', 'North East': 'England_not_london', 'North West': 'England_not_london', 'Northern Ireland': 'NI', 'Scotland': 'Scotland', 'South East': 'England_not_london', 'South West': 'England_not_london', 'Wales': 'Wales', 'West Midlands': 'England_not_london', 'Yorkshire and the Humber': 'England_not_london', } ) assert results.loc[237.0, 'geo'] == 'London' # Who won? def winner(row): """Return winning party for given row of constituency outcomes.""" # Need to remove Other as this represents multiple parties so (usually) not the actual FPTP winner. all_parties = set(results_full.columns[6:]) - set(['Other']) winning_party = row[all_parties].sort_values(ascending=False).index[0] if winning_party in parties_lookup.keys(): winning_party = parties_lookup[winning_party] elif winning_party == 'Speaker': winning_party = 'other' return winning_party results['winner'] = results_full.apply(winner, axis=1) assert results.winner.value_counts()[0] == 306 # Check Conservatives won 306 seats in 2010 # EXPORT print(f'Exporting dataset to {processed_results_location.resolve()}') results.to_csv(processed_results_location, index=False) print(f'Exporting dataset to {processed_results_full_location.resolve()}') results_full.to_csv(processed_results_full_location, index=False)
Python
def winner(row): """Return winning party for given row of constituency outcomes.""" # Need to remove Other as this represents multiple parties so (usually) not the actual FPTP winner. all_parties = set(results_full.columns[6:]) - set(['Other']) winning_party = row[all_parties].sort_values(ascending=False).index[0] if winning_party in parties_lookup.keys(): winning_party = parties_lookup[winning_party] elif winning_party == 'Speaker': winning_party = 'other' return winning_party
def winner(row): """Return winning party for given row of constituency outcomes.""" # Need to remove Other as this represents multiple parties so (usually) not the actual FPTP winner. all_parties = set(results_full.columns[6:]) - set(['Other']) winning_party = row[all_parties].sort_values(ascending=False).index[0] if winning_party in parties_lookup.keys(): winning_party = parties_lookup[winning_party] elif winning_party == 'Speaker': winning_party = 'other' return winning_party
Python
def _get_validated_task(self, parsed_data, key): """ Validate parsed data with labeling config and task structure """ if not isinstance(parsed_data, dict): raise TaskValidationError('Error at ' + str(key) + ':\n' 'Cloud storage supports one task (one dict object) per JSON file only. ') return parsed_data
def _get_validated_task(self, parsed_data, key): """ Validate parsed data with labeling config and task structure """ if not isinstance(parsed_data, dict): raise TaskValidationError('Error at ' + str(key) + ':\n' 'Cloud storage supports one task (one dict object) per JSON file only. ') return parsed_data
Python
def is_gce_instance(cls): """Check if it's GCE instance via DNS lookup to metadata server""" try: socket.getaddrinfo('metadata.google.internal', 80) except socket.gaierror: return False return True
def is_gce_instance(cls): """Check if it's GCE instance via DNS lookup to metadata server""" try: socket.getaddrinfo('metadata.google.internal', 80) except socket.gaierror: return False return True
Python
def generate_download_signed_url_v4(self, bucket_name, blob_name): """Generates a v4 signed URL for downloading a blob. Note that this method requires a service account key file. You can not use this if you are using Application Default Credentials from Google Compute Engine or from the Google Cloud SDK. """ # bucket_name = 'your-bucket-name' # blob_name = 'your-object-name' client = self.get_client() bucket = self.get_bucket(client, bucket_name) blob = bucket.blob(blob_name) url = blob.generate_signed_url( version="v4", # This URL is valid for 15 minutes expiration=timedelta(minutes=self.presign_ttl), # Allow GET requests using this URL. method="GET", ) logger.debug('Generated GCS signed url: ' + url) return url
def generate_download_signed_url_v4(self, bucket_name, blob_name): """Generates a v4 signed URL for downloading a blob. Note that this method requires a service account key file. You can not use this if you are using Application Default Credentials from Google Compute Engine or from the Google Cloud SDK. """ # bucket_name = 'your-bucket-name' # blob_name = 'your-object-name' client = self.get_client() bucket = self.get_bucket(client, bucket_name) blob = bucket.blob(blob_name) url = blob.generate_signed_url( version="v4", # This URL is valid for 15 minutes expiration=timedelta(minutes=self.presign_ttl), # Allow GET requests using this URL. method="GET", ) logger.debug('Generated GCS signed url: ' + url) return url
Python
def delete_tasks_annotations(project, queryset, **kwargs): """ Delete all annotations by tasks ids :param project: project instance :param queryset: filtered tasks db queryset """ task_ids = queryset.values_list('id', flat=True) annotations = Annotation.objects.filter(task__id__in=task_ids) count = annotations.count() annotations.delete() return {'processed_items': count, 'detail': 'Deleted ' + str(count) + ' annotations'}
def delete_tasks_annotations(project, queryset, **kwargs): """ Delete all annotations by tasks ids :param project: project instance :param queryset: filtered tasks db queryset """ task_ids = queryset.values_list('id', flat=True) annotations = Annotation.objects.filter(task__id__in=task_ids) count = annotations.count() annotations.delete() return {'processed_items': count, 'detail': 'Deleted ' + str(count) + ' annotations'}
Python
def delete_tasks_predictions(project, queryset, **kwargs): """ Delete all predictions by tasks ids :param project: project instance :param queryset: filtered tasks db queryset """ task_ids = queryset.values_list('id', flat=True) predictions = Prediction.objects.filter(task__id__in=task_ids) count = predictions.count() predictions.delete() return {'processed_items': count, 'detail': 'Deleted ' + str(count) + ' predictions'}
def delete_tasks_predictions(project, queryset, **kwargs): """ Delete all predictions by tasks ids :param project: project instance :param queryset: filtered tasks db queryset """ task_ids = queryset.values_list('id', flat=True) predictions = Prediction.objects.filter(task__id__in=task_ids) count = predictions.count() predictions.delete() return {'processed_items': count, 'detail': 'Deleted ' + str(count) + ' predictions'}
Python
def perform_action(action_id, project, queryset, **kwargs): """ Perform action using entry point from actions """ if action_id not in settings.DATA_MANAGER_ACTIONS: raise DataManagerException("Can't find '" + action_id + "' in registered actions") try: result = settings.DATA_MANAGER_ACTIONS[action_id]['entry_point'](project, queryset, **kwargs) except Exception as e: text = 'Error while perform action: ' + action_id + '\n' + tb.format_exc() logger.error(text, extra={'sentry_skip': True}) raise e return result
def perform_action(action_id, project, queryset, **kwargs): """ Perform action using entry point from actions """ if action_id not in settings.DATA_MANAGER_ACTIONS: raise DataManagerException("Can't find '" + action_id + "' in registered actions") try: result = settings.DATA_MANAGER_ACTIONS[action_id]['entry_point'](project, queryset, **kwargs) except Exception as e: text = 'Error while perform action: ' + action_id + '\n' + tb.format_exc() logger.error(text, extra={'sentry_skip': True}) raise e return result
Python
def reset(self, _request): """ delete: Reset project views Reset all views for a specific project. """ queryset = self.filter_queryset(self.get_queryset()) queryset.all().delete() return Response(status=204)
def reset(self, _request): """ delete: Reset project views Reset all views for a specific project. """ queryset = self.filter_queryset(self.get_queryset()) queryset.all().delete() return Response(status=204)
Python
def tasks(self, request, pk=None): """ get: Get task list for view Retrieve a list of tasks with pagination for a specific view using filters and ordering. """ view = self.get_object() queryset = self.get_task_queryset(request, view) context = {'proxy': bool_from_request(request.GET, 'proxy', True), 'resolve_uri': True, 'request': request} project = view.project # paginated tasks self.pagination_class = TaskPagination page = self.paginate_queryset(queryset) if page is not None: # retrieve ML predictions if tasks don't have them if project.evaluate_predictions_automatically: ids = [task.id for task in page] # page is a list already tasks_for_predictions = Task.objects.filter(id__in=ids, predictions__isnull=True) evaluate_predictions(tasks_for_predictions) serializer = self.task_serializer_class(page, many=True, context=context) return self.get_paginated_response(serializer.data) # all tasks if project.evaluate_predictions_automatically: evaluate_predictions(queryset.filter(predictions__isnull=True)) serializer = self.task_serializer_class(queryset, many=True, context=context) return Response(serializer.data)
def tasks(self, request, pk=None): """ get: Get task list for view Retrieve a list of tasks with pagination for a specific view using filters and ordering. """ view = self.get_object() queryset = self.get_task_queryset(request, view) context = {'proxy': bool_from_request(request.GET, 'proxy', True), 'resolve_uri': True, 'request': request} project = view.project # paginated tasks self.pagination_class = TaskPagination page = self.paginate_queryset(queryset) if page is not None: # retrieve ML predictions if tasks don't have them if project.evaluate_predictions_automatically: ids = [task.id for task in page] # page is a list already tasks_for_predictions = Task.objects.filter(id__in=ids, predictions__isnull=True) evaluate_predictions(tasks_for_predictions) serializer = self.task_serializer_class(page, many=True, context=context) return self.get_paginated_response(serializer.data) # all tasks if project.evaluate_predictions_automatically: evaluate_predictions(queryset.filter(predictions__isnull=True)) serializer = self.task_serializer_class(queryset, many=True, context=context) return Response(serializer.data)
Python
def post(self, request): """ post: Post actions Perform an action with the selected items from a specific view. """ pk = int_from_request(request.GET, "project", None) project = get_object_with_check_and_log(request, Project, pk=pk) self.check_object_permissions(request, project) queryset = get_prepared_queryset(request, project) # no selected items on tab if not queryset.exists(): response = {'detail': 'No selected items for specified view'} return Response(response, status=404) # wrong action id action_id = request.GET.get('id', None) if action_id is None: response = {'detail': 'No action id "' + str(action_id) + '", use ?id=<action-id>'} return Response(response, status=422) # perform action and return the result dict kwargs = {'request': request} # pass advanced params to actions result = perform_action(action_id, project, queryset, **kwargs) code = result.pop('response_code', 200) return Response(result, status=code)
def post(self, request): """ post: Post actions Perform an action with the selected items from a specific view. """ pk = int_from_request(request.GET, "project", None) project = get_object_with_check_and_log(request, Project, pk=pk) self.check_object_permissions(request, project) queryset = get_prepared_queryset(request, project) # no selected items on tab if not queryset.exists(): response = {'detail': 'No selected items for specified view'} return Response(response, status=404) # wrong action id action_id = request.GET.get('id', None) if action_id is None: response = {'detail': 'No action id "' + str(action_id) + '", use ?id=<action-id>'} return Response(response, status=422) # perform action and return the result dict kwargs = {'request': request} # pass advanced params to actions result = perform_action(action_id, project, queryset, **kwargs) code = result.pop('response_code', 200) return Response(result, status=code)
Python
def windows_dll_fix(): """ Copy sqlite.dll to the current directory and use it """ auto_agree = any([a == '--agree-fix-sqlite' for a in sys.argv]) force_fix = any([a == '--force-fix-sqlite' for a in sys.argv]) # check if it is not on windows if sys.platform != 'win32': return print(f'Current platform is {sys.platform}, apply sqlite fix') # set env import ctypes path_to_dll = os.path.abspath('.') os.environ['PATH'] = path_to_dll + os.pathsep + os.environ['PATH'] try: ctypes.CDLL(os.path.join(path_to_dll, 'sqlite3.dll')) print('Add current directory to PATH for DLL search: ' + path_to_dll) except OSError: print("Can't load sqlite3.dll from current directory") # check sqlite version import sqlite3 v = sqlite3.sqlite_version_info if v[0] >= 3 and v[1] >= 35 and not force_fix: return # check python version and warn print(f'python version: {sys.version_info.major} sqlite minor version: {sys.version_info.minor}') if sys.version_info.major == 3 and sys.version_info.minor in [6, 7, 8]: print('\n' + colorama.Fore.LIGHTYELLOW_EX + 'You are on ' + colorama.Fore.LIGHTRED_EX + f'Windows Python {sys.version_info.major}.{sys.version_info.minor}.\n' + colorama.Fore.LIGHTYELLOW_EX + f"This Python version uses SQLite " f"{colorama.Fore.LIGHTRED_EX}{v[0]}.{v[1]}.{v[2]} " + colorama.Fore.LIGHTYELLOW_EX + f"which does not support JSON Field.\n" + 'Read more about this issue: ' + colorama.Fore.LIGHTWHITE_EX + 'https://code.djangoproject.com/wiki/JSON1Extension [Windows section]\n') agree = 'n' if not auto_agree: print(colorama.Fore.WHITE + 'Label Studio can try to resolve this issue by downloading the correct ' 'sqlite.dll from https://sqlite.org in the current directory, ' 'do you want to proceed? \n [y/n] > ', end='') agree = input() if agree == 'y' or auto_agree: start_fix() print(colorama.Fore.WHITE)
def windows_dll_fix(): """ Copy sqlite.dll to the current directory and use it """ auto_agree = any([a == '--agree-fix-sqlite' for a in sys.argv]) force_fix = any([a == '--force-fix-sqlite' for a in sys.argv]) # check if it is not on windows if sys.platform != 'win32': return print(f'Current platform is {sys.platform}, apply sqlite fix') # set env import ctypes path_to_dll = os.path.abspath('.') os.environ['PATH'] = path_to_dll + os.pathsep + os.environ['PATH'] try: ctypes.CDLL(os.path.join(path_to_dll, 'sqlite3.dll')) print('Add current directory to PATH for DLL search: ' + path_to_dll) except OSError: print("Can't load sqlite3.dll from current directory") # check sqlite version import sqlite3 v = sqlite3.sqlite_version_info if v[0] >= 3 and v[1] >= 35 and not force_fix: return # check python version and warn print(f'python version: {sys.version_info.major} sqlite minor version: {sys.version_info.minor}') if sys.version_info.major == 3 and sys.version_info.minor in [6, 7, 8]: print('\n' + colorama.Fore.LIGHTYELLOW_EX + 'You are on ' + colorama.Fore.LIGHTRED_EX + f'Windows Python {sys.version_info.major}.{sys.version_info.minor}.\n' + colorama.Fore.LIGHTYELLOW_EX + f"This Python version uses SQLite " f"{colorama.Fore.LIGHTRED_EX}{v[0]}.{v[1]}.{v[2]} " + colorama.Fore.LIGHTYELLOW_EX + f"which does not support JSON Field.\n" + 'Read more about this issue: ' + colorama.Fore.LIGHTWHITE_EX + 'https://code.djangoproject.com/wiki/JSON1Extension [Windows section]\n') agree = 'n' if not auto_agree: print(colorama.Fore.WHITE + 'Label Studio can try to resolve this issue by downloading the correct ' 'sqlite.dll from https://sqlite.org in the current directory, ' 'do you want to proceed? \n [y/n] > ', end='') agree = input() if agree == 'y' or auto_agree: start_fix() print(colorama.Fore.WHITE)
Python
def build_pipeline_options(args): """ Apache Beam Pipelines must receive a set of options for setting how the engine should run. Args ---- args: argparse.Namespace Returns ------- pipeline_options: defines how to run beam job. """ options = {} options['runner'] = args.runner if args.temp_location: options['temp_location'] = args.temp_location if args.project: options['project'] = args.project if args.staging_location: options['staging_location'] = args.staging_location if args.job_name: options['job_name'] = args.job_name if args.max_num_workers: options['max_num_workers'] = args.max_num_workers if args.machine_type: options['machine_type'] = args.machine_type options.update({'save_main_session': True}) options.update({'setup_file': './setup.py'}) pipeline_options = PipelineOptions(**options) return pipeline_options
def build_pipeline_options(args): """ Apache Beam Pipelines must receive a set of options for setting how the engine should run. Args ---- args: argparse.Namespace Returns ------- pipeline_options: defines how to run beam job. """ options = {} options['runner'] = args.runner if args.temp_location: options['temp_location'] = args.temp_location if args.project: options['project'] = args.project if args.staging_location: options['staging_location'] = args.staging_location if args.job_name: options['job_name'] = args.job_name if args.max_num_workers: options['max_num_workers'] = args.max_num_workers if args.machine_type: options['machine_type'] = args.machine_type options.update({'save_main_session': True}) options.update({'setup_file': './setup.py'}) pipeline_options = PipelineOptions(**options) return pipeline_options
Python
def aggregate_customers_sessions(sessions): """ Receives as input what products customers interacted with and returns their final aggregation. Args ---- sessions: list of list of dicts. List where each element is a list of dict of type: [{'action': '', 'sku': ''}] Returns ------- results: list of dicts Each resulting dict is aggregated on the sku and action level (repeating clauses are filtered out). """ result = [] for session in sessions: for hit in session: result.append(hit) return [dict(t) for t in {tuple(d.items()) for d in result}]
def aggregate_customers_sessions(sessions): """ Receives as input what products customers interacted with and returns their final aggregation. Args ---- sessions: list of list of dicts. List where each element is a list of dict of type: [{'action': '', 'sku': ''}] Returns ------- results: list of dicts Each resulting dict is aggregated on the sku and action level (repeating clauses are filtered out). """ result = [] for session in sessions: for hit in session: result.append(hit) return [dict(t) for t in {tuple(d.items()) for d in result}]
Python
def read_input_data(args, pipeline, flag): """ Reads train and test pipelines. args: input args. pipeline: input pipeline where all transformations will take place. flag: either train or test. """ if args.input_sql: train_query = build_bq_query(args.input_sql, args.project, args.train_init_date, args.train_end_date) test_query = build_bq_query(args.input_sql, args.project, args.test_init_date, args.test_end_date) data = ( pipeline | '{} read'.format(flag) >> beam.io.Read(beam.io.BigQuerySource( query=train_query if flag == 'train' else test_query, use_standard_sql=True) ) ) else: data = ( pipeline | '{} read'.format(flag) >> beam.io.ReadFromText( args.input_train_data if flag == 'train' else args.input_test_data ) | '{} to json'.format(flag) >> beam.Map(lambda x: ast.literal_eval(x)) ) data = ( data | '{} filter empty hits'.format(flag) >> beam.Filter(lambda x: x['hits']) | '{} prepare customer grouping'.format(flag) >> beam.Map(lambda x: ( x['customer_id'], [{'action': e['action'], 'sku': e['productSku']} for e in x['hits'] if e['action'] in ['Browsed', 'AddedToBasket']]) ) | '{} group customers'.format(flag) >> beam.GroupByKey() | '{} aggregate customers sessions'.format(flag) >> beam.Map(lambda x: ( x[0], aggregate_customers_sessions(x[1])) ) | '{} flatten'.format(flag) >> beam.ParDo(FlattenInteractionsFn()) ) return data
def read_input_data(args, pipeline, flag): """ Reads train and test pipelines. args: input args. pipeline: input pipeline where all transformations will take place. flag: either train or test. """ if args.input_sql: train_query = build_bq_query(args.input_sql, args.project, args.train_init_date, args.train_end_date) test_query = build_bq_query(args.input_sql, args.project, args.test_init_date, args.test_end_date) data = ( pipeline | '{} read'.format(flag) >> beam.io.Read(beam.io.BigQuerySource( query=train_query if flag == 'train' else test_query, use_standard_sql=True) ) ) else: data = ( pipeline | '{} read'.format(flag) >> beam.io.ReadFromText( args.input_train_data if flag == 'train' else args.input_test_data ) | '{} to json'.format(flag) >> beam.Map(lambda x: ast.literal_eval(x)) ) data = ( data | '{} filter empty hits'.format(flag) >> beam.Filter(lambda x: x['hits']) | '{} prepare customer grouping'.format(flag) >> beam.Map(lambda x: ( x['customer_id'], [{'action': e['action'], 'sku': e['productSku']} for e in x['hits'] if e['action'] in ['Browsed', 'AddedToBasket']]) ) | '{} group customers'.format(flag) >> beam.GroupByKey() | '{} aggregate customers sessions'.format(flag) >> beam.Map(lambda x: ( x[0], aggregate_customers_sessions(x[1])) ) | '{} flatten'.format(flag) >> beam.ParDo(FlattenInteractionsFn()) ) return data
Python
def write_tfrecords(data, schema, filename, name): """ Converts input pcollection into a file of tfrecords following schema. Args ---- data: pcollection. schema: dataset_schema from tensorflow transform. name: str to identify operations. """ _ = ( data | '{} tfrecords write'.format(name) >> beam.io.tfrecordio.WriteToTFRecord( filename, coder=example_proto_coder.ExampleProtoCoder(dataset_schema.Schema(schema))) )
def write_tfrecords(data, schema, filename, name): """ Converts input pcollection into a file of tfrecords following schema. Args ---- data: pcollection. schema: dataset_schema from tensorflow transform. name: str to identify operations. """ _ = ( data | '{} tfrecords write'.format(name) >> beam.io.tfrecordio.WriteToTFRecord( filename, coder=example_proto_coder.ExampleProtoCoder(dataset_schema.Schema(schema))) )
Python
def aggregate_transformed_data(transformed_data, flag): """ One of the final steps into our pipelining transformations where data that has been transformed (in our case, skus went from string names to integer indices) is aggregated on the user level. transformed_data: pcollection. flag: identifies train or test Returns ------- transformed_data aggregated on user level. """ if flag == 'test': transformed_data = ( transformed_data | 'test filter out invalid skus' >> beam.Filter(lambda x: x['sku'] != -1) ) transformed_agg_data = ( transformed_data | '{} prepare grouping'.format(flag) >> beam.Map(lambda x: ( x['customer_id'], {'sku': x['sku'], 'action': x['action']}) ) | '{} transformed agg group'.format(flag) >> beam.GroupByKey() | '{} final results'.format(flag) >> beam.Map(lambda x: build_final_results(x)) ) return transformed_agg_data
def aggregate_transformed_data(transformed_data, flag): """ One of the final steps into our pipelining transformations where data that has been transformed (in our case, skus went from string names to integer indices) is aggregated on the user level. transformed_data: pcollection. flag: identifies train or test Returns ------- transformed_data aggregated on user level. """ if flag == 'test': transformed_data = ( transformed_data | 'test filter out invalid skus' >> beam.Filter(lambda x: x['sku'] != -1) ) transformed_agg_data = ( transformed_data | '{} prepare grouping'.format(flag) >> beam.Map(lambda x: ( x['customer_id'], {'sku': x['sku'], 'action': x['action']}) ) | '{} transformed agg group'.format(flag) >> beam.GroupByKey() | '{} final results'.format(flag) >> beam.Map(lambda x: build_final_results(x)) ) return transformed_agg_data
Python
def run_tft_pipeline(args): """ This is where all the data we have available in our database is processed and transformed into Tensorflow tfrecords for later training and testing. The code runs in distributed manner automatically in the engine choosen by the `runner` argument in input. """ pipeline_options = build_pipeline_options(args) temp_tft_folder = ( tempfile.mkdtemp(dir='/tmp/') if not args.tft_temp else args.tft_temp ) tft_transform_folder = ( tempfile.mkdtemp(dir='/tmp/') if not args.tft_transform else args.tft_transform ) with beam.Pipeline(options=pipeline_options) as pipeline: with beam_impl.Context(temp_dir=temp_tft_folder): train_data = read_input_data(args, pipeline, 'train') write_total_distinct_keys_to_file(train_data, args.nitems_filename, 'sku') train_dataset = (train_data, metadata.RAW_DATA_METADATA) (train_data, transformed_train_metadata), transform_fn = ( train_dataset | beam_impl.AnalyzeAndTransformDataset(preprocess_fn) ) _ = ( transform_fn | 'WriteTransformFn' >> transform_fn_io.WriteTransformFn(tft_transform_folder) ) train_data = aggregate_transformed_data( train_data, 'train' ) write_tfrecords(train_data, metadata.OUTPUT_TRAIN_SCHEMA, args.output_train_filename, 'output train') test_data = read_input_data(args, pipeline, 'test') test_dataset = (test_data, metadata.RAW_DATA_METADATA) (test_data, _) = ( (test_dataset, transform_fn) | beam_impl.TransformDataset()) test_data = aggregate_transformed_data( test_data, 'test' ) test_data = aggregate_final_test_data( train_data, test_data ) write_tfrecords(test_data, metadata.OUTPUT_TEST_SCHEMA, args.output_test_filename, 'output test')
def run_tft_pipeline(args): """ This is where all the data we have available in our database is processed and transformed into Tensorflow tfrecords for later training and testing. The code runs in distributed manner automatically in the engine choosen by the `runner` argument in input. """ pipeline_options = build_pipeline_options(args) temp_tft_folder = ( tempfile.mkdtemp(dir='/tmp/') if not args.tft_temp else args.tft_temp ) tft_transform_folder = ( tempfile.mkdtemp(dir='/tmp/') if not args.tft_transform else args.tft_transform ) with beam.Pipeline(options=pipeline_options) as pipeline: with beam_impl.Context(temp_dir=temp_tft_folder): train_data = read_input_data(args, pipeline, 'train') write_total_distinct_keys_to_file(train_data, args.nitems_filename, 'sku') train_dataset = (train_data, metadata.RAW_DATA_METADATA) (train_data, transformed_train_metadata), transform_fn = ( train_dataset | beam_impl.AnalyzeAndTransformDataset(preprocess_fn) ) _ = ( transform_fn | 'WriteTransformFn' >> transform_fn_io.WriteTransformFn(tft_transform_folder) ) train_data = aggregate_transformed_data( train_data, 'train' ) write_tfrecords(train_data, metadata.OUTPUT_TRAIN_SCHEMA, args.output_train_filename, 'output train') test_data = read_input_data(args, pipeline, 'test') test_dataset = (test_data, metadata.RAW_DATA_METADATA) (test_data, _) = ( (test_dataset, transform_fn) | beam_impl.TransformDataset()) test_data = aggregate_transformed_data( test_data, 'test' ) test_data = aggregate_final_test_data( train_data, test_data ) write_tfrecords(test_data, metadata.OUTPUT_TEST_SCHEMA, args.output_test_filename, 'output test')
Python
def main(): ''' This module is used for real time classification. ''' logging.basicConfig( format='[%(asctime)s] %(name)s %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S' ) module_levels = {'sensor_measurement':logging.INFO, 'sensors': logging.INFO, __name__: logging.DEBUG} for module, level in module_levels.items(): logging.getLogger(module).setLevel(level=level) copy_filename = input("Name CSV File: ") # If the file name already exist, proceed to load model if os.path.isdir('../keras_model/' + copy_filename): choice = input('Found the same name of the file. Load the model? [y/n]:').lower() if choice == 'y': copyfile('../csv/' + copy_filename + '.csv', TEMP_CSV_PATH) _, _, _, _, mean, std = ml.data() model = load_model('../model/' + copy_filename) else: choice = 'n' sensor = SensorMeasurement(eeg_port = EEG_PORT, gsr_port = GSR_PORT) sensor.start_sensor_connection() """ set 1 : calibration set 2 : focus data for train data set 3 : rest data for train data set 4 : focus data for validation set 5 : rest data for validation """ if choice == 'n': sensor.collect_data_for_ml(sessions = [60, 120, 120, 120, 120], filename = copy_filename, step = STEP) # Collect EEG, GSR data for ML sensor.pause_sensor_connection() model, mean, std = ml.auto_tuning(copy_filename) # Collect the best ml model based on the recorded data sensor.continue_eeg_sensor() #Start real time detection values = [] sec = 0.0 for i in range(30): row = [] row.extend([sensor.GSR]) row.extend(sensor.EEG) values.append(row) logger.info(f'{sec} sec : {row}') sec += STEP sleep(STEP) values = np.array(values).reshape(1, 30, 9) while(1): try: #predict the result norm_value = (values - mean) / std result = model.predict(norm_value) logger.info("FOCUS\n") if result >= 0.5 else logger.info("NOT FOCUS\n") #delete the oldest value values = np.delete(values, 0, axis = 1) #update the new value row = [] row.extend([sensor.GSR]) row.extend(sensor.EEG) values = np.insert(values, 29, row, axis = 1) logger.debug(f'{sec} sec : {row}') sec += STEP sleep(STEP) except KeyboardInterrupt: logger.info('Keyboard Interrupt') sensor.terminate_sensor_connection() exit(0)
def main(): ''' This module is used for real time classification. ''' logging.basicConfig( format='[%(asctime)s] %(name)s %(levelname)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S' ) module_levels = {'sensor_measurement':logging.INFO, 'sensors': logging.INFO, __name__: logging.DEBUG} for module, level in module_levels.items(): logging.getLogger(module).setLevel(level=level) copy_filename = input("Name CSV File: ") # If the file name already exist, proceed to load model if os.path.isdir('../keras_model/' + copy_filename): choice = input('Found the same name of the file. Load the model? [y/n]:').lower() if choice == 'y': copyfile('../csv/' + copy_filename + '.csv', TEMP_CSV_PATH) _, _, _, _, mean, std = ml.data() model = load_model('../model/' + copy_filename) else: choice = 'n' sensor = SensorMeasurement(eeg_port = EEG_PORT, gsr_port = GSR_PORT) sensor.start_sensor_connection() """ set 1 : calibration set 2 : focus data for train data set 3 : rest data for train data set 4 : focus data for validation set 5 : rest data for validation """ if choice == 'n': sensor.collect_data_for_ml(sessions = [60, 120, 120, 120, 120], filename = copy_filename, step = STEP) # Collect EEG, GSR data for ML sensor.pause_sensor_connection() model, mean, std = ml.auto_tuning(copy_filename) # Collect the best ml model based on the recorded data sensor.continue_eeg_sensor() #Start real time detection values = [] sec = 0.0 for i in range(30): row = [] row.extend([sensor.GSR]) row.extend(sensor.EEG) values.append(row) logger.info(f'{sec} sec : {row}') sec += STEP sleep(STEP) values = np.array(values).reshape(1, 30, 9) while(1): try: #predict the result norm_value = (values - mean) / std result = model.predict(norm_value) logger.info("FOCUS\n") if result >= 0.5 else logger.info("NOT FOCUS\n") #delete the oldest value values = np.delete(values, 0, axis = 1) #update the new value row = [] row.extend([sensor.GSR]) row.extend(sensor.EEG) values = np.insert(values, 29, row, axis = 1) logger.debug(f'{sec} sec : {row}') sec += STEP sleep(STEP) except KeyboardInterrupt: logger.info('Keyboard Interrupt') sensor.terminate_sensor_connection() exit(0)
Python
def start_eeg_sensor(self): ''' Main process Keep getting data packets from the sensor ''' logger.info('Connect to EEG sensor') for pkt in thinkgear.ThinkGearProtocol(self.__port).get_packets(): if self.__terminate == True: break if self.__pause == False: for d in pkt: if isinstance(d, thinkgear.ThinkGearPoorSignalData) and d.value > 10: logger.warning('Signal quality is poor') if isinstance(d, thinkgear.ThinkGearEEGPowerData): logger.debug('Scannig Sensor data...') self.delta = d.value.delta self.theta = d.value.theta self.low_alpha = d.value.lowalpha self.high_alpha = d.value.highalpha self.low_beta = d.value.lowbeta self.high_beta = d.value.highbeta self.low_gamma = d.value.lowgamma self.mid_gamma = d.value.midgamma logger.debug(f'delta:{self.delta}') logger.debug(f'theta:{self.theta}') logger.debug(f'lowAlpha:{self.low_alpha}') logger.debug(f'highAlpha:{self.high_alpha}') logger.debug(f'lowBeta:{self.low_beta}') logger.debug(f'highBeta:{self.high_beta}') logger.debug(f'lowGamma:{self.low_gamma}') logger.debug(f'midGamma:{self.mid_gamma}')
def start_eeg_sensor(self): ''' Main process Keep getting data packets from the sensor ''' logger.info('Connect to EEG sensor') for pkt in thinkgear.ThinkGearProtocol(self.__port).get_packets(): if self.__terminate == True: break if self.__pause == False: for d in pkt: if isinstance(d, thinkgear.ThinkGearPoorSignalData) and d.value > 10: logger.warning('Signal quality is poor') if isinstance(d, thinkgear.ThinkGearEEGPowerData): logger.debug('Scannig Sensor data...') self.delta = d.value.delta self.theta = d.value.theta self.low_alpha = d.value.lowalpha self.high_alpha = d.value.highalpha self.low_beta = d.value.lowbeta self.high_beta = d.value.highbeta self.low_gamma = d.value.lowgamma self.mid_gamma = d.value.midgamma logger.debug(f'delta:{self.delta}') logger.debug(f'theta:{self.theta}') logger.debug(f'lowAlpha:{self.low_alpha}') logger.debug(f'highAlpha:{self.high_alpha}') logger.debug(f'lowBeta:{self.low_beta}') logger.debug(f'highBeta:{self.high_beta}') logger.debug(f'lowGamma:{self.low_gamma}') logger.debug(f'midGamma:{self.mid_gamma}')
Python
def terminate_eeg_sensor(self): ''' Terminate the connection of EEG sensor ''' logger.info('Terminate connection') # self.__pause = True self.__terminate = True
def terminate_eeg_sensor(self): ''' Terminate the connection of EEG sensor ''' logger.info('Terminate connection') # self.__pause = True self.__terminate = True
Python
def crop(cls, left, right, top, bottom, keep_size=False): """ The image augumentation sequence. Crops based on a region of interest among other things. left, right, top & bottom are the number of pixels to crop. """ augmentation = iaa.Crop(px=(top, right, bottom, left), keep_size=keep_size) return augmentation
def crop(cls, left, right, top, bottom, keep_size=False): """ The image augumentation sequence. Crops based on a region of interest among other things. left, right, top & bottom are the number of pixels to crop. """ augmentation = iaa.Crop(px=(top, right, bottom, left), keep_size=keep_size) return augmentation
Python
def trapezoidal_mask(cls, lower_left, lower_right, upper_left, upper_right, min_y, max_y): """ Uses a binary mask to generate a trapezoidal region of interest. Especially useful in filtering out uninteresting features from an input image. """ def _transform_images(images, random_state, parents, hooks): # Transform a batch of images transformed = [] mask = None for image in images: if mask is None: mask = np.zeros(image.shape, dtype=np.int32) # # # # # # # # # # # # # # ul ur min_y # # # # ll lr max_y points = [ [upper_left, min_y], [upper_right, min_y], [lower_right, max_y], [lower_left, max_y] ] cv2.fillConvexPoly(mask, np.array(points, dtype=np.int32), [255, 255, 255]) mask = np.asarray(mask, dtype='bool') masked = np.multiply(image, mask) transformed.append(masked) return transformed def _transform_keypoints(keypoints_on_images, random_state, parents, hooks): # No-op return keypoints_on_images augmentation = iaa.Lambda(func_images=_transform_images, func_keypoints=_transform_keypoints) return augmentation
def trapezoidal_mask(cls, lower_left, lower_right, upper_left, upper_right, min_y, max_y): """ Uses a binary mask to generate a trapezoidal region of interest. Especially useful in filtering out uninteresting features from an input image. """ def _transform_images(images, random_state, parents, hooks): # Transform a batch of images transformed = [] mask = None for image in images: if mask is None: mask = np.zeros(image.shape, dtype=np.int32) # # # # # # # # # # # # # # ul ur min_y # # # # ll lr max_y points = [ [upper_left, min_y], [upper_right, min_y], [lower_right, max_y], [lower_left, max_y] ] cv2.fillConvexPoly(mask, np.array(points, dtype=np.int32), [255, 255, 255]) mask = np.asarray(mask, dtype='bool') masked = np.multiply(image, mask) transformed.append(masked) return transformed def _transform_keypoints(keypoints_on_images, random_state, parents, hooks): # No-op return keypoints_on_images augmentation = iaa.Lambda(func_images=_transform_images, func_keypoints=_transform_keypoints) return augmentation
Python
def create(cls, aug_type: str, config: Config) -> iaa.meta.Augmenter: """ Augmenatition factory. Cropping and trapezoidal mask are transfomations which should be applied in training, validation and inference. Multiply, Blur and similar are augmentations which should be used only in training. """ if aug_type == 'CROP': logger.info(f'Creating augmentation {aug_type} with ROI_CROP ' f'L: {config.ROI_CROP_LEFT}, ' f'R: {config.ROI_CROP_RIGHT}, ' f'B: {config.ROI_CROP_BOTTOM}, ' f'T: {config.ROI_CROP_TOP}') return Augmentations.crop(left=config.ROI_CROP_LEFT, right=config.ROI_CROP_RIGHT, bottom=config.ROI_CROP_BOTTOM, top=config.ROI_CROP_TOP, keep_size=True) elif aug_type == 'TRAPEZE': logger.info(f'Creating augmentation {aug_type}') return Augmentations.trapezoidal_mask( lower_left=config.ROI_TRAPEZE_LL, lower_right=config.ROI_TRAPEZE_LR, upper_left=config.ROI_TRAPEZE_UL, upper_right=config.ROI_TRAPEZE_UR, min_y=config.ROI_TRAPEZE_MIN_Y, max_y=config.ROI_TRAPEZE_MAX_Y) elif aug_type == 'MULTIPLY': interval = getattr(config, 'AUG_MULTIPLY_RANGE', (0.5, 1.5)) logger.info(f'Creating augmentation {aug_type} {interval}') return iaa.Multiply(interval) elif aug_type == 'BLUR': interval = getattr(config, 'AUG_BLUR_RANGE', (0.0, 3.0)) logger.info(f'Creating augmentation {aug_type} {interval}') return iaa.GaussianBlur(sigma=interval)
def create(cls, aug_type: str, config: Config) -> iaa.meta.Augmenter: """ Augmenatition factory. Cropping and trapezoidal mask are transfomations which should be applied in training, validation and inference. Multiply, Blur and similar are augmentations which should be used only in training. """ if aug_type == 'CROP': logger.info(f'Creating augmentation {aug_type} with ROI_CROP ' f'L: {config.ROI_CROP_LEFT}, ' f'R: {config.ROI_CROP_RIGHT}, ' f'B: {config.ROI_CROP_BOTTOM}, ' f'T: {config.ROI_CROP_TOP}') return Augmentations.crop(left=config.ROI_CROP_LEFT, right=config.ROI_CROP_RIGHT, bottom=config.ROI_CROP_BOTTOM, top=config.ROI_CROP_TOP, keep_size=True) elif aug_type == 'TRAPEZE': logger.info(f'Creating augmentation {aug_type}') return Augmentations.trapezoidal_mask( lower_left=config.ROI_TRAPEZE_LL, lower_right=config.ROI_TRAPEZE_LR, upper_left=config.ROI_TRAPEZE_UL, upper_right=config.ROI_TRAPEZE_UR, min_y=config.ROI_TRAPEZE_MIN_Y, max_y=config.ROI_TRAPEZE_MAX_Y) elif aug_type == 'MULTIPLY': interval = getattr(config, 'AUG_MULTIPLY_RANGE', (0.5, 1.5)) logger.info(f'Creating augmentation {aug_type} {interval}') return iaa.Multiply(interval) elif aug_type == 'BLUR': interval = getattr(config, 'AUG_BLUR_RANGE', (0.0, 3.0)) logger.info(f'Creating augmentation {aug_type} {interval}') return iaa.GaussianBlur(sigma=interval)
Python
def start(self, on_input=None, edge: int = PinEdge.RISING) -> None: """ Start the pin in input mode. :param on_input: function to call when an edge is detected, or None to ignore :param edge: type of edge(s) that trigger on_input; default is PinEdge.RISING This raises a RuntimeError if the pin is already started. You can check to see if the pin is started by calling state() and checking for PinState.NOT_STARTED """ pass
def start(self, on_input=None, edge: int = PinEdge.RISING) -> None: """ Start the pin in input mode. :param on_input: function to call when an edge is detected, or None to ignore :param edge: type of edge(s) that trigger on_input; default is PinEdge.RISING This raises a RuntimeError if the pin is already started. You can check to see if the pin is started by calling state() and checking for PinState.NOT_STARTED """ pass
Python
def state(self) -> int: """ Return most recent input state. This does not re-read the input pin, it just returns that last value read by the input() method. If the pin is not started or has been stopped, this will return PinState:NOT_STARTED """ return PinState.NOT_STARTED
def state(self) -> int: """ Return most recent input state. This does not re-read the input pin, it just returns that last value read by the input() method. If the pin is not started or has been stopped, this will return PinState:NOT_STARTED """ return PinState.NOT_STARTED
Python
def input(self) -> int: """ Read the input state from the pin. :return: PinState.LOW/HIGH or PinState.NOT_STARTED if pin not started """ return PinState.NOT_STARTED
def input(self) -> int: """ Read the input state from the pin. :return: PinState.LOW/HIGH or PinState.NOT_STARTED if pin not started """ return PinState.NOT_STARTED
Python
def start(self, state: int = PinState.LOW) -> None: """ Start the pin in output mode and with given starting state. This raises and RuntimeError if the pin is already started. You can check to see if the pin is started by calling state() and checking for PinState.NOT_STARTED """ pass
def start(self, state: int = PinState.LOW) -> None: """ Start the pin in output mode and with given starting state. This raises and RuntimeError if the pin is already started. You can check to see if the pin is started by calling state() and checking for PinState.NOT_STARTED """ pass
Python
def output(self, state: int) -> None: """ Set the output state of the pin to either :param state: PinState.LOW or PinState.HIGH :except: RuntimeError if pin not stated. """ pass
def output(self, state: int) -> None: """ Set the output state of the pin to either :param state: PinState.LOW or PinState.HIGH :except: RuntimeError if pin not stated. """ pass
Python
def start(self, duty: float = 0) -> None: """ Start the pin in output mode and with given starting state. This raises and RuntimeError if the pin is already started. You can check to see if the pin is started by calling state() and checking for PinState.NOT_STARTED :param duty: duty cycle in range 0 to 1 """ pass
def start(self, duty: float = 0) -> None: """ Start the pin in output mode and with given starting state. This raises and RuntimeError if the pin is already started. You can check to see if the pin is started by calling state() and checking for PinState.NOT_STARTED :param duty: duty cycle in range 0 to 1 """ pass
Python
def state(self) -> float: """ Return most recent output state. This does not re-read the pin, It just returns that last value set by the output() method. If the pin is not started or has been stopped, this will return PinState:NOT_STARTED :return: most recent output duty_cycle """ return PinState.NOT_STARTED
def state(self) -> float: """ Return most recent output state. This does not re-read the pin, It just returns that last value set by the output() method. If the pin is not started or has been stopped, this will return PinState:NOT_STARTED :return: most recent output duty_cycle """ return PinState.NOT_STARTED
Python
def output_pin_by_id(pin_id: str, frequency_hz: int = 60) -> OutputPin: """ Select a ttl output pin given a pin id. :param pin_id: pin specifier string :param frequency_hz: duty cycle frequency in hertz (only necessary for PCA9685) :return: OutputPin """ parts = pin_id.split(".") if parts[0] == PinProvider.PCA9685: pin_provider = parts[0] i2c_bus, i2c_address = parts[1].split(":") i2c_bus = int(i2c_bus) i2c_address = int(i2c_address, base=16) frequency_hz = int(frequency_hz) pin_number = int(parts[2]) return output_pin(pin_provider, pin_number, i2c_bus=i2c_bus, i2c_address=i2c_address, frequency_hz=frequency_hz) if parts[0] == PinProvider.RPI_GPIO: pin_provider = parts[0] pin_scheme = parts[1] pin_number = int(parts[2]) return output_pin(pin_provider, pin_number, pin_scheme=pin_scheme) if parts[0] == PinProvider.PIGPIO: pin_provider = parts[0] if PinScheme.BCM != parts[1]: raise ValueError("Pin scheme must be BCM for PIGPIO") pin_number = int(parts[2]) return output_pin(pin_provider, pin_number, pin_scheme=PinScheme.BCM) raise ValueError(f"Unknown pin provider {parts[0]}")
def output_pin_by_id(pin_id: str, frequency_hz: int = 60) -> OutputPin: """ Select a ttl output pin given a pin id. :param pin_id: pin specifier string :param frequency_hz: duty cycle frequency in hertz (only necessary for PCA9685) :return: OutputPin """ parts = pin_id.split(".") if parts[0] == PinProvider.PCA9685: pin_provider = parts[0] i2c_bus, i2c_address = parts[1].split(":") i2c_bus = int(i2c_bus) i2c_address = int(i2c_address, base=16) frequency_hz = int(frequency_hz) pin_number = int(parts[2]) return output_pin(pin_provider, pin_number, i2c_bus=i2c_bus, i2c_address=i2c_address, frequency_hz=frequency_hz) if parts[0] == PinProvider.RPI_GPIO: pin_provider = parts[0] pin_scheme = parts[1] pin_number = int(parts[2]) return output_pin(pin_provider, pin_number, pin_scheme=pin_scheme) if parts[0] == PinProvider.PIGPIO: pin_provider = parts[0] if PinScheme.BCM != parts[1]: raise ValueError("Pin scheme must be BCM for PIGPIO") pin_number = int(parts[2]) return output_pin(pin_provider, pin_number, pin_scheme=PinScheme.BCM) raise ValueError(f"Unknown pin provider {parts[0]}")
Python
def pwm_pin_by_id(pin_id: str, frequency_hz: int = 60) -> PwmPin: """ Select a pwm output pin given a pin id. :param pin_id: pin specifier string :param frequency_hz: duty cycle frequency in hertz :return: PwmPin """ parts = pin_id.split(".") if parts[0] == PinProvider.PCA9685: pin_provider = parts[0] i2c_bus, i2c_address = parts[1].split(":") i2c_bus = int(i2c_bus) i2c_address = int(i2c_address, base=16) pin_number = int(parts[2]) return pwm_pin(pin_provider, pin_number, i2c_bus=i2c_bus, i2c_address=i2c_address, frequency_hz=frequency_hz) if parts[0] == PinProvider.RPI_GPIO: pin_provider = parts[0] pin_scheme = parts[1] pin_number = int(parts[2]) return pwm_pin(pin_provider, pin_number, pin_scheme=pin_scheme, frequency_hz=frequency_hz) if parts[0] == PinProvider.PIGPIO: pin_provider = parts[0] if PinScheme.BCM != parts[1]: raise ValueError("Pin scheme must be BCM for PIGPIO") pin_number = int(parts[2]) return pwm_pin(pin_provider, pin_number, pin_scheme=PinScheme.BCM, frequency_hz=frequency_hz) raise ValueError(f"Unknown pin provider {parts[0]}")
def pwm_pin_by_id(pin_id: str, frequency_hz: int = 60) -> PwmPin: """ Select a pwm output pin given a pin id. :param pin_id: pin specifier string :param frequency_hz: duty cycle frequency in hertz :return: PwmPin """ parts = pin_id.split(".") if parts[0] == PinProvider.PCA9685: pin_provider = parts[0] i2c_bus, i2c_address = parts[1].split(":") i2c_bus = int(i2c_bus) i2c_address = int(i2c_address, base=16) pin_number = int(parts[2]) return pwm_pin(pin_provider, pin_number, i2c_bus=i2c_bus, i2c_address=i2c_address, frequency_hz=frequency_hz) if parts[0] == PinProvider.RPI_GPIO: pin_provider = parts[0] pin_scheme = parts[1] pin_number = int(parts[2]) return pwm_pin(pin_provider, pin_number, pin_scheme=pin_scheme, frequency_hz=frequency_hz) if parts[0] == PinProvider.PIGPIO: pin_provider = parts[0] if PinScheme.BCM != parts[1]: raise ValueError("Pin scheme must be BCM for PIGPIO") pin_number = int(parts[2]) return pwm_pin(pin_provider, pin_number, pin_scheme=PinScheme.BCM, frequency_hz=frequency_hz) raise ValueError(f"Unknown pin provider {parts[0]}")
Python
def input_pin_by_id(pin_id: str, pull: int = PinPull.PULL_NONE) -> InputPin: """ Select a ttl input pin given a pin id. """ parts = pin_id.split(".") if parts[0] == PinProvider.PCA9685: raise RuntimeError("PinProvider.PCA9685 does not implement InputPin") if parts[0] == PinProvider.RPI_GPIO: pin_provider = parts[0] pin_scheme = parts[1] pin_number = int(parts[2]) return input_pin(pin_provider, pin_number, pin_scheme=pin_scheme, pull=pull) if parts[0] == PinProvider.PIGPIO: pin_provider = parts[0] if PinScheme.BCM != parts[1]: raise ValueError("Pin scheme must be BCM for PIGPIO") pin_number = int(parts[2]) return input_pin(pin_provider, pin_number, pin_scheme=PinScheme.BCM, pull=pull) raise ValueError(f"Unknown pin provider {parts[0]}")
def input_pin_by_id(pin_id: str, pull: int = PinPull.PULL_NONE) -> InputPin: """ Select a ttl input pin given a pin id. """ parts = pin_id.split(".") if parts[0] == PinProvider.PCA9685: raise RuntimeError("PinProvider.PCA9685 does not implement InputPin") if parts[0] == PinProvider.RPI_GPIO: pin_provider = parts[0] pin_scheme = parts[1] pin_number = int(parts[2]) return input_pin(pin_provider, pin_number, pin_scheme=pin_scheme, pull=pull) if parts[0] == PinProvider.PIGPIO: pin_provider = parts[0] if PinScheme.BCM != parts[1]: raise ValueError("Pin scheme must be BCM for PIGPIO") pin_number = int(parts[2]) return input_pin(pin_provider, pin_number, pin_scheme=PinScheme.BCM, pull=pull) raise ValueError(f"Unknown pin provider {parts[0]}")
Python
def input_pin( pin_provider: str, pin_number: int, pin_scheme: str = PinScheme.BOARD, pull: int = PinPull.PULL_NONE) -> InputPin: """ construct an InputPin using the given pin provider. Note that PCA9685 can NOT provide an InputPin. :param pin_provider: PinProvider string :param pin_number: zero based pin number :param pin_scheme: PinScheme string :param pull: PinPull value :return: InputPin :except: RuntimeError if pin_provider is not valid. """ if pin_provider == PinProvider.RPI_GPIO: return InputPinGpio(pin_number, pin_scheme, pull) if pin_provider == PinProvider.PCA9685: raise RuntimeError("PinProvider.PCA9685 does not implement InputPin") if pin_provider == PinProvider.PIGPIO: if pin_scheme != PinScheme.BCM: raise ValueError("Pin scheme must be PinScheme.BCM for PIGPIO") return InputPinPigpio(pin_number, pull) raise RuntimeError(f"UnknownPinProvider ({pin_provider})")
def input_pin( pin_provider: str, pin_number: int, pin_scheme: str = PinScheme.BOARD, pull: int = PinPull.PULL_NONE) -> InputPin: """ construct an InputPin using the given pin provider. Note that PCA9685 can NOT provide an InputPin. :param pin_provider: PinProvider string :param pin_number: zero based pin number :param pin_scheme: PinScheme string :param pull: PinPull value :return: InputPin :except: RuntimeError if pin_provider is not valid. """ if pin_provider == PinProvider.RPI_GPIO: return InputPinGpio(pin_number, pin_scheme, pull) if pin_provider == PinProvider.PCA9685: raise RuntimeError("PinProvider.PCA9685 does not implement InputPin") if pin_provider == PinProvider.PIGPIO: if pin_scheme != PinScheme.BCM: raise ValueError("Pin scheme must be PinScheme.BCM for PIGPIO") return InputPinPigpio(pin_number, pull) raise RuntimeError(f"UnknownPinProvider ({pin_provider})")
Python
def output_pin( pin_provider: str, pin_number: int, pin_scheme: str = PinScheme.BOARD, i2c_bus: int = 0, i2c_address: int = 40, frequency_hz: int = 60) -> OutputPin: """ construct an OutputPin using the given pin provider Note that PCA9685 can NOT provide an InputPin. :param pin_provider: PinProvider string :param pin_number: zero based pin number :param pin_scheme: PinScheme string :param i2c_bus: I2C bus number for I2C devices :param i2c_address: I2C address for I2C devices :param frequency_hz: duty cycle frequence in hertz (for PCA9685) :return: InputPin :except: RuntimeError if pin_provider is not valid. """ if pin_provider == PinProvider.RPI_GPIO: return OutputPinGpio(pin_number, pin_scheme) if pin_provider == PinProvider.PCA9685: return OutputPinPCA9685(pin_number, pca9685(i2c_bus, i2c_address, frequency_hz)) if pin_provider == PinProvider.PIGPIO: if pin_scheme != PinScheme.BCM: raise ValueError("Pin scheme must be PinScheme.BCM for PIGPIO") return OutputPinPigpio(pin_number) raise RuntimeError(f"UnknownPinProvider ({pin_provider})")
def output_pin( pin_provider: str, pin_number: int, pin_scheme: str = PinScheme.BOARD, i2c_bus: int = 0, i2c_address: int = 40, frequency_hz: int = 60) -> OutputPin: """ construct an OutputPin using the given pin provider Note that PCA9685 can NOT provide an InputPin. :param pin_provider: PinProvider string :param pin_number: zero based pin number :param pin_scheme: PinScheme string :param i2c_bus: I2C bus number for I2C devices :param i2c_address: I2C address for I2C devices :param frequency_hz: duty cycle frequence in hertz (for PCA9685) :return: InputPin :except: RuntimeError if pin_provider is not valid. """ if pin_provider == PinProvider.RPI_GPIO: return OutputPinGpio(pin_number, pin_scheme) if pin_provider == PinProvider.PCA9685: return OutputPinPCA9685(pin_number, pca9685(i2c_bus, i2c_address, frequency_hz)) if pin_provider == PinProvider.PIGPIO: if pin_scheme != PinScheme.BCM: raise ValueError("Pin scheme must be PinScheme.BCM for PIGPIO") return OutputPinPigpio(pin_number) raise RuntimeError(f"UnknownPinProvider ({pin_provider})")
Python
def pwm_pin( pin_provider: str, pin_number: int, pin_scheme: str = PinScheme.BOARD, frequency_hz: int = 60, i2c_bus: int = 0, i2c_address: int = 40) -> PwmPin: """ construct a PwmPin using the given pin provider :param pin_provider: PinProvider string :param pin_number: zero based pin number :param pin_scheme: PinScheme string :param i2c_bus: I2C bus number for I2C devices :param i2c_address: I2C address for I2C devices :param frequency_hz: duty cycle frequence in hertz :return: PwmPin :except: RuntimeError if pin_provider is not valid. """ if pin_provider == PinProvider.RPI_GPIO: return PwmPinGpio(pin_number, pin_scheme, frequency_hz) if pin_provider == PinProvider.PCA9685: return PwmPinPCA9685(pin_number, pca9685(i2c_bus, i2c_address, frequency_hz)) if pin_provider == PinProvider.PIGPIO: if pin_scheme != PinScheme.BCM: raise ValueError("Pin scheme must be PinScheme.BCM for PIGPIO") return PwmPinPigpio(pin_number, frequency_hz) raise RuntimeError(f"UnknownPinProvider ({pin_provider})")
def pwm_pin( pin_provider: str, pin_number: int, pin_scheme: str = PinScheme.BOARD, frequency_hz: int = 60, i2c_bus: int = 0, i2c_address: int = 40) -> PwmPin: """ construct a PwmPin using the given pin provider :param pin_provider: PinProvider string :param pin_number: zero based pin number :param pin_scheme: PinScheme string :param i2c_bus: I2C bus number for I2C devices :param i2c_address: I2C address for I2C devices :param frequency_hz: duty cycle frequence in hertz :return: PwmPin :except: RuntimeError if pin_provider is not valid. """ if pin_provider == PinProvider.RPI_GPIO: return PwmPinGpio(pin_number, pin_scheme, frequency_hz) if pin_provider == PinProvider.PCA9685: return PwmPinPCA9685(pin_number, pca9685(i2c_bus, i2c_address, frequency_hz)) if pin_provider == PinProvider.PIGPIO: if pin_scheme != PinScheme.BCM: raise ValueError("Pin scheme must be PinScheme.BCM for PIGPIO") return PwmPinPigpio(pin_number, frequency_hz) raise RuntimeError(f"UnknownPinProvider ({pin_provider})")
Python
def gpio_fn(pin_scheme:int, fn:Callable[[], Any]): """ Convenience method to enforce the desired GPIO pin scheme before calling a GPIO function. RPi.GPIO allows only a single scheme to be set at runtime. If the pin scheme is already set to a different scheme, then this will raise a RuntimeError to prevent erroneous pin outputs. :param pin_scheme:int GPIO.BOARD or GPIO.BCM :param fn:Callable[[], Any] no-arg function to call after setting pin scheme. :return:any return value from called function :exception:RuntimeError if pin scheme is already set to a different scheme. """ prev_scheme = GPIO.getmode() if prev_scheme is None: GPIO.setmode(pin_scheme) elif prev_scheme != pin_scheme: raise RuntimeError(f"Attempt to change GPIO pin scheme from ({prev_scheme}) to ({pin_scheme})" " after it has been set. All RPi.GPIO user must use the same pin scheme.") val = fn() return val
def gpio_fn(pin_scheme:int, fn:Callable[[], Any]): """ Convenience method to enforce the desired GPIO pin scheme before calling a GPIO function. RPi.GPIO allows only a single scheme to be set at runtime. If the pin scheme is already set to a different scheme, then this will raise a RuntimeError to prevent erroneous pin outputs. :param pin_scheme:int GPIO.BOARD or GPIO.BCM :param fn:Callable[[], Any] no-arg function to call after setting pin scheme. :return:any return value from called function :exception:RuntimeError if pin scheme is already set to a different scheme. """ prev_scheme = GPIO.getmode() if prev_scheme is None: GPIO.setmode(pin_scheme) elif prev_scheme != pin_scheme: raise RuntimeError(f"Attempt to change GPIO pin scheme from ({prev_scheme}) to ({pin_scheme})" " after it has been set. All RPi.GPIO user must use the same pin scheme.") val = fn() return val
Python
def pca9685(busnum: int, address: int, frequency: int = 60): """ pca9685 factory allocates driver for pca9685 at given bus number and i2c address. If we have already created one for that bus/addr pair then use that singleton. If frequency is not the same, then error. :param busnum: I2C bus number of PCA9685 :param address: address of PCA9685 on I2C bus :param frequency: frequency in hertz of duty cycle :except: PCA9685 has a single frequency for all channels, so attempts to allocate a controller at a given bus number and address with different frequencies will raise a ValueError """ key = str(busnum) + ":" + hex(address) pca = _pca9685.get(key) if pca is None: pca = PCA9685(busnum, address, frequency) if pca.get_frequency() != frequency: raise ValueError( f"Frequency {frequency} conflicts with pca9685 at {key} " f"with frequency {pca.pwm.get_pwm_freq()}") return pca
def pca9685(busnum: int, address: int, frequency: int = 60): """ pca9685 factory allocates driver for pca9685 at given bus number and i2c address. If we have already created one for that bus/addr pair then use that singleton. If frequency is not the same, then error. :param busnum: I2C bus number of PCA9685 :param address: address of PCA9685 on I2C bus :param frequency: frequency in hertz of duty cycle :except: PCA9685 has a single frequency for all channels, so attempts to allocate a controller at a given bus number and address with different frequencies will raise a ValueError """ key = str(busnum) + ":" + hex(address) pca = _pca9685.get(key) if pca is None: pca = PCA9685(busnum, address, frequency) if pca.get_frequency() != frequency: raise ValueError( f"Frequency {frequency} conflicts with pca9685 at {key} " f"with frequency {pca.pwm.get_pwm_freq()}") return pca
Python
def start(self, state: int = PinState.LOW) -> None: """ Start the pin in output mode. This raises a RuntimeError if the pin is already started. You can check to see if the pin is started by calling state() and checking for PinState.NOT_STARTED :param state: PinState to start with :except: RuntimeError if pin is already started. """ if self.state() != PinState.NOT_STARTED: raise RuntimeError(f"Attempt to start pin ({self.pin_number}) that is already started") self._state = 0 # hack to allow first output to work self.output(state)
def start(self, state: int = PinState.LOW) -> None: """ Start the pin in output mode. This raises a RuntimeError if the pin is already started. You can check to see if the pin is started by calling state() and checking for PinState.NOT_STARTED :param state: PinState to start with :except: RuntimeError if pin is already started. """ if self.state() != PinState.NOT_STARTED: raise RuntimeError(f"Attempt to start pin ({self.pin_number}) that is already started") self._state = 0 # hack to allow first output to work self.output(state)
Python
def stop(self) -> None: """ Stop the pin and return it to PinState.NOT_STARTED """ if self.state() != PinState.NOT_STARTED: self.output(PinState.LOW) self._state = PinState.NOT_STARTED
def stop(self) -> None: """ Stop the pin and return it to PinState.NOT_STARTED """ if self.state() != PinState.NOT_STARTED: self.output(PinState.LOW) self._state = PinState.NOT_STARTED
Python
def state(self) -> int: """ Return most recent output state. If the pin is not started or has been stopped, this will return PinState:NOT_STARTED :return: PinState """ return self._state
def state(self) -> int: """ Return most recent output state. If the pin is not started or has been stopped, this will return PinState:NOT_STARTED :return: PinState """ return self._state
Python
def output(self, state: int) -> None: """ Write output state to the pin. :param state: PinState.LOW or PinState.HIGH """ if self.state() == PinState.NOT_STARTED: raise RuntimeError(f"Attempt to use pin ({self.pin_number}) that is not started") if state == PinState.HIGH: self.pca9685.set_high(self.pin_number) else: self.pca9685.set_low(self.pin_number) self._state = state
def output(self, state: int) -> None: """ Write output state to the pin. :param state: PinState.LOW or PinState.HIGH """ if self.state() == PinState.NOT_STARTED: raise RuntimeError(f"Attempt to use pin ({self.pin_number}) that is not started") if state == PinState.HIGH: self.pca9685.set_high(self.pin_number) else: self.pca9685.set_low(self.pin_number) self._state = state
Python
def start(self, duty: float = 0) -> None: """ Start pin with given duty cycle :param duty: duty cycle in range 0 to 1 :except: RuntimeError if pin is already started. """ if self.state() != PinState.NOT_STARTED: raise RuntimeError(f"Attempt to start pin ({self.pin_number}) that is already started") if duty < 0 or duty > 1: raise ValueError("duty_cycle must be in range 0 to 1") self._state = 0 # hack to allow first duty_cycle to work self.duty_cycle(duty) self._state = duty
def start(self, duty: float = 0) -> None: """ Start pin with given duty cycle :param duty: duty cycle in range 0 to 1 :except: RuntimeError if pin is already started. """ if self.state() != PinState.NOT_STARTED: raise RuntimeError(f"Attempt to start pin ({self.pin_number}) that is already started") if duty < 0 or duty > 1: raise ValueError("duty_cycle must be in range 0 to 1") self._state = 0 # hack to allow first duty_cycle to work self.duty_cycle(duty) self._state = duty
Python
def duty_cycle(self, duty: float) -> None: """ Write a duty cycle to the output pin :param duty: duty cycle in range 0 to 1 :except: RuntimeError if not started """ if self.state() == PinState.NOT_STARTED: raise RuntimeError(f"Attempt to use pin ({self.pin_number}) that is not started") if duty < 0 or duty > 1: raise ValueError("duty_cycle must be in range 0 to 1") self.pca9685.set_duty_cycle(self.pin_number, duty) self._state = duty
def duty_cycle(self, duty: float) -> None: """ Write a duty cycle to the output pin :param duty: duty cycle in range 0 to 1 :except: RuntimeError if not started """ if self.state() == PinState.NOT_STARTED: raise RuntimeError(f"Attempt to use pin ({self.pin_number}) that is not started") if duty < 0 or duty > 1: raise ValueError("duty_cycle must be in range 0 to 1") self.pca9685.set_duty_cycle(self.pin_number, duty) self._state = duty
Python
def start(self, on_input=None, edge=PinEdge.RISING) -> None: """ Start the input pin and optionally set callback. :param on_input: function to call when an edge is detected, or None to ignore :param edge: type of edge(s) that trigger on_input; default is PinEdge.RISING """ if self.state() != PinState.NOT_STARTED: raise RuntimeError(f"Attempt to start InputPinPigpio({self.pin_number}) that is already started.") self.pgpio = self.pgpio or pigpio.pi() self.pgpio.set_mode(self.pin_number, pigpio.INPUT) self.pgpio.set_pull_up_down(self.pin_number, self.pull) if on_input is not None: self.on_input = on_input self.pgpio.callback(self.pin_number, pigpio_pin_edge[edge], self._callback) self._state = self.pgpio.read(self.pin_number)
def start(self, on_input=None, edge=PinEdge.RISING) -> None: """ Start the input pin and optionally set callback. :param on_input: function to call when an edge is detected, or None to ignore :param edge: type of edge(s) that trigger on_input; default is PinEdge.RISING """ if self.state() != PinState.NOT_STARTED: raise RuntimeError(f"Attempt to start InputPinPigpio({self.pin_number}) that is already started.") self.pgpio = self.pgpio or pigpio.pi() self.pgpio.set_mode(self.pin_number, pigpio.INPUT) self.pgpio.set_pull_up_down(self.pin_number, self.pull) if on_input is not None: self.on_input = on_input self.pgpio.callback(self.pin_number, pigpio_pin_edge[edge], self._callback) self._state = self.pgpio.read(self.pin_number)
Python
def input(self) -> int: """ Read the input pins state. :return: PinState.LOW/HIGH OR PinState.NOT_STARTED if not started """ if self.state() != PinState.NOT_STARTED: self._state = self.pgpio.read(self.pin_number) return self._state
def input(self) -> int: """ Read the input pins state. :return: PinState.LOW/HIGH OR PinState.NOT_STARTED if not started """ if self.state() != PinState.NOT_STARTED: self._state = self.pgpio.read(self.pin_number) return self._state
Python
def start(self, state: int = PinState.LOW) -> None: """ Start the pin in output mode. This raises a RuntimeError if the pin is already started. You can check to see if the pin is started by calling state() and checking for PinState.NOT_STARTED :param state: PinState to start with :except: RuntimeError if pin is already started. """ if self.state() != PinState.NOT_STARTED: raise RuntimeError("Attempt to start OutputPin that is already started.") self.pgpio = self.pgpio or pigpio.pi() self.pgpio.set_mode(self.pin_number, pigpio.OUTPUT) self.pgpio.write(self.pin_number, state) # set initial state self._state = state
def start(self, state: int = PinState.LOW) -> None: """ Start the pin in output mode. This raises a RuntimeError if the pin is already started. You can check to see if the pin is started by calling state() and checking for PinState.NOT_STARTED :param state: PinState to start with :except: RuntimeError if pin is already started. """ if self.state() != PinState.NOT_STARTED: raise RuntimeError("Attempt to start OutputPin that is already started.") self.pgpio = self.pgpio or pigpio.pi() self.pgpio.set_mode(self.pin_number, pigpio.OUTPUT) self.pgpio.write(self.pin_number, state) # set initial state self._state = state
Python
def output(self, state: int) -> None: """ Write output state to the pin. :param state: PinState.LOW or PinState.HIGH """ if self.state() != PinState.NOT_STARTED: self.pgpio.write(self.pin_number, state) self._state = state
def output(self, state: int) -> None: """ Write output state to the pin. :param state: PinState.LOW or PinState.HIGH """ if self.state() != PinState.NOT_STARTED: self.pgpio.write(self.pin_number, state) self._state = state
Python
def start(self, duty: float = 0) -> None: """ Start pin with given duty cycle. :param duty: duty cycle in range 0 to 1 :except: RuntimeError if pin is already started. """ if self.state() != PinState.NOT_STARTED: raise RuntimeError(f"Attempt to start InputPinPigpio({self.pin_number}) that is already started.") if duty < 0 or duty > 1: raise ValueError("duty_cycle must be in range 0 to 1") self.pgpio = self.pgpio or pigpio.pi() self.pgpio.set_mode(self.pin_number, pigpio.OUTPUT) self.pgpio.set_PWM_frequency(self.pin_number, self.frequency) self.pgpio.set_PWM_range(self.pin_number, 4095) # 12 bits, same as PCA9685 self.pgpio.set_PWM_dutycycle(self.pin_number, int(duty * 4095)) # set initial state self._state = duty
def start(self, duty: float = 0) -> None: """ Start pin with given duty cycle. :param duty: duty cycle in range 0 to 1 :except: RuntimeError if pin is already started. """ if self.state() != PinState.NOT_STARTED: raise RuntimeError(f"Attempt to start InputPinPigpio({self.pin_number}) that is already started.") if duty < 0 or duty > 1: raise ValueError("duty_cycle must be in range 0 to 1") self.pgpio = self.pgpio or pigpio.pi() self.pgpio.set_mode(self.pin_number, pigpio.OUTPUT) self.pgpio.set_PWM_frequency(self.pin_number, self.frequency) self.pgpio.set_PWM_range(self.pin_number, 4095) # 12 bits, same as PCA9685 self.pgpio.set_PWM_dutycycle(self.pin_number, int(duty * 4095)) # set initial state self._state = duty
Python
def duty_cycle(self, duty: float) -> None: """ Write a duty cycle to the output pin :param duty: duty cycle in range 0 to 1 :except: RuntimeError if not started """ if duty < 0 or duty > 1: raise ValueError("duty_cycle must be in range 0 to 1") if self.state() != PinState.NOT_STARTED: self.pgpio.set_PWM_dutycycle(self.pin_number, int(duty * 4095)) self._state = duty
def duty_cycle(self, duty: float) -> None: """ Write a duty cycle to the output pin :param duty: duty cycle in range 0 to 1 :except: RuntimeError if not started """ if duty < 0 or duty > 1: raise ValueError("duty_cycle must be in range 0 to 1") if self.state() != PinState.NOT_STARTED: self.pgpio.set_PWM_dutycycle(self.pin_number, int(duty * 4095)) self._state = duty
Python
def load(self, selection): """ Method to load the chosen file into the path and call an action""" self.file_path = str(selection[0]) self.popup.dismiss() self.load_action()
def load(self, selection): """ Method to load the chosen file into the path and call an action""" self.file_path = str(selection[0]) self.popup.dismiss() self.load_action()
Python
def load_action(self): """ Load the config from the file path""" if self.file_path: try: path = os.path.join(self.file_path, 'config.py') self.config = load_config(path) # If load successful, store into app config rc_handler.data['car_dir'] = self.file_path except FileNotFoundError: Logger.error(f'Config: Directory {self.file_path} has no ' f'config.py') except Exception as e: Logger.error(f'Config: {e}')
def load_action(self): """ Load the config from the file path""" if self.file_path: try: path = os.path.join(self.file_path, 'config.py') self.config = load_config(path) # If load successful, store into app config rc_handler.data['car_dir'] = self.file_path except FileNotFoundError: Logger.error(f'Config: Directory {self.file_path} has no ' f'config.py') except Exception as e: Logger.error(f'Config: {e}')
Python
def load_action(self): """ Update tub from the file path""" if self.update_tub(): # If update successful, store into app config rc_handler.data['last_tub'] = self.file_path
def load_action(self): """ Update tub from the file path""" if self.update_tub(): # If update successful, store into app config rc_handler.data['last_tub'] = self.file_path
Python
def update(self, record): """ This function is called everytime the current record is updated""" if not record: return field, index = decompose(self.field) if field in record.underlying: val = record.underlying[field] if index is not None: val = val[index] # Update bar if a field property for this field is known if self.field_property: norm_value = get_norm_value(val, self.config, self.field_property) new_bar_val = (norm_value + 1) * 50 if \ self.field_property.centered else norm_value * 100 self.ids.bar.value = new_bar_val self.ids.field_label.text = self.field if isinstance(val, float) or isinstance(val, np.float32): text = f'{val:+07.3f}' elif isinstance(val, int): text = f'{val:10}' else: text = str(val) self.ids.value_label.text = text else: Logger.error(f'Record: Bad record {record.underlying["_index"]} - ' f'missing field {self.field}')
def update(self, record): """ This function is called everytime the current record is updated""" if not record: return field, index = decompose(self.field) if field in record.underlying: val = record.underlying[field] if index is not None: val = val[index] # Update bar if a field property for this field is known if self.field_property: norm_value = get_norm_value(val, self.config, self.field_property) new_bar_val = (norm_value + 1) * 50 if \ self.field_property.centered else norm_value * 100 self.ids.bar.value = new_bar_val self.ids.field_label.text = self.field if isinstance(val, float) or isinstance(val, np.float32): text = f'{val:+07.3f}' elif isinstance(val, int): text = f'{val:10}' else: text = str(val) self.ids.value_label.text = text else: Logger.error(f'Record: Bad record {record.underlying["_index"]} - ' f'missing field {self.field}')