repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_documentation_string
stringlengths
1
47.2k
func_code_url
stringlengths
85
339
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/search.py
StorageRecordSearch.fetch
async def fetch(self, limit: int = None) -> Sequence[StorageRecord]: """ Fetch next batch of search results. Raise BadSearch if search is closed, WalletState if wallet is closed. :param limit: maximum number of records to return (default value Wallet.DEFAULT_CHUNK) :return: next batch of records found """ LOGGER.debug('StorageRecordSearch.fetch >>> limit: %s', limit) if not self.opened: LOGGER.debug('StorageRecordSearch.fetch <!< Storage record search is closed') raise BadSearch('Storage record search is closed') if not self._wallet.opened: LOGGER.debug('StorageRecordSearch.fetch <!< Wallet %s is closed', self._wallet.name) raise WalletState('Wallet {} is closed'.format(self._wallet.name)) records = json.loads(await non_secrets.fetch_wallet_search_next_records( self._wallet.handle, self.handle, limit or Wallet.DEFAULT_CHUNK))['records'] or [] # at exhaustion results['records'] = None rv = [StorageRecord(typ=rec['type'], value=rec['value'], tags=rec['tags'], ident=rec['id']) for rec in records] LOGGER.debug('StorageRecordSearch.fetch <<< %s', rv) return rv
python
async def fetch(self, limit: int = None) -> Sequence[StorageRecord]: """ Fetch next batch of search results. Raise BadSearch if search is closed, WalletState if wallet is closed. :param limit: maximum number of records to return (default value Wallet.DEFAULT_CHUNK) :return: next batch of records found """ LOGGER.debug('StorageRecordSearch.fetch >>> limit: %s', limit) if not self.opened: LOGGER.debug('StorageRecordSearch.fetch <!< Storage record search is closed') raise BadSearch('Storage record search is closed') if not self._wallet.opened: LOGGER.debug('StorageRecordSearch.fetch <!< Wallet %s is closed', self._wallet.name) raise WalletState('Wallet {} is closed'.format(self._wallet.name)) records = json.loads(await non_secrets.fetch_wallet_search_next_records( self._wallet.handle, self.handle, limit or Wallet.DEFAULT_CHUNK))['records'] or [] # at exhaustion results['records'] = None rv = [StorageRecord(typ=rec['type'], value=rec['value'], tags=rec['tags'], ident=rec['id']) for rec in records] LOGGER.debug('StorageRecordSearch.fetch <<< %s', rv) return rv
Fetch next batch of search results. Raise BadSearch if search is closed, WalletState if wallet is closed. :param limit: maximum number of records to return (default value Wallet.DEFAULT_CHUNK) :return: next batch of records found
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/search.py#L120-L147
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/search.py
StorageRecordSearch.close
async def close(self) -> None: """ Close search. """ LOGGER.debug('StorageRecordSearch.close >>>') if self._handle: await non_secrets.close_wallet_search(self.handle) self._handle = None LOGGER.debug('StorageRecordSearch.close <<<')
python
async def close(self) -> None: """ Close search. """ LOGGER.debug('StorageRecordSearch.close >>>') if self._handle: await non_secrets.close_wallet_search(self.handle) self._handle = None LOGGER.debug('StorageRecordSearch.close <<<')
Close search.
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/search.py#L164-L175
PSPC-SPAC-buyandsell/von_anchor
von_anchor/nodepool/nodepool.py
NodePool.cache_id
def cache_id(self) -> str: """ Return identifier for archivable caches, computing it first and retaining it if need be. Raise AbsentPool if ledger configuration is not yet available. :param name: pool name :return: archivable cache identifier """ if self._cache_id: return self._cache_id with open(join(expanduser('~'), '.indy_client', 'pool', self.name, '{}.txn'.format(self.name))) as fh_genesis: genesis = [json.loads(line) for line in fh_genesis.readlines() if line] hps = [] for gen_txn in genesis: hps.append(self.protocol.genesis_host_port(gen_txn)) hps.sort() # canonicalize to make order irrelevant self._cache_id = ':'.join('{}:{}'.format(hp[0], hp[1]) for hp in hps) return self._cache_id
python
def cache_id(self) -> str: """ Return identifier for archivable caches, computing it first and retaining it if need be. Raise AbsentPool if ledger configuration is not yet available. :param name: pool name :return: archivable cache identifier """ if self._cache_id: return self._cache_id with open(join(expanduser('~'), '.indy_client', 'pool', self.name, '{}.txn'.format(self.name))) as fh_genesis: genesis = [json.loads(line) for line in fh_genesis.readlines() if line] hps = [] for gen_txn in genesis: hps.append(self.protocol.genesis_host_port(gen_txn)) hps.sort() # canonicalize to make order irrelevant self._cache_id = ':'.join('{}:{}'.format(hp[0], hp[1]) for hp in hps) return self._cache_id
Return identifier for archivable caches, computing it first and retaining it if need be. Raise AbsentPool if ledger configuration is not yet available. :param name: pool name :return: archivable cache identifier
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/nodepool/nodepool.py#L101-L122
PSPC-SPAC-buyandsell/von_anchor
von_anchor/nodepool/nodepool.py
NodePool.open
async def open(self) -> 'NodePool': """ Explicit entry. Opens pool as configured, for later closure via close(). Creates pool if it does not yet exist, using configured genesis transaction file. For use when keeping pool open across multiple calls. Raise any AbsentPool if node pool ledger configuration is not available. :return: current object """ LOGGER.debug('NodePool.open >>>') await pool.set_protocol_version(self.protocol.indy()) LOGGER.info('Pool ledger %s set protocol %s', self.name, self.protocol) try: self._handle = await pool.open_pool_ledger(self.name, json.dumps(self.config)) except IndyError as x_indy: if x_indy.error_code == ErrorCode.PoolLedgerNotCreatedError: LOGGER.debug('NodePool.open <!< Absent node pool %s ledger configuration', self.name) raise AbsentPool('Absent node pool {} ledger configuration'.format(self.name)) LOGGER.debug( 'NodePool.open <!< cannot open node pool %s: indy error code %s', self.name, x_indy.error_code) raise LOGGER.debug('NodePool.open <<<') return self
python
async def open(self) -> 'NodePool': """ Explicit entry. Opens pool as configured, for later closure via close(). Creates pool if it does not yet exist, using configured genesis transaction file. For use when keeping pool open across multiple calls. Raise any AbsentPool if node pool ledger configuration is not available. :return: current object """ LOGGER.debug('NodePool.open >>>') await pool.set_protocol_version(self.protocol.indy()) LOGGER.info('Pool ledger %s set protocol %s', self.name, self.protocol) try: self._handle = await pool.open_pool_ledger(self.name, json.dumps(self.config)) except IndyError as x_indy: if x_indy.error_code == ErrorCode.PoolLedgerNotCreatedError: LOGGER.debug('NodePool.open <!< Absent node pool %s ledger configuration', self.name) raise AbsentPool('Absent node pool {} ledger configuration'.format(self.name)) LOGGER.debug( 'NodePool.open <!< cannot open node pool %s: indy error code %s', self.name, x_indy.error_code) raise LOGGER.debug('NodePool.open <<<') return self
Explicit entry. Opens pool as configured, for later closure via close(). Creates pool if it does not yet exist, using configured genesis transaction file. For use when keeping pool open across multiple calls. Raise any AbsentPool if node pool ledger configuration is not available. :return: current object
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/nodepool/nodepool.py#L140-L169
PSPC-SPAC-buyandsell/von_anchor
von_anchor/nodepool/nodepool.py
NodePool.close
async def close(self) -> None: """ Explicit exit. Closes pool. For use when keeping pool open across multiple calls. """ LOGGER.debug('NodePool.close >>>') if not self.handle: LOGGER.warning('Abstaining from closing pool %s: already closed', self.name) else: await pool.close_pool_ledger(self.handle) self._handle = None LOGGER.debug('NodePool.close <<<')
python
async def close(self) -> None: """ Explicit exit. Closes pool. For use when keeping pool open across multiple calls. """ LOGGER.debug('NodePool.close >>>') if not self.handle: LOGGER.warning('Abstaining from closing pool %s: already closed', self.name) else: await pool.close_pool_ledger(self.handle) self._handle = None LOGGER.debug('NodePool.close <<<')
Explicit exit. Closes pool. For use when keeping pool open across multiple calls.
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/nodepool/nodepool.py#L188-L201
PSPC-SPAC-buyandsell/von_anchor
von_anchor/nodepool/nodepool.py
NodePool.refresh
async def refresh(self) -> None: """ Refresh local copy of pool ledger and update node pool connections. """ LOGGER.debug('NodePool.refresh >>>') await pool.refresh_pool_ledger(self.handle) LOGGER.debug('NodePool.refresh <<<')
python
async def refresh(self) -> None: """ Refresh local copy of pool ledger and update node pool connections. """ LOGGER.debug('NodePool.refresh >>>') await pool.refresh_pool_ledger(self.handle) LOGGER.debug('NodePool.refresh <<<')
Refresh local copy of pool ledger and update node pool connections.
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/nodepool/nodepool.py#L203-L212
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/base.py
BaseAnchor.reseed
async def reseed(self, seed: str = None) -> None: """ Rotate key for VON anchor: generate new key, submit to ledger, update wallet. Raise WalletState if wallet is currently closed. :param seed: new seed for ed25519 key pair (default random) """ LOGGER.debug('BaseAnchor.reseed >>> seed: [SEED]') verkey = await self.wallet.reseed_init(seed) req_json = await ledger.build_nym_request( self.did, self.did, verkey, self.name, (await self.get_nym_role()).token()) await self._sign_submit(req_json) await self.wallet.reseed_apply() LOGGER.debug('BaseAnchor.reseed <<<')
python
async def reseed(self, seed: str = None) -> None: """ Rotate key for VON anchor: generate new key, submit to ledger, update wallet. Raise WalletState if wallet is currently closed. :param seed: new seed for ed25519 key pair (default random) """ LOGGER.debug('BaseAnchor.reseed >>> seed: [SEED]') verkey = await self.wallet.reseed_init(seed) req_json = await ledger.build_nym_request( self.did, self.did, verkey, self.name, (await self.get_nym_role()).token()) await self._sign_submit(req_json) await self.wallet.reseed_apply() LOGGER.debug('BaseAnchor.reseed <<<')
Rotate key for VON anchor: generate new key, submit to ledger, update wallet. Raise WalletState if wallet is currently closed. :param seed: new seed for ed25519 key pair (default random)
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/base.py#L202-L222
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/base.py
BaseAnchor.get_nym
async def get_nym(self, target_did: str = None) -> str: """ Get json cryptonym (including current verification key) for input (anchor) DID from ledger. Return empty production {} if the ledger has no such cryptonym. Raise BadLedgerTxn on failure. Raise WalletState if target DID is default (own DID) value but wallet does not have it (neither created nor opened since initialization). :param target_did: DID of cryptonym to fetch (default own DID) :return: cryptonym json """ LOGGER.debug('BaseAnchor.get_nym >>> target_did: %s', target_did) if target_did and not ok_did(target_did): LOGGER.debug('BaseAnchor.get_nym <!< Bad DID %s', target_did) raise BadIdentifier('Bad DID {}'.format(target_did)) if not (target_did or self.did): LOGGER.debug('BaseAnchor.get_nym <!< Bad wallet state: DID for %s unavailable', self.name) raise WalletState('Bad wallet state: DID for {} unavailable'.format(self.name)) rv = json.dumps({}) get_nym_req = await ledger.build_get_nym_request(self.did, target_did or self.did) resp_json = await self._submit(get_nym_req) data_json = (json.loads(resp_json))['result']['data'] # it's double-encoded on the ledger if data_json: rv = data_json LOGGER.debug('BaseAnchor.get_nym <<< %s', rv) return rv
python
async def get_nym(self, target_did: str = None) -> str: """ Get json cryptonym (including current verification key) for input (anchor) DID from ledger. Return empty production {} if the ledger has no such cryptonym. Raise BadLedgerTxn on failure. Raise WalletState if target DID is default (own DID) value but wallet does not have it (neither created nor opened since initialization). :param target_did: DID of cryptonym to fetch (default own DID) :return: cryptonym json """ LOGGER.debug('BaseAnchor.get_nym >>> target_did: %s', target_did) if target_did and not ok_did(target_did): LOGGER.debug('BaseAnchor.get_nym <!< Bad DID %s', target_did) raise BadIdentifier('Bad DID {}'.format(target_did)) if not (target_did or self.did): LOGGER.debug('BaseAnchor.get_nym <!< Bad wallet state: DID for %s unavailable', self.name) raise WalletState('Bad wallet state: DID for {} unavailable'.format(self.name)) rv = json.dumps({}) get_nym_req = await ledger.build_get_nym_request(self.did, target_did or self.did) resp_json = await self._submit(get_nym_req) data_json = (json.loads(resp_json))['result']['data'] # it's double-encoded on the ledger if data_json: rv = data_json LOGGER.debug('BaseAnchor.get_nym <<< %s', rv) return rv
Get json cryptonym (including current verification key) for input (anchor) DID from ledger. Return empty production {} if the ledger has no such cryptonym. Raise BadLedgerTxn on failure. Raise WalletState if target DID is default (own DID) value but wallet does not have it (neither created nor opened since initialization). :param target_did: DID of cryptonym to fetch (default own DID) :return: cryptonym json
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/base.py#L224-L255
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/base.py
BaseAnchor.get_nym_role
async def get_nym_role(self, target_did: str = None) -> Role: """ Return the cryptonym role for input did from the ledger - note that this may exceed the role of least privilege for the class. Raise AbsentNym if current anchor has no cryptonym on the ledger, or WalletState if current DID unavailable. :param target_did: DID of cryptonym role to fetch (default own DID) :return: identifier for current cryptonym role on ledger """ LOGGER.debug('BaseAnchor.get_nym_role >>> target_did: %s', target_did) nym = json.loads(await self.get_nym(target_did)) if not nym: LOGGER.debug('BaseAnchor.get_nym_role <!< Ledger has no cryptonym for anchor %s', self.name) raise AbsentNym('Ledger has no cryptonym for anchor {}'.format(self.name)) rv = Role.get(nym['role']) LOGGER.debug('BaseAnchor.get_nym_role <<< %s', rv) return rv
python
async def get_nym_role(self, target_did: str = None) -> Role: """ Return the cryptonym role for input did from the ledger - note that this may exceed the role of least privilege for the class. Raise AbsentNym if current anchor has no cryptonym on the ledger, or WalletState if current DID unavailable. :param target_did: DID of cryptonym role to fetch (default own DID) :return: identifier for current cryptonym role on ledger """ LOGGER.debug('BaseAnchor.get_nym_role >>> target_did: %s', target_did) nym = json.loads(await self.get_nym(target_did)) if not nym: LOGGER.debug('BaseAnchor.get_nym_role <!< Ledger has no cryptonym for anchor %s', self.name) raise AbsentNym('Ledger has no cryptonym for anchor {}'.format(self.name)) rv = Role.get(nym['role']) LOGGER.debug('BaseAnchor.get_nym_role <<< %s', rv) return rv
Return the cryptonym role for input did from the ledger - note that this may exceed the role of least privilege for the class. Raise AbsentNym if current anchor has no cryptonym on the ledger, or WalletState if current DID unavailable. :param target_did: DID of cryptonym role to fetch (default own DID) :return: identifier for current cryptonym role on ledger
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/base.py#L257-L278
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/base.py
BaseAnchor.least_role
def least_role() -> Role: """ Return the indy-sdk role of least privilege for an anchor (class) in building its cryptonym for the trust anchor to send to the ledger. :return: role of least privilege by anchor class """ LOGGER.debug('BaseAnchor.least_role >>>') rv = Role.TRUST_ANCHOR LOGGER.debug('BaseAnchor.least_role <<< %s', rv) return rv
python
def least_role() -> Role: """ Return the indy-sdk role of least privilege for an anchor (class) in building its cryptonym for the trust anchor to send to the ledger. :return: role of least privilege by anchor class """ LOGGER.debug('BaseAnchor.least_role >>>') rv = Role.TRUST_ANCHOR LOGGER.debug('BaseAnchor.least_role <<< %s', rv) return rv
Return the indy-sdk role of least privilege for an anchor (class) in building its cryptonym for the trust anchor to send to the ledger. :return: role of least privilege by anchor class
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/base.py#L281-L294
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/base.py
BaseAnchor.set_did_endpoint
async def set_did_endpoint(self, remote_did: str, did_endpoint: str) -> EndpointInfo: """ Set endpoint as metadata for pairwise remote DID in wallet. Pick up (transport) verification key from pairwise relation and return with endpoint in EndpointInfo. Raise BadIdentifier on bad DID. Raise WalletState if wallet is closed. Raise AbsentRecord if pairwise relation not present in wallet. :param remote_did: pairwise remote DID :param endpoint: value to set as endpoint in wallet and cache :return: endpoint and (transport) verification key """ LOGGER.debug('BaseAnchor.set_did_endpoint >>> remote_did: %s, did_endpoint: %s', remote_did, did_endpoint) if not ok_did(remote_did): LOGGER.debug('BaseAnchor.set_did_endpoint <!< Bad DID %s', remote_did) raise BadIdentifier('Bad DID {}'.format(remote_did)) pairwise_info = (await self.wallet.get_pairwise(remote_did)).get(remote_did, None) if not pairwise_info: LOGGER.debug( 'BaseAnchor.set_did_endpoint <!< Anchor %s has no pairwise relation for remote DID %s', self.name, remote_did) raise AbsentRecord('Anchor {} has no pairwise relation for remote DID {}'.format( self.name, remote_did)) await self.wallet.write_pairwise( pairwise_info.their_did, pairwise_info.their_verkey, pairwise_info.my_did, {'did_endpoint': did_endpoint}) rv = EndpointInfo(did_endpoint, pairwise_info.their_verkey) LOGGER.debug('BaseAnchor.set_did_endpoint <<< %s', rv) return rv
python
async def set_did_endpoint(self, remote_did: str, did_endpoint: str) -> EndpointInfo: """ Set endpoint as metadata for pairwise remote DID in wallet. Pick up (transport) verification key from pairwise relation and return with endpoint in EndpointInfo. Raise BadIdentifier on bad DID. Raise WalletState if wallet is closed. Raise AbsentRecord if pairwise relation not present in wallet. :param remote_did: pairwise remote DID :param endpoint: value to set as endpoint in wallet and cache :return: endpoint and (transport) verification key """ LOGGER.debug('BaseAnchor.set_did_endpoint >>> remote_did: %s, did_endpoint: %s', remote_did, did_endpoint) if not ok_did(remote_did): LOGGER.debug('BaseAnchor.set_did_endpoint <!< Bad DID %s', remote_did) raise BadIdentifier('Bad DID {}'.format(remote_did)) pairwise_info = (await self.wallet.get_pairwise(remote_did)).get(remote_did, None) if not pairwise_info: LOGGER.debug( 'BaseAnchor.set_did_endpoint <!< Anchor %s has no pairwise relation for remote DID %s', self.name, remote_did) raise AbsentRecord('Anchor {} has no pairwise relation for remote DID {}'.format( self.name, remote_did)) await self.wallet.write_pairwise( pairwise_info.their_did, pairwise_info.their_verkey, pairwise_info.my_did, {'did_endpoint': did_endpoint}) rv = EndpointInfo(did_endpoint, pairwise_info.their_verkey) LOGGER.debug('BaseAnchor.set_did_endpoint <<< %s', rv) return rv
Set endpoint as metadata for pairwise remote DID in wallet. Pick up (transport) verification key from pairwise relation and return with endpoint in EndpointInfo. Raise BadIdentifier on bad DID. Raise WalletState if wallet is closed. Raise AbsentRecord if pairwise relation not present in wallet. :param remote_did: pairwise remote DID :param endpoint: value to set as endpoint in wallet and cache :return: endpoint and (transport) verification key
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/base.py#L296-L335
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/base.py
BaseAnchor.get_did_endpoint
async def get_did_endpoint(self, remote_did: str) -> EndpointInfo: """ Return endpoint info for remote DID. Raise BadIdentifier for bad remote DID. Raise WalletState if bypassing cache but wallet is closed. Raise AbsentRecord for no such endpoint. :param remote_did: pairwise remote DID :return: endpoint and (transport) verification key as EndpointInfo """ LOGGER.debug('BaseAnchor.get_did_endpoint >>> remote_did: %s', remote_did) if not ok_did(remote_did): LOGGER.debug('BaseAnchor.get_did_endpoint <!< Bad DID %s', remote_did) raise BadIdentifier('Bad DID {}'.format(remote_did)) if not self.wallet.handle: LOGGER.debug('BaseAnchor.get_did_endpoint <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) pairwise_info = (await self.wallet.get_pairwise(remote_did)).get(remote_did, None) if not (pairwise_info and 'did_endpoint' in pairwise_info.metadata): LOGGER.debug('BaseAnchor.get_did_endpoint <!< No endpoint for remote DID %s', remote_did) raise AbsentRecord('No endpoint for remote DID {}'.format(remote_did)) rv = EndpointInfo(pairwise_info.metadata['did_endpoint'], pairwise_info.their_verkey) LOGGER.debug('BaseAnchor.get_did_endpoint <<< %s', rv) return rv
python
async def get_did_endpoint(self, remote_did: str) -> EndpointInfo: """ Return endpoint info for remote DID. Raise BadIdentifier for bad remote DID. Raise WalletState if bypassing cache but wallet is closed. Raise AbsentRecord for no such endpoint. :param remote_did: pairwise remote DID :return: endpoint and (transport) verification key as EndpointInfo """ LOGGER.debug('BaseAnchor.get_did_endpoint >>> remote_did: %s', remote_did) if not ok_did(remote_did): LOGGER.debug('BaseAnchor.get_did_endpoint <!< Bad DID %s', remote_did) raise BadIdentifier('Bad DID {}'.format(remote_did)) if not self.wallet.handle: LOGGER.debug('BaseAnchor.get_did_endpoint <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) pairwise_info = (await self.wallet.get_pairwise(remote_did)).get(remote_did, None) if not (pairwise_info and 'did_endpoint' in pairwise_info.metadata): LOGGER.debug('BaseAnchor.get_did_endpoint <!< No endpoint for remote DID %s', remote_did) raise AbsentRecord('No endpoint for remote DID {}'.format(remote_did)) rv = EndpointInfo(pairwise_info.metadata['did_endpoint'], pairwise_info.their_verkey) LOGGER.debug('BaseAnchor.get_did_endpoint <<< %s', rv) return rv
Return endpoint info for remote DID. Raise BadIdentifier for bad remote DID. Raise WalletState if bypassing cache but wallet is closed. Raise AbsentRecord for no such endpoint. :param remote_did: pairwise remote DID :return: endpoint and (transport) verification key as EndpointInfo
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/base.py#L337-L364
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/base.py
BaseAnchor.send_endpoint
async def send_endpoint(self, endpoint: str) -> None: """ Send anchor's own endpoint attribute to ledger (and endpoint cache), if ledger does not yet have input value. Specify None to clear. Raise BadIdentifier on endpoint not formatted as '<ip-address>:<port>', BadLedgerTxn on failure, WalletState if wallet is closed. :param endpoint: value to set as endpoint attribute on ledger and cache: specify URL or None to clear. """ LOGGER.debug('BaseAnchor.send_endpoint >>> endpoint: %s', endpoint) ledger_endpoint = await self.get_endpoint() if ledger_endpoint == endpoint: LOGGER.info('%s endpoint already set as %s', self.name, endpoint) LOGGER.debug('BaseAnchor.send_endpoint <<< (%s already set for %s )') return attr_json = json.dumps({ 'endpoint': { 'endpoint': endpoint # indy-sdk likes 'ha' here but won't map 'ha' to a URL, only ip:port } }) req_json = await ledger.build_attrib_request(self.did, self.did, None, attr_json, None) await self._sign_submit(req_json) for _ in range(16): # reasonable timeout if await self.get_endpoint(None, False) == endpoint: break await asyncio.sleep(1) LOGGER.info('Sent endpoint %s to ledger, waiting 1s for its confirmation', endpoint) else: LOGGER.debug('BaseAnchor.send_endpoint <!< timed out waiting on send endpoint %s', endpoint) raise BadLedgerTxn('Timed out waiting on sent endpoint {}'.format(endpoint)) LOGGER.debug('BaseAnchor.send_endpoint <<<')
python
async def send_endpoint(self, endpoint: str) -> None: """ Send anchor's own endpoint attribute to ledger (and endpoint cache), if ledger does not yet have input value. Specify None to clear. Raise BadIdentifier on endpoint not formatted as '<ip-address>:<port>', BadLedgerTxn on failure, WalletState if wallet is closed. :param endpoint: value to set as endpoint attribute on ledger and cache: specify URL or None to clear. """ LOGGER.debug('BaseAnchor.send_endpoint >>> endpoint: %s', endpoint) ledger_endpoint = await self.get_endpoint() if ledger_endpoint == endpoint: LOGGER.info('%s endpoint already set as %s', self.name, endpoint) LOGGER.debug('BaseAnchor.send_endpoint <<< (%s already set for %s )') return attr_json = json.dumps({ 'endpoint': { 'endpoint': endpoint # indy-sdk likes 'ha' here but won't map 'ha' to a URL, only ip:port } }) req_json = await ledger.build_attrib_request(self.did, self.did, None, attr_json, None) await self._sign_submit(req_json) for _ in range(16): # reasonable timeout if await self.get_endpoint(None, False) == endpoint: break await asyncio.sleep(1) LOGGER.info('Sent endpoint %s to ledger, waiting 1s for its confirmation', endpoint) else: LOGGER.debug('BaseAnchor.send_endpoint <!< timed out waiting on send endpoint %s', endpoint) raise BadLedgerTxn('Timed out waiting on sent endpoint {}'.format(endpoint)) LOGGER.debug('BaseAnchor.send_endpoint <<<')
Send anchor's own endpoint attribute to ledger (and endpoint cache), if ledger does not yet have input value. Specify None to clear. Raise BadIdentifier on endpoint not formatted as '<ip-address>:<port>', BadLedgerTxn on failure, WalletState if wallet is closed. :param endpoint: value to set as endpoint attribute on ledger and cache: specify URL or None to clear.
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/base.py#L366-L403
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/base.py
BaseAnchor.get_endpoint
async def get_endpoint(self, target_did: str = None, from_cache: bool = True) -> str: """ Get endpoint attribute for anchor having input DID (default own DID). Raise WalletState if target DID is default (own DID) value but wallet does not have it (neither created nor opened since initialization). :param target_did: DID of anchor for which to find endpoint attribute on ledger :param from_cache: check endpoint cache first before visiting ledger; always update cache with ledger value :return: endpoint attribute value, or None for no such value """ LOGGER.debug('BaseAnchor.get_endpoint >>> target_did: %s, from_cache: %s', target_did, from_cache) rv = None if not (target_did or self.did): LOGGER.debug('BaseAnchor.get_endpoint <!< Bad wallet state: DID for %s unavailable', self.name) raise WalletState('Bad wallet state: DID for {} unavailable'.format(self.name)) target_did = target_did or self.did if not ok_did(target_did): LOGGER.debug('BaseAnchor.get_endpoint <!< Bad DID %s', target_did) raise BadIdentifier('Bad DID {}'.format(target_did)) if from_cache: with ENDPOINT_CACHE.lock: if target_did in ENDPOINT_CACHE: LOGGER.info('BaseAnchor.get_endpoint: got endpoint for %s from cache', target_did) rv = ENDPOINT_CACHE[target_did] LOGGER.debug('BaseAnchor.get_endpoint <<< %s', rv) return rv req_json = await ledger.build_get_attrib_request( self.did, target_did, 'endpoint', None, None) resp_json = await self._submit(req_json) data_json = (json.loads(resp_json))['result']['data'] # it's double-encoded on the ledger if data_json: rv = json.loads(data_json)['endpoint'].get('endpoint', None) else: LOGGER.info('_AgentCore.get_endpoint: ledger query returned response with no data') with ENDPOINT_CACHE.lock: if rv: ENDPOINT_CACHE[target_did] = rv else: ENDPOINT_CACHE.pop(target_did, None) assert target_did not in ENDPOINT_CACHE LOGGER.debug('BaseAnchor.get_endpoint <<< %s', rv) return rv
python
async def get_endpoint(self, target_did: str = None, from_cache: bool = True) -> str: """ Get endpoint attribute for anchor having input DID (default own DID). Raise WalletState if target DID is default (own DID) value but wallet does not have it (neither created nor opened since initialization). :param target_did: DID of anchor for which to find endpoint attribute on ledger :param from_cache: check endpoint cache first before visiting ledger; always update cache with ledger value :return: endpoint attribute value, or None for no such value """ LOGGER.debug('BaseAnchor.get_endpoint >>> target_did: %s, from_cache: %s', target_did, from_cache) rv = None if not (target_did or self.did): LOGGER.debug('BaseAnchor.get_endpoint <!< Bad wallet state: DID for %s unavailable', self.name) raise WalletState('Bad wallet state: DID for {} unavailable'.format(self.name)) target_did = target_did or self.did if not ok_did(target_did): LOGGER.debug('BaseAnchor.get_endpoint <!< Bad DID %s', target_did) raise BadIdentifier('Bad DID {}'.format(target_did)) if from_cache: with ENDPOINT_CACHE.lock: if target_did in ENDPOINT_CACHE: LOGGER.info('BaseAnchor.get_endpoint: got endpoint for %s from cache', target_did) rv = ENDPOINT_CACHE[target_did] LOGGER.debug('BaseAnchor.get_endpoint <<< %s', rv) return rv req_json = await ledger.build_get_attrib_request( self.did, target_did, 'endpoint', None, None) resp_json = await self._submit(req_json) data_json = (json.loads(resp_json))['result']['data'] # it's double-encoded on the ledger if data_json: rv = json.loads(data_json)['endpoint'].get('endpoint', None) else: LOGGER.info('_AgentCore.get_endpoint: ledger query returned response with no data') with ENDPOINT_CACHE.lock: if rv: ENDPOINT_CACHE[target_did] = rv else: ENDPOINT_CACHE.pop(target_did, None) assert target_did not in ENDPOINT_CACHE LOGGER.debug('BaseAnchor.get_endpoint <<< %s', rv) return rv
Get endpoint attribute for anchor having input DID (default own DID). Raise WalletState if target DID is default (own DID) value but wallet does not have it (neither created nor opened since initialization). :param target_did: DID of anchor for which to find endpoint attribute on ledger :param from_cache: check endpoint cache first before visiting ledger; always update cache with ledger value :return: endpoint attribute value, or None for no such value
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/base.py#L405-L459
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/base.py
BaseAnchor._submit
async def _submit(self, req_json: str) -> str: """ Submit (json) request to ledger; return (json) result. Raise AbsentPool for no pool, ClosedPool if pool is not yet open, or BadLedgerTxn on failure. :param req_json: json of request to sign and submit :return: json response """ LOGGER.debug('BaseAnchor._submit >>> req_json: %s', req_json) if not self.pool: LOGGER.debug('BaseAnchor._submit <!< absent pool') raise AbsentPool('Cannot submit request: absent pool') if not self.pool.handle: LOGGER.debug('BaseAnchor._submit <!< closed pool %s', self.pool.name) raise ClosedPool('Cannot submit request to closed pool {}'.format(self.pool.name)) rv_json = await ledger.submit_request(self.pool.handle, req_json) await asyncio.sleep(0) resp = json.loads(rv_json) if resp.get('op', '') in ('REQNACK', 'REJECT'): LOGGER.debug('BaseAnchor._submit <!< ledger rejected request: %s', resp['reason']) raise BadLedgerTxn('Ledger rejected transaction request: {}'.format(resp['reason'])) LOGGER.debug('BaseAnchor._submit <<< %s', rv_json) return rv_json
python
async def _submit(self, req_json: str) -> str: """ Submit (json) request to ledger; return (json) result. Raise AbsentPool for no pool, ClosedPool if pool is not yet open, or BadLedgerTxn on failure. :param req_json: json of request to sign and submit :return: json response """ LOGGER.debug('BaseAnchor._submit >>> req_json: %s', req_json) if not self.pool: LOGGER.debug('BaseAnchor._submit <!< absent pool') raise AbsentPool('Cannot submit request: absent pool') if not self.pool.handle: LOGGER.debug('BaseAnchor._submit <!< closed pool %s', self.pool.name) raise ClosedPool('Cannot submit request to closed pool {}'.format(self.pool.name)) rv_json = await ledger.submit_request(self.pool.handle, req_json) await asyncio.sleep(0) resp = json.loads(rv_json) if resp.get('op', '') in ('REQNACK', 'REJECT'): LOGGER.debug('BaseAnchor._submit <!< ledger rejected request: %s', resp['reason']) raise BadLedgerTxn('Ledger rejected transaction request: {}'.format(resp['reason'])) LOGGER.debug('BaseAnchor._submit <<< %s', rv_json) return rv_json
Submit (json) request to ledger; return (json) result. Raise AbsentPool for no pool, ClosedPool if pool is not yet open, or BadLedgerTxn on failure. :param req_json: json of request to sign and submit :return: json response
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/base.py#L461-L490
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/base.py
BaseAnchor._verkey_for
async def _verkey_for(self, target: str) -> str: """ Given a DID, retrieve its verification key, looking in wallet, then pool. Given a verification key or None, return input. Raise WalletState if the wallet is closed. Given a recipient DID not in the wallet, raise AbsentPool if the instance has no pool or ClosedPool if its pool is closed. If no such verification key is on the ledger, raise AbsentNym. :param target: verification key, or DID to resolve to such :return: verification key """ LOGGER.debug('BaseAnchor._verkey_for >>> target: %s', target) rv = target if rv is None or not ok_did(rv): # it's None or already a verification key LOGGER.debug('BaseAnchor._verkey_for <<< %s', rv) return rv if self.wallet.handle: try: rv = await did.key_for_local_did(self.wallet.handle, target) LOGGER.info('Anchor %s got verkey for DID %s from wallet', self.name, target) LOGGER.debug('BaseAnchor._verkey_for <<< %s', rv) return rv except IndyError as x_indy: if x_indy.error_code != ErrorCode.WalletItemNotFound: # on not found, try the pool LOGGER.debug( 'BaseAnchor._verkey_for <!< key lookup for local DID %s raised indy error code %s', target, x_indy.error_code) raise nym = json.loads(await self.get_nym(target)) if not nym: LOGGER.debug( 'BaseAnchor._verkey_for <!< Wallet %s closed and ledger has no cryptonym for DID %s', self.name, target) raise AbsentNym('Wallet {} closed, and ledger has no cryptonym for DID {}'.format(self.name, target)) rv = json.loads(await self.get_nym(target))['verkey'] LOGGER.info('Anchor %s got verkey for DID %s from pool %s', self.name, target, self.pool.name) LOGGER.debug('BaseAnchor._verkey_for <<< %s', rv) return rv
python
async def _verkey_for(self, target: str) -> str: """ Given a DID, retrieve its verification key, looking in wallet, then pool. Given a verification key or None, return input. Raise WalletState if the wallet is closed. Given a recipient DID not in the wallet, raise AbsentPool if the instance has no pool or ClosedPool if its pool is closed. If no such verification key is on the ledger, raise AbsentNym. :param target: verification key, or DID to resolve to such :return: verification key """ LOGGER.debug('BaseAnchor._verkey_for >>> target: %s', target) rv = target if rv is None or not ok_did(rv): # it's None or already a verification key LOGGER.debug('BaseAnchor._verkey_for <<< %s', rv) return rv if self.wallet.handle: try: rv = await did.key_for_local_did(self.wallet.handle, target) LOGGER.info('Anchor %s got verkey for DID %s from wallet', self.name, target) LOGGER.debug('BaseAnchor._verkey_for <<< %s', rv) return rv except IndyError as x_indy: if x_indy.error_code != ErrorCode.WalletItemNotFound: # on not found, try the pool LOGGER.debug( 'BaseAnchor._verkey_for <!< key lookup for local DID %s raised indy error code %s', target, x_indy.error_code) raise nym = json.loads(await self.get_nym(target)) if not nym: LOGGER.debug( 'BaseAnchor._verkey_for <!< Wallet %s closed and ledger has no cryptonym for DID %s', self.name, target) raise AbsentNym('Wallet {} closed, and ledger has no cryptonym for DID {}'.format(self.name, target)) rv = json.loads(await self.get_nym(target))['verkey'] LOGGER.info('Anchor %s got verkey for DID %s from pool %s', self.name, target, self.pool.name) LOGGER.debug('BaseAnchor._verkey_for <<< %s', rv) return rv
Given a DID, retrieve its verification key, looking in wallet, then pool. Given a verification key or None, return input. Raise WalletState if the wallet is closed. Given a recipient DID not in the wallet, raise AbsentPool if the instance has no pool or ClosedPool if its pool is closed. If no such verification key is on the ledger, raise AbsentNym. :param target: verification key, or DID to resolve to such :return: verification key
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/base.py#L547-L593
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/base.py
BaseAnchor.encrypt
async def encrypt(self, message: bytes, authn: bool = False, recip: str = None) -> bytes: """ Encrypt plaintext for owner of DID or verification key, anonymously or via authenticated encryption scheme. If given DID, first check wallet and then pool for corresponding verification key. Raise WalletState if the wallet is closed. Given a recipient DID not in the wallet, raise AbsentPool if the instance has no pool or ClosedPool if its pool is closed. :param message: plaintext, as bytes :param authn: whether to use authenticated encryption scheme :param recip: DID or verification key of recipient, None for anchor's own :return: ciphertext, as bytes """ LOGGER.debug('BaseAnchor.encrypt >>> message: %s, authn: %s, recip: %s', message, authn, recip) if not self.wallet.handle: LOGGER.debug('BaseAnchor.encrypt <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv = await self.wallet.encrypt(message, authn, await self._verkey_for(recip)) LOGGER.debug('BaseAnchor.auth_encrypt <<< %s', rv) return rv
python
async def encrypt(self, message: bytes, authn: bool = False, recip: str = None) -> bytes: """ Encrypt plaintext for owner of DID or verification key, anonymously or via authenticated encryption scheme. If given DID, first check wallet and then pool for corresponding verification key. Raise WalletState if the wallet is closed. Given a recipient DID not in the wallet, raise AbsentPool if the instance has no pool or ClosedPool if its pool is closed. :param message: plaintext, as bytes :param authn: whether to use authenticated encryption scheme :param recip: DID or verification key of recipient, None for anchor's own :return: ciphertext, as bytes """ LOGGER.debug('BaseAnchor.encrypt >>> message: %s, authn: %s, recip: %s', message, authn, recip) if not self.wallet.handle: LOGGER.debug('BaseAnchor.encrypt <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv = await self.wallet.encrypt(message, authn, await self._verkey_for(recip)) LOGGER.debug('BaseAnchor.auth_encrypt <<< %s', rv) return rv
Encrypt plaintext for owner of DID or verification key, anonymously or via authenticated encryption scheme. If given DID, first check wallet and then pool for corresponding verification key. Raise WalletState if the wallet is closed. Given a recipient DID not in the wallet, raise AbsentPool if the instance has no pool or ClosedPool if its pool is closed. :param message: plaintext, as bytes :param authn: whether to use authenticated encryption scheme :param recip: DID or verification key of recipient, None for anchor's own :return: ciphertext, as bytes
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/base.py#L747-L771
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/base.py
BaseAnchor.decrypt
async def decrypt(self, ciphertext: bytes, sender: str = None) -> (bytes, str): """ Decrypt ciphertext and optionally authenticate sender. Raise BadKey if authentication operation reveals sender key distinct from current verification key of owner of input DID. Raise WalletState if wallet is closed. :param ciphertext: ciphertext, as bytes :param sender: DID or verification key of sender, None for anonymously encrypted ciphertext :return: decrypted bytes and sender verification key (None for anonymous decryption) """ LOGGER.debug('BaseAnchor.decrypt >>> ciphertext: %s, sender: %s', ciphertext, sender) if not self.wallet.handle: LOGGER.debug('BaseAnchor.decrypt <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) from_verkey = None if sender: from_verkey = await self._verkey_for(sender) rv = await self.wallet.decrypt( ciphertext, True if from_verkey else None, to_verkey=None, from_verkey=from_verkey) LOGGER.debug('BaseAnchor.decrypt <<< %s', rv) return rv
python
async def decrypt(self, ciphertext: bytes, sender: str = None) -> (bytes, str): """ Decrypt ciphertext and optionally authenticate sender. Raise BadKey if authentication operation reveals sender key distinct from current verification key of owner of input DID. Raise WalletState if wallet is closed. :param ciphertext: ciphertext, as bytes :param sender: DID or verification key of sender, None for anonymously encrypted ciphertext :return: decrypted bytes and sender verification key (None for anonymous decryption) """ LOGGER.debug('BaseAnchor.decrypt >>> ciphertext: %s, sender: %s', ciphertext, sender) if not self.wallet.handle: LOGGER.debug('BaseAnchor.decrypt <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) from_verkey = None if sender: from_verkey = await self._verkey_for(sender) rv = await self.wallet.decrypt( ciphertext, True if from_verkey else None, to_verkey=None, from_verkey=from_verkey) LOGGER.debug('BaseAnchor.decrypt <<< %s', rv) return rv
Decrypt ciphertext and optionally authenticate sender. Raise BadKey if authentication operation reveals sender key distinct from current verification key of owner of input DID. Raise WalletState if wallet is closed. :param ciphertext: ciphertext, as bytes :param sender: DID or verification key of sender, None for anonymously encrypted ciphertext :return: decrypted bytes and sender verification key (None for anonymous decryption)
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/base.py#L773-L801
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/base.py
BaseAnchor.sign
async def sign(self, message: bytes) -> bytes: """ Sign message; return signature. Raise WalletState if wallet is closed. :param message: Content to sign, as bytes :return: signature, as bytes """ LOGGER.debug('BaseAnchor.sign >>> message: %s', message) if not self.wallet.handle: LOGGER.debug('BaseAnchor.sign <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv = await self.wallet.sign(message) LOGGER.debug('BaseAnchor.sign <<< %s', rv) return rv
python
async def sign(self, message: bytes) -> bytes: """ Sign message; return signature. Raise WalletState if wallet is closed. :param message: Content to sign, as bytes :return: signature, as bytes """ LOGGER.debug('BaseAnchor.sign >>> message: %s', message) if not self.wallet.handle: LOGGER.debug('BaseAnchor.sign <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv = await self.wallet.sign(message) LOGGER.debug('BaseAnchor.sign <<< %s', rv) return rv
Sign message; return signature. Raise WalletState if wallet is closed. :param message: Content to sign, as bytes :return: signature, as bytes
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/base.py#L803-L820
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/base.py
BaseAnchor.verify
async def verify(self, message: bytes, signature: bytes, signer: str = None) -> bool: """ Verify signature with input signer verification key (via lookup by DID first if need be). Raise WalletState if wallet is closed. :param message: Content to sign, as bytes :param signature: signature, as bytes :param signer: signer DID or verification key; omit for anchor's own :return: whether signature is valid """ LOGGER.debug('BaseAnchor.verify >>> signer: %s, message: %s, signature: %s', signer, message, signature) if not self.wallet.handle: LOGGER.debug('BaseAnchor.verify <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) verkey = None if signer: verkey = await self._verkey_for(signer) rv = await self.wallet.verify(message, signature, verkey) LOGGER.debug('BaseAnchor.verify <<< %s', rv) return rv
python
async def verify(self, message: bytes, signature: bytes, signer: str = None) -> bool: """ Verify signature with input signer verification key (via lookup by DID first if need be). Raise WalletState if wallet is closed. :param message: Content to sign, as bytes :param signature: signature, as bytes :param signer: signer DID or verification key; omit for anchor's own :return: whether signature is valid """ LOGGER.debug('BaseAnchor.verify >>> signer: %s, message: %s, signature: %s', signer, message, signature) if not self.wallet.handle: LOGGER.debug('BaseAnchor.verify <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) verkey = None if signer: verkey = await self._verkey_for(signer) rv = await self.wallet.verify(message, signature, verkey) LOGGER.debug('BaseAnchor.verify <<< %s', rv) return rv
Verify signature with input signer verification key (via lookup by DID first if need be). Raise WalletState if wallet is closed. :param message: Content to sign, as bytes :param signature: signature, as bytes :param signer: signer DID or verification key; omit for anchor's own :return: whether signature is valid
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/base.py#L822-L845
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/base.py
BaseAnchor.get_txn
async def get_txn(self, seq_no: int) -> str: """ Find a transaction on the distributed ledger by its sequence number. :param seq_no: transaction number :return: json sequence number of transaction, null for no match """ LOGGER.debug('BaseAnchor.get_txn >>> seq_no: %s', seq_no) rv_json = json.dumps({}) req_json = await ledger.build_get_txn_request(self.did, None, seq_no) resp = json.loads(await self._submit(req_json)) rv_json = self.pool.protocol.txn2data(resp) LOGGER.debug('BaseAnchor.get_txn <<< %s', rv_json) return rv_json
python
async def get_txn(self, seq_no: int) -> str: """ Find a transaction on the distributed ledger by its sequence number. :param seq_no: transaction number :return: json sequence number of transaction, null for no match """ LOGGER.debug('BaseAnchor.get_txn >>> seq_no: %s', seq_no) rv_json = json.dumps({}) req_json = await ledger.build_get_txn_request(self.did, None, seq_no) resp = json.loads(await self._submit(req_json)) rv_json = self.pool.protocol.txn2data(resp) LOGGER.debug('BaseAnchor.get_txn <<< %s', rv_json) return rv_json
Find a transaction on the distributed ledger by its sequence number. :param seq_no: transaction number :return: json sequence number of transaction, null for no match
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/base.py#L847-L864
PSPC-SPAC-buyandsell/von_anchor
von_anchor/a2a/service.py
Service.to_dict
def to_dict(self): """ Return dict representation of service to embed in DID document. """ rv = { 'id': self.id, 'type': self.type, 'priority': self.priority } if self.recip_keys: rv['routingKeys'] = [canon_ref(k.did, k.id, '#') for k in self.recip_keys] if self.routing_keys: rv['routingKeys'] = [canon_ref(k.did, k.id, '#') for k in self.routing_keys] rv['serviceEndpoint'] = self.endpoint return rv
python
def to_dict(self): """ Return dict representation of service to embed in DID document. """ rv = { 'id': self.id, 'type': self.type, 'priority': self.priority } if self.recip_keys: rv['routingKeys'] = [canon_ref(k.did, k.id, '#') for k in self.recip_keys] if self.routing_keys: rv['routingKeys'] = [canon_ref(k.did, k.id, '#') for k in self.routing_keys] rv['serviceEndpoint'] = self.endpoint return rv
Return dict representation of service to embed in DID document.
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/a2a/service.py#L131-L147
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver._assert_link_secret
async def _assert_link_secret(self, action: str) -> str: """ Return current wallet link secret label. Raise AbsentLinkSecret if link secret is not set. :param action: action requiring link secret """ rv = await self.wallet.get_link_secret_label() if rv is None: LOGGER.debug('HolderProver._assert_link_secret: action %s requires link secret but it is not set', action) raise AbsentLinkSecret('Action {} requires link secret but it is not set'.format(action)) return rv
python
async def _assert_link_secret(self, action: str) -> str: """ Return current wallet link secret label. Raise AbsentLinkSecret if link secret is not set. :param action: action requiring link secret """ rv = await self.wallet.get_link_secret_label() if rv is None: LOGGER.debug('HolderProver._assert_link_secret: action %s requires link secret but it is not set', action) raise AbsentLinkSecret('Action {} requires link secret but it is not set'.format(action)) return rv
Return current wallet link secret label. Raise AbsentLinkSecret if link secret is not set. :param action: action requiring link secret
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L104-L116
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver._sync_revoc_for_proof
async def _sync_revoc_for_proof(self, rr_id: str) -> None: """ Pick up tails file reader handle for input revocation registry identifier. If no symbolic link is present, get the revocation registry definition to retrieve its tails file hash, then find the tails file and link it. Raise AbsentTails for missing corresponding tails file. :param rr_id: revocation registry identifier """ LOGGER.debug('HolderProver._sync_revoc_for_proof >>> rr_id: %s', rr_id) if not ok_rev_reg_id(rr_id): LOGGER.debug('HolderProver._sync_revoc_for_proof <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) (cd_id, tag) = rev_reg_id2cred_def_id_tag(rr_id) try: json.loads(await self.get_cred_def(cd_id)) except AbsentCredDef: LOGGER.debug( 'HolderProver._sync_revoc_for_proof <!< corrupt tails tree %s may be for another ledger', self._dir_tails) raise AbsentCredDef('Corrupt tails tree {} may be for another ledger'.format(self._dir_tails)) except ClosedPool: pass # carry on, may be OK from cache only with REVO_CACHE.lock: revo_cache_entry = REVO_CACHE.get(rr_id, None) tails = revo_cache_entry.tails if revo_cache_entry else None if tails is None: # it's not yet set in cache try: tails = await Tails(self._dir_tails, cd_id, tag).open() except AbsentTails: # get hash from ledger and check for tails file rr_def = json.loads(await self.get_rev_reg_def(rr_id)) tails_hash = rr_def['value']['tailsHash'] path_tails = join(Tails.dir(self._dir_tails, rr_id), tails_hash) if not isfile(path_tails): LOGGER.debug('HolderProver._sync_revoc_for_proof <!< No tails file present at %s', path_tails) raise AbsentTails('No tails file present at {}'.format(path_tails)) Tails.associate(self._dir_tails, rr_id, tails_hash) tails = await Tails(self._dir_tails, cd_id, tag).open() # OK now since tails file present if revo_cache_entry is None: REVO_CACHE[rr_id] = RevoCacheEntry(None, tails) else: REVO_CACHE[rr_id].tails = tails LOGGER.debug('HolderProver._sync_revoc_for_proof <<<')
python
async def _sync_revoc_for_proof(self, rr_id: str) -> None: """ Pick up tails file reader handle for input revocation registry identifier. If no symbolic link is present, get the revocation registry definition to retrieve its tails file hash, then find the tails file and link it. Raise AbsentTails for missing corresponding tails file. :param rr_id: revocation registry identifier """ LOGGER.debug('HolderProver._sync_revoc_for_proof >>> rr_id: %s', rr_id) if not ok_rev_reg_id(rr_id): LOGGER.debug('HolderProver._sync_revoc_for_proof <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) (cd_id, tag) = rev_reg_id2cred_def_id_tag(rr_id) try: json.loads(await self.get_cred_def(cd_id)) except AbsentCredDef: LOGGER.debug( 'HolderProver._sync_revoc_for_proof <!< corrupt tails tree %s may be for another ledger', self._dir_tails) raise AbsentCredDef('Corrupt tails tree {} may be for another ledger'.format(self._dir_tails)) except ClosedPool: pass # carry on, may be OK from cache only with REVO_CACHE.lock: revo_cache_entry = REVO_CACHE.get(rr_id, None) tails = revo_cache_entry.tails if revo_cache_entry else None if tails is None: # it's not yet set in cache try: tails = await Tails(self._dir_tails, cd_id, tag).open() except AbsentTails: # get hash from ledger and check for tails file rr_def = json.loads(await self.get_rev_reg_def(rr_id)) tails_hash = rr_def['value']['tailsHash'] path_tails = join(Tails.dir(self._dir_tails, rr_id), tails_hash) if not isfile(path_tails): LOGGER.debug('HolderProver._sync_revoc_for_proof <!< No tails file present at %s', path_tails) raise AbsentTails('No tails file present at {}'.format(path_tails)) Tails.associate(self._dir_tails, rr_id, tails_hash) tails = await Tails(self._dir_tails, cd_id, tag).open() # OK now since tails file present if revo_cache_entry is None: REVO_CACHE[rr_id] = RevoCacheEntry(None, tails) else: REVO_CACHE[rr_id].tails = tails LOGGER.debug('HolderProver._sync_revoc_for_proof <<<')
Pick up tails file reader handle for input revocation registry identifier. If no symbolic link is present, get the revocation registry definition to retrieve its tails file hash, then find the tails file and link it. Raise AbsentTails for missing corresponding tails file. :param rr_id: revocation registry identifier
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L149-L199
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver.dir_tails
def dir_tails(self, rr_id: str) -> str: """ Return path to the correct directory for the tails file on input revocation registry identifier. :param rr_id: revocation registry identifier of interest :return: path to tails dir for input revocation registry identifier """ LOGGER.debug('HolderProver.dir_tails >>>') if not ok_rev_reg_id(rr_id): LOGGER.debug('HolderProver.dir_tails <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) rv = Tails.dir(self._dir_tails, rr_id) LOGGER.debug('HolderProver.dir_tails <<< %s', rv) return rv
python
def dir_tails(self, rr_id: str) -> str: """ Return path to the correct directory for the tails file on input revocation registry identifier. :param rr_id: revocation registry identifier of interest :return: path to tails dir for input revocation registry identifier """ LOGGER.debug('HolderProver.dir_tails >>>') if not ok_rev_reg_id(rr_id): LOGGER.debug('HolderProver.dir_tails <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) rv = Tails.dir(self._dir_tails, rr_id) LOGGER.debug('HolderProver.dir_tails <<< %s', rv) return rv
Return path to the correct directory for the tails file on input revocation registry identifier. :param rr_id: revocation registry identifier of interest :return: path to tails dir for input revocation registry identifier
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L257-L273
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver.open
async def open(self) -> 'HolderProver': """ Explicit entry. Perform ancestor opening operations, then parse cache from archive if so configured, and synchronize revocation registry to tails tree content. :return: current object """ LOGGER.debug('HolderProver.open >>>') await super().open() if self.config.get('parse-caches-on-open', False): ArchivableCaches.parse(self.dir_cache) for path_rr_id in Tails.links(self._dir_tails): await self._sync_revoc_for_proof(basename(path_rr_id)) LOGGER.debug('HolderProver.open <<<') return self
python
async def open(self) -> 'HolderProver': """ Explicit entry. Perform ancestor opening operations, then parse cache from archive if so configured, and synchronize revocation registry to tails tree content. :return: current object """ LOGGER.debug('HolderProver.open >>>') await super().open() if self.config.get('parse-caches-on-open', False): ArchivableCaches.parse(self.dir_cache) for path_rr_id in Tails.links(self._dir_tails): await self._sync_revoc_for_proof(basename(path_rr_id)) LOGGER.debug('HolderProver.open <<<') return self
Explicit entry. Perform ancestor opening operations, then parse cache from archive if so configured, and synchronize revocation registry to tails tree content. :return: current object
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L275-L294
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver.close
async def close(self) -> None: """ Explicit exit. If so configured, populate cache to prove all creds in wallet offline if need be, archive cache, and purge prior cache archives. :return: current object """ LOGGER.debug('HolderProver.close >>>') if self.config.get('archive-holder-prover-caches-on-close', False): await self.load_cache_for_proof(True) ArchivableCaches.purge_archives(self.dir_cache, True) await BaseAnchor.close(self) for path_rr_id in Tails.links(self._dir_tails): rr_id = basename(path_rr_id) try: await self._sync_revoc_for_proof(rr_id) except ClosedPool: LOGGER.warning('HolderProver sync-revoc on close required ledger for %s but pool was closed', rr_id) LOGGER.debug('HolderProver.close <<<')
python
async def close(self) -> None: """ Explicit exit. If so configured, populate cache to prove all creds in wallet offline if need be, archive cache, and purge prior cache archives. :return: current object """ LOGGER.debug('HolderProver.close >>>') if self.config.get('archive-holder-prover-caches-on-close', False): await self.load_cache_for_proof(True) ArchivableCaches.purge_archives(self.dir_cache, True) await BaseAnchor.close(self) for path_rr_id in Tails.links(self._dir_tails): rr_id = basename(path_rr_id) try: await self._sync_revoc_for_proof(rr_id) except ClosedPool: LOGGER.warning('HolderProver sync-revoc on close required ledger for %s but pool was closed', rr_id) LOGGER.debug('HolderProver.close <<<')
Explicit exit. If so configured, populate cache to prove all creds in wallet offline if need be, archive cache, and purge prior cache archives. :return: current object
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L296-L318
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver.rev_regs
async def rev_regs(self) -> list: """ Return list of revocation registry identifiers for which HolderProver has associated tails files. The operation creates associations for any (newly copied, via service wrapper API) tails files without. :return: list of revocation registry identifiers for which HolderProver has associated tails files """ LOGGER.debug('HolderProver.rev_regs >>>') for path_rr_id in Tails.links(self._dir_tails): await self._sync_revoc_for_proof(basename(path_rr_id)) rv = [basename(f) for f in Tails.links(self._dir_tails)] LOGGER.debug('HolderProver.rev_regs <<< %s', rv) return rv
python
async def rev_regs(self) -> list: """ Return list of revocation registry identifiers for which HolderProver has associated tails files. The operation creates associations for any (newly copied, via service wrapper API) tails files without. :return: list of revocation registry identifiers for which HolderProver has associated tails files """ LOGGER.debug('HolderProver.rev_regs >>>') for path_rr_id in Tails.links(self._dir_tails): await self._sync_revoc_for_proof(basename(path_rr_id)) rv = [basename(f) for f in Tails.links(self._dir_tails)] LOGGER.debug('HolderProver.rev_regs <<< %s', rv) return rv
Return list of revocation registry identifiers for which HolderProver has associated tails files. The operation creates associations for any (newly copied, via service wrapper API) tails files without. :return: list of revocation registry identifiers for which HolderProver has associated tails files
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L320-L335
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver.offline_intervals
async def offline_intervals(self, cd_ids: Union[str, Sequence[str]]) -> dict: """ Return default non-revocation intervals for input cred def ids, based on content of revocation cache, for augmentation into specification for Verifier.build_proof_req_json. Note that the close() call to set the anchor off-line extends all revocation cache registry delta entries to its time of execution: in this case, the intervals will all be single timestamps rather than (to, fro) pairs. Raise CacheIndex if proof request cites credential definition without corresponding content in cred def cache or revocation cache. :param cd_ids: credential definition identifier or sequence thereof :return: dict mapping revocable cred def ids to interval specifications to augment into cd_id2spec parameter for Verifier.build_proof_req_json(), and non-revocable cred def ids to empty dict; e.g., :: { 'Vx4E82R17q...:3:CL:16:tag': { 'interval': (1528111730, 1528115832) }, 'R17v42T4pk...:3:CL:19:tag': {}, 'Z9ccax812j...:3:CL:27:tag': { 'interval': (1528112408, 1528116008) }, '9cHbp54C8n...:3:CL:37:tag': { 'interval': 1528116426 }, '6caBcmLi33...:tag:CL:41:tag': {}, ... } """ LOGGER.debug('HolderProver.offline_intervals >>> cd_ids: %s', cd_ids) rv = {} for cd_id in [cd_ids] if isinstance(cd_ids, str) else cd_ids: if not ok_cred_def_id(cd_id): LOGGER.debug('HolderProver.offline_intervals <!< Bad cred def id %s', cd_id) raise BadIdentifier('Bad cred def id {}'.format(cd_id)) try: cred_def = json.loads(await self.get_cred_def(cd_id)) except ClosedPool: LOGGER.debug('HolderProver.offline_intervals <!< no such cred def %s in cred def cache', cd_id) raise CacheIndex('No cached delta for non-revoc interval on {}'.format(cd_id)) rv[cd_id] = {} if 'revocation' in cred_def['value']: with REVO_CACHE.lock: (fro, to) = REVO_CACHE.dflt_interval(cd_id) if not (fro and to): LOGGER.debug( 'HolderProver.offline_intervals <!< no cached delta for non-revoc interval on %s', cd_id) raise CacheIndex('No cached delta for non-revoc interval on {}'.format(cd_id)) rv[cd_id]['interval'] = to if fro == to else (fro, to) LOGGER.debug('HolderProver.offline_intervals <<< %s', rv) return rv
python
async def offline_intervals(self, cd_ids: Union[str, Sequence[str]]) -> dict: """ Return default non-revocation intervals for input cred def ids, based on content of revocation cache, for augmentation into specification for Verifier.build_proof_req_json. Note that the close() call to set the anchor off-line extends all revocation cache registry delta entries to its time of execution: in this case, the intervals will all be single timestamps rather than (to, fro) pairs. Raise CacheIndex if proof request cites credential definition without corresponding content in cred def cache or revocation cache. :param cd_ids: credential definition identifier or sequence thereof :return: dict mapping revocable cred def ids to interval specifications to augment into cd_id2spec parameter for Verifier.build_proof_req_json(), and non-revocable cred def ids to empty dict; e.g., :: { 'Vx4E82R17q...:3:CL:16:tag': { 'interval': (1528111730, 1528115832) }, 'R17v42T4pk...:3:CL:19:tag': {}, 'Z9ccax812j...:3:CL:27:tag': { 'interval': (1528112408, 1528116008) }, '9cHbp54C8n...:3:CL:37:tag': { 'interval': 1528116426 }, '6caBcmLi33...:tag:CL:41:tag': {}, ... } """ LOGGER.debug('HolderProver.offline_intervals >>> cd_ids: %s', cd_ids) rv = {} for cd_id in [cd_ids] if isinstance(cd_ids, str) else cd_ids: if not ok_cred_def_id(cd_id): LOGGER.debug('HolderProver.offline_intervals <!< Bad cred def id %s', cd_id) raise BadIdentifier('Bad cred def id {}'.format(cd_id)) try: cred_def = json.loads(await self.get_cred_def(cd_id)) except ClosedPool: LOGGER.debug('HolderProver.offline_intervals <!< no such cred def %s in cred def cache', cd_id) raise CacheIndex('No cached delta for non-revoc interval on {}'.format(cd_id)) rv[cd_id] = {} if 'revocation' in cred_def['value']: with REVO_CACHE.lock: (fro, to) = REVO_CACHE.dflt_interval(cd_id) if not (fro and to): LOGGER.debug( 'HolderProver.offline_intervals <!< no cached delta for non-revoc interval on %s', cd_id) raise CacheIndex('No cached delta for non-revoc interval on {}'.format(cd_id)) rv[cd_id]['interval'] = to if fro == to else (fro, to) LOGGER.debug('HolderProver.offline_intervals <<< %s', rv) return rv
Return default non-revocation intervals for input cred def ids, based on content of revocation cache, for augmentation into specification for Verifier.build_proof_req_json. Note that the close() call to set the anchor off-line extends all revocation cache registry delta entries to its time of execution: in this case, the intervals will all be single timestamps rather than (to, fro) pairs. Raise CacheIndex if proof request cites credential definition without corresponding content in cred def cache or revocation cache. :param cd_ids: credential definition identifier or sequence thereof :return: dict mapping revocable cred def ids to interval specifications to augment into cd_id2spec parameter for Verifier.build_proof_req_json(), and non-revocable cred def ids to empty dict; e.g., :: { 'Vx4E82R17q...:3:CL:16:tag': { 'interval': (1528111730, 1528115832) }, 'R17v42T4pk...:3:CL:19:tag': {}, 'Z9ccax812j...:3:CL:27:tag': { 'interval': (1528112408, 1528116008) }, '9cHbp54C8n...:3:CL:37:tag': { 'interval': 1528116426 }, '6caBcmLi33...:tag:CL:41:tag': {}, ... }
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L337-L396
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver.create_link_secret
async def create_link_secret(self, label: str) -> None: """ Create link secret (a.k.a. master secret) used in proofs by HolderProver, if the current link secret does not already correspond to the input link secret label. Raise WalletState if wallet is closed, or any other IndyError causing failure to set link secret in wallet. :param label: label for link secret; indy-sdk uses label to generate link secret """ LOGGER.debug('HolderProver.create_link_secret >>> label: %s', label) await self.wallet.create_link_secret(label) LOGGER.debug('HolderProver.create_link_secret <<<')
python
async def create_link_secret(self, label: str) -> None: """ Create link secret (a.k.a. master secret) used in proofs by HolderProver, if the current link secret does not already correspond to the input link secret label. Raise WalletState if wallet is closed, or any other IndyError causing failure to set link secret in wallet. :param label: label for link secret; indy-sdk uses label to generate link secret """ LOGGER.debug('HolderProver.create_link_secret >>> label: %s', label) await self.wallet.create_link_secret(label) LOGGER.debug('HolderProver.create_link_secret <<<')
Create link secret (a.k.a. master secret) used in proofs by HolderProver, if the current link secret does not already correspond to the input link secret label. Raise WalletState if wallet is closed, or any other IndyError causing failure to set link secret in wallet. :param label: label for link secret; indy-sdk uses label to generate link secret
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L398-L413
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver.store_cred
async def store_cred(self, cred_json: str, cred_req_metadata_json: str) -> str: """ Store cred in wallet as HolderProver, return its credential identifier as created in wallet. Raise AbsentTails if tails file not available for revocation registry for input credential. Raise WalletState if wallet is closed. :param cred_json: credential json as HolderProver created :param cred_req_metadata_json: credential request metadata json as HolderProver created via create_cred_req() :return: credential identifier within wallet """ LOGGER.debug( 'HolderProver.store_cred >>> cred_json: %s, cred_req_metadata_json: %s', cred_json, cred_req_metadata_json) if not self.wallet.handle: LOGGER.debug('HolderProver.store_cred <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) cred = json.loads(cred_json) cred_def_json = await self.get_cred_def(cred['cred_def_id']) rr_id = cred['rev_reg_id'] rr_def_json = None if rr_id: await self._sync_revoc_for_proof(rr_id) rr_def_json = await self.get_rev_reg_def(rr_id) rv = await anoncreds.prover_store_credential( self.wallet.handle, None, # cred_id, let indy-sdk generate random uuid cred_req_metadata_json, cred_json, cred_def_json, rr_def_json) LOGGER.debug('HolderProver.store_cred <<< %s', rv) return rv
python
async def store_cred(self, cred_json: str, cred_req_metadata_json: str) -> str: """ Store cred in wallet as HolderProver, return its credential identifier as created in wallet. Raise AbsentTails if tails file not available for revocation registry for input credential. Raise WalletState if wallet is closed. :param cred_json: credential json as HolderProver created :param cred_req_metadata_json: credential request metadata json as HolderProver created via create_cred_req() :return: credential identifier within wallet """ LOGGER.debug( 'HolderProver.store_cred >>> cred_json: %s, cred_req_metadata_json: %s', cred_json, cred_req_metadata_json) if not self.wallet.handle: LOGGER.debug('HolderProver.store_cred <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) cred = json.loads(cred_json) cred_def_json = await self.get_cred_def(cred['cred_def_id']) rr_id = cred['rev_reg_id'] rr_def_json = None if rr_id: await self._sync_revoc_for_proof(rr_id) rr_def_json = await self.get_rev_reg_def(rr_id) rv = await anoncreds.prover_store_credential( self.wallet.handle, None, # cred_id, let indy-sdk generate random uuid cred_req_metadata_json, cred_json, cred_def_json, rr_def_json) LOGGER.debug('HolderProver.store_cred <<< %s', rv) return rv
Store cred in wallet as HolderProver, return its credential identifier as created in wallet. Raise AbsentTails if tails file not available for revocation registry for input credential. Raise WalletState if wallet is closed. :param cred_json: credential json as HolderProver created :param cred_req_metadata_json: credential request metadata json as HolderProver created via create_cred_req() :return: credential identifier within wallet
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L459-L497
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver.load_cache_for_proof
async def load_cache_for_proof(self, archive: bool = False) -> int: """ Load schema, cred def, revocation caches; optionally archive enough to go offline and be able to generate proof on all credentials in wallet. Return timestamp (epoch seconds) of cache load event, also used as subdirectory for cache archives. :param archive: True to archive now or False to demur (subclasses may still need to augment archivable caches further) :return: cache load event timestamp (epoch seconds) """ LOGGER.debug('HolderProver.load_cache_for_proof >>> archive: %s', archive) rv = int(time()) box_ids = json.loads(await self.get_box_ids_held()) for s_id in box_ids['schema_id']: with SCHEMA_CACHE.lock: await self.get_schema(s_id) for cd_id in box_ids['cred_def_id']: with CRED_DEF_CACHE.lock: await self.get_cred_def(cd_id) for rr_id in box_ids['rev_reg_id']: await self.get_rev_reg_def(rr_id) with REVO_CACHE.lock: revo_cache_entry = REVO_CACHE.get(rr_id, None) if revo_cache_entry: try: await revo_cache_entry.get_delta_json(self._build_rr_delta_json, rv, rv) except ClosedPool: LOGGER.warning( 'HolderProver %s is offline from pool %s, cannot update revo cache reg delta for %s to %s', self.name, self.pool.name, rr_id, rv) except AbsentPool: LOGGER.warning( 'HolderProver %s has no pool, cannot update revo cache reg delta for %s to %s', self.name, rr_id, rv) if archive: ArchivableCaches.archive(self.dir_cache) LOGGER.debug('HolderProver.load_cache_for_proof <<< %s', rv) return rv
python
async def load_cache_for_proof(self, archive: bool = False) -> int: """ Load schema, cred def, revocation caches; optionally archive enough to go offline and be able to generate proof on all credentials in wallet. Return timestamp (epoch seconds) of cache load event, also used as subdirectory for cache archives. :param archive: True to archive now or False to demur (subclasses may still need to augment archivable caches further) :return: cache load event timestamp (epoch seconds) """ LOGGER.debug('HolderProver.load_cache_for_proof >>> archive: %s', archive) rv = int(time()) box_ids = json.loads(await self.get_box_ids_held()) for s_id in box_ids['schema_id']: with SCHEMA_CACHE.lock: await self.get_schema(s_id) for cd_id in box_ids['cred_def_id']: with CRED_DEF_CACHE.lock: await self.get_cred_def(cd_id) for rr_id in box_ids['rev_reg_id']: await self.get_rev_reg_def(rr_id) with REVO_CACHE.lock: revo_cache_entry = REVO_CACHE.get(rr_id, None) if revo_cache_entry: try: await revo_cache_entry.get_delta_json(self._build_rr_delta_json, rv, rv) except ClosedPool: LOGGER.warning( 'HolderProver %s is offline from pool %s, cannot update revo cache reg delta for %s to %s', self.name, self.pool.name, rr_id, rv) except AbsentPool: LOGGER.warning( 'HolderProver %s has no pool, cannot update revo cache reg delta for %s to %s', self.name, rr_id, rv) if archive: ArchivableCaches.archive(self.dir_cache) LOGGER.debug('HolderProver.load_cache_for_proof <<< %s', rv) return rv
Load schema, cred def, revocation caches; optionally archive enough to go offline and be able to generate proof on all credentials in wallet. Return timestamp (epoch seconds) of cache load event, also used as subdirectory for cache archives. :param archive: True to archive now or False to demur (subclasses may still need to augment archivable caches further) :return: cache load event timestamp (epoch seconds)
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L499-L546
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver.get_box_ids_held
async def get_box_ids_held(self) -> str: """ Return json object on lists of all unique box identifiers for credentials in wallet, as evidenced by tails directory content: * schema identifiers * credential definition identifiers * revocation registry identifiers. E.g., :: { "schema_id": [ "R17v42T4pk...:2:tombstone:1.2", "9cHbp54C8n...:2:business:2.0", ... ], "cred_def_id": [ "R17v42T4pk...:3:CL:19:tag", "9cHbp54C8n...:3:CL:37:tag", ... ] "rev_reg_id": [ "R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:0", "R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:1", "9cHbp54C8n...:4:9cHbp54C8n...:3:CL:37:tag:CL_ACCUM:0", "9cHbp54C8n...:4:9cHbp54C8n...:3:CL:37:tag:CL_ACCUM:1", "9cHbp54C8n...:4:9cHbp54C8n...:3:CL:37:tag:CL_ACCUM:2", ... ] } Raise WalletState if wallet is closed. :return: tuple of sets for schema ids, cred def ids, rev reg ids """ LOGGER.debug('HolderProver.get_box_ids_held >>>') if not self.wallet.handle: LOGGER.debug('HolderProver.get_box_ids_held <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rr_ids = {basename(link) for link in Tails.links(self._dir_tails)} un_rr_ids = set() for rr_id in rr_ids: if not json.loads(await self.get_cred_infos_by_q(json.dumps({'rev_reg_id': rr_id}), 1)): un_rr_ids.add(rr_id) rr_ids -= un_rr_ids cd_ids = {cd_id for cd_id in listdir(self._dir_tails) if isdir(join(self._dir_tails, cd_id)) and ok_cred_def_id(cd_id)} s_ids = set() for cd_id in cd_ids: s_ids.add(json.loads(await self.get_schema(cred_def_id2seq_no(cd_id)))['id']) un_cd_ids = set() for cd_id in cd_ids: if not json.loads(await self.get_cred_infos_by_q(json.dumps({'cred_def_id': cd_id}), 1)): un_cd_ids.add(cd_id) cd_ids -= un_cd_ids un_s_ids = set() for s_id in s_ids: if not json.loads(await self.get_cred_infos_by_q(json.dumps({'schema_id': s_id}), 1)): un_s_ids.add(s_id) s_ids -= un_s_ids rv = json.dumps({ 'schema_id': list(s_ids), 'cred_def_id': list(cd_ids), 'rev_reg_id': list(rr_ids) }) LOGGER.debug('HolderProver.get_box_ids_held <<< %s', rv) return rv
python
async def get_box_ids_held(self) -> str: """ Return json object on lists of all unique box identifiers for credentials in wallet, as evidenced by tails directory content: * schema identifiers * credential definition identifiers * revocation registry identifiers. E.g., :: { "schema_id": [ "R17v42T4pk...:2:tombstone:1.2", "9cHbp54C8n...:2:business:2.0", ... ], "cred_def_id": [ "R17v42T4pk...:3:CL:19:tag", "9cHbp54C8n...:3:CL:37:tag", ... ] "rev_reg_id": [ "R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:0", "R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:1", "9cHbp54C8n...:4:9cHbp54C8n...:3:CL:37:tag:CL_ACCUM:0", "9cHbp54C8n...:4:9cHbp54C8n...:3:CL:37:tag:CL_ACCUM:1", "9cHbp54C8n...:4:9cHbp54C8n...:3:CL:37:tag:CL_ACCUM:2", ... ] } Raise WalletState if wallet is closed. :return: tuple of sets for schema ids, cred def ids, rev reg ids """ LOGGER.debug('HolderProver.get_box_ids_held >>>') if not self.wallet.handle: LOGGER.debug('HolderProver.get_box_ids_held <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rr_ids = {basename(link) for link in Tails.links(self._dir_tails)} un_rr_ids = set() for rr_id in rr_ids: if not json.loads(await self.get_cred_infos_by_q(json.dumps({'rev_reg_id': rr_id}), 1)): un_rr_ids.add(rr_id) rr_ids -= un_rr_ids cd_ids = {cd_id for cd_id in listdir(self._dir_tails) if isdir(join(self._dir_tails, cd_id)) and ok_cred_def_id(cd_id)} s_ids = set() for cd_id in cd_ids: s_ids.add(json.loads(await self.get_schema(cred_def_id2seq_no(cd_id)))['id']) un_cd_ids = set() for cd_id in cd_ids: if not json.loads(await self.get_cred_infos_by_q(json.dumps({'cred_def_id': cd_id}), 1)): un_cd_ids.add(cd_id) cd_ids -= un_cd_ids un_s_ids = set() for s_id in s_ids: if not json.loads(await self.get_cred_infos_by_q(json.dumps({'schema_id': s_id}), 1)): un_s_ids.add(s_id) s_ids -= un_s_ids rv = json.dumps({ 'schema_id': list(s_ids), 'cred_def_id': list(cd_ids), 'rev_reg_id': list(rr_ids) }) LOGGER.debug('HolderProver.get_box_ids_held <<< %s', rv) return rv
Return json object on lists of all unique box identifiers for credentials in wallet, as evidenced by tails directory content: * schema identifiers * credential definition identifiers * revocation registry identifiers. E.g., :: { "schema_id": [ "R17v42T4pk...:2:tombstone:1.2", "9cHbp54C8n...:2:business:2.0", ... ], "cred_def_id": [ "R17v42T4pk...:3:CL:19:tag", "9cHbp54C8n...:3:CL:37:tag", ... ] "rev_reg_id": [ "R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:0", "R17v42T4pk...:4:R17v42T4pk...:3:CL:19:tag:CL_ACCUM:1", "9cHbp54C8n...:4:9cHbp54C8n...:3:CL:37:tag:CL_ACCUM:0", "9cHbp54C8n...:4:9cHbp54C8n...:3:CL:37:tag:CL_ACCUM:1", "9cHbp54C8n...:4:9cHbp54C8n...:3:CL:37:tag:CL_ACCUM:2", ... ] } Raise WalletState if wallet is closed. :return: tuple of sets for schema ids, cred def ids, rev reg ids
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L548-L625
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver.get_cred_infos_by_q
async def get_cred_infos_by_q(self, query_json: str, limit: int = None) -> str: """ A cred-info aggregates: * its wallet cred-id (aka wallet referent) * its attribute names and values * its schema identifier * its credential definition identifier * its revocation registry identifier * its credential revocation identifier. Return list of cred-infos from wallet by input WQL query; return cred-infos for all credentials in wallet for no query. Raise WalletState if the wallet is closed. The operation supports a subset of WQL; i.e., :: query = {subquery} subquery = {subquery, ..., subquery} - WHERE subquery AND ... AND subquery subquery = $or: [{subquery},..., {subquery}] - WHERE subquery OR ... OR subquery subquery = $not: {subquery} - Where NOT (subquery) subquery = "tagName": tagValue - WHERE tagName == tagValue subquery = "tagName": {$in: [tagValue, ..., tagValue]} - WHERE tagName IN (tagValue, ..., tagValue) subquery = "tagName": {$neq: tagValue} - WHERE tagName != tagValue but not :: subquery = "tagName": {$gt: tagValue} - WHERE tagName > tagValue subquery = "tagName": {$gte: tagValue} - WHERE tagName >= tagValue subquery = "tagName": {$lt: tagValue} - WHERE tagName < tagValue subquery = "tagName": {$lte: tagValue} - WHERE tagName <= tagValue subquery = "tagName": {$like: tagValue} - WHERE tagName LIKE tagValue :param query_json: WQL query json :param limit: maximum number of results to return :return: cred-infos as json list; i.e., :: [ { "referent": string, # credential identifier in the wallet "attrs": { "attr1" : {"raw": "value1", "encoded": "value1_as_int" }, "attr2" : {"raw": "value2", "encoded": "value2_as_int" }, ... } "schema_id": string, "cred_def_id": string, "rev_reg_id": Optional<string>, "cred_rev_id": Optional<string> }, ... ] """ LOGGER.debug('HolderProver.get_cred_infos_by_q >>> query_json: %s, limit: %s', query_json, limit) if not self.wallet.handle: LOGGER.debug('HolderProver.get_cred_infos_by_q <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) infos = [] if limit and limit < 0: limit = None (handle, cardinality) = await anoncreds.prover_search_credentials( self.wallet.handle, json.dumps(canon_cred_wql(json.loads(query_json)))) # indy-sdk requires attr name canonicalization chunk = min(cardinality, limit or cardinality, Wallet.DEFAULT_CHUNK) if limit: cardinality = min(limit, cardinality) try: while len(infos) != cardinality: batch = json.loads(await anoncreds.prover_fetch_credentials(handle, chunk)) infos.extend(batch) if len(batch) < chunk: break if len(infos) != cardinality: LOGGER.warning('Credential search/limit indicated %s results but fetched %s', cardinality, len(infos)) finally: await anoncreds.prover_close_credentials_search(handle) rv_json = json.dumps(infos) LOGGER.debug('HolderProver.get_cred_infos_by_q <<< %s', rv_json) return rv_json
python
async def get_cred_infos_by_q(self, query_json: str, limit: int = None) -> str: """ A cred-info aggregates: * its wallet cred-id (aka wallet referent) * its attribute names and values * its schema identifier * its credential definition identifier * its revocation registry identifier * its credential revocation identifier. Return list of cred-infos from wallet by input WQL query; return cred-infos for all credentials in wallet for no query. Raise WalletState if the wallet is closed. The operation supports a subset of WQL; i.e., :: query = {subquery} subquery = {subquery, ..., subquery} - WHERE subquery AND ... AND subquery subquery = $or: [{subquery},..., {subquery}] - WHERE subquery OR ... OR subquery subquery = $not: {subquery} - Where NOT (subquery) subquery = "tagName": tagValue - WHERE tagName == tagValue subquery = "tagName": {$in: [tagValue, ..., tagValue]} - WHERE tagName IN (tagValue, ..., tagValue) subquery = "tagName": {$neq: tagValue} - WHERE tagName != tagValue but not :: subquery = "tagName": {$gt: tagValue} - WHERE tagName > tagValue subquery = "tagName": {$gte: tagValue} - WHERE tagName >= tagValue subquery = "tagName": {$lt: tagValue} - WHERE tagName < tagValue subquery = "tagName": {$lte: tagValue} - WHERE tagName <= tagValue subquery = "tagName": {$like: tagValue} - WHERE tagName LIKE tagValue :param query_json: WQL query json :param limit: maximum number of results to return :return: cred-infos as json list; i.e., :: [ { "referent": string, # credential identifier in the wallet "attrs": { "attr1" : {"raw": "value1", "encoded": "value1_as_int" }, "attr2" : {"raw": "value2", "encoded": "value2_as_int" }, ... } "schema_id": string, "cred_def_id": string, "rev_reg_id": Optional<string>, "cred_rev_id": Optional<string> }, ... ] """ LOGGER.debug('HolderProver.get_cred_infos_by_q >>> query_json: %s, limit: %s', query_json, limit) if not self.wallet.handle: LOGGER.debug('HolderProver.get_cred_infos_by_q <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) infos = [] if limit and limit < 0: limit = None (handle, cardinality) = await anoncreds.prover_search_credentials( self.wallet.handle, json.dumps(canon_cred_wql(json.loads(query_json)))) # indy-sdk requires attr name canonicalization chunk = min(cardinality, limit or cardinality, Wallet.DEFAULT_CHUNK) if limit: cardinality = min(limit, cardinality) try: while len(infos) != cardinality: batch = json.loads(await anoncreds.prover_fetch_credentials(handle, chunk)) infos.extend(batch) if len(batch) < chunk: break if len(infos) != cardinality: LOGGER.warning('Credential search/limit indicated %s results but fetched %s', cardinality, len(infos)) finally: await anoncreds.prover_close_credentials_search(handle) rv_json = json.dumps(infos) LOGGER.debug('HolderProver.get_cred_infos_by_q <<< %s', rv_json) return rv_json
A cred-info aggregates: * its wallet cred-id (aka wallet referent) * its attribute names and values * its schema identifier * its credential definition identifier * its revocation registry identifier * its credential revocation identifier. Return list of cred-infos from wallet by input WQL query; return cred-infos for all credentials in wallet for no query. Raise WalletState if the wallet is closed. The operation supports a subset of WQL; i.e., :: query = {subquery} subquery = {subquery, ..., subquery} - WHERE subquery AND ... AND subquery subquery = $or: [{subquery},..., {subquery}] - WHERE subquery OR ... OR subquery subquery = $not: {subquery} - Where NOT (subquery) subquery = "tagName": tagValue - WHERE tagName == tagValue subquery = "tagName": {$in: [tagValue, ..., tagValue]} - WHERE tagName IN (tagValue, ..., tagValue) subquery = "tagName": {$neq: tagValue} - WHERE tagName != tagValue but not :: subquery = "tagName": {$gt: tagValue} - WHERE tagName > tagValue subquery = "tagName": {$gte: tagValue} - WHERE tagName >= tagValue subquery = "tagName": {$lt: tagValue} - WHERE tagName < tagValue subquery = "tagName": {$lte: tagValue} - WHERE tagName <= tagValue subquery = "tagName": {$like: tagValue} - WHERE tagName LIKE tagValue :param query_json: WQL query json :param limit: maximum number of results to return :return: cred-infos as json list; i.e., :: [ { "referent": string, # credential identifier in the wallet "attrs": { "attr1" : {"raw": "value1", "encoded": "value1_as_int" }, "attr2" : {"raw": "value2", "encoded": "value2_as_int" }, ... } "schema_id": string, "cred_def_id": string, "rev_reg_id": Optional<string>, "cred_rev_id": Optional<string> }, ... ]
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L627-L718
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver.get_cred_infos_by_filter
async def get_cred_infos_by_filter(self, filt: dict = None) -> str: """ Return cred-info (json list) from wallet by input filter for schema identifier and/or credential definition identifier components; return info of all credentials for no filter. Raise WalletState if the wallet is closed. :param filt: indy-sdk filter for credentials; i.e., :: { "schema_id": string, # optional "schema_issuer_did": string, # optional "schema_name": string, # optional "schema_version": string, # optional "issuer_did": string, # optional "cred_def_id": string # optional } :return: credential infos as json list; i.e., :: [ { "referent": string, # credential identifier in the wallet "attrs": { "attr1" : {"raw": "value1", "encoded": "value1_as_int" }, "attr2" : {"raw": "value2", "encoded": "value2_as_int" }, ... } "schema_id": string, "cred_def_id": string, "rev_reg_id": Optional<string>, "cred_rev_id": Optional<string> }, ... ] """ LOGGER.debug('HolderProver.get_cred_infos_by_filter >>> filt: %s', filt) if not self.wallet.handle: LOGGER.debug('HolderProver.get_cred_infos_by_filter <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv_json = await anoncreds.prover_get_credentials(self.wallet.handle, json.dumps(filt or {})) LOGGER.debug('HolderProver.get_cred_infos_by_filter <<< %s', rv_json) return rv_json
python
async def get_cred_infos_by_filter(self, filt: dict = None) -> str: """ Return cred-info (json list) from wallet by input filter for schema identifier and/or credential definition identifier components; return info of all credentials for no filter. Raise WalletState if the wallet is closed. :param filt: indy-sdk filter for credentials; i.e., :: { "schema_id": string, # optional "schema_issuer_did": string, # optional "schema_name": string, # optional "schema_version": string, # optional "issuer_did": string, # optional "cred_def_id": string # optional } :return: credential infos as json list; i.e., :: [ { "referent": string, # credential identifier in the wallet "attrs": { "attr1" : {"raw": "value1", "encoded": "value1_as_int" }, "attr2" : {"raw": "value2", "encoded": "value2_as_int" }, ... } "schema_id": string, "cred_def_id": string, "rev_reg_id": Optional<string>, "cred_rev_id": Optional<string> }, ... ] """ LOGGER.debug('HolderProver.get_cred_infos_by_filter >>> filt: %s', filt) if not self.wallet.handle: LOGGER.debug('HolderProver.get_cred_infos_by_filter <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv_json = await anoncreds.prover_get_credentials(self.wallet.handle, json.dumps(filt or {})) LOGGER.debug('HolderProver.get_cred_infos_by_filter <<< %s', rv_json) return rv_json
Return cred-info (json list) from wallet by input filter for schema identifier and/or credential definition identifier components; return info of all credentials for no filter. Raise WalletState if the wallet is closed. :param filt: indy-sdk filter for credentials; i.e., :: { "schema_id": string, # optional "schema_issuer_did": string, # optional "schema_name": string, # optional "schema_version": string, # optional "issuer_did": string, # optional "cred_def_id": string # optional } :return: credential infos as json list; i.e., :: [ { "referent": string, # credential identifier in the wallet "attrs": { "attr1" : {"raw": "value1", "encoded": "value1_as_int" }, "attr2" : {"raw": "value2", "encoded": "value2_as_int" }, ... } "schema_id": string, "cred_def_id": string, "rev_reg_id": Optional<string>, "cred_rev_id": Optional<string> }, ... ]
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L720-L771
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver.get_cred_info_by_id
async def get_cred_info_by_id(self, cred_id: str) -> str: """ Return cred-info json from wallet by wallet credential identifier. Raise AbsentCred for no such credential. Raise WalletState if the wallet is closed. :param cred_id: credential identifier of interest :return: json with cred for input credential identifier :return: cred-info json; i.e., :: { "referent": string, # credential identifier in the wallet "attrs": { "attr1" : {"raw": "value1", "encoded": "value1_as_int" }, "attr2" : {"raw": "value2", "encoded": "value2_as_int" }, ... } "schema_id": string, "cred_def_id": string, "rev_reg_id": Optional<string>, "cred_rev_id": Optional<string> } """ LOGGER.debug('HolderProver.get_cred_info_by_id >>> cred_id: %s', cred_id) if not self.wallet.handle: LOGGER.debug('HolderProver.get_cred_info_by_id <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) try: rv_json = await anoncreds.prover_get_credential(self.wallet.handle, cred_id) except IndyError as x_indy: # no such cred if x_indy.error_code == ErrorCode.WalletItemNotFound: LOGGER.debug( 'HolderProver.get_cred_info_by_id <!< no cred in wallet %s for cred id %s', self.name, cred_id) raise AbsentCred('No cred in wallet for {}'.format(cred_id)) LOGGER.debug( 'HolderProver.get_cred_info_by_id <!< wallet %s, cred id %s: indy error code %s', self.name, cred_id, x_indy.error_code) raise LOGGER.debug('HolderProver.get_cred_info_by_id <<< %s', rv_json) return rv_json
python
async def get_cred_info_by_id(self, cred_id: str) -> str: """ Return cred-info json from wallet by wallet credential identifier. Raise AbsentCred for no such credential. Raise WalletState if the wallet is closed. :param cred_id: credential identifier of interest :return: json with cred for input credential identifier :return: cred-info json; i.e., :: { "referent": string, # credential identifier in the wallet "attrs": { "attr1" : {"raw": "value1", "encoded": "value1_as_int" }, "attr2" : {"raw": "value2", "encoded": "value2_as_int" }, ... } "schema_id": string, "cred_def_id": string, "rev_reg_id": Optional<string>, "cred_rev_id": Optional<string> } """ LOGGER.debug('HolderProver.get_cred_info_by_id >>> cred_id: %s', cred_id) if not self.wallet.handle: LOGGER.debug('HolderProver.get_cred_info_by_id <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) try: rv_json = await anoncreds.prover_get_credential(self.wallet.handle, cred_id) except IndyError as x_indy: # no such cred if x_indy.error_code == ErrorCode.WalletItemNotFound: LOGGER.debug( 'HolderProver.get_cred_info_by_id <!< no cred in wallet %s for cred id %s', self.name, cred_id) raise AbsentCred('No cred in wallet for {}'.format(cred_id)) LOGGER.debug( 'HolderProver.get_cred_info_by_id <!< wallet %s, cred id %s: indy error code %s', self.name, cred_id, x_indy.error_code) raise LOGGER.debug('HolderProver.get_cred_info_by_id <<< %s', rv_json) return rv_json
Return cred-info json from wallet by wallet credential identifier. Raise AbsentCred for no such credential. Raise WalletState if the wallet is closed. :param cred_id: credential identifier of interest :return: json with cred for input credential identifier :return: cred-info json; i.e., :: { "referent": string, # credential identifier in the wallet "attrs": { "attr1" : {"raw": "value1", "encoded": "value1_as_int" }, "attr2" : {"raw": "value2", "encoded": "value2_as_int" }, ... } "schema_id": string, "cred_def_id": string, "rev_reg_id": Optional<string>, "cred_rev_id": Optional<string> }
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L773-L823
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver.get_cred_briefs_by_proof_req_q
async def get_cred_briefs_by_proof_req_q(self, proof_req_json: str, x_queries_json: str = None) -> str: """ A cred-brief aggregates a cred-info and a non-revocation interval. A cred-brief-dict maps wallet cred-ids to their corresponding cred-briefs. Return json (cred-brief-dict) object mapping wallet credential identifiers to cred-briefs by proof request and WQL queries by proof request referent. Return empty dict on no WQL query and empty requested predicates specification within proof request. Utility util.proof_req2wql_all() builds WQL to retrieve all cred-briefs for (some or all) cred-def-ids in a proof request. For each WQL query on an item referent, indy-sdk takes the WQL and the attribute name and restrictions (e.g., cred def id, schema id, etc.) from its referent. Note that util.proof_req_attr_referents() maps cred defs and attr names to proof req item referents, bridging the gap between attribute names and their corresponding item referents. Raise WalletState if the wallet is closed. :param proof_req_json: proof request as per Verifier.build_proof_req_json(); e.g., :: { "nonce": "1532429687", "name": "proof_req", "version": "0.0", "requested_predicates": {}, "requested_attributes": { "17_name_uuid": { "restrictions": [ { "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:17:tag" } ], "name": "name" }, "17_thing_uuid": { "restrictions": [ { "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:17:tag" } ], "name": "thing" } } } :param x_queries_json: json list of extra queries to apply to proof request attribute and predicate referents; e.g., :: { "17_thing_uuid": { # require attr presence on name 'thing', cred def id from proof req above "$or": [ { "attr::name::value": "J.R. 'Bob' Dobbs" }, { "attr::thing::value": "slack" }, ] }, } :return: json (cred-brief-dict) object mapping wallet cred ids to cred briefs; e.g., :: { "b42ce5bc-b690-43cd-9493-6fe86ad25e85": { "interval": null, "cred_info": { "schema_id": "LjgpST2rjsoxYegQDRm7EL:2:non-revo:1.0", "rev_reg_id": null, "attrs": { "name": "J.R. \"Bob\" Dobbs", "thing": "slack" }, "cred_rev_id": null, "referent": "b42ce5bc-b690-43cd-9493-6fe86ad25e85", "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:17:tag" } }, "d773434a-0080-4e3e-a03b-f2033eae7d75": { "interval": null, "cred_info": { "schema_id": "LjgpST2rjsoxYegQDRm7EL:2:non-revo:1.0", "rev_reg_id": null, "attrs": { "name": "Chicken Hawk", "thing": "chicken" }, "cred_rev_id": null, "referent": "d773434a-0080-4e3e-a03b-f2033eae7d75", "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:17:tag" } } } """ LOGGER.debug( ('HolderProver.get_cred_briefs_by_proof_req_q >>> proof_req_json: %s, x_queries_json: %s'), proof_req_json, x_queries_json) if not self.wallet.handle: LOGGER.debug('HolderProver.get_cred_briefs_by_proof_req_q <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) def _pred_filter(brief): nonlocal pred_refts for attr, preds in pred_refts.get(brief['cred_info']['cred_def_id'], {}).items(): if any(Predicate.get(p[0]).value.no(brief['cred_info']['attrs'][attr], p[1]) for p in preds.values()): return False return True rv = {} item_refts = set() x_queries = json.loads(x_queries_json or '{}') for k in x_queries: x_queries[k] = canon_cred_wql(x_queries[k]) # indy-sdk requires attr name canonicalization item_refts.add(k) proof_req = json.loads(proof_req_json) item_refts.update(uuid for uuid in proof_req['requested_predicates']) if not x_queries: item_refts.update(uuid for uuid in proof_req['requested_attributes']) # get all req attrs if no extra wql handle = await anoncreds.prover_search_credentials_for_proof_req( self.wallet.handle, proof_req_json, json.dumps(x_queries) if x_queries else None) pred_refts = proof_req_pred_referents(proof_req) try: for item_referent in item_refts: count = Wallet.DEFAULT_CHUNK while count == Wallet.DEFAULT_CHUNK: fetched = json.loads(await anoncreds.prover_fetch_credentials_for_proof_req( handle, item_referent, Wallet.DEFAULT_CHUNK)) count = len(fetched) for brief in fetched: # apply predicates from proof req here if brief['cred_info']['referent'] not in rv and _pred_filter(brief): rv[brief['cred_info']['referent']] = brief finally: await anoncreds.prover_close_credentials_search_for_proof_req(handle) rv_json = json.dumps(rv) LOGGER.debug('HolderProver.get_cred_briefs_by_proof_req_q <<< %s', rv_json) return rv_json
python
async def get_cred_briefs_by_proof_req_q(self, proof_req_json: str, x_queries_json: str = None) -> str: """ A cred-brief aggregates a cred-info and a non-revocation interval. A cred-brief-dict maps wallet cred-ids to their corresponding cred-briefs. Return json (cred-brief-dict) object mapping wallet credential identifiers to cred-briefs by proof request and WQL queries by proof request referent. Return empty dict on no WQL query and empty requested predicates specification within proof request. Utility util.proof_req2wql_all() builds WQL to retrieve all cred-briefs for (some or all) cred-def-ids in a proof request. For each WQL query on an item referent, indy-sdk takes the WQL and the attribute name and restrictions (e.g., cred def id, schema id, etc.) from its referent. Note that util.proof_req_attr_referents() maps cred defs and attr names to proof req item referents, bridging the gap between attribute names and their corresponding item referents. Raise WalletState if the wallet is closed. :param proof_req_json: proof request as per Verifier.build_proof_req_json(); e.g., :: { "nonce": "1532429687", "name": "proof_req", "version": "0.0", "requested_predicates": {}, "requested_attributes": { "17_name_uuid": { "restrictions": [ { "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:17:tag" } ], "name": "name" }, "17_thing_uuid": { "restrictions": [ { "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:17:tag" } ], "name": "thing" } } } :param x_queries_json: json list of extra queries to apply to proof request attribute and predicate referents; e.g., :: { "17_thing_uuid": { # require attr presence on name 'thing', cred def id from proof req above "$or": [ { "attr::name::value": "J.R. 'Bob' Dobbs" }, { "attr::thing::value": "slack" }, ] }, } :return: json (cred-brief-dict) object mapping wallet cred ids to cred briefs; e.g., :: { "b42ce5bc-b690-43cd-9493-6fe86ad25e85": { "interval": null, "cred_info": { "schema_id": "LjgpST2rjsoxYegQDRm7EL:2:non-revo:1.0", "rev_reg_id": null, "attrs": { "name": "J.R. \"Bob\" Dobbs", "thing": "slack" }, "cred_rev_id": null, "referent": "b42ce5bc-b690-43cd-9493-6fe86ad25e85", "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:17:tag" } }, "d773434a-0080-4e3e-a03b-f2033eae7d75": { "interval": null, "cred_info": { "schema_id": "LjgpST2rjsoxYegQDRm7EL:2:non-revo:1.0", "rev_reg_id": null, "attrs": { "name": "Chicken Hawk", "thing": "chicken" }, "cred_rev_id": null, "referent": "d773434a-0080-4e3e-a03b-f2033eae7d75", "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:17:tag" } } } """ LOGGER.debug( ('HolderProver.get_cred_briefs_by_proof_req_q >>> proof_req_json: %s, x_queries_json: %s'), proof_req_json, x_queries_json) if not self.wallet.handle: LOGGER.debug('HolderProver.get_cred_briefs_by_proof_req_q <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) def _pred_filter(brief): nonlocal pred_refts for attr, preds in pred_refts.get(brief['cred_info']['cred_def_id'], {}).items(): if any(Predicate.get(p[0]).value.no(brief['cred_info']['attrs'][attr], p[1]) for p in preds.values()): return False return True rv = {} item_refts = set() x_queries = json.loads(x_queries_json or '{}') for k in x_queries: x_queries[k] = canon_cred_wql(x_queries[k]) # indy-sdk requires attr name canonicalization item_refts.add(k) proof_req = json.loads(proof_req_json) item_refts.update(uuid for uuid in proof_req['requested_predicates']) if not x_queries: item_refts.update(uuid for uuid in proof_req['requested_attributes']) # get all req attrs if no extra wql handle = await anoncreds.prover_search_credentials_for_proof_req( self.wallet.handle, proof_req_json, json.dumps(x_queries) if x_queries else None) pred_refts = proof_req_pred_referents(proof_req) try: for item_referent in item_refts: count = Wallet.DEFAULT_CHUNK while count == Wallet.DEFAULT_CHUNK: fetched = json.loads(await anoncreds.prover_fetch_credentials_for_proof_req( handle, item_referent, Wallet.DEFAULT_CHUNK)) count = len(fetched) for brief in fetched: # apply predicates from proof req here if brief['cred_info']['referent'] not in rv and _pred_filter(brief): rv[brief['cred_info']['referent']] = brief finally: await anoncreds.prover_close_credentials_search_for_proof_req(handle) rv_json = json.dumps(rv) LOGGER.debug('HolderProver.get_cred_briefs_by_proof_req_q <<< %s', rv_json) return rv_json
A cred-brief aggregates a cred-info and a non-revocation interval. A cred-brief-dict maps wallet cred-ids to their corresponding cred-briefs. Return json (cred-brief-dict) object mapping wallet credential identifiers to cred-briefs by proof request and WQL queries by proof request referent. Return empty dict on no WQL query and empty requested predicates specification within proof request. Utility util.proof_req2wql_all() builds WQL to retrieve all cred-briefs for (some or all) cred-def-ids in a proof request. For each WQL query on an item referent, indy-sdk takes the WQL and the attribute name and restrictions (e.g., cred def id, schema id, etc.) from its referent. Note that util.proof_req_attr_referents() maps cred defs and attr names to proof req item referents, bridging the gap between attribute names and their corresponding item referents. Raise WalletState if the wallet is closed. :param proof_req_json: proof request as per Verifier.build_proof_req_json(); e.g., :: { "nonce": "1532429687", "name": "proof_req", "version": "0.0", "requested_predicates": {}, "requested_attributes": { "17_name_uuid": { "restrictions": [ { "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:17:tag" } ], "name": "name" }, "17_thing_uuid": { "restrictions": [ { "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:17:tag" } ], "name": "thing" } } } :param x_queries_json: json list of extra queries to apply to proof request attribute and predicate referents; e.g., :: { "17_thing_uuid": { # require attr presence on name 'thing', cred def id from proof req above "$or": [ { "attr::name::value": "J.R. 'Bob' Dobbs" }, { "attr::thing::value": "slack" }, ] }, } :return: json (cred-brief-dict) object mapping wallet cred ids to cred briefs; e.g., :: { "b42ce5bc-b690-43cd-9493-6fe86ad25e85": { "interval": null, "cred_info": { "schema_id": "LjgpST2rjsoxYegQDRm7EL:2:non-revo:1.0", "rev_reg_id": null, "attrs": { "name": "J.R. \"Bob\" Dobbs", "thing": "slack" }, "cred_rev_id": null, "referent": "b42ce5bc-b690-43cd-9493-6fe86ad25e85", "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:17:tag" } }, "d773434a-0080-4e3e-a03b-f2033eae7d75": { "interval": null, "cred_info": { "schema_id": "LjgpST2rjsoxYegQDRm7EL:2:non-revo:1.0", "rev_reg_id": null, "attrs": { "name": "Chicken Hawk", "thing": "chicken" }, "cred_rev_id": null, "referent": "d773434a-0080-4e3e-a03b-f2033eae7d75", "cred_def_id": "LjgpST2rjsoxYegQDRm7EL:3:CL:17:tag" } } }
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L825-L976
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver.create_proof
async def create_proof(self, proof_req: dict, briefs: Union[dict, Sequence[dict]], requested_creds: dict) -> str: """ Create proof as HolderProver. Raise: * AbsentLinkSecret if link secret not set * CredentialFocus on attempt to create proof on no briefs or multiple briefs for a credential definition * AbsentTails if missing required tails file * | BadRevStateTime if a timestamp for a revocation registry state in the proof request | occurs before revocation registry creation * IndyError for any other indy-sdk error * AbsentInterval if briefs missing non-revocation interval, but cred def supports revocation * WalletState if the wallet is closed. :param proof_req: proof request as per Verifier.build_proof_req_json() :param briefs: cred-brief, sequence thereof, or mapping from wallet cred-id to briefs, to prove :param requested_creds: requested credentials data structure; i.e., :: { 'self_attested_attributes': {}, 'requested_attributes': { 'attr0_uuid': { 'cred_id': string, 'timestamp': integer, # for revocation state 'revealed': bool }, ... }, 'requested_predicates': { 'predicate0_uuid': { 'cred_id': string, 'timestamp': integer # for revocation state } } } :return: proof json """ LOGGER.debug( 'HolderProver.create_proof >>> proof_req: %s, briefs: %s, requested_creds: %s', proof_req, briefs, requested_creds) if not self.wallet.handle: LOGGER.debug('HolderProver.create_proof <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) label = await self._assert_link_secret('create_proof') cd_ids = set() x_cd_ids = set() for brief in iter_briefs(briefs): cd_id = brief['cred_info']['cred_def_id'] if cd_id in cd_ids and cd_id not in x_cd_ids: x_cd_ids.add(cd_id) cd_ids.add(cd_id) if x_cd_ids: LOGGER.debug('HolderProver.create_proof <!< briefs specification out of focus (non-uniqueness)') raise CredentialFocus('Briefs list repeats cred defs: {}'.format(x_cd_ids)) s_id2schema = {} # schema identifier to schema cd_id2cred_def = {} # credential definition identifier to credential definition rr_id2timestamp = {} # revocation registry of interest to timestamp of interest (or None) rr_id2cr_id = {} # revocation registry of interest to credential revocation identifier for brief in iter_briefs(briefs): interval = brief.get('interval', None) cred_info = brief['cred_info'] s_id = cred_info['schema_id'] if not ok_schema_id(s_id): LOGGER.debug('HolderProver.create_proof <!< Bad schema id %s', s_id) raise BadIdentifier('Bad schema id {}'.format(s_id)) if s_id not in s_id2schema: schema = json.loads(await self.get_schema(s_id)) # add to cache en passant if not schema: LOGGER.debug( 'HolderProver.create_proof <!< absent schema %s, proof req may be for another ledger', s_id) raise AbsentSchema('Absent schema {}, proof req may be for another ledger'.format(s_id)) s_id2schema[s_id] = schema cd_id = cred_info['cred_def_id'] if not ok_cred_def_id(cd_id): LOGGER.debug('HolderProver.create_proof <!< Bad cred def id %s', cd_id) raise BadIdentifier('Bad cred def id {}'.format(cd_id)) if cd_id not in cd_id2cred_def: cred_def = json.loads(await self.get_cred_def(cd_id)) # add to cache en passant cd_id2cred_def[cd_id] = cred_def rr_id = cred_info['rev_reg_id'] if rr_id: if not ok_rev_reg_id(rr_id): LOGGER.debug('HolderProver.create_proof <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) await self._sync_revoc_for_proof(rr_id) # link tails file to its rr_id if it's new if interval: if rr_id not in rr_id2timestamp: if interval['to'] > int(time()): LOGGER.debug( 'HolderProver.create_proof <!< interval to %s for rev reg %s is in the future', interval['to'], rr_id) raise BadRevStateTime( 'Revocation registry {} timestamp {} is in the future'.format(rr_id, interval['to'])) rr_id2timestamp[rr_id] = interval['to'] elif 'revocation' in cd_id2cred_def[cd_id]['value']: LOGGER.debug( 'HolderProver.create_proof <!< brief on cred def id %s missing non-revocation interval', cd_id) raise AbsentInterval('Brief on cred def id {} missing non-revocation interval'.format(cd_id)) if rr_id in rr_id2cr_id: continue rr_id2cr_id[rr_id] = cred_info['cred_rev_id'] rr_id2rev_state = {} # revocation registry identifier to its state with REVO_CACHE.lock: for rr_id in rr_id2timestamp: revo_cache_entry = REVO_CACHE.get(rr_id, None) tails = revo_cache_entry.tails if revo_cache_entry else None if tails is None: # missing tails file LOGGER.debug('HolderProver.create_proof <!< missing tails file for rev reg id %s', rr_id) raise AbsentTails('Missing tails file for rev reg id {}'.format(rr_id)) rr_def_json = await self.get_rev_reg_def(rr_id) (rr_delta_json, ledger_timestamp) = await revo_cache_entry.get_delta_json( self._build_rr_delta_json, rr_id2timestamp[rr_id], rr_id2timestamp[rr_id]) rr_state_json = await anoncreds.create_revocation_state( tails.reader_handle, rr_def_json, rr_delta_json, ledger_timestamp, rr_id2cr_id[rr_id]) rr_id2rev_state[rr_id] = { rr_id2timestamp[rr_id]: json.loads(rr_state_json) } rv = await anoncreds.prover_create_proof( self.wallet.handle, json.dumps(proof_req), json.dumps(requested_creds), label, json.dumps(s_id2schema), json.dumps(cd_id2cred_def), json.dumps(rr_id2rev_state)) LOGGER.debug('HolderProver.create_proof <<< %s', rv) return rv
python
async def create_proof(self, proof_req: dict, briefs: Union[dict, Sequence[dict]], requested_creds: dict) -> str: """ Create proof as HolderProver. Raise: * AbsentLinkSecret if link secret not set * CredentialFocus on attempt to create proof on no briefs or multiple briefs for a credential definition * AbsentTails if missing required tails file * | BadRevStateTime if a timestamp for a revocation registry state in the proof request | occurs before revocation registry creation * IndyError for any other indy-sdk error * AbsentInterval if briefs missing non-revocation interval, but cred def supports revocation * WalletState if the wallet is closed. :param proof_req: proof request as per Verifier.build_proof_req_json() :param briefs: cred-brief, sequence thereof, or mapping from wallet cred-id to briefs, to prove :param requested_creds: requested credentials data structure; i.e., :: { 'self_attested_attributes': {}, 'requested_attributes': { 'attr0_uuid': { 'cred_id': string, 'timestamp': integer, # for revocation state 'revealed': bool }, ... }, 'requested_predicates': { 'predicate0_uuid': { 'cred_id': string, 'timestamp': integer # for revocation state } } } :return: proof json """ LOGGER.debug( 'HolderProver.create_proof >>> proof_req: %s, briefs: %s, requested_creds: %s', proof_req, briefs, requested_creds) if not self.wallet.handle: LOGGER.debug('HolderProver.create_proof <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) label = await self._assert_link_secret('create_proof') cd_ids = set() x_cd_ids = set() for brief in iter_briefs(briefs): cd_id = brief['cred_info']['cred_def_id'] if cd_id in cd_ids and cd_id not in x_cd_ids: x_cd_ids.add(cd_id) cd_ids.add(cd_id) if x_cd_ids: LOGGER.debug('HolderProver.create_proof <!< briefs specification out of focus (non-uniqueness)') raise CredentialFocus('Briefs list repeats cred defs: {}'.format(x_cd_ids)) s_id2schema = {} # schema identifier to schema cd_id2cred_def = {} # credential definition identifier to credential definition rr_id2timestamp = {} # revocation registry of interest to timestamp of interest (or None) rr_id2cr_id = {} # revocation registry of interest to credential revocation identifier for brief in iter_briefs(briefs): interval = brief.get('interval', None) cred_info = brief['cred_info'] s_id = cred_info['schema_id'] if not ok_schema_id(s_id): LOGGER.debug('HolderProver.create_proof <!< Bad schema id %s', s_id) raise BadIdentifier('Bad schema id {}'.format(s_id)) if s_id not in s_id2schema: schema = json.loads(await self.get_schema(s_id)) # add to cache en passant if not schema: LOGGER.debug( 'HolderProver.create_proof <!< absent schema %s, proof req may be for another ledger', s_id) raise AbsentSchema('Absent schema {}, proof req may be for another ledger'.format(s_id)) s_id2schema[s_id] = schema cd_id = cred_info['cred_def_id'] if not ok_cred_def_id(cd_id): LOGGER.debug('HolderProver.create_proof <!< Bad cred def id %s', cd_id) raise BadIdentifier('Bad cred def id {}'.format(cd_id)) if cd_id not in cd_id2cred_def: cred_def = json.loads(await self.get_cred_def(cd_id)) # add to cache en passant cd_id2cred_def[cd_id] = cred_def rr_id = cred_info['rev_reg_id'] if rr_id: if not ok_rev_reg_id(rr_id): LOGGER.debug('HolderProver.create_proof <!< Bad rev reg id %s', rr_id) raise BadIdentifier('Bad rev reg id {}'.format(rr_id)) await self._sync_revoc_for_proof(rr_id) # link tails file to its rr_id if it's new if interval: if rr_id not in rr_id2timestamp: if interval['to'] > int(time()): LOGGER.debug( 'HolderProver.create_proof <!< interval to %s for rev reg %s is in the future', interval['to'], rr_id) raise BadRevStateTime( 'Revocation registry {} timestamp {} is in the future'.format(rr_id, interval['to'])) rr_id2timestamp[rr_id] = interval['to'] elif 'revocation' in cd_id2cred_def[cd_id]['value']: LOGGER.debug( 'HolderProver.create_proof <!< brief on cred def id %s missing non-revocation interval', cd_id) raise AbsentInterval('Brief on cred def id {} missing non-revocation interval'.format(cd_id)) if rr_id in rr_id2cr_id: continue rr_id2cr_id[rr_id] = cred_info['cred_rev_id'] rr_id2rev_state = {} # revocation registry identifier to its state with REVO_CACHE.lock: for rr_id in rr_id2timestamp: revo_cache_entry = REVO_CACHE.get(rr_id, None) tails = revo_cache_entry.tails if revo_cache_entry else None if tails is None: # missing tails file LOGGER.debug('HolderProver.create_proof <!< missing tails file for rev reg id %s', rr_id) raise AbsentTails('Missing tails file for rev reg id {}'.format(rr_id)) rr_def_json = await self.get_rev_reg_def(rr_id) (rr_delta_json, ledger_timestamp) = await revo_cache_entry.get_delta_json( self._build_rr_delta_json, rr_id2timestamp[rr_id], rr_id2timestamp[rr_id]) rr_state_json = await anoncreds.create_revocation_state( tails.reader_handle, rr_def_json, rr_delta_json, ledger_timestamp, rr_id2cr_id[rr_id]) rr_id2rev_state[rr_id] = { rr_id2timestamp[rr_id]: json.loads(rr_state_json) } rv = await anoncreds.prover_create_proof( self.wallet.handle, json.dumps(proof_req), json.dumps(requested_creds), label, json.dumps(s_id2schema), json.dumps(cd_id2cred_def), json.dumps(rr_id2rev_state)) LOGGER.debug('HolderProver.create_proof <<< %s', rv) return rv
Create proof as HolderProver. Raise: * AbsentLinkSecret if link secret not set * CredentialFocus on attempt to create proof on no briefs or multiple briefs for a credential definition * AbsentTails if missing required tails file * | BadRevStateTime if a timestamp for a revocation registry state in the proof request | occurs before revocation registry creation * IndyError for any other indy-sdk error * AbsentInterval if briefs missing non-revocation interval, but cred def supports revocation * WalletState if the wallet is closed. :param proof_req: proof request as per Verifier.build_proof_req_json() :param briefs: cred-brief, sequence thereof, or mapping from wallet cred-id to briefs, to prove :param requested_creds: requested credentials data structure; i.e., :: { 'self_attested_attributes': {}, 'requested_attributes': { 'attr0_uuid': { 'cred_id': string, 'timestamp': integer, # for revocation state 'revealed': bool }, ... }, 'requested_predicates': { 'predicate0_uuid': { 'cred_id': string, 'timestamp': integer # for revocation state } } } :return: proof json
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L979-L1131
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/holderprover.py
HolderProver.reset_wallet
async def reset_wallet(self, seed: str = None) -> Wallet: """ Close and delete HolderProver wallet, then create and open a replacement on prior link secret. Note that this operation effectively destroys private keys for credential definitions. Its intended use is primarily for testing and demonstration. Raise AbsentLinkSecret if link secret not set. Raise WalletState if the wallet is closed. :param seed: seed to use for new wallet (default random) :return: replacement wallet """ LOGGER.debug('HolderProver.reset_wallet >>>') self.wallet = await WalletManager().reset(self.wallet, seed) rv = self.wallet LOGGER.debug('HolderProver.reset_wallet <<< %s', rv) return rv
python
async def reset_wallet(self, seed: str = None) -> Wallet: """ Close and delete HolderProver wallet, then create and open a replacement on prior link secret. Note that this operation effectively destroys private keys for credential definitions. Its intended use is primarily for testing and demonstration. Raise AbsentLinkSecret if link secret not set. Raise WalletState if the wallet is closed. :param seed: seed to use for new wallet (default random) :return: replacement wallet """ LOGGER.debug('HolderProver.reset_wallet >>>') self.wallet = await WalletManager().reset(self.wallet, seed) rv = self.wallet LOGGER.debug('HolderProver.reset_wallet <<< %s', rv) return rv
Close and delete HolderProver wallet, then create and open a replacement on prior link secret. Note that this operation effectively destroys private keys for credential definitions. Its intended use is primarily for testing and demonstration. Raise AbsentLinkSecret if link secret not set. Raise WalletState if the wallet is closed. :param seed: seed to use for new wallet (default random) :return: replacement wallet
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/holderprover.py#L1133-L1151
praekelt/django-ultracache
ultracache/monkey.py
my_resolve_lookup
def my_resolve_lookup(self, context): """ Performs resolution of a real variable (i.e. not a literal) against the given context. As indicated by the method's name, this method is an implementation detail and shouldn"t be called by external code. Use Variable.resolve() instead. """ current = context try: # catch-all for silent variable failures for bit in self.lookups: try: # dictionary lookup current = current[bit] # ValueError/IndexError are for numpy.array lookup on # numpy < 1.9 and 1.9+ respectively except (TypeError, AttributeError, KeyError, ValueError, IndexError): try: # attribute lookup # Don"t return class attributes if the class is the context: if isinstance(current, BaseContext) and getattr(type(current), bit): raise AttributeError current = getattr(current, bit) except (TypeError, AttributeError) as e: # Reraise an AttributeError raised by a @property if (isinstance(e, AttributeError) and not isinstance(current, BaseContext) and bit in dir(current)): raise try: # list-index lookup current = current[int(bit)] except (IndexError, # list index out of range ValueError, # invalid literal for int() KeyError, # current is a dict without `int(bit)` key TypeError): # unsubscriptable object raise VariableDoesNotExist("Failed lookup for key " "[%s] in %r", (bit, current)) # missing attribute if callable(current): if getattr(current, "do_not_call_in_templates", False): pass elif getattr(current, "alters_data", False): try: current = context.template.engine.string_if_invalid except AttributeError: current = settings.TEMPLATE_STRING_IF_INVALID else: try: # method call (assuming no args required) current = current() except TypeError: try: inspect.getcallargs(current) except TypeError: # arguments *were* required current = context.template.engine.string_if_invalid # invalid method call else: raise elif isinstance(current, Model): if ("request" in context) and hasattr(context["request"], "_ultracache"): # get_for_model itself is cached ct = ContentType.objects.get_for_model(current.__class__) context["request"]._ultracache.append((ct.id, current.pk)) except Exception as e: template_name = getattr(context, "template_name", None) or "unknown" if logger is not None: logger.debug( "Exception while resolving variable \"%s\" in template \"%s\".", bit, template_name, exc_info=True, ) if getattr(e, "silent_variable_failure", False): current = context.template.engine.string_if_invalid else: raise return current
python
def my_resolve_lookup(self, context): """ Performs resolution of a real variable (i.e. not a literal) against the given context. As indicated by the method's name, this method is an implementation detail and shouldn"t be called by external code. Use Variable.resolve() instead. """ current = context try: # catch-all for silent variable failures for bit in self.lookups: try: # dictionary lookup current = current[bit] # ValueError/IndexError are for numpy.array lookup on # numpy < 1.9 and 1.9+ respectively except (TypeError, AttributeError, KeyError, ValueError, IndexError): try: # attribute lookup # Don"t return class attributes if the class is the context: if isinstance(current, BaseContext) and getattr(type(current), bit): raise AttributeError current = getattr(current, bit) except (TypeError, AttributeError) as e: # Reraise an AttributeError raised by a @property if (isinstance(e, AttributeError) and not isinstance(current, BaseContext) and bit in dir(current)): raise try: # list-index lookup current = current[int(bit)] except (IndexError, # list index out of range ValueError, # invalid literal for int() KeyError, # current is a dict without `int(bit)` key TypeError): # unsubscriptable object raise VariableDoesNotExist("Failed lookup for key " "[%s] in %r", (bit, current)) # missing attribute if callable(current): if getattr(current, "do_not_call_in_templates", False): pass elif getattr(current, "alters_data", False): try: current = context.template.engine.string_if_invalid except AttributeError: current = settings.TEMPLATE_STRING_IF_INVALID else: try: # method call (assuming no args required) current = current() except TypeError: try: inspect.getcallargs(current) except TypeError: # arguments *were* required current = context.template.engine.string_if_invalid # invalid method call else: raise elif isinstance(current, Model): if ("request" in context) and hasattr(context["request"], "_ultracache"): # get_for_model itself is cached ct = ContentType.objects.get_for_model(current.__class__) context["request"]._ultracache.append((ct.id, current.pk)) except Exception as e: template_name = getattr(context, "template_name", None) or "unknown" if logger is not None: logger.debug( "Exception while resolving variable \"%s\" in template \"%s\".", bit, template_name, exc_info=True, ) if getattr(e, "silent_variable_failure", False): current = context.template.engine.string_if_invalid else: raise return current
Performs resolution of a real variable (i.e. not a literal) against the given context. As indicated by the method's name, this method is an implementation detail and shouldn"t be called by external code. Use Variable.resolve() instead.
https://github.com/praekelt/django-ultracache/blob/8898f10e50fc8f8d0a4cb7d3fe4d945bf257bd9f/ultracache/monkey.py#L27-L102
PSPC-SPAC-buyandsell/von_anchor
von_anchor/cache.py
SchemaCache.index
def index(self) -> dict: """ Return dict mapping content sequence numbers to schema keys. :return: dict mapping sequence numbers to schema keys """ LOGGER.debug('SchemaCache.index >>>') rv = self._seq_no2schema_key LOGGER.debug('SchemaCache.index <<< %s', rv) return rv
python
def index(self) -> dict: """ Return dict mapping content sequence numbers to schema keys. :return: dict mapping sequence numbers to schema keys """ LOGGER.debug('SchemaCache.index >>>') rv = self._seq_no2schema_key LOGGER.debug('SchemaCache.index <<< %s', rv) return rv
Return dict mapping content sequence numbers to schema keys. :return: dict mapping sequence numbers to schema keys
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/cache.py#L141-L152
PSPC-SPAC-buyandsell/von_anchor
von_anchor/cache.py
SchemaCache.schema_key_for
def schema_key_for(self, seq_no: int) -> SchemaKey: """ Get schema key for schema by sequence number if known, None for no such schema in cache. :param seq_no: sequence number :return: corresponding schema key or None """ LOGGER.debug('SchemaCache.schema_key_for >>> seq_no: %s', seq_no) rv = self._seq_no2schema_key.get(seq_no, None) LOGGER.debug('SchemaCache.schema_key_for <<< %s', rv) return rv
python
def schema_key_for(self, seq_no: int) -> SchemaKey: """ Get schema key for schema by sequence number if known, None for no such schema in cache. :param seq_no: sequence number :return: corresponding schema key or None """ LOGGER.debug('SchemaCache.schema_key_for >>> seq_no: %s', seq_no) rv = self._seq_no2schema_key.get(seq_no, None) LOGGER.debug('SchemaCache.schema_key_for <<< %s', rv) return rv
Get schema key for schema by sequence number if known, None for no such schema in cache. :param seq_no: sequence number :return: corresponding schema key or None
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/cache.py#L154-L167
PSPC-SPAC-buyandsell/von_anchor
von_anchor/cache.py
SchemaCache.schemata
def schemata(self) -> list: """ Return list with schemata in cache. :return: list of schemata """ LOGGER.debug('SchemaCache.schemata >>>') LOGGER.debug('SchemaCache.schemata <<<') return [self._schema_key2schema[seq_no] for seq_no in self._schema_key2schema]
python
def schemata(self) -> list: """ Return list with schemata in cache. :return: list of schemata """ LOGGER.debug('SchemaCache.schemata >>>') LOGGER.debug('SchemaCache.schemata <<<') return [self._schema_key2schema[seq_no] for seq_no in self._schema_key2schema]
Return list with schemata in cache. :return: list of schemata
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/cache.py#L169-L179
PSPC-SPAC-buyandsell/von_anchor
von_anchor/cache.py
SchemaCache.clear
def clear(self) -> None: """ Clear the cache. """ LOGGER.debug('SchemaCache.clear >>>') self._schema_key2schema = {} self._seq_no2schema_key = {} LOGGER.debug('SchemaCache.clear <<<')
python
def clear(self) -> None: """ Clear the cache. """ LOGGER.debug('SchemaCache.clear >>>') self._schema_key2schema = {} self._seq_no2schema_key = {} LOGGER.debug('SchemaCache.clear <<<')
Clear the cache.
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/cache.py#L204-L214
PSPC-SPAC-buyandsell/von_anchor
von_anchor/cache.py
RevoCacheEntry._get_update
async def _get_update(self, rr_builder: Callable, fro: int, to: int, delta: bool) -> (str, int): """ Get rev reg delta/state json, and its timestamp on the distributed ledger, from cached rev reg delta/state frames list or distributed ledger, updating cache as necessary. Raise BadRevStateTime if caller asks for a delta/state in the future. Raise ClosedPool if an update requires the ledger but the node pool is closed. Issuer anchors cannot revoke retroactively. Hence, for any new request against asked-for interval (fro, to): * if the cache has a frame f on f.timestamp <= to <= f.to, > return its rev reg delta/state; e.g., starred frame below: Frames: --------[xxxxx]----[xx]-----[*********]-----[x]-----------[xx]---------> time Fro-to: ^----^ * otherwise, if there is a maximum frame f with fro <= f.to and f.timestamp <= to > return its rev reg delta/state; e.g., starred frame below: Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[*]-----------[xx]---------> time Fro-to: ^----------------------------------^ * otherwise, if the cache has a frame f on f.timestamp < to, > check the distributed ledger for a delta to/state for the rev reg since e.timestamp; - if there is one, bake it through 'to' into a new delta/state, add new frame to cache and return rev reg delta/state; e.g., starred frame below: Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time Fro-to: ^------^ Ledger: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]--!--------[xx]---------> time Update: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]--[*****]--[xx]---------> time - otherwise, update the 'to' time in the frame and return the rev reg delta/state; e.g., starred frame below: Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time Fro-to: ^------^ Ledger: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time Update: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[**********]--[xx]---------> time * otherwise, there is no cache frame f on f.timestamp < to: > create new frame and add it to cache; return rev reg delta/state; e.g., starred frame below: Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[*]-----------[xx]-----> time Fro-to: ^--^ Ledger: -!------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time Update: -[***]--[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time On return of any previously existing rev reg delta/state frame, always update its query time beforehand. :param rr_builder: callback to build rev reg delta/state if need be (specify holder-prover anchor's _build_rr_delta_json() or verifier anchor's _build_rr_state_json() as needed) :param fro: least time (epoch seconds) of interest; lower-bounds 'to' on frame housing return data :param to: greatest time (epoch seconds) of interest; upper-bounds returned revocation delta/state timestamp :param delta: True to operate on rev reg deltas, False for states :return: rev reg delta/state json and ledger timestamp (epoch seconds) """ LOGGER.debug( 'RevoCacheEntry.get_update >>> rr_builder: %s, fro: %s, to: %s, delta: %s', rr_builder.__name__, fro, to, delta) if fro > to: (fro, to) = (to, fro) now = int(time()) if to > now: LOGGER.debug( 'RevoCacheEntry._get_update <!< Cannot query a rev reg %s in the future (%s > %s)', 'delta' if delta else 'state', to, now) raise BadRevStateTime('Cannot query a rev reg {} in the future ({} > {})'.format( 'delta' if delta else 'state', to, now)) cache_frame = None rr_update_json = None rr_frames = self.rr_delta_frames if delta else self.rr_state_frames frames = [frame for frame in rr_frames if frame.timestamp <= to <= frame.to] if frames: cache_frame = max(frames, key=lambda f: f.timestamp) # should be unique in any case # do not update frame.to, it's already past asked-for 'to' else: frames = [frame for frame in rr_frames if (fro <= frame.to and frame.timestamp <= to)] if frames: cache_frame = max(frames, key=lambda f: f.timestamp) # do not update frame.to - another update might occur, but we don't care; fro < frame.to, good enough if not frames: frames = [frame for frame in rr_frames if frame.timestamp < to] # frame.to < to since not frames coming in if frames: latest_cached = max(frames, key=lambda frame: frame.timestamp) if delta: (rr_update_json, timestamp) = await rr_builder( self.rev_reg_def['id'], to=to, fro=latest_cached.timestamp, fro_delta=latest_cached.rr_update) else: (rr_update_json, timestamp) = await rr_builder(self.rev_reg_def['id'], to) if timestamp == latest_cached.timestamp: latest_cached.to = to # this timestamp now known good through more recent 'to' cache_frame = latest_cached else: (rr_update_json, timestamp) = await rr_builder(self.rev_reg_def['id'], to) if cache_frame is None: cache_frame = RevRegUpdateFrame(to, timestamp, json.loads(rr_update_json)) # sets qtime to now rr_frames.append(cache_frame) self.cull(delta) else: cache_frame.qtime = int(time()) rv = (json.dumps(cache_frame.rr_update), cache_frame.timestamp) LOGGER.debug('RevoCacheEntry._get_update <<< %s', rv) return rv
python
async def _get_update(self, rr_builder: Callable, fro: int, to: int, delta: bool) -> (str, int): """ Get rev reg delta/state json, and its timestamp on the distributed ledger, from cached rev reg delta/state frames list or distributed ledger, updating cache as necessary. Raise BadRevStateTime if caller asks for a delta/state in the future. Raise ClosedPool if an update requires the ledger but the node pool is closed. Issuer anchors cannot revoke retroactively. Hence, for any new request against asked-for interval (fro, to): * if the cache has a frame f on f.timestamp <= to <= f.to, > return its rev reg delta/state; e.g., starred frame below: Frames: --------[xxxxx]----[xx]-----[*********]-----[x]-----------[xx]---------> time Fro-to: ^----^ * otherwise, if there is a maximum frame f with fro <= f.to and f.timestamp <= to > return its rev reg delta/state; e.g., starred frame below: Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[*]-----------[xx]---------> time Fro-to: ^----------------------------------^ * otherwise, if the cache has a frame f on f.timestamp < to, > check the distributed ledger for a delta to/state for the rev reg since e.timestamp; - if there is one, bake it through 'to' into a new delta/state, add new frame to cache and return rev reg delta/state; e.g., starred frame below: Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time Fro-to: ^------^ Ledger: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]--!--------[xx]---------> time Update: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]--[*****]--[xx]---------> time - otherwise, update the 'to' time in the frame and return the rev reg delta/state; e.g., starred frame below: Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time Fro-to: ^------^ Ledger: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time Update: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[**********]--[xx]---------> time * otherwise, there is no cache frame f on f.timestamp < to: > create new frame and add it to cache; return rev reg delta/state; e.g., starred frame below: Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[*]-----------[xx]-----> time Fro-to: ^--^ Ledger: -!------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time Update: -[***]--[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time On return of any previously existing rev reg delta/state frame, always update its query time beforehand. :param rr_builder: callback to build rev reg delta/state if need be (specify holder-prover anchor's _build_rr_delta_json() or verifier anchor's _build_rr_state_json() as needed) :param fro: least time (epoch seconds) of interest; lower-bounds 'to' on frame housing return data :param to: greatest time (epoch seconds) of interest; upper-bounds returned revocation delta/state timestamp :param delta: True to operate on rev reg deltas, False for states :return: rev reg delta/state json and ledger timestamp (epoch seconds) """ LOGGER.debug( 'RevoCacheEntry.get_update >>> rr_builder: %s, fro: %s, to: %s, delta: %s', rr_builder.__name__, fro, to, delta) if fro > to: (fro, to) = (to, fro) now = int(time()) if to > now: LOGGER.debug( 'RevoCacheEntry._get_update <!< Cannot query a rev reg %s in the future (%s > %s)', 'delta' if delta else 'state', to, now) raise BadRevStateTime('Cannot query a rev reg {} in the future ({} > {})'.format( 'delta' if delta else 'state', to, now)) cache_frame = None rr_update_json = None rr_frames = self.rr_delta_frames if delta else self.rr_state_frames frames = [frame for frame in rr_frames if frame.timestamp <= to <= frame.to] if frames: cache_frame = max(frames, key=lambda f: f.timestamp) # should be unique in any case # do not update frame.to, it's already past asked-for 'to' else: frames = [frame for frame in rr_frames if (fro <= frame.to and frame.timestamp <= to)] if frames: cache_frame = max(frames, key=lambda f: f.timestamp) # do not update frame.to - another update might occur, but we don't care; fro < frame.to, good enough if not frames: frames = [frame for frame in rr_frames if frame.timestamp < to] # frame.to < to since not frames coming in if frames: latest_cached = max(frames, key=lambda frame: frame.timestamp) if delta: (rr_update_json, timestamp) = await rr_builder( self.rev_reg_def['id'], to=to, fro=latest_cached.timestamp, fro_delta=latest_cached.rr_update) else: (rr_update_json, timestamp) = await rr_builder(self.rev_reg_def['id'], to) if timestamp == latest_cached.timestamp: latest_cached.to = to # this timestamp now known good through more recent 'to' cache_frame = latest_cached else: (rr_update_json, timestamp) = await rr_builder(self.rev_reg_def['id'], to) if cache_frame is None: cache_frame = RevRegUpdateFrame(to, timestamp, json.loads(rr_update_json)) # sets qtime to now rr_frames.append(cache_frame) self.cull(delta) else: cache_frame.qtime = int(time()) rv = (json.dumps(cache_frame.rr_update), cache_frame.timestamp) LOGGER.debug('RevoCacheEntry._get_update <<< %s', rv) return rv
Get rev reg delta/state json, and its timestamp on the distributed ledger, from cached rev reg delta/state frames list or distributed ledger, updating cache as necessary. Raise BadRevStateTime if caller asks for a delta/state in the future. Raise ClosedPool if an update requires the ledger but the node pool is closed. Issuer anchors cannot revoke retroactively. Hence, for any new request against asked-for interval (fro, to): * if the cache has a frame f on f.timestamp <= to <= f.to, > return its rev reg delta/state; e.g., starred frame below: Frames: --------[xxxxx]----[xx]-----[*********]-----[x]-----------[xx]---------> time Fro-to: ^----^ * otherwise, if there is a maximum frame f with fro <= f.to and f.timestamp <= to > return its rev reg delta/state; e.g., starred frame below: Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[*]-----------[xx]---------> time Fro-to: ^----------------------------------^ * otherwise, if the cache has a frame f on f.timestamp < to, > check the distributed ledger for a delta to/state for the rev reg since e.timestamp; - if there is one, bake it through 'to' into a new delta/state, add new frame to cache and return rev reg delta/state; e.g., starred frame below: Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time Fro-to: ^------^ Ledger: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]--!--------[xx]---------> time Update: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]--[*****]--[xx]---------> time - otherwise, update the 'to' time in the frame and return the rev reg delta/state; e.g., starred frame below: Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time Fro-to: ^------^ Ledger: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time Update: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[**********]--[xx]---------> time * otherwise, there is no cache frame f on f.timestamp < to: > create new frame and add it to cache; return rev reg delta/state; e.g., starred frame below: Frames: --------[xxxxx]----[xx]-----[xxxxxxxxx]-----[*]-----------[xx]-----> time Fro-to: ^--^ Ledger: -!------[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time Update: -[***]--[xxxxx]----[xx]-----[xxxxxxxxx]-----[x]-----------[xx]---------> time On return of any previously existing rev reg delta/state frame, always update its query time beforehand. :param rr_builder: callback to build rev reg delta/state if need be (specify holder-prover anchor's _build_rr_delta_json() or verifier anchor's _build_rr_state_json() as needed) :param fro: least time (epoch seconds) of interest; lower-bounds 'to' on frame housing return data :param to: greatest time (epoch seconds) of interest; upper-bounds returned revocation delta/state timestamp :param delta: True to operate on rev reg deltas, False for states :return: rev reg delta/state json and ledger timestamp (epoch seconds)
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/cache.py#L442-L563
PSPC-SPAC-buyandsell/von_anchor
von_anchor/cache.py
RevoCacheEntry.get_delta_json
async def get_delta_json( self, rr_delta_builder: Callable[['HolderProver', str, int, int, dict], Awaitable[Tuple[str, int]]], fro: int, to: int) -> (str, int): """ Get rev reg delta json, and its timestamp on the distributed ledger, from cached rev reg delta frames list or distributed ledger, updating cache as necessary. Raise BadRevStateTime if caller asks for a delta to the future. On return of any previously existing rev reg delta frame, always update its query time beforehand. :param rr_delta_builder: callback to build rev reg delta if need be (specify anchor instance's _build_rr_delta()) :param fro: least time (epoch seconds) of interest; lower-bounds 'to' on frame housing return data :param to: greatest time (epoch seconds) of interest; upper-bounds returned revocation delta timestamp :return: rev reg delta json and ledger timestamp (epoch seconds) """ LOGGER.debug( 'RevoCacheEntry.get_delta_json >>> rr_delta_builder: %s, fro: %s, to: %s', rr_delta_builder.__name__, fro, to) rv = await self._get_update(rr_delta_builder, fro, to, True) LOGGER.debug('RevoCacheEntry.get_delta_json <<< %s', rv) return rv
python
async def get_delta_json( self, rr_delta_builder: Callable[['HolderProver', str, int, int, dict], Awaitable[Tuple[str, int]]], fro: int, to: int) -> (str, int): """ Get rev reg delta json, and its timestamp on the distributed ledger, from cached rev reg delta frames list or distributed ledger, updating cache as necessary. Raise BadRevStateTime if caller asks for a delta to the future. On return of any previously existing rev reg delta frame, always update its query time beforehand. :param rr_delta_builder: callback to build rev reg delta if need be (specify anchor instance's _build_rr_delta()) :param fro: least time (epoch seconds) of interest; lower-bounds 'to' on frame housing return data :param to: greatest time (epoch seconds) of interest; upper-bounds returned revocation delta timestamp :return: rev reg delta json and ledger timestamp (epoch seconds) """ LOGGER.debug( 'RevoCacheEntry.get_delta_json >>> rr_delta_builder: %s, fro: %s, to: %s', rr_delta_builder.__name__, fro, to) rv = await self._get_update(rr_delta_builder, fro, to, True) LOGGER.debug('RevoCacheEntry.get_delta_json <<< %s', rv) return rv
Get rev reg delta json, and its timestamp on the distributed ledger, from cached rev reg delta frames list or distributed ledger, updating cache as necessary. Raise BadRevStateTime if caller asks for a delta to the future. On return of any previously existing rev reg delta frame, always update its query time beforehand. :param rr_delta_builder: callback to build rev reg delta if need be (specify anchor instance's _build_rr_delta()) :param fro: least time (epoch seconds) of interest; lower-bounds 'to' on frame housing return data :param to: greatest time (epoch seconds) of interest; upper-bounds returned revocation delta timestamp :return: rev reg delta json and ledger timestamp (epoch seconds)
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/cache.py#L565-L594
PSPC-SPAC-buyandsell/von_anchor
von_anchor/cache.py
RevoCacheEntry.get_state_json
async def get_state_json( self, rr_state_builder: Callable[['Verifier', str, int], Awaitable[Tuple[str, int]]], fro: int, to: int) -> (str, int): """ Get rev reg state json, and its timestamp on the distributed ledger, from cached rev reg state frames list or distributed ledger, updating cache as necessary. Raise BadRevStateTime if caller asks for a state in the future. On return of any previously existing rev reg state frame, always update its query time beforehand. :param rr_state_builder: callback to build rev reg state if need be (specify anchor instance's _build_rr_state()) :param fro: least time (epoch seconds) of interest; lower-bounds 'to' on frame housing return data :param to: greatest time (epoch seconds) of interest; upper-bounds returned revocation state timestamp :return: rev reg state json and ledger timestamp (epoch seconds) """ LOGGER.debug( 'RevoCacheEntry.get_state_json >>> rr_state_builder: %s, fro: %s, to: %s', rr_state_builder.__name__, fro, to) rv = await self._get_update(rr_state_builder, fro, to, False) LOGGER.debug('RevoCacheEntry.get_state_json <<< %s', rv) return rv
python
async def get_state_json( self, rr_state_builder: Callable[['Verifier', str, int], Awaitable[Tuple[str, int]]], fro: int, to: int) -> (str, int): """ Get rev reg state json, and its timestamp on the distributed ledger, from cached rev reg state frames list or distributed ledger, updating cache as necessary. Raise BadRevStateTime if caller asks for a state in the future. On return of any previously existing rev reg state frame, always update its query time beforehand. :param rr_state_builder: callback to build rev reg state if need be (specify anchor instance's _build_rr_state()) :param fro: least time (epoch seconds) of interest; lower-bounds 'to' on frame housing return data :param to: greatest time (epoch seconds) of interest; upper-bounds returned revocation state timestamp :return: rev reg state json and ledger timestamp (epoch seconds) """ LOGGER.debug( 'RevoCacheEntry.get_state_json >>> rr_state_builder: %s, fro: %s, to: %s', rr_state_builder.__name__, fro, to) rv = await self._get_update(rr_state_builder, fro, to, False) LOGGER.debug('RevoCacheEntry.get_state_json <<< %s', rv) return rv
Get rev reg state json, and its timestamp on the distributed ledger, from cached rev reg state frames list or distributed ledger, updating cache as necessary. Raise BadRevStateTime if caller asks for a state in the future. On return of any previously existing rev reg state frame, always update its query time beforehand. :param rr_state_builder: callback to build rev reg state if need be (specify anchor instance's _build_rr_state()) :param fro: least time (epoch seconds) of interest; lower-bounds 'to' on frame housing return data :param to: greatest time (epoch seconds) of interest; upper-bounds returned revocation state timestamp :return: rev reg state json and ledger timestamp (epoch seconds)
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/cache.py#L596-L625
PSPC-SPAC-buyandsell/von_anchor
von_anchor/cache.py
ArchivableCaches.clear
def clear() -> None: """ Clear all archivable caches in memory. """ LOGGER.debug('clear >>>') with SCHEMA_CACHE.lock: SCHEMA_CACHE.clear() with CRED_DEF_CACHE.lock: CRED_DEF_CACHE.clear() with REVO_CACHE.lock: REVO_CACHE.clear() LOGGER.debug('clear <<<')
python
def clear() -> None: """ Clear all archivable caches in memory. """ LOGGER.debug('clear >>>') with SCHEMA_CACHE.lock: SCHEMA_CACHE.clear() with CRED_DEF_CACHE.lock: CRED_DEF_CACHE.clear() with REVO_CACHE.lock: REVO_CACHE.clear() LOGGER.debug('clear <<<')
Clear all archivable caches in memory.
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/cache.py#L756-L770
PSPC-SPAC-buyandsell/von_anchor
von_anchor/cache.py
ArchivableCaches.archive
def archive(base_dir: str) -> int: """ Archive schema, cred def, revocation caches to disk as json. :param base_dir: archive base directory :return: timestamp (epoch seconds) used as subdirectory """ LOGGER.debug('archive >>> base_dir: %s', base_dir) rv = int(time()) timestamp_dir = join(base_dir, str(rv)) makedirs(timestamp_dir, exist_ok=True) with SCHEMA_CACHE.lock: with open(join(timestamp_dir, 'schema'), 'w') as archive: print(json.dumps(SCHEMA_CACHE.schemata()), file=archive) with CRED_DEF_CACHE.lock: with open(join(timestamp_dir, 'cred_def'), 'w') as archive: print(json.dumps(CRED_DEF_CACHE), file=archive) with REVO_CACHE.lock: with open(join(timestamp_dir, 'revocation'), 'w') as archive: revo_cache_dict = {} for rr_id in REVO_CACHE: revo_cache_dict[rr_id] = { 'rev_reg_def': REVO_CACHE[rr_id].rev_reg_def, 'rr_delta_frames': [vars(f) for f in REVO_CACHE[rr_id].rr_delta_frames], 'rr_state_frames': [vars(f) for f in REVO_CACHE[rr_id].rr_state_frames] } print(json.dumps(revo_cache_dict), file=archive) LOGGER.debug('archive <<< %s', rv) return rv
python
def archive(base_dir: str) -> int: """ Archive schema, cred def, revocation caches to disk as json. :param base_dir: archive base directory :return: timestamp (epoch seconds) used as subdirectory """ LOGGER.debug('archive >>> base_dir: %s', base_dir) rv = int(time()) timestamp_dir = join(base_dir, str(rv)) makedirs(timestamp_dir, exist_ok=True) with SCHEMA_CACHE.lock: with open(join(timestamp_dir, 'schema'), 'w') as archive: print(json.dumps(SCHEMA_CACHE.schemata()), file=archive) with CRED_DEF_CACHE.lock: with open(join(timestamp_dir, 'cred_def'), 'w') as archive: print(json.dumps(CRED_DEF_CACHE), file=archive) with REVO_CACHE.lock: with open(join(timestamp_dir, 'revocation'), 'w') as archive: revo_cache_dict = {} for rr_id in REVO_CACHE: revo_cache_dict[rr_id] = { 'rev_reg_def': REVO_CACHE[rr_id].rev_reg_def, 'rr_delta_frames': [vars(f) for f in REVO_CACHE[rr_id].rr_delta_frames], 'rr_state_frames': [vars(f) for f in REVO_CACHE[rr_id].rr_state_frames] } print(json.dumps(revo_cache_dict), file=archive) LOGGER.debug('archive <<< %s', rv) return rv
Archive schema, cred def, revocation caches to disk as json. :param base_dir: archive base directory :return: timestamp (epoch seconds) used as subdirectory
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/cache.py#L773-L807
PSPC-SPAC-buyandsell/von_anchor
von_anchor/cache.py
ArchivableCaches.purge_archives
def purge_archives(base_dir: str, retain_latest: bool = False) -> None: """ Erase all (or nearly all) cache archives. :param base_dir: archive base directory :param retain_latest: retain latest archive if present, purge all others """ LOGGER.debug('purge_archives >>> base_dir: %s, retain_latest: %s', base_dir, retain_latest) if isdir(base_dir): timestamps = sorted([int(t) for t in listdir(base_dir) if t.isdigit()]) if retain_latest and timestamps: timestamps.pop() for timestamp in timestamps: timestamp_dir = join(base_dir, str(timestamp)) rmtree(timestamp_dir) LOGGER.info('Purged archive cache directory %s', timestamp_dir) LOGGER.debug('purge_archives <<<')
python
def purge_archives(base_dir: str, retain_latest: bool = False) -> None: """ Erase all (or nearly all) cache archives. :param base_dir: archive base directory :param retain_latest: retain latest archive if present, purge all others """ LOGGER.debug('purge_archives >>> base_dir: %s, retain_latest: %s', base_dir, retain_latest) if isdir(base_dir): timestamps = sorted([int(t) for t in listdir(base_dir) if t.isdigit()]) if retain_latest and timestamps: timestamps.pop() for timestamp in timestamps: timestamp_dir = join(base_dir, str(timestamp)) rmtree(timestamp_dir) LOGGER.info('Purged archive cache directory %s', timestamp_dir) LOGGER.debug('purge_archives <<<')
Erase all (or nearly all) cache archives. :param base_dir: archive base directory :param retain_latest: retain latest archive if present, purge all others
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/cache.py#L893-L912
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/info.py
pairwise_info2tags
def pairwise_info2tags(pairwise: PairwiseInfo) -> dict: """ Given pairwise info with metadata mapping tags to values, return corresponding indy-sdk non_secrets record tags dict to store same in wallet (via non_secrets) unencrypted (for WQL search options). Canonicalize metadata values to strings via raw() for WQL fitness. Raise BadRecord if metadata does not coerce into non_secrets API tags spec of {str:str}. :param pairwise: pairwise info with metadata dict mapping tags to values :return: corresponding non_secrets tags dict marked for unencrypted storage """ rv = { canon_pairwise_tag(tag): raw(pairwise.metadata[tag]) for tag in pairwise.metadata or {} } rv['~their_did'] = pairwise.their_did rv['~their_verkey'] = pairwise.their_verkey rv['~my_did'] = pairwise.my_did rv['~my_verkey'] = pairwise.my_verkey if not StorageRecord.ok_tags(rv): raise BadRecord('Pairwise metadata {} must map strings to strings'.format(rv)) return rv
python
def pairwise_info2tags(pairwise: PairwiseInfo) -> dict: """ Given pairwise info with metadata mapping tags to values, return corresponding indy-sdk non_secrets record tags dict to store same in wallet (via non_secrets) unencrypted (for WQL search options). Canonicalize metadata values to strings via raw() for WQL fitness. Raise BadRecord if metadata does not coerce into non_secrets API tags spec of {str:str}. :param pairwise: pairwise info with metadata dict mapping tags to values :return: corresponding non_secrets tags dict marked for unencrypted storage """ rv = { canon_pairwise_tag(tag): raw(pairwise.metadata[tag]) for tag in pairwise.metadata or {} } rv['~their_did'] = pairwise.their_did rv['~their_verkey'] = pairwise.their_verkey rv['~my_did'] = pairwise.my_did rv['~my_verkey'] = pairwise.my_verkey if not StorageRecord.ok_tags(rv): raise BadRecord('Pairwise metadata {} must map strings to strings'.format(rv)) return rv
Given pairwise info with metadata mapping tags to values, return corresponding indy-sdk non_secrets record tags dict to store same in wallet (via non_secrets) unencrypted (for WQL search options). Canonicalize metadata values to strings via raw() for WQL fitness. Raise BadRecord if metadata does not coerce into non_secrets API tags spec of {str:str}. :param pairwise: pairwise info with metadata dict mapping tags to values :return: corresponding non_secrets tags dict marked for unencrypted storage
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/info.py#L354-L378
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/info.py
storage_record2pairwise_info
def storage_record2pairwise_info(storec: StorageRecord) -> PairwiseInfo: """ Given indy-sdk non_secrets implementation of pairwise storage record dict, return corresponding PairwiseInfo. :param storec: (non-secret) storage record to convert to PairwiseInfo :return: PairwiseInfo on record DIDs, verkeys, metadata """ return PairwiseInfo( storec.id, # = their did storec.value, # = their verkey storec.tags['~my_did'], storec.tags['~my_verkey'], { tag[tag.startswith('~'):]: storec.tags[tag] for tag in (storec.tags or {}) # strip any leading '~' })
python
def storage_record2pairwise_info(storec: StorageRecord) -> PairwiseInfo: """ Given indy-sdk non_secrets implementation of pairwise storage record dict, return corresponding PairwiseInfo. :param storec: (non-secret) storage record to convert to PairwiseInfo :return: PairwiseInfo on record DIDs, verkeys, metadata """ return PairwiseInfo( storec.id, # = their did storec.value, # = their verkey storec.tags['~my_did'], storec.tags['~my_verkey'], { tag[tag.startswith('~'):]: storec.tags[tag] for tag in (storec.tags or {}) # strip any leading '~' })
Given indy-sdk non_secrets implementation of pairwise storage record dict, return corresponding PairwiseInfo. :param storec: (non-secret) storage record to convert to PairwiseInfo :return: PairwiseInfo on record DIDs, verkeys, metadata
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/info.py#L381-L396
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/manager.py
WalletManager._config2indy
def _config2indy(self, config: dict) -> dict: """ Given a configuration dict with indy and possibly more configuration values, return the corresponding indy wallet configuration dict from current default and input values. :param config: input configuration :return: configuration dict for indy wallet """ assert {'name', 'id'} & {k for k in config} return { 'id': config.get('name', config.get('id')), 'storage_type': config.get('storage_type', self.default_storage_type), 'freshness_time': config.get('freshness_time', self.default_freshness_time) }
python
def _config2indy(self, config: dict) -> dict: """ Given a configuration dict with indy and possibly more configuration values, return the corresponding indy wallet configuration dict from current default and input values. :param config: input configuration :return: configuration dict for indy wallet """ assert {'name', 'id'} & {k for k in config} return { 'id': config.get('name', config.get('id')), 'storage_type': config.get('storage_type', self.default_storage_type), 'freshness_time': config.get('freshness_time', self.default_freshness_time) }
Given a configuration dict with indy and possibly more configuration values, return the corresponding indy wallet configuration dict from current default and input values. :param config: input configuration :return: configuration dict for indy wallet
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/manager.py#L113-L127
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/manager.py
WalletManager._config2von
def _config2von(self, config: dict, access: str = None) -> dict: """ Given a configuration dict with indy and possibly more configuration values, return the corresponding VON wallet configuration dict from current default and input values. :param config: input configuration :param access: access credentials value :return: configuration dict for VON wallet with VON-specific entries """ rv = {k: config.get(k, self._defaults[k]) for k in ('auto_create', 'auto_remove')} rv['access'] = access or self.default_access for key in ('seed', 'did', 'link_secret_label'): if key in config: rv[key] = config[key] return rv
python
def _config2von(self, config: dict, access: str = None) -> dict: """ Given a configuration dict with indy and possibly more configuration values, return the corresponding VON wallet configuration dict from current default and input values. :param config: input configuration :param access: access credentials value :return: configuration dict for VON wallet with VON-specific entries """ rv = {k: config.get(k, self._defaults[k]) for k in ('auto_create', 'auto_remove')} rv['access'] = access or self.default_access for key in ('seed', 'did', 'link_secret_label'): if key in config: rv[key] = config[key] return rv
Given a configuration dict with indy and possibly more configuration values, return the corresponding VON wallet configuration dict from current default and input values. :param config: input configuration :param access: access credentials value :return: configuration dict for VON wallet with VON-specific entries
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/manager.py#L129-L144
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/manager.py
WalletManager.create
async def create(self, config: dict = None, access: str = None, replace: bool = False) -> Wallet: """ Create wallet on input name with given configuration and access credential value. Raise ExtantWallet if wallet on input name exists already and replace parameter is False. Raise BadAccess on replacement for bad access credentials value. FAIR WARNING: specifying replace=True attempts to remove any matching wallet before proceeding; to succeed, the existing wallet must use the same access credentials that the input configuration has. :param config: configuration data for both indy-sdk and VON anchor wallet: - 'name' or 'id': wallet name - 'storage_type': storage type - 'freshness_time': freshness time - 'did': (optional) DID to use - 'seed': (optional) seed to use - 'auto_create': whether to create the wallet on first open (persists past close, can work with auto_remove) - 'auto_remove': whether to remove the wallet on next close - 'link_secret_label': (optional) link secret label to use to create link secret :param access: indy wallet access credential ('key') value, if different than default :param replace: whether to replace old wallet if it exists :return: wallet created """ LOGGER.debug('WalletManager.create >>> config %s, access %s, replace %s', config, access, replace) assert {'name', 'id'} & {k for k in config} wallet_name = config.get('name', config.get('id')) if replace: von_wallet = self.get(config, access) if not await von_wallet.remove(): LOGGER.debug('WalletManager.create <!< Failed to remove wallet %s for replacement', wallet_name) raise ExtantWallet('Failed to remove wallet {} for replacement'.format(wallet_name)) indy_config = self._config2indy(config) von_config = self._config2von(config, access) rv = Wallet(indy_config, von_config) await rv.create() LOGGER.debug('WalletManager.create <<< %s', rv) return rv
python
async def create(self, config: dict = None, access: str = None, replace: bool = False) -> Wallet: """ Create wallet on input name with given configuration and access credential value. Raise ExtantWallet if wallet on input name exists already and replace parameter is False. Raise BadAccess on replacement for bad access credentials value. FAIR WARNING: specifying replace=True attempts to remove any matching wallet before proceeding; to succeed, the existing wallet must use the same access credentials that the input configuration has. :param config: configuration data for both indy-sdk and VON anchor wallet: - 'name' or 'id': wallet name - 'storage_type': storage type - 'freshness_time': freshness time - 'did': (optional) DID to use - 'seed': (optional) seed to use - 'auto_create': whether to create the wallet on first open (persists past close, can work with auto_remove) - 'auto_remove': whether to remove the wallet on next close - 'link_secret_label': (optional) link secret label to use to create link secret :param access: indy wallet access credential ('key') value, if different than default :param replace: whether to replace old wallet if it exists :return: wallet created """ LOGGER.debug('WalletManager.create >>> config %s, access %s, replace %s', config, access, replace) assert {'name', 'id'} & {k for k in config} wallet_name = config.get('name', config.get('id')) if replace: von_wallet = self.get(config, access) if not await von_wallet.remove(): LOGGER.debug('WalletManager.create <!< Failed to remove wallet %s for replacement', wallet_name) raise ExtantWallet('Failed to remove wallet {} for replacement'.format(wallet_name)) indy_config = self._config2indy(config) von_config = self._config2von(config, access) rv = Wallet(indy_config, von_config) await rv.create() LOGGER.debug('WalletManager.create <<< %s', rv) return rv
Create wallet on input name with given configuration and access credential value. Raise ExtantWallet if wallet on input name exists already and replace parameter is False. Raise BadAccess on replacement for bad access credentials value. FAIR WARNING: specifying replace=True attempts to remove any matching wallet before proceeding; to succeed, the existing wallet must use the same access credentials that the input configuration has. :param config: configuration data for both indy-sdk and VON anchor wallet: - 'name' or 'id': wallet name - 'storage_type': storage type - 'freshness_time': freshness time - 'did': (optional) DID to use - 'seed': (optional) seed to use - 'auto_create': whether to create the wallet on first open (persists past close, can work with auto_remove) - 'auto_remove': whether to remove the wallet on next close - 'link_secret_label': (optional) link secret label to use to create link secret :param access: indy wallet access credential ('key') value, if different than default :param replace: whether to replace old wallet if it exists :return: wallet created
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/manager.py#L146-L187
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/manager.py
WalletManager.get
def get(self, config: dict, access: str = None) -> Wallet: """ Instantiate and return VON anchor wallet object on given configuration, respecting wallet manager default configuration values. :param config: configuration data for both indy-sdk and VON anchor wallet: - 'name' or 'id': wallet name - 'storage_type': storage type - 'freshness_time': freshness time - 'did': (optional) DID to use - 'seed': (optional) seed to use - 'auto_create': whether to create the wallet on first open (persists past close, can work with auto_remove) - 'auto_remove': whether to remove the wallet on next close - 'link_secret_label': (optional) link secret label to use to create link secret :param access: indy access credentials value :return: VON anchor wallet """ LOGGER.debug('WalletManager.get >>> config %s, access %s', config, access) rv = Wallet( self._config2indy(config), self._config2von(config, access)) LOGGER.debug('WalletManager.get <<< %s', rv) return rv
python
def get(self, config: dict, access: str = None) -> Wallet: """ Instantiate and return VON anchor wallet object on given configuration, respecting wallet manager default configuration values. :param config: configuration data for both indy-sdk and VON anchor wallet: - 'name' or 'id': wallet name - 'storage_type': storage type - 'freshness_time': freshness time - 'did': (optional) DID to use - 'seed': (optional) seed to use - 'auto_create': whether to create the wallet on first open (persists past close, can work with auto_remove) - 'auto_remove': whether to remove the wallet on next close - 'link_secret_label': (optional) link secret label to use to create link secret :param access: indy access credentials value :return: VON anchor wallet """ LOGGER.debug('WalletManager.get >>> config %s, access %s', config, access) rv = Wallet( self._config2indy(config), self._config2von(config, access)) LOGGER.debug('WalletManager.get <<< %s', rv) return rv
Instantiate and return VON anchor wallet object on given configuration, respecting wallet manager default configuration values. :param config: configuration data for both indy-sdk and VON anchor wallet: - 'name' or 'id': wallet name - 'storage_type': storage type - 'freshness_time': freshness time - 'did': (optional) DID to use - 'seed': (optional) seed to use - 'auto_create': whether to create the wallet on first open (persists past close, can work with auto_remove) - 'auto_remove': whether to remove the wallet on next close - 'link_secret_label': (optional) link secret label to use to create link secret :param access: indy access credentials value :return: VON anchor wallet
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/manager.py#L189-L216
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/manager.py
WalletManager.reseed_local
async def reseed_local(self, local_wallet: Wallet, next_seed: str = None) -> DIDInfo: """ Generate and apply new key, in wallet only, for local DID based on input seed (default random). Raise WalletState if wallet is closed. Note that this operation does not update the corresponding NYM on the ledger: for VON anchors anchored to the ledger, use von_anchor.BaseAnchor.reseed(). :param local_wallet: VON anchor wallet without NYM on ledger :param next_seed: incoming replacement seed (default random) :return: DIDInfo with new verification key and metadata for DID """ LOGGER.debug('WalletManager.reseed_local >>> local_wallet %s', local_wallet) await local_wallet.reseed_init(next_seed) rv = await local_wallet.reseed_apply() LOGGER.debug('WalletManager.reseed_local <<< %s', rv) return rv
python
async def reseed_local(self, local_wallet: Wallet, next_seed: str = None) -> DIDInfo: """ Generate and apply new key, in wallet only, for local DID based on input seed (default random). Raise WalletState if wallet is closed. Note that this operation does not update the corresponding NYM on the ledger: for VON anchors anchored to the ledger, use von_anchor.BaseAnchor.reseed(). :param local_wallet: VON anchor wallet without NYM on ledger :param next_seed: incoming replacement seed (default random) :return: DIDInfo with new verification key and metadata for DID """ LOGGER.debug('WalletManager.reseed_local >>> local_wallet %s', local_wallet) await local_wallet.reseed_init(next_seed) rv = await local_wallet.reseed_apply() LOGGER.debug('WalletManager.reseed_local <<< %s', rv) return rv
Generate and apply new key, in wallet only, for local DID based on input seed (default random). Raise WalletState if wallet is closed. Note that this operation does not update the corresponding NYM on the ledger: for VON anchors anchored to the ledger, use von_anchor.BaseAnchor.reseed(). :param local_wallet: VON anchor wallet without NYM on ledger :param next_seed: incoming replacement seed (default random) :return: DIDInfo with new verification key and metadata for DID
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/manager.py#L218-L237
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/manager.py
WalletManager.export_wallet
async def export_wallet(self, von_wallet: Wallet, path: str) -> None: """ Export an existing VON anchor wallet. Raise WalletState if wallet is closed. :param von_wallet: open wallet :param path: path to which to export wallet """ LOGGER.debug('WalletManager.export_wallet >>> von_wallet %s, path %s', von_wallet, path) if not von_wallet.handle: LOGGER.debug('WalletManager.export_wallet <!< Wallet %s is closed', von_wallet.name) raise WalletState('Wallet {} is closed'.format(von_wallet.name)) await wallet.export_wallet( von_wallet.handle, json.dumps({ 'path': path, **von_wallet.access_creds })) LOGGER.debug('WalletManager.export_wallet <<<')
python
async def export_wallet(self, von_wallet: Wallet, path: str) -> None: """ Export an existing VON anchor wallet. Raise WalletState if wallet is closed. :param von_wallet: open wallet :param path: path to which to export wallet """ LOGGER.debug('WalletManager.export_wallet >>> von_wallet %s, path %s', von_wallet, path) if not von_wallet.handle: LOGGER.debug('WalletManager.export_wallet <!< Wallet %s is closed', von_wallet.name) raise WalletState('Wallet {} is closed'.format(von_wallet.name)) await wallet.export_wallet( von_wallet.handle, json.dumps({ 'path': path, **von_wallet.access_creds })) LOGGER.debug('WalletManager.export_wallet <<<')
Export an existing VON anchor wallet. Raise WalletState if wallet is closed. :param von_wallet: open wallet :param path: path to which to export wallet
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/manager.py#L239-L260
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/manager.py
WalletManager.import_wallet
async def import_wallet(self, indy_config: dict, path: str, access: str = None) -> None: """ Import a VON anchor wallet. Raise BadAccess on bad access credential value. :param indy_config: indy wallet configuration to use, with: - 'id' - 'storage_type' (optional) - 'storage_config' (optional) :param path: path from which to import wallet file :param access: indy access credentials value (default value from wallet manager) """ LOGGER.debug('WalletManager.import_wallet >>> indy_config %s, path: %s', indy_config, path) try: await wallet.import_wallet( json.dumps(indy_config), json.dumps({'key': access or self.default_access}), json.dumps({'path': path, 'key': access or self.default_access})) except IndyError as x_indy: if x_indy.error_code == ErrorCode.CommonInvalidStructure: # indy-sdk raises on bad access LOGGER.debug( 'WalletManager.import_wallet <!< bad access credential value for wallet %s', indy_config.get('id', '(no id)')) raise BadAccess('Bad access credential value for wallet {}'.format(indy_config.get('id', '(no id)'))) LOGGER.debug( 'WalletManager.import_wallet <!< indy error code %s on wallet %s import', x_indy.error_code, indy_config.get('id', '(no id)')) raise LOGGER.debug('WalletManager.import_wallet <<<')
python
async def import_wallet(self, indy_config: dict, path: str, access: str = None) -> None: """ Import a VON anchor wallet. Raise BadAccess on bad access credential value. :param indy_config: indy wallet configuration to use, with: - 'id' - 'storage_type' (optional) - 'storage_config' (optional) :param path: path from which to import wallet file :param access: indy access credentials value (default value from wallet manager) """ LOGGER.debug('WalletManager.import_wallet >>> indy_config %s, path: %s', indy_config, path) try: await wallet.import_wallet( json.dumps(indy_config), json.dumps({'key': access or self.default_access}), json.dumps({'path': path, 'key': access or self.default_access})) except IndyError as x_indy: if x_indy.error_code == ErrorCode.CommonInvalidStructure: # indy-sdk raises on bad access LOGGER.debug( 'WalletManager.import_wallet <!< bad access credential value for wallet %s', indy_config.get('id', '(no id)')) raise BadAccess('Bad access credential value for wallet {}'.format(indy_config.get('id', '(no id)'))) LOGGER.debug( 'WalletManager.import_wallet <!< indy error code %s on wallet %s import', x_indy.error_code, indy_config.get('id', '(no id)')) raise LOGGER.debug('WalletManager.import_wallet <<<')
Import a VON anchor wallet. Raise BadAccess on bad access credential value. :param indy_config: indy wallet configuration to use, with: - 'id' - 'storage_type' (optional) - 'storage_config' (optional) :param path: path from which to import wallet file :param access: indy access credentials value (default value from wallet manager)
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/manager.py#L262-L295
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/manager.py
WalletManager.reset
async def reset(self, von_wallet: Wallet, seed: str = None) -> Wallet: """ Close and delete (open) VON anchor wallet and then create, open, and return replacement on current link secret. Note that this operation effectively destroys private keys for keyed data structures such as credential offers or credential definitions. Raise WalletState if the wallet is closed. :param von_wallet: open wallet :param seed: seed to use for new wallet (default random) :return: replacement wallet """ LOGGER.debug('WalletManager.reset >>> von_wallet %s', von_wallet) if not von_wallet.handle: LOGGER.debug('WalletManager.reset <!< Wallet %s is closed', von_wallet.name) raise WalletState('Wallet {} is closed'.format(von_wallet.name)) w_config = von_wallet.config # wallet under reset, no need to make copy w_config['did'] = von_wallet.did w_config['seed'] = seed w_config['auto_create'] = von_wallet.auto_create # in case both auto_remove+auto_create set (create every open) w_config['auto_remove'] = von_wallet.auto_remove label = await von_wallet.get_link_secret_label() if label: w_config['link_secret_label'] = label await von_wallet.close() if not von_wallet.auto_remove: await self.remove(von_wallet) rv = await self.create(w_config, von_wallet.access) await rv.open() LOGGER.debug('WalletManager.reset <<< %s', rv) return rv
python
async def reset(self, von_wallet: Wallet, seed: str = None) -> Wallet: """ Close and delete (open) VON anchor wallet and then create, open, and return replacement on current link secret. Note that this operation effectively destroys private keys for keyed data structures such as credential offers or credential definitions. Raise WalletState if the wallet is closed. :param von_wallet: open wallet :param seed: seed to use for new wallet (default random) :return: replacement wallet """ LOGGER.debug('WalletManager.reset >>> von_wallet %s', von_wallet) if not von_wallet.handle: LOGGER.debug('WalletManager.reset <!< Wallet %s is closed', von_wallet.name) raise WalletState('Wallet {} is closed'.format(von_wallet.name)) w_config = von_wallet.config # wallet under reset, no need to make copy w_config['did'] = von_wallet.did w_config['seed'] = seed w_config['auto_create'] = von_wallet.auto_create # in case both auto_remove+auto_create set (create every open) w_config['auto_remove'] = von_wallet.auto_remove label = await von_wallet.get_link_secret_label() if label: w_config['link_secret_label'] = label await von_wallet.close() if not von_wallet.auto_remove: await self.remove(von_wallet) rv = await self.create(w_config, von_wallet.access) await rv.open() LOGGER.debug('WalletManager.reset <<< %s', rv) return rv
Close and delete (open) VON anchor wallet and then create, open, and return replacement on current link secret. Note that this operation effectively destroys private keys for keyed data structures such as credential offers or credential definitions. Raise WalletState if the wallet is closed. :param von_wallet: open wallet :param seed: seed to use for new wallet (default random) :return: replacement wallet
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/manager.py#L297-L336
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/manager.py
WalletManager.remove
async def remove(self, von_wallet: Wallet) -> None: """ Remove serialized wallet if it exists. Raise WalletState if wallet is open. :param von_wallet: (closed) wallet to remove """ LOGGER.debug('WalletManager.remove >>> wallet %s', von_wallet) await von_wallet.remove() LOGGER.debug('WalletManager.remove <<<')
python
async def remove(self, von_wallet: Wallet) -> None: """ Remove serialized wallet if it exists. Raise WalletState if wallet is open. :param von_wallet: (closed) wallet to remove """ LOGGER.debug('WalletManager.remove >>> wallet %s', von_wallet) await von_wallet.remove() LOGGER.debug('WalletManager.remove <<<')
Remove serialized wallet if it exists. Raise WalletState if wallet is open. :param von_wallet: (closed) wallet to remove
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/manager.py#L338-L349
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/manager.py
WalletManager.register_storage_library
async def register_storage_library(storage_type: str, c_library: str, entry_point: str) -> None: """ Load a wallet storage plug-in. An indy-sdk wallet storage plug-in is a shared library; relying parties must explicitly load it before creating or opening a wallet with the plug-in. The implementation loads a dynamic library and calls an entry point; internally, the plug-in calls the indy-sdk wallet async def register_wallet_storage_library(storage_type: str, c_library: str, fn_pfx: str). :param storage_type: wallet storage type :param c_library: plug-in library :param entry_point: function to initialize the library """ LOGGER.debug( 'WalletManager.register_storage_library >>> storage_type %s, c_library %s, entry_point %s', storage_type, c_library, entry_point) try: stg_lib = CDLL(c_library) result = stg_lib[entry_point]() if result: LOGGER.debug( 'WalletManager.register_storage_library <!< indy error code %s on storage library entry at %s', result, entry_point) raise IndyError(result) LOGGER.info('Loaded storage library type %s (%s)', storage_type, c_library) except IndyError as x_indy: LOGGER.debug( 'WalletManager.register_storage_library <!< indy error code %s on load of storage library %s %s', x_indy.error_code, storage_type, c_library) raise LOGGER.debug('WalletManager.register_storage_library <<<')
python
async def register_storage_library(storage_type: str, c_library: str, entry_point: str) -> None: """ Load a wallet storage plug-in. An indy-sdk wallet storage plug-in is a shared library; relying parties must explicitly load it before creating or opening a wallet with the plug-in. The implementation loads a dynamic library and calls an entry point; internally, the plug-in calls the indy-sdk wallet async def register_wallet_storage_library(storage_type: str, c_library: str, fn_pfx: str). :param storage_type: wallet storage type :param c_library: plug-in library :param entry_point: function to initialize the library """ LOGGER.debug( 'WalletManager.register_storage_library >>> storage_type %s, c_library %s, entry_point %s', storage_type, c_library, entry_point) try: stg_lib = CDLL(c_library) result = stg_lib[entry_point]() if result: LOGGER.debug( 'WalletManager.register_storage_library <!< indy error code %s on storage library entry at %s', result, entry_point) raise IndyError(result) LOGGER.info('Loaded storage library type %s (%s)', storage_type, c_library) except IndyError as x_indy: LOGGER.debug( 'WalletManager.register_storage_library <!< indy error code %s on load of storage library %s %s', x_indy.error_code, storage_type, c_library) raise LOGGER.debug('WalletManager.register_storage_library <<<')
Load a wallet storage plug-in. An indy-sdk wallet storage plug-in is a shared library; relying parties must explicitly load it before creating or opening a wallet with the plug-in. The implementation loads a dynamic library and calls an entry point; internally, the plug-in calls the indy-sdk wallet async def register_wallet_storage_library(storage_type: str, c_library: str, fn_pfx: str). :param storage_type: wallet storage type :param c_library: plug-in library :param entry_point: function to initialize the library
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/manager.py#L352-L392
praekelt/django-ultracache
ultracache/signals.py
on_post_save
def on_post_save(sender, **kwargs): """Expire ultracache cache keys affected by this object """ if not invalidate: return if kwargs.get("raw", False): return if sender is MigrationRecorder.Migration: return if issubclass(sender, Model): obj = kwargs["instance"] if isinstance(obj, Model): # get_for_model itself is cached try: ct = ContentType.objects.get_for_model(sender) except RuntimeError: # This happens when ultracache is being used by another product # during a test run. return if kwargs.get("created", False): # Expire cache keys that contain objects of this content type key = "ucache-ct-%s" % ct.id to_delete = cache.get(key, []) if to_delete: try: cache.delete_many(to_delete) except NotImplementedError: for k in to_delete: cache.delete(k) cache.delete(key) # Purge paths in reverse caching proxy that contain objects of # this content type. key = "ucache-ct-pth-%s" % ct.id if purger is not None: for li in cache.get(key, []): purger(li[0], li[1]) cache.delete(key) else: # Expire cache keys key = "ucache-%s-%s" % (ct.id, obj.pk) to_delete = cache.get(key, []) if to_delete: try: cache.delete_many(to_delete) except NotImplementedError: for k in to_delete: cache.delete(k) cache.delete(key) # Purge paths in reverse caching proxy key = "ucache-pth-%s-%s" % (ct.id, obj.pk) if purger is not None: for li in cache.get(key, []): purger(li[0], li[1]) cache.delete(key)
python
def on_post_save(sender, **kwargs): """Expire ultracache cache keys affected by this object """ if not invalidate: return if kwargs.get("raw", False): return if sender is MigrationRecorder.Migration: return if issubclass(sender, Model): obj = kwargs["instance"] if isinstance(obj, Model): # get_for_model itself is cached try: ct = ContentType.objects.get_for_model(sender) except RuntimeError: # This happens when ultracache is being used by another product # during a test run. return if kwargs.get("created", False): # Expire cache keys that contain objects of this content type key = "ucache-ct-%s" % ct.id to_delete = cache.get(key, []) if to_delete: try: cache.delete_many(to_delete) except NotImplementedError: for k in to_delete: cache.delete(k) cache.delete(key) # Purge paths in reverse caching proxy that contain objects of # this content type. key = "ucache-ct-pth-%s" % ct.id if purger is not None: for li in cache.get(key, []): purger(li[0], li[1]) cache.delete(key) else: # Expire cache keys key = "ucache-%s-%s" % (ct.id, obj.pk) to_delete = cache.get(key, []) if to_delete: try: cache.delete_many(to_delete) except NotImplementedError: for k in to_delete: cache.delete(k) cache.delete(key) # Purge paths in reverse caching proxy key = "ucache-pth-%s-%s" % (ct.id, obj.pk) if purger is not None: for li in cache.get(key, []): purger(li[0], li[1]) cache.delete(key)
Expire ultracache cache keys affected by this object
https://github.com/praekelt/django-ultracache/blob/8898f10e50fc8f8d0a4cb7d3fe4d945bf257bd9f/ultracache/signals.py#L28-L85
praekelt/django-ultracache
ultracache/signals.py
on_post_delete
def on_post_delete(sender, **kwargs): """Expire ultracache cache keys affected by this object """ if not invalidate: return if kwargs.get("raw", False): return if sender is MigrationRecorder.Migration: return if issubclass(sender, Model): obj = kwargs["instance"] if isinstance(obj, Model): # get_for_model itself is cached try: ct = ContentType.objects.get_for_model(sender) except RuntimeError: # This happens when ultracache is being used by another product # during a test run. return # Expire cache keys key = "ucache-%s-%s" % (ct.id, obj.pk) to_delete = cache.get(key, []) if to_delete: try: cache.delete_many(to_delete) except NotImplementedError: for k in to_delete: cache.delete(k) cache.delete(key) # Invalidate paths in reverse caching proxy key = "ucache-pth-%s-%s" % (ct.id, obj.pk) if purger is not None: for li in cache.get(key, []): purger(li[0], li[1]) cache.delete(key)
python
def on_post_delete(sender, **kwargs): """Expire ultracache cache keys affected by this object """ if not invalidate: return if kwargs.get("raw", False): return if sender is MigrationRecorder.Migration: return if issubclass(sender, Model): obj = kwargs["instance"] if isinstance(obj, Model): # get_for_model itself is cached try: ct = ContentType.objects.get_for_model(sender) except RuntimeError: # This happens when ultracache is being used by another product # during a test run. return # Expire cache keys key = "ucache-%s-%s" % (ct.id, obj.pk) to_delete = cache.get(key, []) if to_delete: try: cache.delete_many(to_delete) except NotImplementedError: for k in to_delete: cache.delete(k) cache.delete(key) # Invalidate paths in reverse caching proxy key = "ucache-pth-%s-%s" % (ct.id, obj.pk) if purger is not None: for li in cache.get(key, []): purger(li[0], li[1]) cache.delete(key)
Expire ultracache cache keys affected by this object
https://github.com/praekelt/django-ultracache/blob/8898f10e50fc8f8d0a4cb7d3fe4d945bf257bd9f/ultracache/signals.py#L89-L125
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.create_signing_key
async def create_signing_key(self, seed: str = None, metadata: dict = None) -> KeyInfo: """ Create a new signing key pair. Raise WalletState if wallet is closed, ExtantRecord if verification key already exists. :param seed: optional seed allowing deterministic key creation :param metadata: optional metadata to store with key pair :return: KeyInfo for new key pair """ LOGGER.debug('Wallet.create_signing_key >>> seed: [SEED], metadata: %s', metadata) if not self.handle: LOGGER.debug('Wallet.create_signing_key <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) try: verkey = await crypto.create_key(self.handle, json.dumps({'seed': seed} if seed else {})) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemAlreadyExists: LOGGER.debug('Wallet.create_signing_key <!< Verification key already present in wallet %s', self.name) raise ExtantRecord('Verification key already present in wallet {}'.format(self.name)) LOGGER.debug('Wallet.create_signing_key <!< indy-sdk raised error %s', x_indy.error_code) raise await crypto.set_key_metadata(self.handle, verkey, json.dumps(metadata or {})) # coerce None to empty rv = KeyInfo(verkey, metadata or {}) LOGGER.debug('Wallet.create_signing_key <<< %s', rv) return rv
python
async def create_signing_key(self, seed: str = None, metadata: dict = None) -> KeyInfo: """ Create a new signing key pair. Raise WalletState if wallet is closed, ExtantRecord if verification key already exists. :param seed: optional seed allowing deterministic key creation :param metadata: optional metadata to store with key pair :return: KeyInfo for new key pair """ LOGGER.debug('Wallet.create_signing_key >>> seed: [SEED], metadata: %s', metadata) if not self.handle: LOGGER.debug('Wallet.create_signing_key <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) try: verkey = await crypto.create_key(self.handle, json.dumps({'seed': seed} if seed else {})) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemAlreadyExists: LOGGER.debug('Wallet.create_signing_key <!< Verification key already present in wallet %s', self.name) raise ExtantRecord('Verification key already present in wallet {}'.format(self.name)) LOGGER.debug('Wallet.create_signing_key <!< indy-sdk raised error %s', x_indy.error_code) raise await crypto.set_key_metadata(self.handle, verkey, json.dumps(metadata or {})) # coerce None to empty rv = KeyInfo(verkey, metadata or {}) LOGGER.debug('Wallet.create_signing_key <<< %s', rv) return rv
Create a new signing key pair. Raise WalletState if wallet is closed, ExtantRecord if verification key already exists. :param seed: optional seed allowing deterministic key creation :param metadata: optional metadata to store with key pair :return: KeyInfo for new key pair
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L233-L263
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.get_signing_key
async def get_signing_key(self, verkey: str) -> KeyInfo: """ Get signing key pair for input verification key. Raise WalletState if wallet is closed, AbsentRecord for no such key pair. :param verkey: verification key of key pair :return: KeyInfo for key pair """ LOGGER.debug('Wallet.get_signing_key >>> seed: [SEED], verkey: %s', verkey) if not self.handle: LOGGER.debug('Wallet.get_signing_key <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) try: metadata = await crypto.get_key_metadata(self.handle, verkey) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemNotFound: LOGGER.debug('Wallet.get_signing_key <!< Verification key %s not in wallet %s', verkey, self.name) raise AbsentRecord('Verification key not in wallet {}'.format(self.name)) LOGGER.debug('Wallet.get_signing_key <!< indy-sdk raised error %s', x_indy.error_code) raise rv = KeyInfo(verkey, json.loads(metadata) if metadata else {}) LOGGER.debug('Wallet.get_signing_key <<< %s', rv) return rv
python
async def get_signing_key(self, verkey: str) -> KeyInfo: """ Get signing key pair for input verification key. Raise WalletState if wallet is closed, AbsentRecord for no such key pair. :param verkey: verification key of key pair :return: KeyInfo for key pair """ LOGGER.debug('Wallet.get_signing_key >>> seed: [SEED], verkey: %s', verkey) if not self.handle: LOGGER.debug('Wallet.get_signing_key <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) try: metadata = await crypto.get_key_metadata(self.handle, verkey) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemNotFound: LOGGER.debug('Wallet.get_signing_key <!< Verification key %s not in wallet %s', verkey, self.name) raise AbsentRecord('Verification key not in wallet {}'.format(self.name)) LOGGER.debug('Wallet.get_signing_key <!< indy-sdk raised error %s', x_indy.error_code) raise rv = KeyInfo(verkey, json.loads(metadata) if metadata else {}) LOGGER.debug('Wallet.get_signing_key <<< %s', rv) return rv
Get signing key pair for input verification key. Raise WalletState if wallet is closed, AbsentRecord for no such key pair. :param verkey: verification key of key pair :return: KeyInfo for key pair
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L265-L292
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.create_local_did
async def create_local_did(self, seed: str = None, loc_did: str = None, metadata: dict = None) -> DIDInfo: """ Create and store a new local DID for use in pairwise DID relations. :param seed: seed from which to create (default random) :param loc_did: local DID value (default None to let indy-sdk generate) :param metadata: metadata to associate with the local DID (operation always sets 'since', 'modified' epoch timestamps) :return: DIDInfo for new local DID """ LOGGER.debug('Wallet.create_local_did >>> seed: [SEED] loc_did: %s metadata: %s', loc_did, metadata) cfg = {} if seed: cfg['seed'] = seed if loc_did: cfg['did'] = loc_did if not self.handle: LOGGER.debug('Wallet.create_local_did <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) try: (created_did, verkey) = await did.create_and_store_my_did(self.handle, json.dumps(cfg)) except IndyError as x_indy: if x_indy.error_code == ErrorCode.DidAlreadyExistsError: LOGGER.debug('Wallet.create_local_did <!< DID %s already present in wallet %s', loc_did, self.name) raise ExtantRecord('Local DID {} already present in wallet {}'.format(loc_did, self.name)) LOGGER.debug('Wallet.create_local_did <!< indy-sdk raised error %s', x_indy.error_code) raise now = int(time()) loc_did_metadata = {**(metadata or {}), 'since': now, 'modified': now} await did.set_did_metadata(self.handle, created_did, json.dumps(loc_did_metadata)) rv = DIDInfo(created_did, verkey, loc_did_metadata) LOGGER.debug('Wallet.create_local_did <<< %s', rv) return rv
python
async def create_local_did(self, seed: str = None, loc_did: str = None, metadata: dict = None) -> DIDInfo: """ Create and store a new local DID for use in pairwise DID relations. :param seed: seed from which to create (default random) :param loc_did: local DID value (default None to let indy-sdk generate) :param metadata: metadata to associate with the local DID (operation always sets 'since', 'modified' epoch timestamps) :return: DIDInfo for new local DID """ LOGGER.debug('Wallet.create_local_did >>> seed: [SEED] loc_did: %s metadata: %s', loc_did, metadata) cfg = {} if seed: cfg['seed'] = seed if loc_did: cfg['did'] = loc_did if not self.handle: LOGGER.debug('Wallet.create_local_did <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) try: (created_did, verkey) = await did.create_and_store_my_did(self.handle, json.dumps(cfg)) except IndyError as x_indy: if x_indy.error_code == ErrorCode.DidAlreadyExistsError: LOGGER.debug('Wallet.create_local_did <!< DID %s already present in wallet %s', loc_did, self.name) raise ExtantRecord('Local DID {} already present in wallet {}'.format(loc_did, self.name)) LOGGER.debug('Wallet.create_local_did <!< indy-sdk raised error %s', x_indy.error_code) raise now = int(time()) loc_did_metadata = {**(metadata or {}), 'since': now, 'modified': now} await did.set_did_metadata(self.handle, created_did, json.dumps(loc_did_metadata)) rv = DIDInfo(created_did, verkey, loc_did_metadata) LOGGER.debug('Wallet.create_local_did <<< %s', rv) return rv
Create and store a new local DID for use in pairwise DID relations. :param seed: seed from which to create (default random) :param loc_did: local DID value (default None to let indy-sdk generate) :param metadata: metadata to associate with the local DID (operation always sets 'since', 'modified' epoch timestamps) :return: DIDInfo for new local DID
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L329-L368
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.replace_local_did_metadata
async def replace_local_did_metadata(self, loc_did: str, metadata: dict) -> DIDInfo: """ Replace the metadata associated with a local DID. Raise WalletState if wallet is closed, AbsentRecord for no such local DID. :param loc_did: local DID of interest :param metadata: new metadata to store :return: DIDInfo for local DID after write """ LOGGER.debug('Wallet.replace_local_did_metadata >>> loc_did: %s, metadata: %s', loc_did, metadata) old = await self.get_local_did(loc_did) # raises exceptions if applicable now = int(time()) loc_did_metadata = {**(metadata or {}), 'since': (old.metadata or {}).get('since', now), 'modified': now} try: await did.set_did_metadata(self.handle, loc_did, json.dumps(loc_did_metadata)) except IndyError as x_indy: LOGGER.debug('Wallet.replace_local_did_metadata <!< indy-sdk raised error %s', x_indy.error_code) raise rv = await self.get_local_did(loc_did) LOGGER.debug('Wallet.replace_local_did_metadata <<< %s', rv) return rv
python
async def replace_local_did_metadata(self, loc_did: str, metadata: dict) -> DIDInfo: """ Replace the metadata associated with a local DID. Raise WalletState if wallet is closed, AbsentRecord for no such local DID. :param loc_did: local DID of interest :param metadata: new metadata to store :return: DIDInfo for local DID after write """ LOGGER.debug('Wallet.replace_local_did_metadata >>> loc_did: %s, metadata: %s', loc_did, metadata) old = await self.get_local_did(loc_did) # raises exceptions if applicable now = int(time()) loc_did_metadata = {**(metadata or {}), 'since': (old.metadata or {}).get('since', now), 'modified': now} try: await did.set_did_metadata(self.handle, loc_did, json.dumps(loc_did_metadata)) except IndyError as x_indy: LOGGER.debug('Wallet.replace_local_did_metadata <!< indy-sdk raised error %s', x_indy.error_code) raise rv = await self.get_local_did(loc_did) LOGGER.debug('Wallet.replace_local_did_metadata <<< %s', rv) return rv
Replace the metadata associated with a local DID. Raise WalletState if wallet is closed, AbsentRecord for no such local DID. :param loc_did: local DID of interest :param metadata: new metadata to store :return: DIDInfo for local DID after write
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L370-L394
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.get_local_dids
async def get_local_dids(self) -> Sequence[DIDInfo]: """ Get list of DIDInfos for local DIDs. :return: list of local DIDInfos """ LOGGER.debug('Wallet.get_local_dids >>>') dids_with_meta = json.loads(did.list_my_dids_with_meta(self.handle)) # list rv = [] for did_with_meta in dids_with_meta: meta = json.loads(did_with_meta['metadata']) if did_with_meta['metadata'] else {} if meta.get('anchor', False): continue # exclude anchor DIDs past and present rv.append(DIDInfo(did_with_meta['did'], did_with_meta['verkey'], meta)) LOGGER.debug('Wallet.get_local_dids <<< %s', rv) return rv
python
async def get_local_dids(self) -> Sequence[DIDInfo]: """ Get list of DIDInfos for local DIDs. :return: list of local DIDInfos """ LOGGER.debug('Wallet.get_local_dids >>>') dids_with_meta = json.loads(did.list_my_dids_with_meta(self.handle)) # list rv = [] for did_with_meta in dids_with_meta: meta = json.loads(did_with_meta['metadata']) if did_with_meta['metadata'] else {} if meta.get('anchor', False): continue # exclude anchor DIDs past and present rv.append(DIDInfo(did_with_meta['did'], did_with_meta['verkey'], meta)) LOGGER.debug('Wallet.get_local_dids <<< %s', rv) return rv
Get list of DIDInfos for local DIDs. :return: list of local DIDInfos
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L396-L415
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.get_local_did
async def get_local_did(self, loc: str) -> DIDInfo: """ Get local DID info by local DID or verification key. Raise AbsentRecord for no such local DID. :param loc: DID or verification key of interest :return: DIDInfo for local DID """ LOGGER.debug('Wallet.get_local_did >>> loc: %s', loc) if not self.handle: LOGGER.debug('Wallet.get_local_did <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) if ok_did(loc): # it's a DID try: did_with_meta = json.loads(await did.get_my_did_with_meta(self.handle, loc)) rv = DIDInfo( did_with_meta['did'], did_with_meta['verkey'], json.loads(did_with_meta['metadata']) if did_with_meta['metadata'] else {}) # nudge None to empty except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemNotFound: LOGGER.debug('Wallet.get_local_did <!< DID %s not present in wallet %s', loc, self.name) raise AbsentRecord('Local DID {} not present in wallet {}'.format(loc, self.name)) LOGGER.debug('Wallet.get_local_did <!< indy-sdk raised error %s', x_indy.error_code) raise else: # it's a verkey dids_with_meta = json.loads(await did.list_my_dids_with_meta(self.handle)) # list for did_with_meta in dids_with_meta: if did_with_meta['verkey'] == loc: rv = DIDInfo( did_with_meta['did'], did_with_meta['verkey'], json.loads(did_with_meta['metadata']) if did_with_meta['metadata'] else {}) break else: LOGGER.debug('Wallet.get_local_did <!< Wallet %s has no local DID for verkey %s', self.name, loc) raise AbsentRecord('Wallet {} has no local DID for verkey {}'.format(self.name, loc)) LOGGER.debug('Wallet.get_local_did <<< %s', rv) return rv
python
async def get_local_did(self, loc: str) -> DIDInfo: """ Get local DID info by local DID or verification key. Raise AbsentRecord for no such local DID. :param loc: DID or verification key of interest :return: DIDInfo for local DID """ LOGGER.debug('Wallet.get_local_did >>> loc: %s', loc) if not self.handle: LOGGER.debug('Wallet.get_local_did <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) if ok_did(loc): # it's a DID try: did_with_meta = json.loads(await did.get_my_did_with_meta(self.handle, loc)) rv = DIDInfo( did_with_meta['did'], did_with_meta['verkey'], json.loads(did_with_meta['metadata']) if did_with_meta['metadata'] else {}) # nudge None to empty except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemNotFound: LOGGER.debug('Wallet.get_local_did <!< DID %s not present in wallet %s', loc, self.name) raise AbsentRecord('Local DID {} not present in wallet {}'.format(loc, self.name)) LOGGER.debug('Wallet.get_local_did <!< indy-sdk raised error %s', x_indy.error_code) raise else: # it's a verkey dids_with_meta = json.loads(await did.list_my_dids_with_meta(self.handle)) # list for did_with_meta in dids_with_meta: if did_with_meta['verkey'] == loc: rv = DIDInfo( did_with_meta['did'], did_with_meta['verkey'], json.loads(did_with_meta['metadata']) if did_with_meta['metadata'] else {}) break else: LOGGER.debug('Wallet.get_local_did <!< Wallet %s has no local DID for verkey %s', self.name, loc) raise AbsentRecord('Wallet {} has no local DID for verkey {}'.format(self.name, loc)) LOGGER.debug('Wallet.get_local_did <<< %s', rv) return rv
Get local DID info by local DID or verification key. Raise AbsentRecord for no such local DID. :param loc: DID or verification key of interest :return: DIDInfo for local DID
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L417-L460
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.get_anchor_did
async def get_anchor_did(self) -> str: """ Get current anchor DID by metadata, None for not yet set. :return: DID """ LOGGER.debug('Wallet.get_anchor_did >>>') if not self.handle: LOGGER.debug('Wallet.get_anchor_did <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv = None dids_with_meta = json.loads(await did.list_my_dids_with_meta(self.handle)) # list latest = 0 for did_with_meta in dids_with_meta: try: meta = json.loads(did_with_meta['metadata']) if did_with_meta['metadata'] else {} if not meta.get('anchor', False): continue if isinstance(meta, dict) and meta.get('since', -1) > latest: rv = did_with_meta.get('did') except json.decoder.JSONDecodeError: continue # it's not an anchor DID, carry on LOGGER.debug('Wallet.get_anchor_did <<< %s', rv) return rv
python
async def get_anchor_did(self) -> str: """ Get current anchor DID by metadata, None for not yet set. :return: DID """ LOGGER.debug('Wallet.get_anchor_did >>>') if not self.handle: LOGGER.debug('Wallet.get_anchor_did <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv = None dids_with_meta = json.loads(await did.list_my_dids_with_meta(self.handle)) # list latest = 0 for did_with_meta in dids_with_meta: try: meta = json.loads(did_with_meta['metadata']) if did_with_meta['metadata'] else {} if not meta.get('anchor', False): continue if isinstance(meta, dict) and meta.get('since', -1) > latest: rv = did_with_meta.get('did') except json.decoder.JSONDecodeError: continue # it's not an anchor DID, carry on LOGGER.debug('Wallet.get_anchor_did <<< %s', rv) return rv
Get current anchor DID by metadata, None for not yet set. :return: DID
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L462-L490
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.create_link_secret
async def create_link_secret(self, label: str) -> None: """ Create link secret (a.k.a. master secret) used in proofs by HolderProver, if the current link secret does not already correspond to the input link secret label. Raise WalletState if wallet is closed, or any other IndyError causing failure to set link secret in wallet. :param label: label for link secret; indy-sdk uses label to generate link secret """ LOGGER.debug('Wallet.create_link_secret >>> label: %s', label) if not self.handle: LOGGER.debug('Wallet.create_link_secret <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) try: await anoncreds.prover_create_master_secret(self.handle, label) await self._write_link_secret_label(label) except IndyError as x_indy: if x_indy.error_code == ErrorCode.AnoncredsMasterSecretDuplicateNameError: LOGGER.warning( 'Wallet %s link secret already current: abstaining from updating label record', self.name) await self._write_link_secret_label(label) else: LOGGER.debug( 'Wallet.create_link_secret <!< cannot create link secret for wallet %s, indy error code %s', self.name, x_indy.error_code) raise LOGGER.debug('Wallet.create_link_secret <<<')
python
async def create_link_secret(self, label: str) -> None: """ Create link secret (a.k.a. master secret) used in proofs by HolderProver, if the current link secret does not already correspond to the input link secret label. Raise WalletState if wallet is closed, or any other IndyError causing failure to set link secret in wallet. :param label: label for link secret; indy-sdk uses label to generate link secret """ LOGGER.debug('Wallet.create_link_secret >>> label: %s', label) if not self.handle: LOGGER.debug('Wallet.create_link_secret <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) try: await anoncreds.prover_create_master_secret(self.handle, label) await self._write_link_secret_label(label) except IndyError as x_indy: if x_indy.error_code == ErrorCode.AnoncredsMasterSecretDuplicateNameError: LOGGER.warning( 'Wallet %s link secret already current: abstaining from updating label record', self.name) await self._write_link_secret_label(label) else: LOGGER.debug( 'Wallet.create_link_secret <!< cannot create link secret for wallet %s, indy error code %s', self.name, x_indy.error_code) raise LOGGER.debug('Wallet.create_link_secret <<<')
Create link secret (a.k.a. master secret) used in proofs by HolderProver, if the current link secret does not already correspond to the input link secret label. Raise WalletState if wallet is closed, or any other IndyError causing failure to set link secret in wallet. :param label: label for link secret; indy-sdk uses label to generate link secret
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L492-L524
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet._write_link_secret_label
async def _write_link_secret_label(self, label) -> None: """ Update non-secret storage record with link secret label. :param label: link secret label """ LOGGER.debug('Wallet._write_link_secret_label <<< %s', label) if await self.get_link_secret_label() == label: LOGGER.info('Wallet._write_link_secret_label abstaining - already current') else: await self.write_non_secret(StorageRecord( TYPE_LINK_SECRET_LABEL, label, tags=None, ident=str(int(time())))) # indy requires str LOGGER.debug('Wallet._write_link_secret_label <<<')
python
async def _write_link_secret_label(self, label) -> None: """ Update non-secret storage record with link secret label. :param label: link secret label """ LOGGER.debug('Wallet._write_link_secret_label <<< %s', label) if await self.get_link_secret_label() == label: LOGGER.info('Wallet._write_link_secret_label abstaining - already current') else: await self.write_non_secret(StorageRecord( TYPE_LINK_SECRET_LABEL, label, tags=None, ident=str(int(time())))) # indy requires str LOGGER.debug('Wallet._write_link_secret_label <<<')
Update non-secret storage record with link secret label. :param label: link secret label
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L526-L544
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.get_link_secret_label
async def get_link_secret_label(self) -> str: """ Get current link secret label from non-secret storage records; return None for no match. :return: latest non-secret storage record for link secret label """ LOGGER.debug('Wallet.get_link_secret_label >>>') if not self.handle: LOGGER.debug('Wallet.get_link_secret <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv = None records = await self.get_non_secret(TYPE_LINK_SECRET_LABEL) if records: rv = records[str(max(int(k) for k in records))].value # str to int, max, and back again LOGGER.debug('Wallet.get_link_secret_label <<< %s', rv) return rv
python
async def get_link_secret_label(self) -> str: """ Get current link secret label from non-secret storage records; return None for no match. :return: latest non-secret storage record for link secret label """ LOGGER.debug('Wallet.get_link_secret_label >>>') if not self.handle: LOGGER.debug('Wallet.get_link_secret <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv = None records = await self.get_non_secret(TYPE_LINK_SECRET_LABEL) if records: rv = records[str(max(int(k) for k in records))].value # str to int, max, and back again LOGGER.debug('Wallet.get_link_secret_label <<< %s', rv) return rv
Get current link secret label from non-secret storage records; return None for no match. :return: latest non-secret storage record for link secret label
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L546-L565
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.open
async def open(self) -> 'Wallet': """ Explicit entry. Open wallet as configured, for later closure via close(). For use when keeping wallet open across multiple calls. Raise any IndyError causing failure to open wallet, WalletState if wallet already open, or AbsentWallet on attempt to enter wallet not yet created. :return: current object """ LOGGER.debug('Wallet.open >>>') created = False while True: try: self._handle = await wallet.open_wallet( json.dumps(self.config), json.dumps(self.access_creds)) LOGGER.info('Opened wallet %s on handle %s', self.name, self.handle) break except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletNotFoundError: if created: LOGGER.debug('Wallet.open() <!< Wallet %s not found after creation', self.name) raise AbsentWallet('Wallet {} not found after creation'.format(self.name)) if self.auto_create: await self.create() continue else: LOGGER.debug('Wallet.open() <!< Wallet %s not found', self.name) raise AbsentWallet('Wallet {} not found'.format(self.name)) elif x_indy.error_code == ErrorCode.WalletAlreadyOpenedError: LOGGER.debug('Wallet.open() <!< Wallet %s is already open', self.name) raise WalletState('Wallet {} is already open'.format(self.name)) elif x_indy.error_code == ErrorCode.WalletAccessFailed: LOGGER.debug('Wallet.open() <!< Bad access credentials value for wallet %s', self.name) raise BadAccess('Bad access credentials value for wallet {}'.format(self.name)) LOGGER.debug('Wallet %s open raised indy error %s', self.name, x_indy.error_code) raise self.did = await self.get_anchor_did() self.verkey = await did.key_for_local_did(self.handle, self.did) if self.did else None LOGGER.info('Wallet %s got verkey %s for existing DID %s', self.name, self.verkey, self.did) LOGGER.debug('Wallet.open <<<') return self
python
async def open(self) -> 'Wallet': """ Explicit entry. Open wallet as configured, for later closure via close(). For use when keeping wallet open across multiple calls. Raise any IndyError causing failure to open wallet, WalletState if wallet already open, or AbsentWallet on attempt to enter wallet not yet created. :return: current object """ LOGGER.debug('Wallet.open >>>') created = False while True: try: self._handle = await wallet.open_wallet( json.dumps(self.config), json.dumps(self.access_creds)) LOGGER.info('Opened wallet %s on handle %s', self.name, self.handle) break except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletNotFoundError: if created: LOGGER.debug('Wallet.open() <!< Wallet %s not found after creation', self.name) raise AbsentWallet('Wallet {} not found after creation'.format(self.name)) if self.auto_create: await self.create() continue else: LOGGER.debug('Wallet.open() <!< Wallet %s not found', self.name) raise AbsentWallet('Wallet {} not found'.format(self.name)) elif x_indy.error_code == ErrorCode.WalletAlreadyOpenedError: LOGGER.debug('Wallet.open() <!< Wallet %s is already open', self.name) raise WalletState('Wallet {} is already open'.format(self.name)) elif x_indy.error_code == ErrorCode.WalletAccessFailed: LOGGER.debug('Wallet.open() <!< Bad access credentials value for wallet %s', self.name) raise BadAccess('Bad access credentials value for wallet {}'.format(self.name)) LOGGER.debug('Wallet %s open raised indy error %s', self.name, x_indy.error_code) raise self.did = await self.get_anchor_did() self.verkey = await did.key_for_local_did(self.handle, self.did) if self.did else None LOGGER.info('Wallet %s got verkey %s for existing DID %s', self.name, self.verkey, self.did) LOGGER.debug('Wallet.open <<<') return self
Explicit entry. Open wallet as configured, for later closure via close(). For use when keeping wallet open across multiple calls. Raise any IndyError causing failure to open wallet, WalletState if wallet already open, or AbsentWallet on attempt to enter wallet not yet created. :return: current object
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L584-L631
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.create
async def create(self) -> None: """ Persist the wallet. Raise ExtantWallet if it already exists. Actuators should prefer WalletManager.create() to calling this method directly - the wallet manager filters wallet configuration through preset defaults. """ LOGGER.debug('Wallet.create >>>') try: await wallet.create_wallet( config=json.dumps(self.config), credentials=json.dumps(self.access_creds)) LOGGER.info('Created wallet %s', self.name) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletAlreadyExistsError: LOGGER.debug('Wallet.create <!< Wallet %s already exists', self.name) raise ExtantWallet('Wallet {} already exists'.format(self.name)) LOGGER.debug( 'Wallet.create <!< indy error code %s on creation of wallet %s', x_indy.error_code, self.name) raise auto_remove = self.auto_remove self.auto_remove = False # defer past this creation process async with self: did_info = await self.create_local_did( self._von_config.get('seed', None), self._von_config.get('did', None), {'anchor': True}) self.did = did_info.did self.verkey = did_info.verkey if 'link_secret_label' in self._von_config: await self.create_link_secret(self._von_config['link_secret_label']) self.auto_remove = auto_remove LOGGER.debug('Wallet.create <<<')
python
async def create(self) -> None: """ Persist the wallet. Raise ExtantWallet if it already exists. Actuators should prefer WalletManager.create() to calling this method directly - the wallet manager filters wallet configuration through preset defaults. """ LOGGER.debug('Wallet.create >>>') try: await wallet.create_wallet( config=json.dumps(self.config), credentials=json.dumps(self.access_creds)) LOGGER.info('Created wallet %s', self.name) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletAlreadyExistsError: LOGGER.debug('Wallet.create <!< Wallet %s already exists', self.name) raise ExtantWallet('Wallet {} already exists'.format(self.name)) LOGGER.debug( 'Wallet.create <!< indy error code %s on creation of wallet %s', x_indy.error_code, self.name) raise auto_remove = self.auto_remove self.auto_remove = False # defer past this creation process async with self: did_info = await self.create_local_did( self._von_config.get('seed', None), self._von_config.get('did', None), {'anchor': True}) self.did = did_info.did self.verkey = did_info.verkey if 'link_secret_label' in self._von_config: await self.create_link_secret(self._von_config['link_secret_label']) self.auto_remove = auto_remove LOGGER.debug('Wallet.create <<<')
Persist the wallet. Raise ExtantWallet if it already exists. Actuators should prefer WalletManager.create() to calling this method directly - the wallet manager filters wallet configuration through preset defaults.
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L633-L671
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.close
async def close(self) -> None: """ Explicit exit. Close wallet (and delete if so configured). """ LOGGER.debug('Wallet.close >>>') if not self.handle: LOGGER.warning('Abstaining from closing wallet %s: already closed', self.name) else: LOGGER.debug('Closing wallet %s', self.name) await wallet.close_wallet(self.handle) self._handle = None if self.auto_remove: LOGGER.info('Automatically removing wallet %s', self.name) await self.remove() self._handle = None LOGGER.debug('Wallet.close <<<')
python
async def close(self) -> None: """ Explicit exit. Close wallet (and delete if so configured). """ LOGGER.debug('Wallet.close >>>') if not self.handle: LOGGER.warning('Abstaining from closing wallet %s: already closed', self.name) else: LOGGER.debug('Closing wallet %s', self.name) await wallet.close_wallet(self.handle) self._handle = None if self.auto_remove: LOGGER.info('Automatically removing wallet %s', self.name) await self.remove() self._handle = None LOGGER.debug('Wallet.close <<<')
Explicit exit. Close wallet (and delete if so configured).
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L689-L707
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.remove
async def remove(self) -> bool: """ Remove serialized wallet, best effort, if it exists. Return whether wallet absent after operation (removal successful or else not present a priori). Raise WalletState if wallet is open. :return: whether wallet gone from persistent storage """ LOGGER.debug('Wallet.remove >>>') if self.handle: LOGGER.debug('Wallet.remove <!< Wallet %s is open', self.name) raise WalletState('Wallet {} is open'.format(self.name)) rv = True try: LOGGER.info('Attempting to remove wallet: %s', self.name) await wallet.delete_wallet( json.dumps(self.config), json.dumps(self.access_creds)) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletNotFoundError: LOGGER.info('Wallet %s not present; abstaining from removal', self.name) else: LOGGER.info('Failed wallet %s removal; indy-sdk error code %s', self.name, x_indy.error_code) rv = False LOGGER.debug('Wallet.remove <<< %s', rv) return rv
python
async def remove(self) -> bool: """ Remove serialized wallet, best effort, if it exists. Return whether wallet absent after operation (removal successful or else not present a priori). Raise WalletState if wallet is open. :return: whether wallet gone from persistent storage """ LOGGER.debug('Wallet.remove >>>') if self.handle: LOGGER.debug('Wallet.remove <!< Wallet %s is open', self.name) raise WalletState('Wallet {} is open'.format(self.name)) rv = True try: LOGGER.info('Attempting to remove wallet: %s', self.name) await wallet.delete_wallet( json.dumps(self.config), json.dumps(self.access_creds)) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletNotFoundError: LOGGER.info('Wallet %s not present; abstaining from removal', self.name) else: LOGGER.info('Failed wallet %s removal; indy-sdk error code %s', self.name, x_indy.error_code) rv = False LOGGER.debug('Wallet.remove <<< %s', rv) return rv
Remove serialized wallet, best effort, if it exists. Return whether wallet absent after operation (removal successful or else not present a priori). Raise WalletState if wallet is open. :return: whether wallet gone from persistent storage
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L709-L739
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.write_pairwise
async def write_pairwise( self, their_did: str, their_verkey: str = None, my_did: str = None, metadata: dict = None, replace_meta: bool = False) -> PairwiseInfo: """ Store a pairwise DID for a secure connection. Use verification key for local DID in wallet if supplied; otherwise, create one first. If local DID specified but not present, raise AbsentRecord. With supplied metadata, replace or augment and overwrite any existing metadata for the pairwise relation if one already exists in the wallet. Always include local and remote DIDs and keys in metadata to allow for WQL search. Raise AbsentRecord on call to update a non-existent record. Raise BadRecord if metadata does not coerce into non-secrets API tags specification {str:str}. :param their_did: remote DID :param their_verkey: remote verification key (default None is OK if updating an existing pairwise DID) :param my_did: local DID :param metadata: metadata for pairwise connection :param replace_meta: whether to (True) replace or (False) augment and overwrite existing metadata :return: resulting PairwiseInfo """ LOGGER.debug( 'Wallet.write_pairwise >>> their_did: %s, their_verkey: %s, my_did: %s, metadata: %s, replace_meta: %s', their_did, their_verkey, my_did, metadata, replace_meta) if their_verkey is None: match = await self.get_pairwise(their_did) if not match: LOGGER.debug( 'Wallet.write_pairwise <!< Wallet %s has no pairwise DID on %s to update', self.name, their_did) raise AbsentRecord('Wallet {} has no pairwise DID on {} to update'.format(self.name, their_did)) their_verkey = [pwise for pwise in match.values()][0].their_verkey try: await did.store_their_did(self.handle, json.dumps({'did': their_did, 'verkey': their_verkey})) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemAlreadyExists: pass # exists already, carry on else: LOGGER.debug( 'Wallet.write_pairwise <!< Wallet %s write of their_did %s raised indy error code %s', self.name, their_did, x_indy.error_code) raise if my_did: my_did_info = await self.get_local_did(my_did) # raises AbsentRecord if no such local did else: my_did_info = await self.create_local_did(None, None, {'pairwise_for': their_did}) pairwise = PairwiseInfo(their_did, their_verkey, my_did_info.did, my_did_info.verkey, metadata) try: storec = await self.write_non_secret( StorageRecord(TYPE_PAIRWISE, their_verkey, tags=pairwise_info2tags(pairwise), ident=their_did), replace_meta) except BadRecord: LOGGER.debug( 'Wallet.write_pairwise <!< Pairwise metadata %s does not coerce into flat {str:str} tags dict', pairwise.metadata) raise rv = storage_record2pairwise_info(storec) LOGGER.debug('Wallet.write_pairwise <<< %s', rv) return rv
python
async def write_pairwise( self, their_did: str, their_verkey: str = None, my_did: str = None, metadata: dict = None, replace_meta: bool = False) -> PairwiseInfo: """ Store a pairwise DID for a secure connection. Use verification key for local DID in wallet if supplied; otherwise, create one first. If local DID specified but not present, raise AbsentRecord. With supplied metadata, replace or augment and overwrite any existing metadata for the pairwise relation if one already exists in the wallet. Always include local and remote DIDs and keys in metadata to allow for WQL search. Raise AbsentRecord on call to update a non-existent record. Raise BadRecord if metadata does not coerce into non-secrets API tags specification {str:str}. :param their_did: remote DID :param their_verkey: remote verification key (default None is OK if updating an existing pairwise DID) :param my_did: local DID :param metadata: metadata for pairwise connection :param replace_meta: whether to (True) replace or (False) augment and overwrite existing metadata :return: resulting PairwiseInfo """ LOGGER.debug( 'Wallet.write_pairwise >>> their_did: %s, their_verkey: %s, my_did: %s, metadata: %s, replace_meta: %s', their_did, their_verkey, my_did, metadata, replace_meta) if their_verkey is None: match = await self.get_pairwise(their_did) if not match: LOGGER.debug( 'Wallet.write_pairwise <!< Wallet %s has no pairwise DID on %s to update', self.name, their_did) raise AbsentRecord('Wallet {} has no pairwise DID on {} to update'.format(self.name, their_did)) their_verkey = [pwise for pwise in match.values()][0].their_verkey try: await did.store_their_did(self.handle, json.dumps({'did': their_did, 'verkey': their_verkey})) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemAlreadyExists: pass # exists already, carry on else: LOGGER.debug( 'Wallet.write_pairwise <!< Wallet %s write of their_did %s raised indy error code %s', self.name, their_did, x_indy.error_code) raise if my_did: my_did_info = await self.get_local_did(my_did) # raises AbsentRecord if no such local did else: my_did_info = await self.create_local_did(None, None, {'pairwise_for': their_did}) pairwise = PairwiseInfo(their_did, their_verkey, my_did_info.did, my_did_info.verkey, metadata) try: storec = await self.write_non_secret( StorageRecord(TYPE_PAIRWISE, their_verkey, tags=pairwise_info2tags(pairwise), ident=their_did), replace_meta) except BadRecord: LOGGER.debug( 'Wallet.write_pairwise <!< Pairwise metadata %s does not coerce into flat {str:str} tags dict', pairwise.metadata) raise rv = storage_record2pairwise_info(storec) LOGGER.debug('Wallet.write_pairwise <<< %s', rv) return rv
Store a pairwise DID for a secure connection. Use verification key for local DID in wallet if supplied; otherwise, create one first. If local DID specified but not present, raise AbsentRecord. With supplied metadata, replace or augment and overwrite any existing metadata for the pairwise relation if one already exists in the wallet. Always include local and remote DIDs and keys in metadata to allow for WQL search. Raise AbsentRecord on call to update a non-existent record. Raise BadRecord if metadata does not coerce into non-secrets API tags specification {str:str}. :param their_did: remote DID :param their_verkey: remote verification key (default None is OK if updating an existing pairwise DID) :param my_did: local DID :param metadata: metadata for pairwise connection :param replace_meta: whether to (True) replace or (False) augment and overwrite existing metadata :return: resulting PairwiseInfo
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L741-L816
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.delete_pairwise
async def delete_pairwise(self, their_did: str) -> None: """ Remove a pairwise DID record by its remote DID. Silently return if no such record is present. Raise WalletState for closed wallet, or BadIdentifier for invalid pairwise DID. :param their_did: remote DID marking pairwise DID to remove """ LOGGER.debug('Wallet.delete_pairwise >>> their_did: %s', their_did) if not ok_did(their_did): LOGGER.debug('Wallet.delete_pairwise <!< Bad DID %s', their_did) raise BadIdentifier('Bad DID {}'.format(their_did)) await self.delete_non_secret(TYPE_PAIRWISE, their_did) LOGGER.debug('Wallet.delete_pairwise <<<')
python
async def delete_pairwise(self, their_did: str) -> None: """ Remove a pairwise DID record by its remote DID. Silently return if no such record is present. Raise WalletState for closed wallet, or BadIdentifier for invalid pairwise DID. :param their_did: remote DID marking pairwise DID to remove """ LOGGER.debug('Wallet.delete_pairwise >>> their_did: %s', their_did) if not ok_did(their_did): LOGGER.debug('Wallet.delete_pairwise <!< Bad DID %s', their_did) raise BadIdentifier('Bad DID {}'.format(their_did)) await self.delete_non_secret(TYPE_PAIRWISE, their_did) LOGGER.debug('Wallet.delete_pairwise <<<')
Remove a pairwise DID record by its remote DID. Silently return if no such record is present. Raise WalletState for closed wallet, or BadIdentifier for invalid pairwise DID. :param their_did: remote DID marking pairwise DID to remove
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L818-L834
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.get_pairwise
async def get_pairwise(self, pairwise_filt: str = None) -> dict: """ Return dict mapping each pairwise DID of interest in wallet to its pairwise info, or, for no filter specified, mapping them all. If wallet has no such item, return empty dict. :param pairwise_filt: remote DID of interest, or WQL json (default all) :return: dict mapping remote DIDs to PairwiseInfo """ LOGGER.debug('Wallet.get_pairwise >>> pairwise_filt: %s', pairwise_filt) if not self.handle: LOGGER.debug('Wallet.get_pairwise <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) storecs = await self.get_non_secret( TYPE_PAIRWISE, pairwise_filt if ok_did(pairwise_filt) or not pairwise_filt else json.loads(pairwise_filt), canon_pairwise_wql) rv = {k: storage_record2pairwise_info(storecs[k]) for k in storecs} # touch up tags, mute leading ~ LOGGER.debug('Wallet.get_pairwise <<< %s', rv) return rv
python
async def get_pairwise(self, pairwise_filt: str = None) -> dict: """ Return dict mapping each pairwise DID of interest in wallet to its pairwise info, or, for no filter specified, mapping them all. If wallet has no such item, return empty dict. :param pairwise_filt: remote DID of interest, or WQL json (default all) :return: dict mapping remote DIDs to PairwiseInfo """ LOGGER.debug('Wallet.get_pairwise >>> pairwise_filt: %s', pairwise_filt) if not self.handle: LOGGER.debug('Wallet.get_pairwise <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) storecs = await self.get_non_secret( TYPE_PAIRWISE, pairwise_filt if ok_did(pairwise_filt) or not pairwise_filt else json.loads(pairwise_filt), canon_pairwise_wql) rv = {k: storage_record2pairwise_info(storecs[k]) for k in storecs} # touch up tags, mute leading ~ LOGGER.debug('Wallet.get_pairwise <<< %s', rv) return rv
Return dict mapping each pairwise DID of interest in wallet to its pairwise info, or, for no filter specified, mapping them all. If wallet has no such item, return empty dict. :param pairwise_filt: remote DID of interest, or WQL json (default all) :return: dict mapping remote DIDs to PairwiseInfo
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L836-L858
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.write_non_secret
async def write_non_secret(self, storec: StorageRecord, replace_meta: bool = False) -> StorageRecord: """ Add or update non-secret storage record to the wallet; return resulting wallet non-secret record. :param storec: non-secret storage record :param replace_meta: whether to replace any existing metadata on matching record or to augment it :return: non-secret storage record as it appears in the wallet after write """ LOGGER.debug('Wallet.write_non_secret >>> storec: %s, replace_meta: %s', storec, replace_meta) if not self.handle: LOGGER.debug('Wallet.write_non_secret <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) if not StorageRecord.ok_tags(storec.tags): LOGGER.debug('Wallet.write_non_secret <!< bad storage record tags %s; use flat {str: str} dict', storec) raise BadRecord('Bad storage record tags {}; use flat {{str:str}} dict'.format(storec)) try: record = json.loads(await non_secrets.get_wallet_record( self.handle, storec.type, storec.id, json.dumps({ 'retrieveType': False, 'retrieveValue': True, 'retrieveTags': True }))) if record['value'] != storec.value: await non_secrets.update_wallet_record_value( self.handle, storec.type, storec.id, storec.value) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemNotFound: await non_secrets.add_wallet_record( self.handle, storec.type, storec.id, storec.value, json.dumps(storec.tags) if storec.tags else None) else: LOGGER.debug( 'Wallet.write_non_secret <!< Wallet lookup raised indy error code %s', x_indy.error_code) raise else: if (record['tags'] or None) != storec.tags: # record maps no tags to {}, not None tags = (storec.tags or {}) if replace_meta else {**record['tags'], **(storec.tags or {})} await non_secrets.update_wallet_record_tags( self.handle, storec.type, storec.id, json.dumps(tags)) # indy-sdk takes '{}' instead of None for null tags record = json.loads(await non_secrets.get_wallet_record( self.handle, storec.type, storec.id, json.dumps({ 'retrieveType': False, 'retrieveValue': True, 'retrieveTags': True }))) rv = StorageRecord(storec.type, record['value'], tags=record.get('tags', None), ident=record['id']) LOGGER.debug('Wallet.write_non_secret <<< %s', rv) return rv
python
async def write_non_secret(self, storec: StorageRecord, replace_meta: bool = False) -> StorageRecord: """ Add or update non-secret storage record to the wallet; return resulting wallet non-secret record. :param storec: non-secret storage record :param replace_meta: whether to replace any existing metadata on matching record or to augment it :return: non-secret storage record as it appears in the wallet after write """ LOGGER.debug('Wallet.write_non_secret >>> storec: %s, replace_meta: %s', storec, replace_meta) if not self.handle: LOGGER.debug('Wallet.write_non_secret <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) if not StorageRecord.ok_tags(storec.tags): LOGGER.debug('Wallet.write_non_secret <!< bad storage record tags %s; use flat {str: str} dict', storec) raise BadRecord('Bad storage record tags {}; use flat {{str:str}} dict'.format(storec)) try: record = json.loads(await non_secrets.get_wallet_record( self.handle, storec.type, storec.id, json.dumps({ 'retrieveType': False, 'retrieveValue': True, 'retrieveTags': True }))) if record['value'] != storec.value: await non_secrets.update_wallet_record_value( self.handle, storec.type, storec.id, storec.value) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemNotFound: await non_secrets.add_wallet_record( self.handle, storec.type, storec.id, storec.value, json.dumps(storec.tags) if storec.tags else None) else: LOGGER.debug( 'Wallet.write_non_secret <!< Wallet lookup raised indy error code %s', x_indy.error_code) raise else: if (record['tags'] or None) != storec.tags: # record maps no tags to {}, not None tags = (storec.tags or {}) if replace_meta else {**record['tags'], **(storec.tags or {})} await non_secrets.update_wallet_record_tags( self.handle, storec.type, storec.id, json.dumps(tags)) # indy-sdk takes '{}' instead of None for null tags record = json.loads(await non_secrets.get_wallet_record( self.handle, storec.type, storec.id, json.dumps({ 'retrieveType': False, 'retrieveValue': True, 'retrieveTags': True }))) rv = StorageRecord(storec.type, record['value'], tags=record.get('tags', None), ident=record['id']) LOGGER.debug('Wallet.write_non_secret <<< %s', rv) return rv
Add or update non-secret storage record to the wallet; return resulting wallet non-secret record. :param storec: non-secret storage record :param replace_meta: whether to replace any existing metadata on matching record or to augment it :return: non-secret storage record as it appears in the wallet after write
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L860-L930
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.delete_non_secret
async def delete_non_secret(self, typ: str, ident: str) -> None: """ Remove a non-secret record by its type and identifier. Silently return if no such record is present. Raise WalletState for closed wallet. :param typ: non-secret storage record type :param ident: non-secret storage record identifier """ LOGGER.debug('Wallet.delete_non_secret >>> typ: %s, ident: %s', typ, ident) if not self.handle: LOGGER.debug('Wallet.delete_non_secret <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) try: await non_secrets.delete_wallet_record(self.handle, typ, ident) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemNotFound: LOGGER.info('Wallet.delete_non_secret <!< no record for type %s on identifier %s', typ, ident) else: LOGGER.debug( 'Wallet.delete_non_secret <!< deletion of %s record on identifier %s raised indy error code %s', typ, ident, x_indy.error_code) raise LOGGER.debug('Wallet.delete_non_secret <<<')
python
async def delete_non_secret(self, typ: str, ident: str) -> None: """ Remove a non-secret record by its type and identifier. Silently return if no such record is present. Raise WalletState for closed wallet. :param typ: non-secret storage record type :param ident: non-secret storage record identifier """ LOGGER.debug('Wallet.delete_non_secret >>> typ: %s, ident: %s', typ, ident) if not self.handle: LOGGER.debug('Wallet.delete_non_secret <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) try: await non_secrets.delete_wallet_record(self.handle, typ, ident) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemNotFound: LOGGER.info('Wallet.delete_non_secret <!< no record for type %s on identifier %s', typ, ident) else: LOGGER.debug( 'Wallet.delete_non_secret <!< deletion of %s record on identifier %s raised indy error code %s', typ, ident, x_indy.error_code) raise LOGGER.debug('Wallet.delete_non_secret <<<')
Remove a non-secret record by its type and identifier. Silently return if no such record is present. Raise WalletState for closed wallet. :param typ: non-secret storage record type :param ident: non-secret storage record identifier
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L932-L960
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.get_non_secret
async def get_non_secret( self, typ: str, filt: Union[dict, str] = None, canon_wql: Callable[[dict], dict] = None, limit: int = None) -> dict: """ Return dict mapping each non-secret storage record of interest by identifier or, for no filter specified, mapping them all. If wallet has no such item, return empty dict. :param typ: non-secret storage record type :param filt: non-secret storage record identifier or WQL json (default all) :param canon_wql: WQL canonicalization function (default von_anchor.canon.canon_non_secret_wql()) :param limit: maximum number of results to return (default no limit) :return: dict mapping identifiers to non-secret storage records """ LOGGER.debug('Wallet.get_non_secret >>> typ: %s, filt: %s, canon_wql: %s', typ, filt, canon_wql) if not self.handle: LOGGER.debug('Wallet.get_non_secret <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) records = [] if isinstance(filt, str): # ordinary lookup by value try: records = [json.loads(await non_secrets.get_wallet_record( self.handle, typ, filt, json.dumps({ 'retrieveType': False, 'retrieveValue': True, 'retrieveTags': True })))] except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemNotFound: pass else: LOGGER.debug( 'Wallet.get_non_secret <!< Wallet %s lookup raised indy exception %s', self.name, x_indy.error_code) raise else: canon = canon_wql or canon_non_secret_wql s_handle = await non_secrets.open_wallet_search( self.handle, typ, json.dumps(canon(filt or {})), json.dumps({ 'retrieveRecords': True, 'retrieveTotalCount': True, 'retrieveType': False, 'retrieveValue': True, 'retrieveTags': True })) records = [] cardinality = int(json.loads( await non_secrets.fetch_wallet_search_next_records(self.handle, s_handle, 0))['totalCount']) chunk = min(cardinality, limit or cardinality, Wallet.DEFAULT_CHUNK) if limit: cardinality = min(limit, cardinality) try: while len(records) != cardinality: batch = json.loads( await non_secrets.fetch_wallet_search_next_records(self.handle, s_handle, chunk))['records'] records.extend(batch) if len(batch) < chunk: break if len(records) != cardinality: LOGGER.warning( 'Non-secret search/limit indicated %s results but fetched %s', cardinality, len(records)) finally: await non_secrets.close_wallet_search(s_handle) rv = {record['id']: StorageRecord(typ, record['value'], record['tags'], record['id']) for record in records} LOGGER.debug('Wallet.get_non_secret <<< %s', rv) return rv
python
async def get_non_secret( self, typ: str, filt: Union[dict, str] = None, canon_wql: Callable[[dict], dict] = None, limit: int = None) -> dict: """ Return dict mapping each non-secret storage record of interest by identifier or, for no filter specified, mapping them all. If wallet has no such item, return empty dict. :param typ: non-secret storage record type :param filt: non-secret storage record identifier or WQL json (default all) :param canon_wql: WQL canonicalization function (default von_anchor.canon.canon_non_secret_wql()) :param limit: maximum number of results to return (default no limit) :return: dict mapping identifiers to non-secret storage records """ LOGGER.debug('Wallet.get_non_secret >>> typ: %s, filt: %s, canon_wql: %s', typ, filt, canon_wql) if not self.handle: LOGGER.debug('Wallet.get_non_secret <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) records = [] if isinstance(filt, str): # ordinary lookup by value try: records = [json.loads(await non_secrets.get_wallet_record( self.handle, typ, filt, json.dumps({ 'retrieveType': False, 'retrieveValue': True, 'retrieveTags': True })))] except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemNotFound: pass else: LOGGER.debug( 'Wallet.get_non_secret <!< Wallet %s lookup raised indy exception %s', self.name, x_indy.error_code) raise else: canon = canon_wql or canon_non_secret_wql s_handle = await non_secrets.open_wallet_search( self.handle, typ, json.dumps(canon(filt or {})), json.dumps({ 'retrieveRecords': True, 'retrieveTotalCount': True, 'retrieveType': False, 'retrieveValue': True, 'retrieveTags': True })) records = [] cardinality = int(json.loads( await non_secrets.fetch_wallet_search_next_records(self.handle, s_handle, 0))['totalCount']) chunk = min(cardinality, limit or cardinality, Wallet.DEFAULT_CHUNK) if limit: cardinality = min(limit, cardinality) try: while len(records) != cardinality: batch = json.loads( await non_secrets.fetch_wallet_search_next_records(self.handle, s_handle, chunk))['records'] records.extend(batch) if len(batch) < chunk: break if len(records) != cardinality: LOGGER.warning( 'Non-secret search/limit indicated %s results but fetched %s', cardinality, len(records)) finally: await non_secrets.close_wallet_search(s_handle) rv = {record['id']: StorageRecord(typ, record['value'], record['tags'], record['id']) for record in records} LOGGER.debug('Wallet.get_non_secret <<< %s', rv) return rv
Return dict mapping each non-secret storage record of interest by identifier or, for no filter specified, mapping them all. If wallet has no such item, return empty dict. :param typ: non-secret storage record type :param filt: non-secret storage record identifier or WQL json (default all) :param canon_wql: WQL canonicalization function (default von_anchor.canon.canon_non_secret_wql()) :param limit: maximum number of results to return (default no limit) :return: dict mapping identifiers to non-secret storage records
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L962-L1043
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.encrypt
async def encrypt( self, message: bytes, authn: bool = False, to_verkey: str = None, from_verkey: str = None) -> bytes: """ Encrypt plaintext for owner of DID, anonymously or via authenticated encryption scheme. Raise AbsentMessage for missing message, or WalletState if wallet is closed. :param message: plaintext, as bytes :param authn: whether to use authenticated encryption scheme :param to_verkey: verification key of recipient, None for anchor's own :param from_verkey: verification key of sender for authenticated encryption, None for anchor's own :return: ciphertext, as bytes """ LOGGER.debug( 'Wallet.encrypt >>> message: %s, authn: %s, to_verkey: %s, from_verkey: %s', message, authn, to_verkey, from_verkey) if not message: LOGGER.debug('Wallet.encrypt <!< No message to encrypt') raise AbsentMessage('No message to encrypt') if not self.handle: LOGGER.debug('Wallet.encrypt <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) if authn: rv = await crypto.auth_crypt(self.handle, from_verkey or self.verkey, to_verkey or self.verkey, message) else: rv = await crypto.anon_crypt(to_verkey or self.verkey, message) LOGGER.debug('Wallet.auth_encrypt <<< %s', rv) return rv
python
async def encrypt( self, message: bytes, authn: bool = False, to_verkey: str = None, from_verkey: str = None) -> bytes: """ Encrypt plaintext for owner of DID, anonymously or via authenticated encryption scheme. Raise AbsentMessage for missing message, or WalletState if wallet is closed. :param message: plaintext, as bytes :param authn: whether to use authenticated encryption scheme :param to_verkey: verification key of recipient, None for anchor's own :param from_verkey: verification key of sender for authenticated encryption, None for anchor's own :return: ciphertext, as bytes """ LOGGER.debug( 'Wallet.encrypt >>> message: %s, authn: %s, to_verkey: %s, from_verkey: %s', message, authn, to_verkey, from_verkey) if not message: LOGGER.debug('Wallet.encrypt <!< No message to encrypt') raise AbsentMessage('No message to encrypt') if not self.handle: LOGGER.debug('Wallet.encrypt <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) if authn: rv = await crypto.auth_crypt(self.handle, from_verkey or self.verkey, to_verkey or self.verkey, message) else: rv = await crypto.anon_crypt(to_verkey or self.verkey, message) LOGGER.debug('Wallet.auth_encrypt <<< %s', rv) return rv
Encrypt plaintext for owner of DID, anonymously or via authenticated encryption scheme. Raise AbsentMessage for missing message, or WalletState if wallet is closed. :param message: plaintext, as bytes :param authn: whether to use authenticated encryption scheme :param to_verkey: verification key of recipient, None for anchor's own :param from_verkey: verification key of sender for authenticated encryption, None for anchor's own :return: ciphertext, as bytes
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L1045-L1083
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.decrypt
async def decrypt( self, ciphertext: bytes, authn_check: bool = None, to_verkey: str = None, from_verkey: str = None) -> (bytes, str): """ Decrypt ciphertext and optionally authenticate sender. Raise BadKey if authentication operation checks and reveals sender key distinct from input sender verification key. Raise AbsentMessage for missing ciphertext, or WalletState if wallet is closed. :param ciphertext: ciphertext, as bytes :param authn_check: True to authenticate and check sender verification key, False to authenticate and return sender verification key for client to decide fitness, or None to use anonymous decryption :param to_verkey: recipient verification key, default anchor's own :param from_verkey: sender verification key, ignored for anonymous decryption, default anchor's own for authenticated decryption :return: decrypted bytes and sender verification key (None for anonymous decryption) """ LOGGER.debug( 'Wallet.decrypt >>> ciphertext: %s, authn_check: %s, to_verkey: %s, from_verkey: %s', ciphertext, authn_check, to_verkey, from_verkey) if not ciphertext: LOGGER.debug('Wallet.decrypt <!< No ciphertext to decrypt') raise AbsentMessage('No ciphertext to decrypt') if not self.handle: LOGGER.debug('Wallet.decrypt <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) sender_verkey = None if authn_check is None: plaintext = await crypto.anon_decrypt(self.handle, to_verkey or self.verkey, ciphertext) else: (sender_verkey, plaintext) = await crypto.auth_decrypt(self.handle, to_verkey or self.verkey, ciphertext) if authn_check and sender_verkey != (from_verkey or self.verkey): LOGGER.debug('Wallet.decrypt <!< Authentication revealed unexpected sender key on decryption') raise BadKey('Authentication revealed unexpected sender key on decryption') rv = (plaintext, sender_verkey) LOGGER.debug('Wallet.decrypt <<< %s', rv) return rv
python
async def decrypt( self, ciphertext: bytes, authn_check: bool = None, to_verkey: str = None, from_verkey: str = None) -> (bytes, str): """ Decrypt ciphertext and optionally authenticate sender. Raise BadKey if authentication operation checks and reveals sender key distinct from input sender verification key. Raise AbsentMessage for missing ciphertext, or WalletState if wallet is closed. :param ciphertext: ciphertext, as bytes :param authn_check: True to authenticate and check sender verification key, False to authenticate and return sender verification key for client to decide fitness, or None to use anonymous decryption :param to_verkey: recipient verification key, default anchor's own :param from_verkey: sender verification key, ignored for anonymous decryption, default anchor's own for authenticated decryption :return: decrypted bytes and sender verification key (None for anonymous decryption) """ LOGGER.debug( 'Wallet.decrypt >>> ciphertext: %s, authn_check: %s, to_verkey: %s, from_verkey: %s', ciphertext, authn_check, to_verkey, from_verkey) if not ciphertext: LOGGER.debug('Wallet.decrypt <!< No ciphertext to decrypt') raise AbsentMessage('No ciphertext to decrypt') if not self.handle: LOGGER.debug('Wallet.decrypt <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) sender_verkey = None if authn_check is None: plaintext = await crypto.anon_decrypt(self.handle, to_verkey or self.verkey, ciphertext) else: (sender_verkey, plaintext) = await crypto.auth_decrypt(self.handle, to_verkey or self.verkey, ciphertext) if authn_check and sender_verkey != (from_verkey or self.verkey): LOGGER.debug('Wallet.decrypt <!< Authentication revealed unexpected sender key on decryption') raise BadKey('Authentication revealed unexpected sender key on decryption') rv = (plaintext, sender_verkey) LOGGER.debug('Wallet.decrypt <<< %s', rv) return rv
Decrypt ciphertext and optionally authenticate sender. Raise BadKey if authentication operation checks and reveals sender key distinct from input sender verification key. Raise AbsentMessage for missing ciphertext, or WalletState if wallet is closed. :param ciphertext: ciphertext, as bytes :param authn_check: True to authenticate and check sender verification key, False to authenticate and return sender verification key for client to decide fitness, or None to use anonymous decryption :param to_verkey: recipient verification key, default anchor's own :param from_verkey: sender verification key, ignored for anonymous decryption, default anchor's own for authenticated decryption :return: decrypted bytes and sender verification key (None for anonymous decryption)
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L1085-L1134
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.sign
async def sign(self, message: bytes, verkey: str = None) -> bytes: """ Derive signing key and Sign message; return signature. Raise WalletState if wallet is closed. Raise AbsentMessage for missing message, or WalletState if wallet is closed. :param message: Content to sign, as bytes :param verkey: verification key corresponding to private signing key (default anchor's own) :return: signature, as bytes """ LOGGER.debug('Wallet.sign >>> message: %s, verkey: %s', message, verkey) if not message: LOGGER.debug('Wallet.sign <!< No message to sign') raise AbsentMessage('No message to sign') if not self.handle: LOGGER.debug('Wallet.sign <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv = await crypto.crypto_sign(self.handle, verkey or self.verkey, message) LOGGER.debug('Wallet.sign <<< %s', rv) return rv
python
async def sign(self, message: bytes, verkey: str = None) -> bytes: """ Derive signing key and Sign message; return signature. Raise WalletState if wallet is closed. Raise AbsentMessage for missing message, or WalletState if wallet is closed. :param message: Content to sign, as bytes :param verkey: verification key corresponding to private signing key (default anchor's own) :return: signature, as bytes """ LOGGER.debug('Wallet.sign >>> message: %s, verkey: %s', message, verkey) if not message: LOGGER.debug('Wallet.sign <!< No message to sign') raise AbsentMessage('No message to sign') if not self.handle: LOGGER.debug('Wallet.sign <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv = await crypto.crypto_sign(self.handle, verkey or self.verkey, message) LOGGER.debug('Wallet.sign <<< %s', rv) return rv
Derive signing key and Sign message; return signature. Raise WalletState if wallet is closed. Raise AbsentMessage for missing message, or WalletState if wallet is closed. :param message: Content to sign, as bytes :param verkey: verification key corresponding to private signing key (default anchor's own) :return: signature, as bytes
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L1136-L1159
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.verify
async def verify(self, message: bytes, signature: bytes, verkey: str = None) -> bool: """ Verify signature against input signer verification key (default anchor's own). Raise AbsentMessage for missing message or signature, or WalletState if wallet is closed. :param message: Content to sign, as bytes :param signature: signature, as bytes :param verkey: signer verification key (default for anchor's own) :return: whether signature is valid """ LOGGER.debug('Wallet.verify >>> message: %s, signature: %s, verkey: %s', message, signature, verkey) if not message: LOGGER.debug('Wallet.verify <!< No message to verify') raise AbsentMessage('No message to verify') if not signature: LOGGER.debug('Wallet.verify <!< No signature to verify') raise AbsentMessage('No signature to verify') if not self.handle: LOGGER.debug('Wallet.verify <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv = await crypto.crypto_verify(verkey or self.verkey, message, signature) LOGGER.debug('Wallet.verify <<< %s', rv) return rv
python
async def verify(self, message: bytes, signature: bytes, verkey: str = None) -> bool: """ Verify signature against input signer verification key (default anchor's own). Raise AbsentMessage for missing message or signature, or WalletState if wallet is closed. :param message: Content to sign, as bytes :param signature: signature, as bytes :param verkey: signer verification key (default for anchor's own) :return: whether signature is valid """ LOGGER.debug('Wallet.verify >>> message: %s, signature: %s, verkey: %s', message, signature, verkey) if not message: LOGGER.debug('Wallet.verify <!< No message to verify') raise AbsentMessage('No message to verify') if not signature: LOGGER.debug('Wallet.verify <!< No signature to verify') raise AbsentMessage('No signature to verify') if not self.handle: LOGGER.debug('Wallet.verify <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv = await crypto.crypto_verify(verkey or self.verkey, message, signature) LOGGER.debug('Wallet.verify <<< %s', rv) return rv
Verify signature against input signer verification key (default anchor's own). Raise AbsentMessage for missing message or signature, or WalletState if wallet is closed. :param message: Content to sign, as bytes :param signature: signature, as bytes :param verkey: signer verification key (default for anchor's own) :return: whether signature is valid
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L1161-L1189
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.pack
async def pack( self, message: str, recip_verkeys: Union[str, Sequence[str]] = None, sender_verkey: str = None) -> bytes: """ Pack a message for one or more recipients (default anchor only). Raise AbsentMessage for missing message, or WalletState if wallet is closed. :param message: message to pack :param recip_verkeys: verification keys of recipients (default anchor's own, only) :param sender_verkey: sender verification key (default anonymous encryption) :return: packed message """ LOGGER.debug( 'Wallet.pack >>> message: %s, recip_verkeys: %s, sender_verkey: %s', message, recip_verkeys, sender_verkey) if message is None: LOGGER.debug('Wallet.pack <!< No message to pack') raise AbsentMessage('No message to pack') rv = await crypto.pack_message( self.handle, message, [recip_verkeys] if isinstance(recip_verkeys, str) else list(recip_verkeys or [self.verkey]), sender_verkey) LOGGER.debug('Wallet.pack <<< %s', rv) return rv
python
async def pack( self, message: str, recip_verkeys: Union[str, Sequence[str]] = None, sender_verkey: str = None) -> bytes: """ Pack a message for one or more recipients (default anchor only). Raise AbsentMessage for missing message, or WalletState if wallet is closed. :param message: message to pack :param recip_verkeys: verification keys of recipients (default anchor's own, only) :param sender_verkey: sender verification key (default anonymous encryption) :return: packed message """ LOGGER.debug( 'Wallet.pack >>> message: %s, recip_verkeys: %s, sender_verkey: %s', message, recip_verkeys, sender_verkey) if message is None: LOGGER.debug('Wallet.pack <!< No message to pack') raise AbsentMessage('No message to pack') rv = await crypto.pack_message( self.handle, message, [recip_verkeys] if isinstance(recip_verkeys, str) else list(recip_verkeys or [self.verkey]), sender_verkey) LOGGER.debug('Wallet.pack <<< %s', rv) return rv
Pack a message for one or more recipients (default anchor only). Raise AbsentMessage for missing message, or WalletState if wallet is closed. :param message: message to pack :param recip_verkeys: verification keys of recipients (default anchor's own, only) :param sender_verkey: sender verification key (default anonymous encryption) :return: packed message
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L1191-L1223
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.unpack
async def unpack(self, ciphertext: bytes) -> (str, str, str): """ Unpack a message. Return triple with cleartext, sender verification key, and recipient verification key. Raise AbsentMessage for missing ciphertext, or WalletState if wallet is closed. Raise AbsentRecord if wallet has no key to unpack ciphertext. :param ciphertext: JWE-like formatted message as pack() produces :return: cleartext, sender verification key, recipient verification key """ LOGGER.debug('Wallet.unpack >>> ciphertext: %s', ciphertext) if not ciphertext: LOGGER.debug('Wallet.pack <!< No ciphertext to unpack') raise AbsentMessage('No ciphertext to unpack') try: unpacked = json.loads(await crypto.unpack_message(self.handle, ciphertext)) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemNotFound: LOGGER.debug('Wallet.unpack <!< Wallet %s has no local key to unpack ciphertext', self.name) raise AbsentRecord('Wallet {} has no local key to unpack ciphertext'.format(self.name)) LOGGER.debug('Wallet.unpack <!< Wallet %s unpack() raised indy error code {}', x_indy.error_code) raise rv = (unpacked['message'], unpacked.get('sender_verkey', None), unpacked.get('recipient_verkey', None)) LOGGER.debug('Wallet.unpack <<< %s', rv) return rv
python
async def unpack(self, ciphertext: bytes) -> (str, str, str): """ Unpack a message. Return triple with cleartext, sender verification key, and recipient verification key. Raise AbsentMessage for missing ciphertext, or WalletState if wallet is closed. Raise AbsentRecord if wallet has no key to unpack ciphertext. :param ciphertext: JWE-like formatted message as pack() produces :return: cleartext, sender verification key, recipient verification key """ LOGGER.debug('Wallet.unpack >>> ciphertext: %s', ciphertext) if not ciphertext: LOGGER.debug('Wallet.pack <!< No ciphertext to unpack') raise AbsentMessage('No ciphertext to unpack') try: unpacked = json.loads(await crypto.unpack_message(self.handle, ciphertext)) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletItemNotFound: LOGGER.debug('Wallet.unpack <!< Wallet %s has no local key to unpack ciphertext', self.name) raise AbsentRecord('Wallet {} has no local key to unpack ciphertext'.format(self.name)) LOGGER.debug('Wallet.unpack <!< Wallet %s unpack() raised indy error code {}', x_indy.error_code) raise rv = (unpacked['message'], unpacked.get('sender_verkey', None), unpacked.get('recipient_verkey', None)) LOGGER.debug('Wallet.unpack <<< %s', rv) return rv
Unpack a message. Return triple with cleartext, sender verification key, and recipient verification key. Raise AbsentMessage for missing ciphertext, or WalletState if wallet is closed. Raise AbsentRecord if wallet has no key to unpack ciphertext. :param ciphertext: JWE-like formatted message as pack() produces :return: cleartext, sender verification key, recipient verification key
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L1225-L1252
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.reseed_init
async def reseed_init(self, next_seed: str = None) -> str: """ Begin reseed operation: generate new key. Raise WalletState if wallet is closed. :param next_seed: incoming replacement seed (default random) :return: new verification key """ LOGGER.debug('Wallet.reseed_init >>> next_seed: [SEED]') if not self.handle: LOGGER.debug('Wallet.reseed_init <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv = await did.replace_keys_start(self.handle, self.did, json.dumps({'seed': next_seed} if next_seed else {})) LOGGER.debug('Wallet.reseed_init <<< %s', rv) return rv
python
async def reseed_init(self, next_seed: str = None) -> str: """ Begin reseed operation: generate new key. Raise WalletState if wallet is closed. :param next_seed: incoming replacement seed (default random) :return: new verification key """ LOGGER.debug('Wallet.reseed_init >>> next_seed: [SEED]') if not self.handle: LOGGER.debug('Wallet.reseed_init <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) rv = await did.replace_keys_start(self.handle, self.did, json.dumps({'seed': next_seed} if next_seed else {})) LOGGER.debug('Wallet.reseed_init <<< %s', rv) return rv
Begin reseed operation: generate new key. Raise WalletState if wallet is closed. :param next_seed: incoming replacement seed (default random) :return: new verification key
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L1254-L1270
PSPC-SPAC-buyandsell/von_anchor
von_anchor/wallet/wallet.py
Wallet.reseed_apply
async def reseed_apply(self) -> DIDInfo: """ Replace verification key with new verification key from reseed operation. Raise WalletState if wallet is closed. :return: DIDInfo with new verification key and metadata for DID """ LOGGER.debug('Wallet.reseed_apply >>>') if not self.handle: LOGGER.debug('Wallet.reseed_init <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) await did.replace_keys_apply(self.handle, self.did) self.verkey = await did.key_for_local_did(self.handle, self.did) now = int(time()) rv = DIDInfo(self.did, self.verkey, {'anchor': True, 'since': now, 'modified': now}) await did.set_did_metadata(self.handle, self.did, json.dumps(rv.metadata)) LOGGER.info('Wallet %s set seed hash metadata for DID %s', self.name, self.did) LOGGER.debug('Wallet.reseed_apply <<< %s', rv) return rv
python
async def reseed_apply(self) -> DIDInfo: """ Replace verification key with new verification key from reseed operation. Raise WalletState if wallet is closed. :return: DIDInfo with new verification key and metadata for DID """ LOGGER.debug('Wallet.reseed_apply >>>') if not self.handle: LOGGER.debug('Wallet.reseed_init <!< Wallet %s is closed', self.name) raise WalletState('Wallet {} is closed'.format(self.name)) await did.replace_keys_apply(self.handle, self.did) self.verkey = await did.key_for_local_did(self.handle, self.did) now = int(time()) rv = DIDInfo(self.did, self.verkey, {'anchor': True, 'since': now, 'modified': now}) await did.set_did_metadata(self.handle, self.did, json.dumps(rv.metadata)) LOGGER.info('Wallet %s set seed hash metadata for DID %s', self.name, self.did) LOGGER.debug('Wallet.reseed_apply <<< %s', rv) return rv
Replace verification key with new verification key from reseed operation. Raise WalletState if wallet is closed. :return: DIDInfo with new verification key and metadata for DID
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/wallet/wallet.py#L1272-L1295
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/origin.py
Origin.send_schema
async def send_schema(self, schema_data_json: str) -> str: """ Send schema to ledger, then retrieve it as written to the ledger and return it. Raise BadLedgerTxn on failure. Raise BadAttribute for attribute name with spaces or reserved for indy-sdk. If schema already exists on ledger, log error and return schema. :param schema_data_json: schema data json with name, version, attribute names; e.g., :: { 'name': 'my-schema', 'version': '1.234', 'attr_names': ['favourite_drink', 'height', 'last_visit_date'] } :return: schema json as written to ledger (or existed a priori) """ LOGGER.debug('Origin.send_schema >>> schema_data_json: %s', schema_data_json) schema_data = json.loads(schema_data_json) for attr in schema_data['attr_names']: if not (re.match(r'(?=[^- ])[-_a-zA-Z0-9 ]+(?<=[^- ])$', attr)) or attr.strip().lower() == 'hash': LOGGER.debug('Origin.send_schema <!< Bad attribute name [%s]', attr) raise BadAttribute('Bad attribute name [{}]'.format(attr)) s_id = schema_id(self.did, schema_data['name'], schema_data['version']) s_key = schema_key(s_id) rv_json = None with SCHEMA_CACHE.lock: try: rv_json = await self.get_schema(s_key) LOGGER.error( 'Schema %s version %s already exists on ledger for origin-did %s: not sending', schema_data['name'], schema_data['version'], self.did) except AbsentSchema: # OK - about to create and send it (_, schema_json) = await anoncreds.issuer_create_schema( self.did, schema_data['name'], schema_data['version'], json.dumps(schema_data['attr_names'])) req_json = await ledger.build_schema_request(self.did, schema_json) await self._sign_submit(req_json) for _ in range(16): # reasonable timeout try: rv_json = await self.get_schema(s_key) # adds to cache break except AbsentSchema: await sleep(1) LOGGER.info('Sent schema %s to ledger, waiting 1s for its appearance', s_id) if not rv_json: LOGGER.debug('Origin.send_schema <!< timed out waiting on sent schema %s', s_id) raise BadLedgerTxn('Timed out waiting on sent schema {}'.format(s_id)) LOGGER.debug('Origin.send_schema <<< %s', rv_json) return rv_json
python
async def send_schema(self, schema_data_json: str) -> str: """ Send schema to ledger, then retrieve it as written to the ledger and return it. Raise BadLedgerTxn on failure. Raise BadAttribute for attribute name with spaces or reserved for indy-sdk. If schema already exists on ledger, log error and return schema. :param schema_data_json: schema data json with name, version, attribute names; e.g., :: { 'name': 'my-schema', 'version': '1.234', 'attr_names': ['favourite_drink', 'height', 'last_visit_date'] } :return: schema json as written to ledger (or existed a priori) """ LOGGER.debug('Origin.send_schema >>> schema_data_json: %s', schema_data_json) schema_data = json.loads(schema_data_json) for attr in schema_data['attr_names']: if not (re.match(r'(?=[^- ])[-_a-zA-Z0-9 ]+(?<=[^- ])$', attr)) or attr.strip().lower() == 'hash': LOGGER.debug('Origin.send_schema <!< Bad attribute name [%s]', attr) raise BadAttribute('Bad attribute name [{}]'.format(attr)) s_id = schema_id(self.did, schema_data['name'], schema_data['version']) s_key = schema_key(s_id) rv_json = None with SCHEMA_CACHE.lock: try: rv_json = await self.get_schema(s_key) LOGGER.error( 'Schema %s version %s already exists on ledger for origin-did %s: not sending', schema_data['name'], schema_data['version'], self.did) except AbsentSchema: # OK - about to create and send it (_, schema_json) = await anoncreds.issuer_create_schema( self.did, schema_data['name'], schema_data['version'], json.dumps(schema_data['attr_names'])) req_json = await ledger.build_schema_request(self.did, schema_json) await self._sign_submit(req_json) for _ in range(16): # reasonable timeout try: rv_json = await self.get_schema(s_key) # adds to cache break except AbsentSchema: await sleep(1) LOGGER.info('Sent schema %s to ledger, waiting 1s for its appearance', s_id) if not rv_json: LOGGER.debug('Origin.send_schema <!< timed out waiting on sent schema %s', s_id) raise BadLedgerTxn('Timed out waiting on sent schema {}'.format(s_id)) LOGGER.debug('Origin.send_schema <<< %s', rv_json) return rv_json
Send schema to ledger, then retrieve it as written to the ledger and return it. Raise BadLedgerTxn on failure. Raise BadAttribute for attribute name with spaces or reserved for indy-sdk. If schema already exists on ledger, log error and return schema. :param schema_data_json: schema data json with name, version, attribute names; e.g., :: { 'name': 'my-schema', 'version': '1.234', 'attr_names': ['favourite_drink', 'height', 'last_visit_date'] } :return: schema json as written to ledger (or existed a priori)
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/origin.py#L40-L102
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/demo.py
NominalAnchor.least_role
def least_role() -> Role: """ Return the indy-sdk null role for a tails sync anchor, which does not need write access. :return: USER role """ LOGGER.debug('NominalAnchor.least_role >>>') rv = Role.USER LOGGER.debug('NominalAnchor.least_role <<< %s', rv) return rv
python
def least_role() -> Role: """ Return the indy-sdk null role for a tails sync anchor, which does not need write access. :return: USER role """ LOGGER.debug('NominalAnchor.least_role >>>') rv = Role.USER LOGGER.debug('NominalAnchor.least_role <<< %s', rv) return rv
Return the indy-sdk null role for a tails sync anchor, which does not need write access. :return: USER role
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/demo.py#L46-L58
PSPC-SPAC-buyandsell/von_anchor
von_anchor/anchor/demo.py
OrgHubAnchor.close
async def close(self) -> None: """ Explicit exit. If so configured, populate cache to prove for any creds on schemata, cred defs, and rev regs marked of interest in configuration at initialization, archive cache, and purge prior cache archives. :return: current object """ LOGGER.debug('OrgHubAnchor.close >>>') archive_caches = False if self.config.get('archive-holder-prover-caches-on-close', False): archive_caches = True await self.load_cache_for_proof(False) if self.config.get('archive-verifier-caches-on-close', {}): archive_caches = True await self.load_cache_for_verification(False) if archive_caches: ArchivableCaches.archive(self.dir_cache) ArchivableCaches.purge_archives(self.dir_cache, True) # Do not close wallet independently: allow for sharing open wallet over many anchor lifetimes # await self.wallet.close() #1.7.8 # Do not close pool independently: let relying party decide when to go on-line and off-line for path_rr_id in Tails.links(self._dir_tails): rr_id = basename(path_rr_id) try: await HolderProver._sync_revoc_for_proof(self, rr_id) except ClosedPool: LOGGER.warning('OrgHubAnchor sync-revoc on close required ledger for %s but pool was closed', rr_id) LOGGER.debug('OrgHubAnchor.close <<<')
python
async def close(self) -> None: """ Explicit exit. If so configured, populate cache to prove for any creds on schemata, cred defs, and rev regs marked of interest in configuration at initialization, archive cache, and purge prior cache archives. :return: current object """ LOGGER.debug('OrgHubAnchor.close >>>') archive_caches = False if self.config.get('archive-holder-prover-caches-on-close', False): archive_caches = True await self.load_cache_for_proof(False) if self.config.get('archive-verifier-caches-on-close', {}): archive_caches = True await self.load_cache_for_verification(False) if archive_caches: ArchivableCaches.archive(self.dir_cache) ArchivableCaches.purge_archives(self.dir_cache, True) # Do not close wallet independently: allow for sharing open wallet over many anchor lifetimes # await self.wallet.close() #1.7.8 # Do not close pool independently: let relying party decide when to go on-line and off-line for path_rr_id in Tails.links(self._dir_tails): rr_id = basename(path_rr_id) try: await HolderProver._sync_revoc_for_proof(self, rr_id) except ClosedPool: LOGGER.warning('OrgHubAnchor sync-revoc on close required ledger for %s but pool was closed', rr_id) LOGGER.debug('OrgHubAnchor.close <<<')
Explicit exit. If so configured, populate cache to prove for any creds on schemata, cred defs, and rev regs marked of interest in configuration at initialization, archive cache, and purge prior cache archives. :return: current object
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/anchor/demo.py#L149-L182
PSPC-SPAC-buyandsell/von_anchor
von_anchor/canon.py
canon_cred_wql
def canon_cred_wql(query: dict) -> dict: """ Canonicalize WQL attribute marker and value keys for input to indy-sdk wallet credential filtration. Canonicalize comparison values to proper indy-sdk raw values as per raw(). Raise BadWalletQuery for WQL mapping '$or' to non-list. :param query: WQL query :return: canonicalized WQL query dict """ for k in [qk for qk in query]: # copy: iteration alters query keys attr_match = re.match('attr::([^:]+)::(marker|value)$', k) if isinstance(query[k], dict): # only subqueries are dicts: recurse query[k] = canon_cred_wql(query[k]) if k == '$or': if not isinstance(query[k], list): raise BadWalletQuery('Bad WQL; $or value must be a list in {}'.format(json.dumps(query))) query[k] = [canon_cred_wql(subq) for subq in query[k]] if attr_match: qkey = 'attr::{}::{}'.format(canon(attr_match.group(1)), canon(attr_match.group(2))) query[qkey] = query.pop(k) tag_value = query[qkey] if isinstance(tag_value, dict) and len(tag_value) == 1: if '$in' in tag_value: tag_value['$in'] = [raw(val) for val in tag_value.pop('$in')] else: wql_op = set(tag_value.keys()).pop() # $neq, $gt, $gte, etc. tag_value[wql_op] = raw(tag_value[wql_op]) else: # equality query[qkey] = raw(query[qkey]) return query
python
def canon_cred_wql(query: dict) -> dict: """ Canonicalize WQL attribute marker and value keys for input to indy-sdk wallet credential filtration. Canonicalize comparison values to proper indy-sdk raw values as per raw(). Raise BadWalletQuery for WQL mapping '$or' to non-list. :param query: WQL query :return: canonicalized WQL query dict """ for k in [qk for qk in query]: # copy: iteration alters query keys attr_match = re.match('attr::([^:]+)::(marker|value)$', k) if isinstance(query[k], dict): # only subqueries are dicts: recurse query[k] = canon_cred_wql(query[k]) if k == '$or': if not isinstance(query[k], list): raise BadWalletQuery('Bad WQL; $or value must be a list in {}'.format(json.dumps(query))) query[k] = [canon_cred_wql(subq) for subq in query[k]] if attr_match: qkey = 'attr::{}::{}'.format(canon(attr_match.group(1)), canon(attr_match.group(2))) query[qkey] = query.pop(k) tag_value = query[qkey] if isinstance(tag_value, dict) and len(tag_value) == 1: if '$in' in tag_value: tag_value['$in'] = [raw(val) for val in tag_value.pop('$in')] else: wql_op = set(tag_value.keys()).pop() # $neq, $gt, $gte, etc. tag_value[wql_op] = raw(tag_value[wql_op]) else: # equality query[qkey] = raw(query[qkey]) return query
Canonicalize WQL attribute marker and value keys for input to indy-sdk wallet credential filtration. Canonicalize comparison values to proper indy-sdk raw values as per raw(). Raise BadWalletQuery for WQL mapping '$or' to non-list. :param query: WQL query :return: canonicalized WQL query dict
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/canon.py#L54-L86
PSPC-SPAC-buyandsell/von_anchor
von_anchor/canon.py
canon_pairwise_wql
def canon_pairwise_wql(query: dict = None) -> dict: """ Canonicalize WQL tags to unencrypted storage specification. Canonicalize comparison values to strings via raw(). Raise BadWalletQuery for WQL mapping '$or' to non-list. :param query: WQL query :return: canonicalized WQL query dict """ if not query: return { '~their_did': { '$neq': '' } } for k in [qk for qk in query]: # copy: iteration alters query keys if isinstance(query[k], dict): # only subqueries are dicts: recurse query[k] = canon_pairwise_wql(query[k]) if k == '$or': if not isinstance(query[k], list): raise BadWalletQuery('Bad WQL; $or value must be a list in {}'.format(json.dumps(query))) query[k] = [canon_pairwise_wql(subq) for subq in query[k]] elif k == '$not': query[k] = canon_pairwise_wql(query.pop(k)) elif k not in WQL_1_OPS: qkey = canon_pairwise_tag(k) query[qkey] = query.pop(k) tag_value = query[qkey] if isinstance(tag_value, dict) and len(tag_value) == 1: if '$in' in tag_value: tag_value['$in'] = [raw(val) for val in tag_value['$in']] else: wql_op = set(tag_value.keys()).pop() # $neq, $gt, $gt, etc. tag_value[wql_op] = raw(tag_value[wql_op]) else: query[qkey] = raw(query.pop(qkey)) return query
python
def canon_pairwise_wql(query: dict = None) -> dict: """ Canonicalize WQL tags to unencrypted storage specification. Canonicalize comparison values to strings via raw(). Raise BadWalletQuery for WQL mapping '$or' to non-list. :param query: WQL query :return: canonicalized WQL query dict """ if not query: return { '~their_did': { '$neq': '' } } for k in [qk for qk in query]: # copy: iteration alters query keys if isinstance(query[k], dict): # only subqueries are dicts: recurse query[k] = canon_pairwise_wql(query[k]) if k == '$or': if not isinstance(query[k], list): raise BadWalletQuery('Bad WQL; $or value must be a list in {}'.format(json.dumps(query))) query[k] = [canon_pairwise_wql(subq) for subq in query[k]] elif k == '$not': query[k] = canon_pairwise_wql(query.pop(k)) elif k not in WQL_1_OPS: qkey = canon_pairwise_tag(k) query[qkey] = query.pop(k) tag_value = query[qkey] if isinstance(tag_value, dict) and len(tag_value) == 1: if '$in' in tag_value: tag_value['$in'] = [raw(val) for val in tag_value['$in']] else: wql_op = set(tag_value.keys()).pop() # $neq, $gt, $gt, etc. tag_value[wql_op] = raw(tag_value[wql_op]) else: query[qkey] = raw(query.pop(qkey)) return query
Canonicalize WQL tags to unencrypted storage specification. Canonicalize comparison values to strings via raw(). Raise BadWalletQuery for WQL mapping '$or' to non-list. :param query: WQL query :return: canonicalized WQL query dict
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/canon.py#L136-L176
PSPC-SPAC-buyandsell/von_anchor
setup.py
parse_requirements
def parse_requirements(filename): """ Load requirements from a pip requirements file. :param filename: file name with requirements to parse """ try: with open(filename) as fh_req: return [line.strip() for line in fh_req if line.strip() and not line.startswith('#')] except FileNotFoundError: print('File not found: {}'.format(realpath(filename)), file=stderr) raise
python
def parse_requirements(filename): """ Load requirements from a pip requirements file. :param filename: file name with requirements to parse """ try: with open(filename) as fh_req: return [line.strip() for line in fh_req if line.strip() and not line.startswith('#')] except FileNotFoundError: print('File not found: {}'.format(realpath(filename)), file=stderr) raise
Load requirements from a pip requirements file. :param filename: file name with requirements to parse
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/setup.py#L29-L41
PSPC-SPAC-buyandsell/von_anchor
von_anchor/nodepool/manager.py
NodePoolManager.add_config
async def add_config(self, name: str, genesis: str = None) -> None: """ Given pool name and genesis transaction path or data, add node pool configuration to indy home directory. Raise ExtantPool if node pool configuration on input name already exists. :param name: pool name :param genesis: genesis transaction path or raw data """ LOGGER.debug('NodePoolManager.__init__ >>> name: %s, genesis: %s', name, genesis) if name in await self.list(): LOGGER.debug('NodePoolManager.add_config: <!< Node pool %s configuration already present', name) raise ExtantPool('Node pool {} configuration already present'.format(name)) genesis_tmp = None path_gen = realpath(expanduser(expandvars(genesis))) try: if not isfile(path_gen): genesis_tmp = NamedTemporaryFile(mode='w+b', buffering=0, delete=False) with genesis_tmp: genesis_tmp.write(genesis.encode()) await pool.create_pool_ledger_config( name, json.dumps({ 'genesis_txn': path_gen if isfile(path_gen) else genesis_tmp.name })) finally: if genesis_tmp: remove(genesis_tmp.name) LOGGER.debug('NodePoolManager.__init__ <<<')
python
async def add_config(self, name: str, genesis: str = None) -> None: """ Given pool name and genesis transaction path or data, add node pool configuration to indy home directory. Raise ExtantPool if node pool configuration on input name already exists. :param name: pool name :param genesis: genesis transaction path or raw data """ LOGGER.debug('NodePoolManager.__init__ >>> name: %s, genesis: %s', name, genesis) if name in await self.list(): LOGGER.debug('NodePoolManager.add_config: <!< Node pool %s configuration already present', name) raise ExtantPool('Node pool {} configuration already present'.format(name)) genesis_tmp = None path_gen = realpath(expanduser(expandvars(genesis))) try: if not isfile(path_gen): genesis_tmp = NamedTemporaryFile(mode='w+b', buffering=0, delete=False) with genesis_tmp: genesis_tmp.write(genesis.encode()) await pool.create_pool_ledger_config( name, json.dumps({ 'genesis_txn': path_gen if isfile(path_gen) else genesis_tmp.name })) finally: if genesis_tmp: remove(genesis_tmp.name) LOGGER.debug('NodePoolManager.__init__ <<<')
Given pool name and genesis transaction path or data, add node pool configuration to indy home directory. Raise ExtantPool if node pool configuration on input name already exists. :param name: pool name :param genesis: genesis transaction path or raw data
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/nodepool/manager.py#L73-L104
PSPC-SPAC-buyandsell/von_anchor
von_anchor/nodepool/manager.py
NodePoolManager.list
async def list(self) -> List[str]: """ Return list of pool names configured, empty list for none. :return: list of pool names. """ LOGGER.debug('NodePoolManager.list >>>') rv = [p['pool'] for p in await pool.list_pools()] LOGGER.debug('NodePoolManager.list <<< %s', rv) return rv
python
async def list(self) -> List[str]: """ Return list of pool names configured, empty list for none. :return: list of pool names. """ LOGGER.debug('NodePoolManager.list >>>') rv = [p['pool'] for p in await pool.list_pools()] LOGGER.debug('NodePoolManager.list <<< %s', rv) return rv
Return list of pool names configured, empty list for none. :return: list of pool names.
https://github.com/PSPC-SPAC-buyandsell/von_anchor/blob/78ac1de67be42a676274f4bf71fe12f66e72f309/von_anchor/nodepool/manager.py#L106-L118