repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
couchbase/couchbase-python-client
couchbase/admin.py
Admin.wait_ready
def wait_ready(self, name, timeout=5.0, sleep_interval=0.2): """ Wait for a newly created bucket to be ready. :param string name: the name to wait for :param seconds timeout: the maximum amount of time to wait :param seconds sleep_interval: the number of time to sleep between each probe :raise: :exc:`.CouchbaseError` on internal HTTP error :raise: :exc:`NotReadyError` if all nodes could not be ready in time """ end = time() + timeout while True: try: info = self.bucket_info(name).value for node in info['nodes']: if node['status'] != 'healthy': raise NotReadyError.pyexc('Not all nodes are healthy') return # No error and all OK except E.CouchbaseError: if time() + sleep_interval > end: raise sleep(sleep_interval)
python
def wait_ready(self, name, timeout=5.0, sleep_interval=0.2): """ Wait for a newly created bucket to be ready. :param string name: the name to wait for :param seconds timeout: the maximum amount of time to wait :param seconds sleep_interval: the number of time to sleep between each probe :raise: :exc:`.CouchbaseError` on internal HTTP error :raise: :exc:`NotReadyError` if all nodes could not be ready in time """ end = time() + timeout while True: try: info = self.bucket_info(name).value for node in info['nodes']: if node['status'] != 'healthy': raise NotReadyError.pyexc('Not all nodes are healthy') return # No error and all OK except E.CouchbaseError: if time() + sleep_interval > end: raise sleep(sleep_interval)
[ "def", "wait_ready", "(", "self", ",", "name", ",", "timeout", "=", "5.0", ",", "sleep_interval", "=", "0.2", ")", ":", "end", "=", "time", "(", ")", "+", "timeout", "while", "True", ":", "try", ":", "info", "=", "self", ".", "bucket_info", "(", "name", ")", ".", "value", "for", "node", "in", "info", "[", "'nodes'", "]", ":", "if", "node", "[", "'status'", "]", "!=", "'healthy'", ":", "raise", "NotReadyError", ".", "pyexc", "(", "'Not all nodes are healthy'", ")", "return", "# No error and all OK", "except", "E", ".", "CouchbaseError", ":", "if", "time", "(", ")", "+", "sleep_interval", ">", "end", ":", "raise", "sleep", "(", "sleep_interval", ")" ]
Wait for a newly created bucket to be ready. :param string name: the name to wait for :param seconds timeout: the maximum amount of time to wait :param seconds sleep_interval: the number of time to sleep between each probe :raise: :exc:`.CouchbaseError` on internal HTTP error :raise: :exc:`NotReadyError` if all nodes could not be ready in time
[ "Wait", "for", "a", "newly", "created", "bucket", "to", "be", "ready", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/admin.py#L241-L264
train
couchbase/couchbase-python-client
couchbase/admin.py
Admin.bucket_update
def bucket_update(self, name, current, bucket_password=None, replicas=None, ram_quota=None, flush_enabled=None): """ Update an existing bucket's settings. :param string name: The name of the bucket to update :param dict current: Current state of the bucket. This can be retrieve from :meth:`bucket_info` :param str bucket_password: Change the bucket's password :param int replicas: The number of replicas for the bucket :param int ram_quota: The memory available to the bucket on each node. :param bool flush_enabled: Whether the flush API should be allowed from normal clients :return: A :class:`~.HttpResult` object :raise: :exc:`~.HTTPError` if the request could not be completed .. note:: The default value for all options in this method is ``None``. If a value is set to something else, it will modify the setting. Change the bucket password:: adm.bucket_update('a_bucket', adm.bucket_info('a_bucket'), bucket_password='n3wpassw0rd') Enable the flush API:: adm.bucket_update('a_bucket', adm.bucket_info('a_bucket'), flush_enabled=True) """ params = {} current = current.value # Merge params params['authType'] = current['authType'] if 'saslPassword' in current: params['saslPassword'] = current['saslPassword'] if bucket_password is not None: params['authType'] = 'sasl' params['saslPassword'] = bucket_password params['replicaNumber'] = ( replicas if replicas is not None else current['replicaNumber']) if ram_quota: params['ramQuotaMB'] = ram_quota else: params['ramQuotaMB'] = current['quota']['ram'] / 1024 / 1024 if flush_enabled is not None: params['flushEnabled'] = int(flush_enabled) params['proxyPort'] = current['proxyPort'] return self.http_request(path='/pools/default/buckets/' + name, method='POST', content_type='application/x-www-form-urlencoded', content=self._mk_formstr(params))
python
def bucket_update(self, name, current, bucket_password=None, replicas=None, ram_quota=None, flush_enabled=None): """ Update an existing bucket's settings. :param string name: The name of the bucket to update :param dict current: Current state of the bucket. This can be retrieve from :meth:`bucket_info` :param str bucket_password: Change the bucket's password :param int replicas: The number of replicas for the bucket :param int ram_quota: The memory available to the bucket on each node. :param bool flush_enabled: Whether the flush API should be allowed from normal clients :return: A :class:`~.HttpResult` object :raise: :exc:`~.HTTPError` if the request could not be completed .. note:: The default value for all options in this method is ``None``. If a value is set to something else, it will modify the setting. Change the bucket password:: adm.bucket_update('a_bucket', adm.bucket_info('a_bucket'), bucket_password='n3wpassw0rd') Enable the flush API:: adm.bucket_update('a_bucket', adm.bucket_info('a_bucket'), flush_enabled=True) """ params = {} current = current.value # Merge params params['authType'] = current['authType'] if 'saslPassword' in current: params['saslPassword'] = current['saslPassword'] if bucket_password is not None: params['authType'] = 'sasl' params['saslPassword'] = bucket_password params['replicaNumber'] = ( replicas if replicas is not None else current['replicaNumber']) if ram_quota: params['ramQuotaMB'] = ram_quota else: params['ramQuotaMB'] = current['quota']['ram'] / 1024 / 1024 if flush_enabled is not None: params['flushEnabled'] = int(flush_enabled) params['proxyPort'] = current['proxyPort'] return self.http_request(path='/pools/default/buckets/' + name, method='POST', content_type='application/x-www-form-urlencoded', content=self._mk_formstr(params))
[ "def", "bucket_update", "(", "self", ",", "name", ",", "current", ",", "bucket_password", "=", "None", ",", "replicas", "=", "None", ",", "ram_quota", "=", "None", ",", "flush_enabled", "=", "None", ")", ":", "params", "=", "{", "}", "current", "=", "current", ".", "value", "# Merge params", "params", "[", "'authType'", "]", "=", "current", "[", "'authType'", "]", "if", "'saslPassword'", "in", "current", ":", "params", "[", "'saslPassword'", "]", "=", "current", "[", "'saslPassword'", "]", "if", "bucket_password", "is", "not", "None", ":", "params", "[", "'authType'", "]", "=", "'sasl'", "params", "[", "'saslPassword'", "]", "=", "bucket_password", "params", "[", "'replicaNumber'", "]", "=", "(", "replicas", "if", "replicas", "is", "not", "None", "else", "current", "[", "'replicaNumber'", "]", ")", "if", "ram_quota", ":", "params", "[", "'ramQuotaMB'", "]", "=", "ram_quota", "else", ":", "params", "[", "'ramQuotaMB'", "]", "=", "current", "[", "'quota'", "]", "[", "'ram'", "]", "/", "1024", "/", "1024", "if", "flush_enabled", "is", "not", "None", ":", "params", "[", "'flushEnabled'", "]", "=", "int", "(", "flush_enabled", ")", "params", "[", "'proxyPort'", "]", "=", "current", "[", "'proxyPort'", "]", "return", "self", ".", "http_request", "(", "path", "=", "'/pools/default/buckets/'", "+", "name", ",", "method", "=", "'POST'", ",", "content_type", "=", "'application/x-www-form-urlencoded'", ",", "content", "=", "self", ".", "_mk_formstr", "(", "params", ")", ")" ]
Update an existing bucket's settings. :param string name: The name of the bucket to update :param dict current: Current state of the bucket. This can be retrieve from :meth:`bucket_info` :param str bucket_password: Change the bucket's password :param int replicas: The number of replicas for the bucket :param int ram_quota: The memory available to the bucket on each node. :param bool flush_enabled: Whether the flush API should be allowed from normal clients :return: A :class:`~.HttpResult` object :raise: :exc:`~.HTTPError` if the request could not be completed .. note:: The default value for all options in this method is ``None``. If a value is set to something else, it will modify the setting. Change the bucket password:: adm.bucket_update('a_bucket', adm.bucket_info('a_bucket'), bucket_password='n3wpassw0rd') Enable the flush API:: adm.bucket_update('a_bucket', adm.bucket_info('a_bucket'), flush_enabled=True)
[ "Update", "an", "existing", "bucket", "s", "settings", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/admin.py#L266-L329
train
couchbase/couchbase-python-client
couchbase/admin.py
Admin.users_get
def users_get(self, domain): """ Retrieve a list of users from the server. :param AuthDomain domain: The authentication domain to retrieve users from. :return: :class:`~.HttpResult`. The list of users can be obtained from the returned object's `value` property. """ path = self._get_management_path(domain) return self.http_request(path=path, method='GET')
python
def users_get(self, domain): """ Retrieve a list of users from the server. :param AuthDomain domain: The authentication domain to retrieve users from. :return: :class:`~.HttpResult`. The list of users can be obtained from the returned object's `value` property. """ path = self._get_management_path(domain) return self.http_request(path=path, method='GET')
[ "def", "users_get", "(", "self", ",", "domain", ")", ":", "path", "=", "self", ".", "_get_management_path", "(", "domain", ")", "return", "self", ".", "http_request", "(", "path", "=", "path", ",", "method", "=", "'GET'", ")" ]
Retrieve a list of users from the server. :param AuthDomain domain: The authentication domain to retrieve users from. :return: :class:`~.HttpResult`. The list of users can be obtained from the returned object's `value` property.
[ "Retrieve", "a", "list", "of", "users", "from", "the", "server", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/admin.py#L347-L357
train
couchbase/couchbase-python-client
couchbase/admin.py
Admin.user_get
def user_get(self, domain, userid): """ Retrieve a user from the server :param AuthDomain domain: The authentication domain for the user. :param userid: The user ID. :raise: :exc:`couchbase.exceptions.HTTPError` if the user does not exist. :return: :class:`~.HttpResult`. The user can be obtained from the returned object's `value` property. """ path = self._get_management_path(domain, userid) return self.http_request(path=path, method='GET')
python
def user_get(self, domain, userid): """ Retrieve a user from the server :param AuthDomain domain: The authentication domain for the user. :param userid: The user ID. :raise: :exc:`couchbase.exceptions.HTTPError` if the user does not exist. :return: :class:`~.HttpResult`. The user can be obtained from the returned object's `value` property. """ path = self._get_management_path(domain, userid) return self.http_request(path=path, method='GET')
[ "def", "user_get", "(", "self", ",", "domain", ",", "userid", ")", ":", "path", "=", "self", ".", "_get_management_path", "(", "domain", ",", "userid", ")", "return", "self", ".", "http_request", "(", "path", "=", "path", ",", "method", "=", "'GET'", ")" ]
Retrieve a user from the server :param AuthDomain domain: The authentication domain for the user. :param userid: The user ID. :raise: :exc:`couchbase.exceptions.HTTPError` if the user does not exist. :return: :class:`~.HttpResult`. The user can be obtained from the returned object's `value` property.
[ "Retrieve", "a", "user", "from", "the", "server" ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/admin.py#L359-L371
train
couchbase/couchbase-python-client
couchbase/admin.py
Admin.user_upsert
def user_upsert(self, domain, userid, password=None, roles=None, name=None): """ Upsert a user in the cluster :param AuthDomain domain: The authentication domain for the user. :param userid: The user ID :param password: The user password :param roles: A list of roles. A role can either be a simple string, or a list of `(role, bucket)` pairs. :param name: Human-readable name :raise: :exc:`couchbase.exceptions.HTTPError` if the request fails. :return: :class:`~.HttpResult` Creating a new read-only admin user :: adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', ['ro_admin']) An example of using more complex roles :: adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', [('data_reader', '*'), ('data_writer', 'inbox')]) .. warning:: Due to the asynchronous nature of Couchbase management APIs, it may take a few moments for the new user settings to take effect. """ if not roles or not isinstance(roles, list): raise E.ArgumentError("Roles must be a non-empty list") if password and domain == AuthDomain.External: raise E.ArgumentError("External domains must not have passwords") tmplist = [] for role in roles: if isinstance(role, basestring): tmplist.append(role) else: tmplist.append('{0}[{1}]'.format(*role)) role_string = ','.join(tmplist) params = { 'roles': role_string, } if password: params['password'] = password if name: params['name'] = name form = self._mk_formstr(params) path = self._get_management_path(domain, userid) return self.http_request(path=path, method='PUT', content_type='application/x-www-form-urlencoded', content=form)
python
def user_upsert(self, domain, userid, password=None, roles=None, name=None): """ Upsert a user in the cluster :param AuthDomain domain: The authentication domain for the user. :param userid: The user ID :param password: The user password :param roles: A list of roles. A role can either be a simple string, or a list of `(role, bucket)` pairs. :param name: Human-readable name :raise: :exc:`couchbase.exceptions.HTTPError` if the request fails. :return: :class:`~.HttpResult` Creating a new read-only admin user :: adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', ['ro_admin']) An example of using more complex roles :: adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', [('data_reader', '*'), ('data_writer', 'inbox')]) .. warning:: Due to the asynchronous nature of Couchbase management APIs, it may take a few moments for the new user settings to take effect. """ if not roles or not isinstance(roles, list): raise E.ArgumentError("Roles must be a non-empty list") if password and domain == AuthDomain.External: raise E.ArgumentError("External domains must not have passwords") tmplist = [] for role in roles: if isinstance(role, basestring): tmplist.append(role) else: tmplist.append('{0}[{1}]'.format(*role)) role_string = ','.join(tmplist) params = { 'roles': role_string, } if password: params['password'] = password if name: params['name'] = name form = self._mk_formstr(params) path = self._get_management_path(domain, userid) return self.http_request(path=path, method='PUT', content_type='application/x-www-form-urlencoded', content=form)
[ "def", "user_upsert", "(", "self", ",", "domain", ",", "userid", ",", "password", "=", "None", ",", "roles", "=", "None", ",", "name", "=", "None", ")", ":", "if", "not", "roles", "or", "not", "isinstance", "(", "roles", ",", "list", ")", ":", "raise", "E", ".", "ArgumentError", "(", "\"Roles must be a non-empty list\"", ")", "if", "password", "and", "domain", "==", "AuthDomain", ".", "External", ":", "raise", "E", ".", "ArgumentError", "(", "\"External domains must not have passwords\"", ")", "tmplist", "=", "[", "]", "for", "role", "in", "roles", ":", "if", "isinstance", "(", "role", ",", "basestring", ")", ":", "tmplist", ".", "append", "(", "role", ")", "else", ":", "tmplist", ".", "append", "(", "'{0}[{1}]'", ".", "format", "(", "*", "role", ")", ")", "role_string", "=", "','", ".", "join", "(", "tmplist", ")", "params", "=", "{", "'roles'", ":", "role_string", ",", "}", "if", "password", ":", "params", "[", "'password'", "]", "=", "password", "if", "name", ":", "params", "[", "'name'", "]", "=", "name", "form", "=", "self", ".", "_mk_formstr", "(", "params", ")", "path", "=", "self", ".", "_get_management_path", "(", "domain", ",", "userid", ")", "return", "self", ".", "http_request", "(", "path", "=", "path", ",", "method", "=", "'PUT'", ",", "content_type", "=", "'application/x-www-form-urlencoded'", ",", "content", "=", "form", ")" ]
Upsert a user in the cluster :param AuthDomain domain: The authentication domain for the user. :param userid: The user ID :param password: The user password :param roles: A list of roles. A role can either be a simple string, or a list of `(role, bucket)` pairs. :param name: Human-readable name :raise: :exc:`couchbase.exceptions.HTTPError` if the request fails. :return: :class:`~.HttpResult` Creating a new read-only admin user :: adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', ['ro_admin']) An example of using more complex roles :: adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', [('data_reader', '*'), ('data_writer', 'inbox')]) .. warning:: Due to the asynchronous nature of Couchbase management APIs, it may take a few moments for the new user settings to take effect.
[ "Upsert", "a", "user", "in", "the", "cluster" ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/admin.py#L373-L429
train
couchbase/couchbase-python-client
couchbase/connstr.py
convert_1x_args
def convert_1x_args(bucket, **kwargs): """ Converts arguments for 1.x constructors to their 2.x forms """ host = kwargs.pop('host', 'localhost') port = kwargs.pop('port', None) if not 'connstr' in kwargs and 'connection_string' not in kwargs: kwargs['connection_string'] = _build_connstr(host, port, bucket) return kwargs
python
def convert_1x_args(bucket, **kwargs): """ Converts arguments for 1.x constructors to their 2.x forms """ host = kwargs.pop('host', 'localhost') port = kwargs.pop('port', None) if not 'connstr' in kwargs and 'connection_string' not in kwargs: kwargs['connection_string'] = _build_connstr(host, port, bucket) return kwargs
[ "def", "convert_1x_args", "(", "bucket", ",", "*", "*", "kwargs", ")", ":", "host", "=", "kwargs", ".", "pop", "(", "'host'", ",", "'localhost'", ")", "port", "=", "kwargs", ".", "pop", "(", "'port'", ",", "None", ")", "if", "not", "'connstr'", "in", "kwargs", "and", "'connection_string'", "not", "in", "kwargs", ":", "kwargs", "[", "'connection_string'", "]", "=", "_build_connstr", "(", "host", ",", "port", ",", "bucket", ")", "return", "kwargs" ]
Converts arguments for 1.x constructors to their 2.x forms
[ "Converts", "arguments", "for", "1", ".", "x", "constructors", "to", "their", "2", ".", "x", "forms" ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/connstr.py#L173-L181
train
couchbase/couchbase-python-client
couchbase/connstr.py
ConnectionString.parse
def parse(cls, ss): """ Parses an existing connection string This method will return a :class:`~.ConnectionString` object which will allow further inspection on the input parameters. :param string ss: The existing connection string :return: A new :class:`~.ConnectionString` object """ up = urlparse(ss) path = up.path query = up.query if '?' in path: path, _ = up.path.split('?') if path.startswith('/'): path = path[1:] bucket = path options = parse_qs(query) scheme = up.scheme hosts = up.netloc.split(',') return cls(bucket=bucket, options=options, hosts=hosts, scheme=scheme)
python
def parse(cls, ss): """ Parses an existing connection string This method will return a :class:`~.ConnectionString` object which will allow further inspection on the input parameters. :param string ss: The existing connection string :return: A new :class:`~.ConnectionString` object """ up = urlparse(ss) path = up.path query = up.query if '?' in path: path, _ = up.path.split('?') if path.startswith('/'): path = path[1:] bucket = path options = parse_qs(query) scheme = up.scheme hosts = up.netloc.split(',') return cls(bucket=bucket, options=options, hosts=hosts, scheme=scheme)
[ "def", "parse", "(", "cls", ",", "ss", ")", ":", "up", "=", "urlparse", "(", "ss", ")", "path", "=", "up", ".", "path", "query", "=", "up", ".", "query", "if", "'?'", "in", "path", ":", "path", ",", "_", "=", "up", ".", "path", ".", "split", "(", "'?'", ")", "if", "path", ".", "startswith", "(", "'/'", ")", ":", "path", "=", "path", "[", "1", ":", "]", "bucket", "=", "path", "options", "=", "parse_qs", "(", "query", ")", "scheme", "=", "up", ".", "scheme", "hosts", "=", "up", ".", "netloc", ".", "split", "(", "','", ")", "return", "cls", "(", "bucket", "=", "bucket", ",", "options", "=", "options", ",", "hosts", "=", "hosts", ",", "scheme", "=", "scheme", ")" ]
Parses an existing connection string This method will return a :class:`~.ConnectionString` object which will allow further inspection on the input parameters. :param string ss: The existing connection string :return: A new :class:`~.ConnectionString` object
[ "Parses", "an", "existing", "connection", "string" ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/connstr.py#L76-L101
train
couchbase/couchbase-python-client
couchbase/connstr.py
ConnectionString.encode
def encode(self): """ Encodes the current state of the object into a string. :return: The encoded string """ opt_dict = {} for k, v in self.options.items(): opt_dict[k] = v[0] ss = '{0}://{1}'.format(self.scheme, ','.join(self.hosts)) if self.bucket: ss += '/' + self.bucket # URL encode options then decoded forward slash / ss += '?' + urlencode(opt_dict).replace('%2F', '/') return ss
python
def encode(self): """ Encodes the current state of the object into a string. :return: The encoded string """ opt_dict = {} for k, v in self.options.items(): opt_dict[k] = v[0] ss = '{0}://{1}'.format(self.scheme, ','.join(self.hosts)) if self.bucket: ss += '/' + self.bucket # URL encode options then decoded forward slash / ss += '?' + urlencode(opt_dict).replace('%2F', '/') return ss
[ "def", "encode", "(", "self", ")", ":", "opt_dict", "=", "{", "}", "for", "k", ",", "v", "in", "self", ".", "options", ".", "items", "(", ")", ":", "opt_dict", "[", "k", "]", "=", "v", "[", "0", "]", "ss", "=", "'{0}://{1}'", ".", "format", "(", "self", ".", "scheme", ",", "','", ".", "join", "(", "self", ".", "hosts", ")", ")", "if", "self", ".", "bucket", ":", "ss", "+=", "'/'", "+", "self", ".", "bucket", "# URL encode options then decoded forward slash /", "ss", "+=", "'?'", "+", "urlencode", "(", "opt_dict", ")", ".", "replace", "(", "'%2F'", ",", "'/'", ")", "return", "ss" ]
Encodes the current state of the object into a string. :return: The encoded string
[ "Encodes", "the", "current", "state", "of", "the", "object", "into", "a", "string", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/connstr.py#L126-L143
train
couchbase/couchbase-python-client
couchbase/exceptions.py
CouchbaseError.rc_to_exctype
def rc_to_exctype(cls, rc): """ Map an error code to an exception :param int rc: The error code received for an operation :return: a subclass of :class:`CouchbaseError` """ try: return _LCB_ERRNO_MAP[rc] except KeyError: newcls = _mk_lcberr(rc) _LCB_ERRNO_MAP[rc] = newcls return newcls
python
def rc_to_exctype(cls, rc): """ Map an error code to an exception :param int rc: The error code received for an operation :return: a subclass of :class:`CouchbaseError` """ try: return _LCB_ERRNO_MAP[rc] except KeyError: newcls = _mk_lcberr(rc) _LCB_ERRNO_MAP[rc] = newcls return newcls
[ "def", "rc_to_exctype", "(", "cls", ",", "rc", ")", ":", "try", ":", "return", "_LCB_ERRNO_MAP", "[", "rc", "]", "except", "KeyError", ":", "newcls", "=", "_mk_lcberr", "(", "rc", ")", "_LCB_ERRNO_MAP", "[", "rc", "]", "=", "newcls", "return", "newcls" ]
Map an error code to an exception :param int rc: The error code received for an operation :return: a subclass of :class:`CouchbaseError`
[ "Map", "an", "error", "code", "to", "an", "exception" ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/exceptions.py#L91-L104
train
couchbase/couchbase-python-client
couchbase/exceptions.py
CouchbaseError.split_results
def split_results(self): """ Convenience method to separate failed and successful results. .. versionadded:: 2.0.0 This function will split the results of the failed operation (see :attr:`.all_results`) into "good" and "bad" dictionaries. The intent is for the application to handle any successful results in a success code path, and handle any failed results in a "retry" code path. For example .. code-block:: python try: cb.add_multi(docs) except CouchbaseTransientError as e: # Temporary failure or server OOM _, fail = e.split_results() # Sleep for a bit to reduce the load on the server time.sleep(0.5) # Try to add only the failed results again cb.add_multi(fail) Of course, in the example above, the second retry may fail as well, and a more robust implementation is left as an exercise to the reader. :return: A tuple of ( `ok`, `bad` ) dictionaries. """ ret_ok, ret_fail = {}, {} count = 0 nokey_prefix = ([""] + sorted(filter(bool, self.all_results.keys())))[-1] for key, v in self.all_results.items(): if not key: key = nokey_prefix + ":nokey:" + str(count) count += 1 success = getattr(v,'success', True) if success: ret_ok[key] = v else: ret_fail[key] = v return ret_ok, ret_fail
python
def split_results(self): """ Convenience method to separate failed and successful results. .. versionadded:: 2.0.0 This function will split the results of the failed operation (see :attr:`.all_results`) into "good" and "bad" dictionaries. The intent is for the application to handle any successful results in a success code path, and handle any failed results in a "retry" code path. For example .. code-block:: python try: cb.add_multi(docs) except CouchbaseTransientError as e: # Temporary failure or server OOM _, fail = e.split_results() # Sleep for a bit to reduce the load on the server time.sleep(0.5) # Try to add only the failed results again cb.add_multi(fail) Of course, in the example above, the second retry may fail as well, and a more robust implementation is left as an exercise to the reader. :return: A tuple of ( `ok`, `bad` ) dictionaries. """ ret_ok, ret_fail = {}, {} count = 0 nokey_prefix = ([""] + sorted(filter(bool, self.all_results.keys())))[-1] for key, v in self.all_results.items(): if not key: key = nokey_prefix + ":nokey:" + str(count) count += 1 success = getattr(v,'success', True) if success: ret_ok[key] = v else: ret_fail[key] = v return ret_ok, ret_fail
[ "def", "split_results", "(", "self", ")", ":", "ret_ok", ",", "ret_fail", "=", "{", "}", ",", "{", "}", "count", "=", "0", "nokey_prefix", "=", "(", "[", "\"\"", "]", "+", "sorted", "(", "filter", "(", "bool", ",", "self", ".", "all_results", ".", "keys", "(", ")", ")", ")", ")", "[", "-", "1", "]", "for", "key", ",", "v", "in", "self", ".", "all_results", ".", "items", "(", ")", ":", "if", "not", "key", ":", "key", "=", "nokey_prefix", "+", "\":nokey:\"", "+", "str", "(", "count", ")", "count", "+=", "1", "success", "=", "getattr", "(", "v", ",", "'success'", ",", "True", ")", "if", "success", ":", "ret_ok", "[", "key", "]", "=", "v", "else", ":", "ret_fail", "[", "key", "]", "=", "v", "return", "ret_ok", ",", "ret_fail" ]
Convenience method to separate failed and successful results. .. versionadded:: 2.0.0 This function will split the results of the failed operation (see :attr:`.all_results`) into "good" and "bad" dictionaries. The intent is for the application to handle any successful results in a success code path, and handle any failed results in a "retry" code path. For example .. code-block:: python try: cb.add_multi(docs) except CouchbaseTransientError as e: # Temporary failure or server OOM _, fail = e.split_results() # Sleep for a bit to reduce the load on the server time.sleep(0.5) # Try to add only the failed results again cb.add_multi(fail) Of course, in the example above, the second retry may fail as well, and a more robust implementation is left as an exercise to the reader. :return: A tuple of ( `ok`, `bad` ) dictionaries.
[ "Convenience", "method", "to", "separate", "failed", "and", "successful", "results", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/exceptions.py#L162-L209
train
couchbase/couchbase-python-client
couchbase/items.py
ItemOptionDict.add
def add(self, itm, **options): """ Convenience method to add an item together with a series of options. :param itm: The item to add :param options: keyword arguments which will be placed in the item's option entry. If the item already exists, it (and its options) will be overidden. Use :attr:`dict` instead to update options """ if not options: options = None self._d[itm] = options
python
def add(self, itm, **options): """ Convenience method to add an item together with a series of options. :param itm: The item to add :param options: keyword arguments which will be placed in the item's option entry. If the item already exists, it (and its options) will be overidden. Use :attr:`dict` instead to update options """ if not options: options = None self._d[itm] = options
[ "def", "add", "(", "self", ",", "itm", ",", "*", "*", "options", ")", ":", "if", "not", "options", ":", "options", "=", "None", "self", ".", "_d", "[", "itm", "]", "=", "options" ]
Convenience method to add an item together with a series of options. :param itm: The item to add :param options: keyword arguments which will be placed in the item's option entry. If the item already exists, it (and its options) will be overidden. Use :attr:`dict` instead to update options
[ "Convenience", "method", "to", "add", "an", "item", "together", "with", "a", "series", "of", "options", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/items.py#L144-L158
train
couchbase/couchbase-python-client
couchbase/deprecation.py
deprecate_module_attribute
def deprecate_module_attribute(mod, deprecated): """Return a wrapped object that warns about deprecated accesses""" deprecated = set(deprecated) class Wrapper(object): def __getattr__(self, attr): if attr in deprecated: warnings.warn("Property %s is deprecated" % attr) return getattr(mod, attr) def __setattr__(self, attr, value): if attr in deprecated: warnings.warn("Property %s is deprecated" % attr) return setattr(mod, attr, value) return Wrapper()
python
def deprecate_module_attribute(mod, deprecated): """Return a wrapped object that warns about deprecated accesses""" deprecated = set(deprecated) class Wrapper(object): def __getattr__(self, attr): if attr in deprecated: warnings.warn("Property %s is deprecated" % attr) return getattr(mod, attr) def __setattr__(self, attr, value): if attr in deprecated: warnings.warn("Property %s is deprecated" % attr) return setattr(mod, attr, value) return Wrapper()
[ "def", "deprecate_module_attribute", "(", "mod", ",", "deprecated", ")", ":", "deprecated", "=", "set", "(", "deprecated", ")", "class", "Wrapper", "(", "object", ")", ":", "def", "__getattr__", "(", "self", ",", "attr", ")", ":", "if", "attr", "in", "deprecated", ":", "warnings", ".", "warn", "(", "\"Property %s is deprecated\"", "%", "attr", ")", "return", "getattr", "(", "mod", ",", "attr", ")", "def", "__setattr__", "(", "self", ",", "attr", ",", "value", ")", ":", "if", "attr", "in", "deprecated", ":", "warnings", ".", "warn", "(", "\"Property %s is deprecated\"", "%", "attr", ")", "return", "setattr", "(", "mod", ",", "attr", ",", "value", ")", "return", "Wrapper", "(", ")" ]
Return a wrapped object that warns about deprecated accesses
[ "Return", "a", "wrapped", "object", "that", "warns", "about", "deprecated", "accesses" ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/deprecation.py#L4-L19
train
couchbase/couchbase-python-client
couchbase/result.py
SubdocResult.get
def get(self, path_or_index, default=None): """ Get details about a given result :param path_or_index: The path (or index) of the result to fetch. :param default: If the given result does not exist, return this value instead :return: A tuple of `(error, value)`. If the entry does not exist then `(err, default)` is returned, where `err` is the actual error which occurred. You can use :meth:`couchbase.exceptions.CouchbaseError.rc_to_exctype` to convert the error code to a proper exception class :raise: :exc:`IndexError` or :exc:`KeyError` if `path_or_index` is not an initially requested path. This is a programming error as opposed to a constraint error where the path is not found. """ err, value = self._resolve(path_or_index) value = default if err else value return err, value
python
def get(self, path_or_index, default=None): """ Get details about a given result :param path_or_index: The path (or index) of the result to fetch. :param default: If the given result does not exist, return this value instead :return: A tuple of `(error, value)`. If the entry does not exist then `(err, default)` is returned, where `err` is the actual error which occurred. You can use :meth:`couchbase.exceptions.CouchbaseError.rc_to_exctype` to convert the error code to a proper exception class :raise: :exc:`IndexError` or :exc:`KeyError` if `path_or_index` is not an initially requested path. This is a programming error as opposed to a constraint error where the path is not found. """ err, value = self._resolve(path_or_index) value = default if err else value return err, value
[ "def", "get", "(", "self", ",", "path_or_index", ",", "default", "=", "None", ")", ":", "err", ",", "value", "=", "self", ".", "_resolve", "(", "path_or_index", ")", "value", "=", "default", "if", "err", "else", "value", "return", "err", ",", "value" ]
Get details about a given result :param path_or_index: The path (or index) of the result to fetch. :param default: If the given result does not exist, return this value instead :return: A tuple of `(error, value)`. If the entry does not exist then `(err, default)` is returned, where `err` is the actual error which occurred. You can use :meth:`couchbase.exceptions.CouchbaseError.rc_to_exctype` to convert the error code to a proper exception class :raise: :exc:`IndexError` or :exc:`KeyError` if `path_or_index` is not an initially requested path. This is a programming error as opposed to a constraint error where the path is not found.
[ "Get", "details", "about", "a", "given", "result" ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/result.py#L115-L133
train
couchbase/couchbase-python-client
couchbase/asynchronous/bucket.py
AsyncBucket.query
def query(self, *args, **kwargs): """ Reimplemented from base class. This method does not add additional functionality of the base class' :meth:`~couchbase.bucket.Bucket.query` method (all the functionality is encapsulated in the view class anyway). However it does require one additional keyword argument :param class itercls: A class used for instantiating the view object. This should be a subclass of :class:`~couchbase.asynchronous.view.AsyncViewBase`. """ if not issubclass(kwargs.get('itercls', None), AsyncViewBase): raise ArgumentError.pyexc("itercls must be defined " "and must be derived from AsyncViewBase") return super(AsyncBucket, self).query(*args, **kwargs)
python
def query(self, *args, **kwargs): """ Reimplemented from base class. This method does not add additional functionality of the base class' :meth:`~couchbase.bucket.Bucket.query` method (all the functionality is encapsulated in the view class anyway). However it does require one additional keyword argument :param class itercls: A class used for instantiating the view object. This should be a subclass of :class:`~couchbase.asynchronous.view.AsyncViewBase`. """ if not issubclass(kwargs.get('itercls', None), AsyncViewBase): raise ArgumentError.pyexc("itercls must be defined " "and must be derived from AsyncViewBase") return super(AsyncBucket, self).query(*args, **kwargs)
[ "def", "query", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "issubclass", "(", "kwargs", ".", "get", "(", "'itercls'", ",", "None", ")", ",", "AsyncViewBase", ")", ":", "raise", "ArgumentError", ".", "pyexc", "(", "\"itercls must be defined \"", "\"and must be derived from AsyncViewBase\"", ")", "return", "super", "(", "AsyncBucket", ",", "self", ")", ".", "query", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Reimplemented from base class. This method does not add additional functionality of the base class' :meth:`~couchbase.bucket.Bucket.query` method (all the functionality is encapsulated in the view class anyway). However it does require one additional keyword argument :param class itercls: A class used for instantiating the view object. This should be a subclass of :class:`~couchbase.asynchronous.view.AsyncViewBase`.
[ "Reimplemented", "from", "base", "class", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/asynchronous/bucket.py#L154-L171
train
couchbase/couchbase-python-client
couchbase/subdocument.py
_gen_3spec
def _gen_3spec(op, path, xattr=False): """ Returns a Spec tuple suitable for passing to the underlying C extension. This variant is called for operations that lack an input value. :param str path: The path to fetch :param bool xattr: Whether this is an extended attribute :return: a spec suitable for passing to the underlying C extension """ flags = 0 if xattr: flags |= _P.SDSPEC_F_XATTR return Spec(op, path, flags)
python
def _gen_3spec(op, path, xattr=False): """ Returns a Spec tuple suitable for passing to the underlying C extension. This variant is called for operations that lack an input value. :param str path: The path to fetch :param bool xattr: Whether this is an extended attribute :return: a spec suitable for passing to the underlying C extension """ flags = 0 if xattr: flags |= _P.SDSPEC_F_XATTR return Spec(op, path, flags)
[ "def", "_gen_3spec", "(", "op", ",", "path", ",", "xattr", "=", "False", ")", ":", "flags", "=", "0", "if", "xattr", ":", "flags", "|=", "_P", ".", "SDSPEC_F_XATTR", "return", "Spec", "(", "op", ",", "path", ",", "flags", ")" ]
Returns a Spec tuple suitable for passing to the underlying C extension. This variant is called for operations that lack an input value. :param str path: The path to fetch :param bool xattr: Whether this is an extended attribute :return: a spec suitable for passing to the underlying C extension
[ "Returns", "a", "Spec", "tuple", "suitable", "for", "passing", "to", "the", "underlying", "C", "extension", ".", "This", "variant", "is", "called", "for", "operations", "that", "lack", "an", "input", "value", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/subdocument.py#L34-L46
train
couchbase/couchbase-python-client
couchbase/subdocument.py
upsert
def upsert(path, value, create_parents=False, **kwargs): """ Create or replace a dictionary path. :param path: The path to modify :param value: The new value for the path. This should be a native Python object which can be encoded into JSON (the SDK will do the encoding for you). :param create_parents: Whether intermediate parents should be created. This means creating any additional levels of hierarchy not already in the document, for example: .. code-block:: python {'foo': {}} Without `create_parents`, an operation such as .. code-block:: python cb.mutate_in("docid", SD.upsert("foo.bar.baz", "newValue")) would fail with :cb_exc:`SubdocPathNotFoundError` because `foo.bar` does not exist. However when using the `create_parents` option, the server creates the new `foo.bar` dictionary and then inserts the `baz` value. """ return _gen_4spec(LCB_SDCMD_DICT_UPSERT, path, value, create_path=create_parents, **kwargs)
python
def upsert(path, value, create_parents=False, **kwargs): """ Create or replace a dictionary path. :param path: The path to modify :param value: The new value for the path. This should be a native Python object which can be encoded into JSON (the SDK will do the encoding for you). :param create_parents: Whether intermediate parents should be created. This means creating any additional levels of hierarchy not already in the document, for example: .. code-block:: python {'foo': {}} Without `create_parents`, an operation such as .. code-block:: python cb.mutate_in("docid", SD.upsert("foo.bar.baz", "newValue")) would fail with :cb_exc:`SubdocPathNotFoundError` because `foo.bar` does not exist. However when using the `create_parents` option, the server creates the new `foo.bar` dictionary and then inserts the `baz` value. """ return _gen_4spec(LCB_SDCMD_DICT_UPSERT, path, value, create_path=create_parents, **kwargs)
[ "def", "upsert", "(", "path", ",", "value", ",", "create_parents", "=", "False", ",", "*", "*", "kwargs", ")", ":", "return", "_gen_4spec", "(", "LCB_SDCMD_DICT_UPSERT", ",", "path", ",", "value", ",", "create_path", "=", "create_parents", ",", "*", "*", "kwargs", ")" ]
Create or replace a dictionary path. :param path: The path to modify :param value: The new value for the path. This should be a native Python object which can be encoded into JSON (the SDK will do the encoding for you). :param create_parents: Whether intermediate parents should be created. This means creating any additional levels of hierarchy not already in the document, for example: .. code-block:: python {'foo': {}} Without `create_parents`, an operation such as .. code-block:: python cb.mutate_in("docid", SD.upsert("foo.bar.baz", "newValue")) would fail with :cb_exc:`SubdocPathNotFoundError` because `foo.bar` does not exist. However when using the `create_parents` option, the server creates the new `foo.bar` dictionary and then inserts the `baz` value.
[ "Create", "or", "replace", "a", "dictionary", "path", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/subdocument.py#L100-L129
train
couchbase/couchbase-python-client
couchbase/subdocument.py
array_append
def array_append(path, *values, **kwargs): """ Add new values to the end of an array. :param path: Path to the array. The path should contain the *array itself* and not an element *within* the array :param values: one or more values to append :param create_parents: Create the array if it does not exist .. note:: Specifying multiple values in `values` is more than just syntactical sugar. It allows the server to insert the values as one single unit. If you have multiple values to append to the same array, ensure they are specified as multiple arguments to `array_append` rather than multiple `array_append` commands to :cb_bmeth:`mutate_in` This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`array_prepend`, :func:`upsert` """ return _gen_4spec(LCB_SDCMD_ARRAY_ADD_LAST, path, MultiValue(*values), create_path=kwargs.pop('create_parents', False), **kwargs)
python
def array_append(path, *values, **kwargs): """ Add new values to the end of an array. :param path: Path to the array. The path should contain the *array itself* and not an element *within* the array :param values: one or more values to append :param create_parents: Create the array if it does not exist .. note:: Specifying multiple values in `values` is more than just syntactical sugar. It allows the server to insert the values as one single unit. If you have multiple values to append to the same array, ensure they are specified as multiple arguments to `array_append` rather than multiple `array_append` commands to :cb_bmeth:`mutate_in` This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`array_prepend`, :func:`upsert` """ return _gen_4spec(LCB_SDCMD_ARRAY_ADD_LAST, path, MultiValue(*values), create_path=kwargs.pop('create_parents', False), **kwargs)
[ "def", "array_append", "(", "path", ",", "*", "values", ",", "*", "*", "kwargs", ")", ":", "return", "_gen_4spec", "(", "LCB_SDCMD_ARRAY_ADD_LAST", ",", "path", ",", "MultiValue", "(", "*", "values", ")", ",", "create_path", "=", "kwargs", ".", "pop", "(", "'create_parents'", ",", "False", ")", ",", "*", "*", "kwargs", ")" ]
Add new values to the end of an array. :param path: Path to the array. The path should contain the *array itself* and not an element *within* the array :param values: one or more values to append :param create_parents: Create the array if it does not exist .. note:: Specifying multiple values in `values` is more than just syntactical sugar. It allows the server to insert the values as one single unit. If you have multiple values to append to the same array, ensure they are specified as multiple arguments to `array_append` rather than multiple `array_append` commands to :cb_bmeth:`mutate_in` This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`array_prepend`, :func:`upsert`
[ "Add", "new", "values", "to", "the", "end", "of", "an", "array", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/subdocument.py#L157-L181
train
couchbase/couchbase-python-client
couchbase/subdocument.py
array_prepend
def array_prepend(path, *values, **kwargs): """ Add new values to the beginning of an array. :param path: Path to the array. The path should contain the *array itself* and not an element *within* the array :param values: one or more values to append :param create_parents: Create the array if it does not exist This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`array_append`, :func:`upsert` """ return _gen_4spec(LCB_SDCMD_ARRAY_ADD_FIRST, path, MultiValue(*values), create_path=kwargs.pop('create_parents', False), **kwargs)
python
def array_prepend(path, *values, **kwargs): """ Add new values to the beginning of an array. :param path: Path to the array. The path should contain the *array itself* and not an element *within* the array :param values: one or more values to append :param create_parents: Create the array if it does not exist This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`array_append`, :func:`upsert` """ return _gen_4spec(LCB_SDCMD_ARRAY_ADD_FIRST, path, MultiValue(*values), create_path=kwargs.pop('create_parents', False), **kwargs)
[ "def", "array_prepend", "(", "path", ",", "*", "values", ",", "*", "*", "kwargs", ")", ":", "return", "_gen_4spec", "(", "LCB_SDCMD_ARRAY_ADD_FIRST", ",", "path", ",", "MultiValue", "(", "*", "values", ")", ",", "create_path", "=", "kwargs", ".", "pop", "(", "'create_parents'", ",", "False", ")", ",", "*", "*", "kwargs", ")" ]
Add new values to the beginning of an array. :param path: Path to the array. The path should contain the *array itself* and not an element *within* the array :param values: one or more values to append :param create_parents: Create the array if it does not exist This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`array_append`, :func:`upsert`
[ "Add", "new", "values", "to", "the", "beginning", "of", "an", "array", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/subdocument.py#L184-L200
train
couchbase/couchbase-python-client
couchbase/subdocument.py
array_insert
def array_insert(path, *values, **kwargs): """ Insert items at a given position within an array. :param path: The path indicating where the item should be placed. The path _should_ contain the desired position :param values: Values to insert This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`array_prepend`, :func:`upsert` """ return _gen_4spec(LCB_SDCMD_ARRAY_INSERT, path, MultiValue(*values), **kwargs)
python
def array_insert(path, *values, **kwargs): """ Insert items at a given position within an array. :param path: The path indicating where the item should be placed. The path _should_ contain the desired position :param values: Values to insert This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`array_prepend`, :func:`upsert` """ return _gen_4spec(LCB_SDCMD_ARRAY_INSERT, path, MultiValue(*values), **kwargs)
[ "def", "array_insert", "(", "path", ",", "*", "values", ",", "*", "*", "kwargs", ")", ":", "return", "_gen_4spec", "(", "LCB_SDCMD_ARRAY_INSERT", ",", "path", ",", "MultiValue", "(", "*", "values", ")", ",", "*", "*", "kwargs", ")" ]
Insert items at a given position within an array. :param path: The path indicating where the item should be placed. The path _should_ contain the desired position :param values: Values to insert This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`array_prepend`, :func:`upsert`
[ "Insert", "items", "at", "a", "given", "position", "within", "an", "array", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/subdocument.py#L203-L216
train
couchbase/couchbase-python-client
couchbase/subdocument.py
array_addunique
def array_addunique(path, value, create_parents=False, **kwargs): """ Add a new value to an array if the value does not exist. :param path: The path to the array :param value: Value to add to the array if it does not exist. Currently the value is restricted to primitives: strings, numbers, booleans, and `None` values. :param create_parents: Create the array if it does not exist .. note:: The actual position of the new item is unspecified. This means it may be at the beginning, end, or middle of the existing array) This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`array_append`, :func:`upsert` """ return _gen_4spec(LCB_SDCMD_ARRAY_ADD_UNIQUE, path, value, create_path=create_parents, **kwargs)
python
def array_addunique(path, value, create_parents=False, **kwargs): """ Add a new value to an array if the value does not exist. :param path: The path to the array :param value: Value to add to the array if it does not exist. Currently the value is restricted to primitives: strings, numbers, booleans, and `None` values. :param create_parents: Create the array if it does not exist .. note:: The actual position of the new item is unspecified. This means it may be at the beginning, end, or middle of the existing array) This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`array_append`, :func:`upsert` """ return _gen_4spec(LCB_SDCMD_ARRAY_ADD_UNIQUE, path, value, create_path=create_parents, **kwargs)
[ "def", "array_addunique", "(", "path", ",", "value", ",", "create_parents", "=", "False", ",", "*", "*", "kwargs", ")", ":", "return", "_gen_4spec", "(", "LCB_SDCMD_ARRAY_ADD_UNIQUE", ",", "path", ",", "value", ",", "create_path", "=", "create_parents", ",", "*", "*", "kwargs", ")" ]
Add a new value to an array if the value does not exist. :param path: The path to the array :param value: Value to add to the array if it does not exist. Currently the value is restricted to primitives: strings, numbers, booleans, and `None` values. :param create_parents: Create the array if it does not exist .. note:: The actual position of the new item is unspecified. This means it may be at the beginning, end, or middle of the existing array) This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`array_append`, :func:`upsert`
[ "Add", "a", "new", "value", "to", "an", "array", "if", "the", "value", "does", "not", "exist", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/subdocument.py#L219-L240
train
couchbase/couchbase-python-client
couchbase/subdocument.py
counter
def counter(path, delta, create_parents=False, **kwargs): """ Increment or decrement a counter in a document. :param path: Path to the counter :param delta: Amount by which to modify the value. The delta can be negative but not 0. It must be an integer (not a float) as well. :param create_parents: Create the counter (and apply the modification) if it does not exist .. note:: Unlike :meth:`couchbase.bucket.Bucket.counter`, there is no `initial` argument. If the counter does not exist within the document (but its parent does, or `create_parents` is true), it will be initialized with the value of the `delta`. This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`upsert`, :cb_bmeth:`counter` (in `Bucket`) """ if not delta: raise ValueError("Delta must be positive or negative!") return _gen_4spec(LCB_SDCMD_COUNTER, path, delta, create_path=create_parents, **kwargs)
python
def counter(path, delta, create_parents=False, **kwargs): """ Increment or decrement a counter in a document. :param path: Path to the counter :param delta: Amount by which to modify the value. The delta can be negative but not 0. It must be an integer (not a float) as well. :param create_parents: Create the counter (and apply the modification) if it does not exist .. note:: Unlike :meth:`couchbase.bucket.Bucket.counter`, there is no `initial` argument. If the counter does not exist within the document (but its parent does, or `create_parents` is true), it will be initialized with the value of the `delta`. This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`upsert`, :cb_bmeth:`counter` (in `Bucket`) """ if not delta: raise ValueError("Delta must be positive or negative!") return _gen_4spec(LCB_SDCMD_COUNTER, path, delta, create_path=create_parents, **kwargs)
[ "def", "counter", "(", "path", ",", "delta", ",", "create_parents", "=", "False", ",", "*", "*", "kwargs", ")", ":", "if", "not", "delta", ":", "raise", "ValueError", "(", "\"Delta must be positive or negative!\"", ")", "return", "_gen_4spec", "(", "LCB_SDCMD_COUNTER", ",", "path", ",", "delta", ",", "create_path", "=", "create_parents", ",", "*", "*", "kwargs", ")" ]
Increment or decrement a counter in a document. :param path: Path to the counter :param delta: Amount by which to modify the value. The delta can be negative but not 0. It must be an integer (not a float) as well. :param create_parents: Create the counter (and apply the modification) if it does not exist .. note:: Unlike :meth:`couchbase.bucket.Bucket.counter`, there is no `initial` argument. If the counter does not exist within the document (but its parent does, or `create_parents` is true), it will be initialized with the value of the `delta`. This operation is only valid in :cb_bmeth:`mutate_in`. .. seealso:: :func:`upsert`, :cb_bmeth:`counter` (in `Bucket`)
[ "Increment", "or", "decrement", "a", "counter", "in", "a", "document", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/subdocument.py#L243-L268
train
couchbase/couchbase-python-client
couchbase/mutation_state.py
MutationState.add_results
def add_results(self, *rvs, **kwargs): """ Changes the state to reflect the mutation which yielded the given result. In order to use the result, the `fetch_mutation_tokens` option must have been specified in the connection string, _and_ the result must have been successful. :param rvs: One or more :class:`~.OperationResult` which have been returned from mutations :param quiet: Suppress errors if one of the results does not contain a convertible state. :return: `True` if the result was valid and added, `False` if not added (and `quiet` was specified :raise: :exc:`~.MissingTokenError` if `result` does not contain a valid token """ if not rvs: raise MissingTokenError.pyexc(message='No results passed') for rv in rvs: mi = rv._mutinfo if not mi: if kwargs.get('quiet'): return False raise MissingTokenError.pyexc( message='Result does not contain token') self._add_scanvec(mi) return True
python
def add_results(self, *rvs, **kwargs): """ Changes the state to reflect the mutation which yielded the given result. In order to use the result, the `fetch_mutation_tokens` option must have been specified in the connection string, _and_ the result must have been successful. :param rvs: One or more :class:`~.OperationResult` which have been returned from mutations :param quiet: Suppress errors if one of the results does not contain a convertible state. :return: `True` if the result was valid and added, `False` if not added (and `quiet` was specified :raise: :exc:`~.MissingTokenError` if `result` does not contain a valid token """ if not rvs: raise MissingTokenError.pyexc(message='No results passed') for rv in rvs: mi = rv._mutinfo if not mi: if kwargs.get('quiet'): return False raise MissingTokenError.pyexc( message='Result does not contain token') self._add_scanvec(mi) return True
[ "def", "add_results", "(", "self", ",", "*", "rvs", ",", "*", "*", "kwargs", ")", ":", "if", "not", "rvs", ":", "raise", "MissingTokenError", ".", "pyexc", "(", "message", "=", "'No results passed'", ")", "for", "rv", "in", "rvs", ":", "mi", "=", "rv", ".", "_mutinfo", "if", "not", "mi", ":", "if", "kwargs", ".", "get", "(", "'quiet'", ")", ":", "return", "False", "raise", "MissingTokenError", ".", "pyexc", "(", "message", "=", "'Result does not contain token'", ")", "self", ".", "_add_scanvec", "(", "mi", ")", "return", "True" ]
Changes the state to reflect the mutation which yielded the given result. In order to use the result, the `fetch_mutation_tokens` option must have been specified in the connection string, _and_ the result must have been successful. :param rvs: One or more :class:`~.OperationResult` which have been returned from mutations :param quiet: Suppress errors if one of the results does not contain a convertible state. :return: `True` if the result was valid and added, `False` if not added (and `quiet` was specified :raise: :exc:`~.MissingTokenError` if `result` does not contain a valid token
[ "Changes", "the", "state", "to", "reflect", "the", "mutation", "which", "yielded", "the", "given", "result", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/mutation_state.py#L111-L139
train
couchbase/couchbase-python-client
couchbase/mutation_state.py
MutationState.add_all
def add_all(self, bucket, quiet=False): """ Ensures the query result is consistent with all prior mutations performed by a given bucket. Using this function is equivalent to keeping track of all mutations performed by the given bucket, and passing them to :meth:`~add_result` :param bucket: A :class:`~couchbase.bucket.Bucket` object used for the mutations :param quiet: If the bucket contains no valid mutations, this option suppresses throwing exceptions. :return: `True` if at least one mutation was added, `False` if none were added (and `quiet` was specified) :raise: :exc:`~.MissingTokenError` if no mutations were added and `quiet` was not specified """ added = False for mt in bucket._mutinfo(): added = True self._add_scanvec(mt) if not added and not quiet: raise MissingTokenError('Bucket object contains no tokens!') return added
python
def add_all(self, bucket, quiet=False): """ Ensures the query result is consistent with all prior mutations performed by a given bucket. Using this function is equivalent to keeping track of all mutations performed by the given bucket, and passing them to :meth:`~add_result` :param bucket: A :class:`~couchbase.bucket.Bucket` object used for the mutations :param quiet: If the bucket contains no valid mutations, this option suppresses throwing exceptions. :return: `True` if at least one mutation was added, `False` if none were added (and `quiet` was specified) :raise: :exc:`~.MissingTokenError` if no mutations were added and `quiet` was not specified """ added = False for mt in bucket._mutinfo(): added = True self._add_scanvec(mt) if not added and not quiet: raise MissingTokenError('Bucket object contains no tokens!') return added
[ "def", "add_all", "(", "self", ",", "bucket", ",", "quiet", "=", "False", ")", ":", "added", "=", "False", "for", "mt", "in", "bucket", ".", "_mutinfo", "(", ")", ":", "added", "=", "True", "self", ".", "_add_scanvec", "(", "mt", ")", "if", "not", "added", "and", "not", "quiet", ":", "raise", "MissingTokenError", "(", "'Bucket object contains no tokens!'", ")", "return", "added" ]
Ensures the query result is consistent with all prior mutations performed by a given bucket. Using this function is equivalent to keeping track of all mutations performed by the given bucket, and passing them to :meth:`~add_result` :param bucket: A :class:`~couchbase.bucket.Bucket` object used for the mutations :param quiet: If the bucket contains no valid mutations, this option suppresses throwing exceptions. :return: `True` if at least one mutation was added, `False` if none were added (and `quiet` was specified) :raise: :exc:`~.MissingTokenError` if no mutations were added and `quiet` was not specified
[ "Ensures", "the", "query", "result", "is", "consistent", "with", "all", "prior", "mutations", "performed", "by", "a", "given", "bucket", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/mutation_state.py#L141-L165
train
couchbase/couchbase-python-client
couchbase/fulltext.py
_assign_kwargs
def _assign_kwargs(self, kwargs): """ Assigns all keyword arguments to a given instance, raising an exception if one of the keywords is not already the name of a property. """ for k in kwargs: if not hasattr(self, k): raise AttributeError(k, 'Not valid for', self.__class__.__name__) setattr(self, k, kwargs[k])
python
def _assign_kwargs(self, kwargs): """ Assigns all keyword arguments to a given instance, raising an exception if one of the keywords is not already the name of a property. """ for k in kwargs: if not hasattr(self, k): raise AttributeError(k, 'Not valid for', self.__class__.__name__) setattr(self, k, kwargs[k])
[ "def", "_assign_kwargs", "(", "self", ",", "kwargs", ")", ":", "for", "k", "in", "kwargs", ":", "if", "not", "hasattr", "(", "self", ",", "k", ")", ":", "raise", "AttributeError", "(", "k", ",", "'Not valid for'", ",", "self", ".", "__class__", ".", "__name__", ")", "setattr", "(", "self", ",", "k", ",", "kwargs", "[", "k", "]", ")" ]
Assigns all keyword arguments to a given instance, raising an exception if one of the keywords is not already the name of a property.
[ "Assigns", "all", "keyword", "arguments", "to", "a", "given", "instance", "raising", "an", "exception", "if", "one", "of", "the", "keywords", "is", "not", "already", "the", "name", "of", "a", "property", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/fulltext.py#L86-L94
train
couchbase/couchbase-python-client
couchbase/fulltext.py
_mk_range_bucket
def _mk_range_bucket(name, n1, n2, r1, r2): """ Create a named range specification for encoding. :param name: The name of the range as it should appear in the result :param n1: The name of the lower bound of the range specifier :param n2: The name of the upper bound of the range specified :param r1: The value of the lower bound (user value) :param r2: The value of the upper bound (user value) :return: A dictionary containing the range bounds. The upper and lower bounds are keyed under ``n1`` and ``n2``. More than just a simple wrapper, this will not include any range bound which has a user value of `None`. Likewise it will raise an exception if both range values are ``None``. """ d = {} if r1 is not None: d[n1] = r1 if r2 is not None: d[n2] = r2 if not d: raise TypeError('Must specify at least one range boundary!') d['name'] = name return d
python
def _mk_range_bucket(name, n1, n2, r1, r2): """ Create a named range specification for encoding. :param name: The name of the range as it should appear in the result :param n1: The name of the lower bound of the range specifier :param n2: The name of the upper bound of the range specified :param r1: The value of the lower bound (user value) :param r2: The value of the upper bound (user value) :return: A dictionary containing the range bounds. The upper and lower bounds are keyed under ``n1`` and ``n2``. More than just a simple wrapper, this will not include any range bound which has a user value of `None`. Likewise it will raise an exception if both range values are ``None``. """ d = {} if r1 is not None: d[n1] = r1 if r2 is not None: d[n2] = r2 if not d: raise TypeError('Must specify at least one range boundary!') d['name'] = name return d
[ "def", "_mk_range_bucket", "(", "name", ",", "n1", ",", "n2", ",", "r1", ",", "r2", ")", ":", "d", "=", "{", "}", "if", "r1", "is", "not", "None", ":", "d", "[", "n1", "]", "=", "r1", "if", "r2", "is", "not", "None", ":", "d", "[", "n2", "]", "=", "r2", "if", "not", "d", ":", "raise", "TypeError", "(", "'Must specify at least one range boundary!'", ")", "d", "[", "'name'", "]", "=", "name", "return", "d" ]
Create a named range specification for encoding. :param name: The name of the range as it should appear in the result :param n1: The name of the lower bound of the range specifier :param n2: The name of the upper bound of the range specified :param r1: The value of the lower bound (user value) :param r2: The value of the upper bound (user value) :return: A dictionary containing the range bounds. The upper and lower bounds are keyed under ``n1`` and ``n2``. More than just a simple wrapper, this will not include any range bound which has a user value of `None`. Likewise it will raise an exception if both range values are ``None``.
[ "Create", "a", "named", "range", "specification", "for", "encoding", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/fulltext.py#L133-L157
train
couchbase/couchbase-python-client
couchbase/fulltext.py
DateFacet.add_range
def add_range(self, name, start=None, end=None): """ Adds a date range to the given facet. :param str name: The name by which the results within the range can be accessed :param str start: Lower date range. Should be in RFC 3339 format :param str end: Upper date range. :return: The `DateFacet` object, so calls to this method may be chained """ self._ranges.append(_mk_range_bucket(name, 'start', 'end', start, end)) return self
python
def add_range(self, name, start=None, end=None): """ Adds a date range to the given facet. :param str name: The name by which the results within the range can be accessed :param str start: Lower date range. Should be in RFC 3339 format :param str end: Upper date range. :return: The `DateFacet` object, so calls to this method may be chained """ self._ranges.append(_mk_range_bucket(name, 'start', 'end', start, end)) return self
[ "def", "add_range", "(", "self", ",", "name", ",", "start", "=", "None", ",", "end", "=", "None", ")", ":", "self", ".", "_ranges", ".", "append", "(", "_mk_range_bucket", "(", "name", ",", "'start'", ",", "'end'", ",", "start", ",", "end", ")", ")", "return", "self" ]
Adds a date range to the given facet. :param str name: The name by which the results within the range can be accessed :param str start: Lower date range. Should be in RFC 3339 format :param str end: Upper date range. :return: The `DateFacet` object, so calls to this method may be chained
[ "Adds", "a", "date", "range", "to", "the", "given", "facet", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/fulltext.py#L170-L182
train
couchbase/couchbase-python-client
couchbase/fulltext.py
NumericFacet.add_range
def add_range(self, name, min=None, max=None): """ Add a numeric range. :param str name: the name by which the range is accessed in the results :param int | float min: Lower range bound :param int | float max: Upper range bound :return: This object; suitable for method chaining """ self._ranges.append(_mk_range_bucket(name, 'min', 'max', min, max)) return self
python
def add_range(self, name, min=None, max=None): """ Add a numeric range. :param str name: the name by which the range is accessed in the results :param int | float min: Lower range bound :param int | float max: Upper range bound :return: This object; suitable for method chaining """ self._ranges.append(_mk_range_bucket(name, 'min', 'max', min, max)) return self
[ "def", "add_range", "(", "self", ",", "name", ",", "min", "=", "None", ",", "max", "=", "None", ")", ":", "self", ".", "_ranges", ".", "append", "(", "_mk_range_bucket", "(", "name", ",", "'min'", ",", "'max'", ",", "min", ",", "max", ")", ")", "return", "self" ]
Add a numeric range. :param str name: the name by which the range is accessed in the results :param int | float min: Lower range bound :param int | float max: Upper range bound :return: This object; suitable for method chaining
[ "Add", "a", "numeric", "range", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/fulltext.py#L197-L208
train
couchbase/couchbase-python-client
couchbase/fulltext.py
SearchRequest.mk_kwargs
def mk_kwargs(cls, kwargs): """ Pop recognized arguments from a keyword list. """ ret = {} kws = ['row_factory', 'body', 'parent'] for k in kws: if k in kwargs: ret[k] = kwargs.pop(k) return ret
python
def mk_kwargs(cls, kwargs): """ Pop recognized arguments from a keyword list. """ ret = {} kws = ['row_factory', 'body', 'parent'] for k in kws: if k in kwargs: ret[k] = kwargs.pop(k) return ret
[ "def", "mk_kwargs", "(", "cls", ",", "kwargs", ")", ":", "ret", "=", "{", "}", "kws", "=", "[", "'row_factory'", ",", "'body'", ",", "'parent'", "]", "for", "k", "in", "kws", ":", "if", "k", "in", "kwargs", ":", "ret", "[", "k", "]", "=", "kwargs", ".", "pop", "(", "k", ")", "return", "ret" ]
Pop recognized arguments from a keyword list.
[ "Pop", "recognized", "arguments", "from", "a", "keyword", "list", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/fulltext.py#L1115-L1125
train
couchbase/couchbase-python-client
couchbase/n1ql.py
N1QLQuery._set_named_args
def _set_named_args(self, **kv): """ Set a named parameter in the query. The named field must exist in the query itself. :param kv: Key-Value pairs representing values within the query. These values should be stripped of their leading `$` identifier. """ for k in kv: self._body['${0}'.format(k)] = kv[k] return self
python
def _set_named_args(self, **kv): """ Set a named parameter in the query. The named field must exist in the query itself. :param kv: Key-Value pairs representing values within the query. These values should be stripped of their leading `$` identifier. """ for k in kv: self._body['${0}'.format(k)] = kv[k] return self
[ "def", "_set_named_args", "(", "self", ",", "*", "*", "kv", ")", ":", "for", "k", "in", "kv", ":", "self", ".", "_body", "[", "'${0}'", ".", "format", "(", "k", ")", "]", "=", "kv", "[", "k", "]", "return", "self" ]
Set a named parameter in the query. The named field must exist in the query itself. :param kv: Key-Value pairs representing values within the query. These values should be stripped of their leading `$` identifier.
[ "Set", "a", "named", "parameter", "in", "the", "query", ".", "The", "named", "field", "must", "exist", "in", "the", "query", "itself", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/n1ql.py#L134-L146
train
couchbase/couchbase-python-client
couchbase/n1ql.py
N1QLQuery.consistent_with
def consistent_with(self, state): """ Indicate that the query should be consistent with one or more mutations. :param state: The state of the mutations it should be consistent with. :type state: :class:`~.couchbase.mutation_state.MutationState` """ if self.consistency not in (UNBOUNDED, NOT_BOUNDED, 'at_plus'): raise TypeError( 'consistent_with not valid with other consistency options') if not state: raise TypeError('Passed empty or invalid state', state) self.consistency = 'at_plus' self._body['scan_vectors'] = state._sv
python
def consistent_with(self, state): """ Indicate that the query should be consistent with one or more mutations. :param state: The state of the mutations it should be consistent with. :type state: :class:`~.couchbase.mutation_state.MutationState` """ if self.consistency not in (UNBOUNDED, NOT_BOUNDED, 'at_plus'): raise TypeError( 'consistent_with not valid with other consistency options') if not state: raise TypeError('Passed empty or invalid state', state) self.consistency = 'at_plus' self._body['scan_vectors'] = state._sv
[ "def", "consistent_with", "(", "self", ",", "state", ")", ":", "if", "self", ".", "consistency", "not", "in", "(", "UNBOUNDED", ",", "NOT_BOUNDED", ",", "'at_plus'", ")", ":", "raise", "TypeError", "(", "'consistent_with not valid with other consistency options'", ")", "if", "not", "state", ":", "raise", "TypeError", "(", "'Passed empty or invalid state'", ",", "state", ")", "self", ".", "consistency", "=", "'at_plus'", "self", ".", "_body", "[", "'scan_vectors'", "]", "=", "state", ".", "_sv" ]
Indicate that the query should be consistent with one or more mutations. :param state: The state of the mutations it should be consistent with. :type state: :class:`~.couchbase.mutation_state.MutationState`
[ "Indicate", "that", "the", "query", "should", "be", "consistent", "with", "one", "or", "more", "mutations", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/n1ql.py#L194-L210
train
couchbase/couchbase-python-client
couchbase/n1ql.py
N1QLQuery.timeout
def timeout(self): """ Optional per-query timeout. If set, this will limit the amount of time in which the query can be executed and waited for. .. note:: The effective timeout for the query will be either this property or the value of :attr:`couchbase.bucket.Bucket.n1ql_timeout` property, whichever is *lower*. .. seealso:: couchbase.bucket.Bucket.n1ql_timeout """ value = self._body.get('timeout', '0s') value = value[:-1] return float(value)
python
def timeout(self): """ Optional per-query timeout. If set, this will limit the amount of time in which the query can be executed and waited for. .. note:: The effective timeout for the query will be either this property or the value of :attr:`couchbase.bucket.Bucket.n1ql_timeout` property, whichever is *lower*. .. seealso:: couchbase.bucket.Bucket.n1ql_timeout """ value = self._body.get('timeout', '0s') value = value[:-1] return float(value)
[ "def", "timeout", "(", "self", ")", ":", "value", "=", "self", ".", "_body", ".", "get", "(", "'timeout'", ",", "'0s'", ")", "value", "=", "value", "[", ":", "-", "1", "]", "return", "float", "(", "value", ")" ]
Optional per-query timeout. If set, this will limit the amount of time in which the query can be executed and waited for. .. note:: The effective timeout for the query will be either this property or the value of :attr:`couchbase.bucket.Bucket.n1ql_timeout` property, whichever is *lower*. .. seealso:: couchbase.bucket.Bucket.n1ql_timeout
[ "Optional", "per", "-", "query", "timeout", ".", "If", "set", "this", "will", "limit", "the", "amount", "of", "time", "in", "which", "the", "query", "can", "be", "executed", "and", "waited", "for", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/n1ql.py#L247-L262
train
couchbase/couchbase-python-client
couchbase/analytics.py
DeferredAnalyticsRequest._is_ready
def _is_ready(self): """ Return True if and only if final result has been received, optionally blocking until this is the case, or the timeout is exceeded. This is a synchronous implementation but an async one can be added by subclassing this. :return: True if ready, False if not """ while not self.finish_time or time.time() < self.finish_time: result=self._poll_deferred() if result=='success': return True if result=='failed': raise couchbase.exceptions.InternalError("Failed exception") time.sleep(self.interval) raise couchbase.exceptions.TimeoutError("Deferred query timed out")
python
def _is_ready(self): """ Return True if and only if final result has been received, optionally blocking until this is the case, or the timeout is exceeded. This is a synchronous implementation but an async one can be added by subclassing this. :return: True if ready, False if not """ while not self.finish_time or time.time() < self.finish_time: result=self._poll_deferred() if result=='success': return True if result=='failed': raise couchbase.exceptions.InternalError("Failed exception") time.sleep(self.interval) raise couchbase.exceptions.TimeoutError("Deferred query timed out")
[ "def", "_is_ready", "(", "self", ")", ":", "while", "not", "self", ".", "finish_time", "or", "time", ".", "time", "(", ")", "<", "self", ".", "finish_time", ":", "result", "=", "self", ".", "_poll_deferred", "(", ")", "if", "result", "==", "'success'", ":", "return", "True", "if", "result", "==", "'failed'", ":", "raise", "couchbase", ".", "exceptions", ".", "InternalError", "(", "\"Failed exception\"", ")", "time", ".", "sleep", "(", "self", ".", "interval", ")", "raise", "couchbase", ".", "exceptions", ".", "TimeoutError", "(", "\"Deferred query timed out\"", ")" ]
Return True if and only if final result has been received, optionally blocking until this is the case, or the timeout is exceeded. This is a synchronous implementation but an async one can be added by subclassing this. :return: True if ready, False if not
[ "Return", "True", "if", "and", "only", "if", "final", "result", "has", "been", "received", "optionally", "blocking", "until", "this", "is", "the", "case", "or", "the", "timeout", "is", "exceeded", "." ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/analytics.py#L199-L217
train
couchbase/couchbase-python-client
couchbase_version.py
VersionInfo.package_version
def package_version(self): """Returns the well formed PEP-440 version""" vbase = self.base_version if self.ncommits: vbase += '.dev{0}+{1}'.format(self.ncommits, self.sha) return vbase
python
def package_version(self): """Returns the well formed PEP-440 version""" vbase = self.base_version if self.ncommits: vbase += '.dev{0}+{1}'.format(self.ncommits, self.sha) return vbase
[ "def", "package_version", "(", "self", ")", ":", "vbase", "=", "self", ".", "base_version", "if", "self", ".", "ncommits", ":", "vbase", "+=", "'.dev{0}+{1}'", ".", "format", "(", "self", ".", "ncommits", ",", "self", ".", "sha", ")", "return", "vbase" ]
Returns the well formed PEP-440 version
[ "Returns", "the", "well", "formed", "PEP", "-", "440", "version" ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase_version.py#L76-L81
train
couchbase/couchbase-python-client
jenkins/pycbc-winbuild.py
download_and_bootstrap
def download_and_bootstrap(src, name, prereq=None): """ Download and install something if 'prerequisite' fails """ if prereq: prereq_cmd = '{0} -c "{1}"'.format(PY_EXE, prereq) rv = os.system(prereq_cmd) if rv == 0: return ulp = urllib2.urlopen(src) fp = open(name, "wb") fp.write(ulp.read()) fp.close() cmdline = "{0} {1}".format(PY_EXE, name) rv = os.system(cmdline) assert rv == 0
python
def download_and_bootstrap(src, name, prereq=None): """ Download and install something if 'prerequisite' fails """ if prereq: prereq_cmd = '{0} -c "{1}"'.format(PY_EXE, prereq) rv = os.system(prereq_cmd) if rv == 0: return ulp = urllib2.urlopen(src) fp = open(name, "wb") fp.write(ulp.read()) fp.close() cmdline = "{0} {1}".format(PY_EXE, name) rv = os.system(cmdline) assert rv == 0
[ "def", "download_and_bootstrap", "(", "src", ",", "name", ",", "prereq", "=", "None", ")", ":", "if", "prereq", ":", "prereq_cmd", "=", "'{0} -c \"{1}\"'", ".", "format", "(", "PY_EXE", ",", "prereq", ")", "rv", "=", "os", ".", "system", "(", "prereq_cmd", ")", "if", "rv", "==", "0", ":", "return", "ulp", "=", "urllib2", ".", "urlopen", "(", "src", ")", "fp", "=", "open", "(", "name", ",", "\"wb\"", ")", "fp", ".", "write", "(", "ulp", ".", "read", "(", ")", ")", "fp", ".", "close", "(", ")", "cmdline", "=", "\"{0} {1}\"", ".", "format", "(", "PY_EXE", ",", "name", ")", "rv", "=", "os", ".", "system", "(", "cmdline", ")", "assert", "rv", "==", "0" ]
Download and install something if 'prerequisite' fails
[ "Download", "and", "install", "something", "if", "prerequisite", "fails" ]
a7bada167785bf79a29c39f820d932a433a6a535
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/jenkins/pycbc-winbuild.py#L165-L181
train
zheller/flake8-quotes
flake8_quotes/__init__.py
QuoteChecker._register_opt
def _register_opt(parser, *args, **kwargs): """ Handler to register an option for both Flake8 3.x and 2.x. This is based on: https://github.com/PyCQA/flake8/blob/3.0.0b2/docs/source/plugin-development/cross-compatibility.rst#option-handling-on-flake8-2-and-3 It only supports `parse_from_config` from the original function and it uses the `Option` object returned to get the string. """ try: # Flake8 3.x registration parser.add_option(*args, **kwargs) except (optparse.OptionError, TypeError): # Flake8 2.x registration parse_from_config = kwargs.pop('parse_from_config', False) option = parser.add_option(*args, **kwargs) if parse_from_config: parser.config_options.append(option.get_opt_string().lstrip('-'))
python
def _register_opt(parser, *args, **kwargs): """ Handler to register an option for both Flake8 3.x and 2.x. This is based on: https://github.com/PyCQA/flake8/blob/3.0.0b2/docs/source/plugin-development/cross-compatibility.rst#option-handling-on-flake8-2-and-3 It only supports `parse_from_config` from the original function and it uses the `Option` object returned to get the string. """ try: # Flake8 3.x registration parser.add_option(*args, **kwargs) except (optparse.OptionError, TypeError): # Flake8 2.x registration parse_from_config = kwargs.pop('parse_from_config', False) option = parser.add_option(*args, **kwargs) if parse_from_config: parser.config_options.append(option.get_opt_string().lstrip('-'))
[ "def", "_register_opt", "(", "parser", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "# Flake8 3.x registration", "parser", ".", "add_option", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "(", "optparse", ".", "OptionError", ",", "TypeError", ")", ":", "# Flake8 2.x registration", "parse_from_config", "=", "kwargs", ".", "pop", "(", "'parse_from_config'", ",", "False", ")", "option", "=", "parser", ".", "add_option", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "parse_from_config", ":", "parser", ".", "config_options", ".", "append", "(", "option", ".", "get_opt_string", "(", ")", ".", "lstrip", "(", "'-'", ")", ")" ]
Handler to register an option for both Flake8 3.x and 2.x. This is based on: https://github.com/PyCQA/flake8/blob/3.0.0b2/docs/source/plugin-development/cross-compatibility.rst#option-handling-on-flake8-2-and-3 It only supports `parse_from_config` from the original function and it uses the `Option` object returned to get the string.
[ "Handler", "to", "register", "an", "option", "for", "both", "Flake8", "3", ".", "x", "and", "2", ".", "x", "." ]
4afe69da02b89232cb71c57aafd384214a45a145
https://github.com/zheller/flake8-quotes/blob/4afe69da02b89232cb71c57aafd384214a45a145/flake8_quotes/__init__.py#L78-L96
train
eventbrite/pysoa
pysoa/utils.py
dict_to_hashable
def dict_to_hashable(d): """ Takes a dict and returns an immutable, hashable version of that dict that can be used as a key in dicts or as a set value. Any two dicts passed in with the same content are guaranteed to return the same value. Any two dicts passed in with different content are guaranteed to return different values. Performs comparatively to `repr`. >> %timeit repr(d1) The slowest run took 5.76 times longer than the fastest. This could mean that an intermediate result is being cached 100000 loops, best of 3: 3.48 µs per loop >> %timeit dict_to_hashable(d1) The slowest run took 4.16 times longer than the fastest. This could mean that an intermediate result is being cached 100000 loops, best of 3: 4.07 µs per loop :param d: The dict :return: The hashable representation of the dict """ return frozenset( (k, tuple(v) if isinstance(v, list) else (dict_to_hashable(v) if isinstance(v, dict) else v)) for k, v in six.iteritems(d) )
python
def dict_to_hashable(d): """ Takes a dict and returns an immutable, hashable version of that dict that can be used as a key in dicts or as a set value. Any two dicts passed in with the same content are guaranteed to return the same value. Any two dicts passed in with different content are guaranteed to return different values. Performs comparatively to `repr`. >> %timeit repr(d1) The slowest run took 5.76 times longer than the fastest. This could mean that an intermediate result is being cached 100000 loops, best of 3: 3.48 µs per loop >> %timeit dict_to_hashable(d1) The slowest run took 4.16 times longer than the fastest. This could mean that an intermediate result is being cached 100000 loops, best of 3: 4.07 µs per loop :param d: The dict :return: The hashable representation of the dict """ return frozenset( (k, tuple(v) if isinstance(v, list) else (dict_to_hashable(v) if isinstance(v, dict) else v)) for k, v in six.iteritems(d) )
[ "def", "dict_to_hashable", "(", "d", ")", ":", "return", "frozenset", "(", "(", "k", ",", "tuple", "(", "v", ")", "if", "isinstance", "(", "v", ",", "list", ")", "else", "(", "dict_to_hashable", "(", "v", ")", "if", "isinstance", "(", "v", ",", "dict", ")", "else", "v", ")", ")", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "d", ")", ")" ]
Takes a dict and returns an immutable, hashable version of that dict that can be used as a key in dicts or as a set value. Any two dicts passed in with the same content are guaranteed to return the same value. Any two dicts passed in with different content are guaranteed to return different values. Performs comparatively to `repr`. >> %timeit repr(d1) The slowest run took 5.76 times longer than the fastest. This could mean that an intermediate result is being cached 100000 loops, best of 3: 3.48 µs per loop >> %timeit dict_to_hashable(d1) The slowest run took 4.16 times longer than the fastest. This could mean that an intermediate result is being cached 100000 loops, best of 3: 4.07 µs per loop :param d: The dict :return: The hashable representation of the dict
[ "Takes", "a", "dict", "and", "returns", "an", "immutable", "hashable", "version", "of", "that", "dict", "that", "can", "be", "used", "as", "a", "key", "in", "dicts", "or", "as", "a", "set", "value", ".", "Any", "two", "dicts", "passed", "in", "with", "the", "same", "content", "are", "guaranteed", "to", "return", "the", "same", "value", ".", "Any", "two", "dicts", "passed", "in", "with", "different", "content", "are", "guaranteed", "to", "return", "different", "values", ".", "Performs", "comparatively", "to", "repr", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/utils.py#L12-L32
train
eventbrite/pysoa
pysoa/server/action/introspection.py
IntrospectionAction.run
def run(self, request): """ Introspects all of the actions on the server and returns their documentation. :param request: The request object :type request: EnrichedActionRequest :return: The response """ if request.body.get('action_name'): return self._get_response_for_single_action(request.body.get('action_name')) return self._get_response_for_all_actions()
python
def run(self, request): """ Introspects all of the actions on the server and returns their documentation. :param request: The request object :type request: EnrichedActionRequest :return: The response """ if request.body.get('action_name'): return self._get_response_for_single_action(request.body.get('action_name')) return self._get_response_for_all_actions()
[ "def", "run", "(", "self", ",", "request", ")", ":", "if", "request", ".", "body", ".", "get", "(", "'action_name'", ")", ":", "return", "self", ".", "_get_response_for_single_action", "(", "request", ".", "body", ".", "get", "(", "'action_name'", ")", ")", "return", "self", ".", "_get_response_for_all_actions", "(", ")" ]
Introspects all of the actions on the server and returns their documentation. :param request: The request object :type request: EnrichedActionRequest :return: The response
[ "Introspects", "all", "of", "the", "actions", "on", "the", "server", "and", "returns", "their", "documentation", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/server/action/introspection.py#L118-L130
train
eventbrite/pysoa
pysoa/client/client.py
ServiceHandler._make_middleware_stack
def _make_middleware_stack(middleware, base): """ Given a list of in-order middleware callables `middleware` and a base function `base`, chains them together so each middleware is fed the function below, and returns the top level ready to call. """ for ware in reversed(middleware): base = ware(base) return base
python
def _make_middleware_stack(middleware, base): """ Given a list of in-order middleware callables `middleware` and a base function `base`, chains them together so each middleware is fed the function below, and returns the top level ready to call. """ for ware in reversed(middleware): base = ware(base) return base
[ "def", "_make_middleware_stack", "(", "middleware", ",", "base", ")", ":", "for", "ware", "in", "reversed", "(", "middleware", ")", ":", "base", "=", "ware", "(", "base", ")", "return", "base" ]
Given a list of in-order middleware callables `middleware` and a base function `base`, chains them together so each middleware is fed the function below, and returns the top level ready to call.
[ "Given", "a", "list", "of", "in", "-", "order", "middleware", "callables", "middleware", "and", "a", "base", "function", "base", "chains", "them", "together", "so", "each", "middleware", "is", "fed", "the", "function", "below", "and", "returns", "the", "top", "level", "ready", "to", "call", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/client/client.py#L71-L79
train
eventbrite/pysoa
pysoa/client/client.py
ServiceHandler.send_request
def send_request(self, job_request, message_expiry_in_seconds=None): """ Send a JobRequest, and return a request ID. The context and control_extra arguments may be used to include extra values in the context and control headers, respectively. :param job_request: The job request object to send :type job_request: JobRequest :param message_expiry_in_seconds: How soon the message will expire if not received by a server (defaults to sixty seconds unless the settings are otherwise) :type message_expiry_in_seconds: int :return: The request ID :rtype: int :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge """ request_id = self.request_counter self.request_counter += 1 meta = {} wrapper = self._make_middleware_stack( [m.request for m in self.middleware], self._base_send_request, ) try: with self.metrics.timer('client.send.including_middleware', resolution=TimerResolution.MICROSECONDS): wrapper(request_id, meta, job_request, message_expiry_in_seconds) return request_id finally: self.metrics.commit()
python
def send_request(self, job_request, message_expiry_in_seconds=None): """ Send a JobRequest, and return a request ID. The context and control_extra arguments may be used to include extra values in the context and control headers, respectively. :param job_request: The job request object to send :type job_request: JobRequest :param message_expiry_in_seconds: How soon the message will expire if not received by a server (defaults to sixty seconds unless the settings are otherwise) :type message_expiry_in_seconds: int :return: The request ID :rtype: int :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge """ request_id = self.request_counter self.request_counter += 1 meta = {} wrapper = self._make_middleware_stack( [m.request for m in self.middleware], self._base_send_request, ) try: with self.metrics.timer('client.send.including_middleware', resolution=TimerResolution.MICROSECONDS): wrapper(request_id, meta, job_request, message_expiry_in_seconds) return request_id finally: self.metrics.commit()
[ "def", "send_request", "(", "self", ",", "job_request", ",", "message_expiry_in_seconds", "=", "None", ")", ":", "request_id", "=", "self", ".", "request_counter", "self", ".", "request_counter", "+=", "1", "meta", "=", "{", "}", "wrapper", "=", "self", ".", "_make_middleware_stack", "(", "[", "m", ".", "request", "for", "m", "in", "self", ".", "middleware", "]", ",", "self", ".", "_base_send_request", ",", ")", "try", ":", "with", "self", ".", "metrics", ".", "timer", "(", "'client.send.including_middleware'", ",", "resolution", "=", "TimerResolution", ".", "MICROSECONDS", ")", ":", "wrapper", "(", "request_id", ",", "meta", ",", "job_request", ",", "message_expiry_in_seconds", ")", "return", "request_id", "finally", ":", "self", ".", "metrics", ".", "commit", "(", ")" ]
Send a JobRequest, and return a request ID. The context and control_extra arguments may be used to include extra values in the context and control headers, respectively. :param job_request: The job request object to send :type job_request: JobRequest :param message_expiry_in_seconds: How soon the message will expire if not received by a server (defaults to sixty seconds unless the settings are otherwise) :type message_expiry_in_seconds: int :return: The request ID :rtype: int :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge
[ "Send", "a", "JobRequest", "and", "return", "a", "request", "ID", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/client/client.py#L87-L117
train
eventbrite/pysoa
pysoa/client/client.py
ServiceHandler.get_all_responses
def get_all_responses(self, receive_timeout_in_seconds=None): """ Receive all available responses from the transport as a generator. :param receive_timeout_in_seconds: How long to block without receiving a message before raising `MessageReceiveTimeout` (defaults to five seconds unless the settings are otherwise). :type receive_timeout_in_seconds: int :return: A generator that yields (request ID, job response) :rtype: generator :raise: ConnectionError, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, StopIteration """ wrapper = self._make_middleware_stack( [m.response for m in self.middleware], self._get_response, ) try: while True: with self.metrics.timer('client.receive.including_middleware', resolution=TimerResolution.MICROSECONDS): request_id, response = wrapper(receive_timeout_in_seconds) if response is None: break yield request_id, response finally: self.metrics.commit()
python
def get_all_responses(self, receive_timeout_in_seconds=None): """ Receive all available responses from the transport as a generator. :param receive_timeout_in_seconds: How long to block without receiving a message before raising `MessageReceiveTimeout` (defaults to five seconds unless the settings are otherwise). :type receive_timeout_in_seconds: int :return: A generator that yields (request ID, job response) :rtype: generator :raise: ConnectionError, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, StopIteration """ wrapper = self._make_middleware_stack( [m.response for m in self.middleware], self._get_response, ) try: while True: with self.metrics.timer('client.receive.including_middleware', resolution=TimerResolution.MICROSECONDS): request_id, response = wrapper(receive_timeout_in_seconds) if response is None: break yield request_id, response finally: self.metrics.commit()
[ "def", "get_all_responses", "(", "self", ",", "receive_timeout_in_seconds", "=", "None", ")", ":", "wrapper", "=", "self", ".", "_make_middleware_stack", "(", "[", "m", ".", "response", "for", "m", "in", "self", ".", "middleware", "]", ",", "self", ".", "_get_response", ",", ")", "try", ":", "while", "True", ":", "with", "self", ".", "metrics", ".", "timer", "(", "'client.receive.including_middleware'", ",", "resolution", "=", "TimerResolution", ".", "MICROSECONDS", ")", ":", "request_id", ",", "response", "=", "wrapper", "(", "receive_timeout_in_seconds", ")", "if", "response", "is", "None", ":", "break", "yield", "request_id", ",", "response", "finally", ":", "self", ".", "metrics", ".", "commit", "(", ")" ]
Receive all available responses from the transport as a generator. :param receive_timeout_in_seconds: How long to block without receiving a message before raising `MessageReceiveTimeout` (defaults to five seconds unless the settings are otherwise). :type receive_timeout_in_seconds: int :return: A generator that yields (request ID, job response) :rtype: generator :raise: ConnectionError, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, StopIteration
[ "Receive", "all", "available", "responses", "from", "the", "transport", "as", "a", "generator", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/client/client.py#L127-L154
train
eventbrite/pysoa
pysoa/client/client.py
Client.call_action
def call_action(self, service_name, action, body=None, **kwargs): """ Build and send a single job request with one action. Returns the action response or raises an exception if the action response is an error (unless `raise_action_errors` is passed as `False`) or if the job response is an error (unless `raise_job_errors` is passed as `False`). :param service_name: The name of the service to call :type service_name: union[str, unicode] :param action: The name of the action to call :type action: union[str, unicode] :param body: The action request body :type body: dict :param expansions: A dictionary representing the expansions to perform :type expansions: dict :param raise_job_errors: Whether to raise a JobError if the job response contains errors (defaults to `True`) :type raise_job_errors: bool :param raise_action_errors: Whether to raise a CallActionError if any action responses contain errors (defaults to `True`) :type raise_action_errors: bool :param timeout: If provided, this will override the default transport timeout values to; requests will expire after this number of seconds plus some buffer defined by the transport, and the client will not block waiting for a response for longer than this amount of time. :type timeout: int :param switches: A list of switch value integers :type switches: list :param correlation_id: The request correlation ID :type correlation_id: union[str, unicode] :param continue_on_error: Whether to continue executing further actions once one action has returned errors :type continue_on_error: bool :param context: A dictionary of extra values to include in the context header :type context: dict :param control_extra: A dictionary of extra values to include in the control header :type control_extra: dict :return: The action response :rtype: ActionResponse :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, JobError, CallActionError """ return self.call_action_future(service_name, action, body, **kwargs).result()
python
def call_action(self, service_name, action, body=None, **kwargs): """ Build and send a single job request with one action. Returns the action response or raises an exception if the action response is an error (unless `raise_action_errors` is passed as `False`) or if the job response is an error (unless `raise_job_errors` is passed as `False`). :param service_name: The name of the service to call :type service_name: union[str, unicode] :param action: The name of the action to call :type action: union[str, unicode] :param body: The action request body :type body: dict :param expansions: A dictionary representing the expansions to perform :type expansions: dict :param raise_job_errors: Whether to raise a JobError if the job response contains errors (defaults to `True`) :type raise_job_errors: bool :param raise_action_errors: Whether to raise a CallActionError if any action responses contain errors (defaults to `True`) :type raise_action_errors: bool :param timeout: If provided, this will override the default transport timeout values to; requests will expire after this number of seconds plus some buffer defined by the transport, and the client will not block waiting for a response for longer than this amount of time. :type timeout: int :param switches: A list of switch value integers :type switches: list :param correlation_id: The request correlation ID :type correlation_id: union[str, unicode] :param continue_on_error: Whether to continue executing further actions once one action has returned errors :type continue_on_error: bool :param context: A dictionary of extra values to include in the context header :type context: dict :param control_extra: A dictionary of extra values to include in the control header :type control_extra: dict :return: The action response :rtype: ActionResponse :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, JobError, CallActionError """ return self.call_action_future(service_name, action, body, **kwargs).result()
[ "def", "call_action", "(", "self", ",", "service_name", ",", "action", ",", "body", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "call_action_future", "(", "service_name", ",", "action", ",", "body", ",", "*", "*", "kwargs", ")", ".", "result", "(", ")" ]
Build and send a single job request with one action. Returns the action response or raises an exception if the action response is an error (unless `raise_action_errors` is passed as `False`) or if the job response is an error (unless `raise_job_errors` is passed as `False`). :param service_name: The name of the service to call :type service_name: union[str, unicode] :param action: The name of the action to call :type action: union[str, unicode] :param body: The action request body :type body: dict :param expansions: A dictionary representing the expansions to perform :type expansions: dict :param raise_job_errors: Whether to raise a JobError if the job response contains errors (defaults to `True`) :type raise_job_errors: bool :param raise_action_errors: Whether to raise a CallActionError if any action responses contain errors (defaults to `True`) :type raise_action_errors: bool :param timeout: If provided, this will override the default transport timeout values to; requests will expire after this number of seconds plus some buffer defined by the transport, and the client will not block waiting for a response for longer than this amount of time. :type timeout: int :param switches: A list of switch value integers :type switches: list :param correlation_id: The request correlation ID :type correlation_id: union[str, unicode] :param continue_on_error: Whether to continue executing further actions once one action has returned errors :type continue_on_error: bool :param context: A dictionary of extra values to include in the context header :type context: dict :param control_extra: A dictionary of extra values to include in the control header :type control_extra: dict :return: The action response :rtype: ActionResponse :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, JobError, CallActionError
[ "Build", "and", "send", "a", "single", "job", "request", "with", "one", "action", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/client/client.py#L348-L390
train
eventbrite/pysoa
pysoa/client/client.py
Client.call_actions
def call_actions( self, service_name, actions, expansions=None, raise_job_errors=True, raise_action_errors=True, timeout=None, **kwargs ): """ Build and send a single job request with one or more actions. Returns a list of action responses, one for each action in the same order as provided, or raises an exception if any action response is an error (unless `raise_action_errors` is passed as `False`) or if the job response is an error (unless `raise_job_errors` is passed as `False`). This method performs expansions if the Client is configured with an expansion converter. :param service_name: The name of the service to call :type service_name: union[str, unicode] :param actions: A list of `ActionRequest` objects and/or dicts that can be converted to `ActionRequest` objects :type actions: iterable[union[ActionRequest, dict]] :param expansions: A dictionary representing the expansions to perform :type expansions: dict :param raise_job_errors: Whether to raise a JobError if the job response contains errors (defaults to `True`) :type raise_job_errors: bool :param raise_action_errors: Whether to raise a CallActionError if any action responses contain errors (defaults to `True`) :type raise_action_errors: bool :param timeout: If provided, this will override the default transport timeout values to; requests will expire after this number of seconds plus some buffer defined by the transport, and the client will not block waiting for a response for longer than this amount of time. :type timeout: int :param switches: A list of switch value integers :type switches: list :param correlation_id: The request correlation ID :type correlation_id: union[str, unicode] :param continue_on_error: Whether to continue executing further actions once one action has returned errors :type continue_on_error: bool :param context: A dictionary of extra values to include in the context header :type context: dict :param control_extra: A dictionary of extra values to include in the control header :type control_extra: dict :return: The job response :rtype: JobResponse :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, JobError, CallActionError """ return self.call_actions_future( service_name, actions, expansions, raise_job_errors, raise_action_errors, timeout, **kwargs ).result()
python
def call_actions( self, service_name, actions, expansions=None, raise_job_errors=True, raise_action_errors=True, timeout=None, **kwargs ): """ Build and send a single job request with one or more actions. Returns a list of action responses, one for each action in the same order as provided, or raises an exception if any action response is an error (unless `raise_action_errors` is passed as `False`) or if the job response is an error (unless `raise_job_errors` is passed as `False`). This method performs expansions if the Client is configured with an expansion converter. :param service_name: The name of the service to call :type service_name: union[str, unicode] :param actions: A list of `ActionRequest` objects and/or dicts that can be converted to `ActionRequest` objects :type actions: iterable[union[ActionRequest, dict]] :param expansions: A dictionary representing the expansions to perform :type expansions: dict :param raise_job_errors: Whether to raise a JobError if the job response contains errors (defaults to `True`) :type raise_job_errors: bool :param raise_action_errors: Whether to raise a CallActionError if any action responses contain errors (defaults to `True`) :type raise_action_errors: bool :param timeout: If provided, this will override the default transport timeout values to; requests will expire after this number of seconds plus some buffer defined by the transport, and the client will not block waiting for a response for longer than this amount of time. :type timeout: int :param switches: A list of switch value integers :type switches: list :param correlation_id: The request correlation ID :type correlation_id: union[str, unicode] :param continue_on_error: Whether to continue executing further actions once one action has returned errors :type continue_on_error: bool :param context: A dictionary of extra values to include in the context header :type context: dict :param control_extra: A dictionary of extra values to include in the control header :type control_extra: dict :return: The job response :rtype: JobResponse :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, JobError, CallActionError """ return self.call_actions_future( service_name, actions, expansions, raise_job_errors, raise_action_errors, timeout, **kwargs ).result()
[ "def", "call_actions", "(", "self", ",", "service_name", ",", "actions", ",", "expansions", "=", "None", ",", "raise_job_errors", "=", "True", ",", "raise_action_errors", "=", "True", ",", "timeout", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "call_actions_future", "(", "service_name", ",", "actions", ",", "expansions", ",", "raise_job_errors", ",", "raise_action_errors", ",", "timeout", ",", "*", "*", "kwargs", ")", ".", "result", "(", ")" ]
Build and send a single job request with one or more actions. Returns a list of action responses, one for each action in the same order as provided, or raises an exception if any action response is an error (unless `raise_action_errors` is passed as `False`) or if the job response is an error (unless `raise_job_errors` is passed as `False`). This method performs expansions if the Client is configured with an expansion converter. :param service_name: The name of the service to call :type service_name: union[str, unicode] :param actions: A list of `ActionRequest` objects and/or dicts that can be converted to `ActionRequest` objects :type actions: iterable[union[ActionRequest, dict]] :param expansions: A dictionary representing the expansions to perform :type expansions: dict :param raise_job_errors: Whether to raise a JobError if the job response contains errors (defaults to `True`) :type raise_job_errors: bool :param raise_action_errors: Whether to raise a CallActionError if any action responses contain errors (defaults to `True`) :type raise_action_errors: bool :param timeout: If provided, this will override the default transport timeout values to; requests will expire after this number of seconds plus some buffer defined by the transport, and the client will not block waiting for a response for longer than this amount of time. :type timeout: int :param switches: A list of switch value integers :type switches: list :param correlation_id: The request correlation ID :type correlation_id: union[str, unicode] :param continue_on_error: Whether to continue executing further actions once one action has returned errors :type continue_on_error: bool :param context: A dictionary of extra values to include in the context header :type context: dict :param control_extra: A dictionary of extra values to include in the control header :type control_extra: dict :return: The job response :rtype: JobResponse :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, JobError, CallActionError
[ "Build", "and", "send", "a", "single", "job", "request", "with", "one", "or", "more", "actions", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/client/client.py#L392-L451
train
eventbrite/pysoa
pysoa/client/client.py
Client.call_actions_parallel
def call_actions_parallel(self, service_name, actions, **kwargs): """ Build and send multiple job requests to one service, each job with one action, to be executed in parallel, and return once all responses have been received. Returns a list of action responses, one for each action in the same order as provided, or raises an exception if any action response is an error (unless `raise_action_errors` is passed as `False`) or if any job response is an error (unless `raise_job_errors` is passed as `False`). This method performs expansions if the Client is configured with an expansion converter. :param service_name: The name of the service to call :type service_name: union[str, unicode] :param actions: A list of `ActionRequest` objects and/or dicts that can be converted to `ActionRequest` objects :type actions: iterable[union[ActionRequest, dict]] :param expansions: A dictionary representing the expansions to perform :type expansions: dict :param raise_action_errors: Whether to raise a CallActionError if any action responses contain errors (defaults to `True`) :type raise_action_errors: bool :param timeout: If provided, this will override the default transport timeout values to; requests will expire after this number of seconds plus some buffer defined by the transport, and the client will not block waiting for a response for longer than this amount of time. :type timeout: int :param switches: A list of switch value integers :type switches: list :param correlation_id: The request correlation ID :type correlation_id: union[str, unicode] :param continue_on_error: Whether to continue executing further actions once one action has returned errors :type continue_on_error: bool :param context: A dictionary of extra values to include in the context header :type context: dict :param control_extra: A dictionary of extra values to include in the control header :type control_extra: dict :return: A generator of action responses :rtype: Generator[ActionResponse] :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, JobError, CallActionError """ return self.call_actions_parallel_future(service_name, actions, **kwargs).result()
python
def call_actions_parallel(self, service_name, actions, **kwargs): """ Build and send multiple job requests to one service, each job with one action, to be executed in parallel, and return once all responses have been received. Returns a list of action responses, one for each action in the same order as provided, or raises an exception if any action response is an error (unless `raise_action_errors` is passed as `False`) or if any job response is an error (unless `raise_job_errors` is passed as `False`). This method performs expansions if the Client is configured with an expansion converter. :param service_name: The name of the service to call :type service_name: union[str, unicode] :param actions: A list of `ActionRequest` objects and/or dicts that can be converted to `ActionRequest` objects :type actions: iterable[union[ActionRequest, dict]] :param expansions: A dictionary representing the expansions to perform :type expansions: dict :param raise_action_errors: Whether to raise a CallActionError if any action responses contain errors (defaults to `True`) :type raise_action_errors: bool :param timeout: If provided, this will override the default transport timeout values to; requests will expire after this number of seconds plus some buffer defined by the transport, and the client will not block waiting for a response for longer than this amount of time. :type timeout: int :param switches: A list of switch value integers :type switches: list :param correlation_id: The request correlation ID :type correlation_id: union[str, unicode] :param continue_on_error: Whether to continue executing further actions once one action has returned errors :type continue_on_error: bool :param context: A dictionary of extra values to include in the context header :type context: dict :param control_extra: A dictionary of extra values to include in the control header :type control_extra: dict :return: A generator of action responses :rtype: Generator[ActionResponse] :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, JobError, CallActionError """ return self.call_actions_parallel_future(service_name, actions, **kwargs).result()
[ "def", "call_actions_parallel", "(", "self", ",", "service_name", ",", "actions", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "call_actions_parallel_future", "(", "service_name", ",", "actions", ",", "*", "*", "kwargs", ")", ".", "result", "(", ")" ]
Build and send multiple job requests to one service, each job with one action, to be executed in parallel, and return once all responses have been received. Returns a list of action responses, one for each action in the same order as provided, or raises an exception if any action response is an error (unless `raise_action_errors` is passed as `False`) or if any job response is an error (unless `raise_job_errors` is passed as `False`). This method performs expansions if the Client is configured with an expansion converter. :param service_name: The name of the service to call :type service_name: union[str, unicode] :param actions: A list of `ActionRequest` objects and/or dicts that can be converted to `ActionRequest` objects :type actions: iterable[union[ActionRequest, dict]] :param expansions: A dictionary representing the expansions to perform :type expansions: dict :param raise_action_errors: Whether to raise a CallActionError if any action responses contain errors (defaults to `True`) :type raise_action_errors: bool :param timeout: If provided, this will override the default transport timeout values to; requests will expire after this number of seconds plus some buffer defined by the transport, and the client will not block waiting for a response for longer than this amount of time. :type timeout: int :param switches: A list of switch value integers :type switches: list :param correlation_id: The request correlation ID :type correlation_id: union[str, unicode] :param continue_on_error: Whether to continue executing further actions once one action has returned errors :type continue_on_error: bool :param context: A dictionary of extra values to include in the context header :type context: dict :param control_extra: A dictionary of extra values to include in the control header :type control_extra: dict :return: A generator of action responses :rtype: Generator[ActionResponse] :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, JobError, CallActionError
[ "Build", "and", "send", "multiple", "job", "requests", "to", "one", "service", "each", "job", "with", "one", "action", "to", "be", "executed", "in", "parallel", "and", "return", "once", "all", "responses", "have", "been", "received", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/client/client.py#L453-L494
train
eventbrite/pysoa
pysoa/client/client.py
Client.call_jobs_parallel
def call_jobs_parallel( self, jobs, expansions=None, raise_job_errors=True, raise_action_errors=True, catch_transport_errors=False, timeout=None, **kwargs ): """ Build and send multiple job requests to one or more services, each with one or more actions, to be executed in parallel, and return once all responses have been received. Returns a list of job responses, one for each job in the same order as provided, or raises an exception if any job response is an error (unless `raise_job_errors` is passed as `False`) or if any action response is an error (unless `raise_action_errors` is passed as `False`). This method performs expansions if the Client is configured with an expansion converter. :param jobs: A list of job request dicts, each containing `service_name` and `actions`, where `actions` is a list of `ActionRequest` objects and/or dicts that can be converted to `ActionRequest` objects :type jobs: iterable[dict(service_name=union[str, unicode], actions=list[union[ActionRequest, dict]])] :param expansions: A dictionary representing the expansions to perform :type expansions: dict :param raise_job_errors: Whether to raise a JobError if any job responses contain errors (defaults to `True`) :type raise_job_errors: bool :param raise_action_errors: Whether to raise a CallActionError if any action responses contain errors (defaults to `True`) :type raise_action_errors: bool :param catch_transport_errors: Whether to catch transport errors and return them instead of letting them propagate. By default (`False`), the errors `ConnectionError`, `InvalidMessageError`, `MessageReceiveError`, `MessageReceiveTimeout`, `MessageSendError`, `MessageSendTimeout`, and `MessageTooLarge`, when raised by the transport, cause the entire process to terminate, potentially losing responses. If this argument is set to `True`, those errors are, instead, caught, and they are returned in place of their corresponding responses in the returned list of job responses. :type catch_transport_errors: bool :param timeout: If provided, this will override the default transport timeout values to; requests will expire after this number of seconds plus some buffer defined by the transport, and the client will not block waiting for a response for longer than this amount of time. :type timeout: int :param switches: A list of switch value integers :type switches: list :param correlation_id: The request correlation ID :type correlation_id: union[str, unicode] :param continue_on_error: Whether to continue executing further actions once one action has returned errors :type continue_on_error: bool :param context: A dictionary of extra values to include in the context header :type context: dict :param control_extra: A dictionary of extra values to include in the control header :type control_extra: dict :return: The job response :rtype: list[union(JobResponse, Exception)] :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, JobError, CallActionError """ return self.call_jobs_parallel_future( jobs, expansions=expansions, raise_job_errors=raise_job_errors, raise_action_errors=raise_action_errors, catch_transport_errors=catch_transport_errors, timeout=timeout, **kwargs ).result()
python
def call_jobs_parallel( self, jobs, expansions=None, raise_job_errors=True, raise_action_errors=True, catch_transport_errors=False, timeout=None, **kwargs ): """ Build and send multiple job requests to one or more services, each with one or more actions, to be executed in parallel, and return once all responses have been received. Returns a list of job responses, one for each job in the same order as provided, or raises an exception if any job response is an error (unless `raise_job_errors` is passed as `False`) or if any action response is an error (unless `raise_action_errors` is passed as `False`). This method performs expansions if the Client is configured with an expansion converter. :param jobs: A list of job request dicts, each containing `service_name` and `actions`, where `actions` is a list of `ActionRequest` objects and/or dicts that can be converted to `ActionRequest` objects :type jobs: iterable[dict(service_name=union[str, unicode], actions=list[union[ActionRequest, dict]])] :param expansions: A dictionary representing the expansions to perform :type expansions: dict :param raise_job_errors: Whether to raise a JobError if any job responses contain errors (defaults to `True`) :type raise_job_errors: bool :param raise_action_errors: Whether to raise a CallActionError if any action responses contain errors (defaults to `True`) :type raise_action_errors: bool :param catch_transport_errors: Whether to catch transport errors and return them instead of letting them propagate. By default (`False`), the errors `ConnectionError`, `InvalidMessageError`, `MessageReceiveError`, `MessageReceiveTimeout`, `MessageSendError`, `MessageSendTimeout`, and `MessageTooLarge`, when raised by the transport, cause the entire process to terminate, potentially losing responses. If this argument is set to `True`, those errors are, instead, caught, and they are returned in place of their corresponding responses in the returned list of job responses. :type catch_transport_errors: bool :param timeout: If provided, this will override the default transport timeout values to; requests will expire after this number of seconds plus some buffer defined by the transport, and the client will not block waiting for a response for longer than this amount of time. :type timeout: int :param switches: A list of switch value integers :type switches: list :param correlation_id: The request correlation ID :type correlation_id: union[str, unicode] :param continue_on_error: Whether to continue executing further actions once one action has returned errors :type continue_on_error: bool :param context: A dictionary of extra values to include in the context header :type context: dict :param control_extra: A dictionary of extra values to include in the control header :type control_extra: dict :return: The job response :rtype: list[union(JobResponse, Exception)] :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, JobError, CallActionError """ return self.call_jobs_parallel_future( jobs, expansions=expansions, raise_job_errors=raise_job_errors, raise_action_errors=raise_action_errors, catch_transport_errors=catch_transport_errors, timeout=timeout, **kwargs ).result()
[ "def", "call_jobs_parallel", "(", "self", ",", "jobs", ",", "expansions", "=", "None", ",", "raise_job_errors", "=", "True", ",", "raise_action_errors", "=", "True", ",", "catch_transport_errors", "=", "False", ",", "timeout", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "call_jobs_parallel_future", "(", "jobs", ",", "expansions", "=", "expansions", ",", "raise_job_errors", "=", "raise_job_errors", ",", "raise_action_errors", "=", "raise_action_errors", ",", "catch_transport_errors", "=", "catch_transport_errors", ",", "timeout", "=", "timeout", ",", "*", "*", "kwargs", ")", ".", "result", "(", ")" ]
Build and send multiple job requests to one or more services, each with one or more actions, to be executed in parallel, and return once all responses have been received. Returns a list of job responses, one for each job in the same order as provided, or raises an exception if any job response is an error (unless `raise_job_errors` is passed as `False`) or if any action response is an error (unless `raise_action_errors` is passed as `False`). This method performs expansions if the Client is configured with an expansion converter. :param jobs: A list of job request dicts, each containing `service_name` and `actions`, where `actions` is a list of `ActionRequest` objects and/or dicts that can be converted to `ActionRequest` objects :type jobs: iterable[dict(service_name=union[str, unicode], actions=list[union[ActionRequest, dict]])] :param expansions: A dictionary representing the expansions to perform :type expansions: dict :param raise_job_errors: Whether to raise a JobError if any job responses contain errors (defaults to `True`) :type raise_job_errors: bool :param raise_action_errors: Whether to raise a CallActionError if any action responses contain errors (defaults to `True`) :type raise_action_errors: bool :param catch_transport_errors: Whether to catch transport errors and return them instead of letting them propagate. By default (`False`), the errors `ConnectionError`, `InvalidMessageError`, `MessageReceiveError`, `MessageReceiveTimeout`, `MessageSendError`, `MessageSendTimeout`, and `MessageTooLarge`, when raised by the transport, cause the entire process to terminate, potentially losing responses. If this argument is set to `True`, those errors are, instead, caught, and they are returned in place of their corresponding responses in the returned list of job responses. :type catch_transport_errors: bool :param timeout: If provided, this will override the default transport timeout values to; requests will expire after this number of seconds plus some buffer defined by the transport, and the client will not block waiting for a response for longer than this amount of time. :type timeout: int :param switches: A list of switch value integers :type switches: list :param correlation_id: The request correlation ID :type correlation_id: union[str, unicode] :param continue_on_error: Whether to continue executing further actions once one action has returned errors :type continue_on_error: bool :param context: A dictionary of extra values to include in the context header :type context: dict :param control_extra: A dictionary of extra values to include in the control header :type control_extra: dict :return: The job response :rtype: list[union(JobResponse, Exception)] :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, JobError, CallActionError
[ "Build", "and", "send", "multiple", "job", "requests", "to", "one", "or", "more", "services", "each", "with", "one", "or", "more", "actions", "to", "be", "executed", "in", "parallel", "and", "return", "once", "all", "responses", "have", "been", "received", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/client/client.py#L496-L564
train
eventbrite/pysoa
pysoa/client/client.py
Client.send_request
def send_request( self, service_name, actions, switches=None, correlation_id=None, continue_on_error=False, context=None, control_extra=None, message_expiry_in_seconds=None, suppress_response=False, ): """ Build and send a JobRequest, and return a request ID. The context and control_extra arguments may be used to include extra values in the context and control headers, respectively. :param service_name: The name of the service from which to receive responses :type service_name: union[str, unicode] :param actions: A list of `ActionRequest` objects :type actions: list :param switches: A list of switch value integers :type switches: union[list, set] :param correlation_id: The request correlation ID :type correlation_id: union[str, unicode] :param continue_on_error: Whether to continue executing further actions once one action has returned errors :type continue_on_error: bool :param context: A dictionary of extra values to include in the context header :type context: dict :param control_extra: A dictionary of extra values to include in the control header :type control_extra: dict :param message_expiry_in_seconds: How soon the message will expire if not received by a server (defaults to sixty seconds unless the settings are otherwise) :type message_expiry_in_seconds: int :param suppress_response: If `True`, the service will process the request normally but omit the step of sending a response back to the client (use this feature to implement send-and-forget patterns for asynchronous execution) :type suppress_response: bool :return: The request ID :rtype: int :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge """ control_extra = control_extra.copy() if control_extra else {} if message_expiry_in_seconds and 'timeout' not in control_extra: control_extra['timeout'] = message_expiry_in_seconds handler = self._get_handler(service_name) control = self._make_control_header( continue_on_error=continue_on_error, control_extra=control_extra, suppress_response=suppress_response, ) context = self._make_context_header( switches=switches, correlation_id=correlation_id, context_extra=context, ) job_request = JobRequest(actions=actions, control=control, context=context or {}) return handler.send_request(job_request, message_expiry_in_seconds)
python
def send_request( self, service_name, actions, switches=None, correlation_id=None, continue_on_error=False, context=None, control_extra=None, message_expiry_in_seconds=None, suppress_response=False, ): """ Build and send a JobRequest, and return a request ID. The context and control_extra arguments may be used to include extra values in the context and control headers, respectively. :param service_name: The name of the service from which to receive responses :type service_name: union[str, unicode] :param actions: A list of `ActionRequest` objects :type actions: list :param switches: A list of switch value integers :type switches: union[list, set] :param correlation_id: The request correlation ID :type correlation_id: union[str, unicode] :param continue_on_error: Whether to continue executing further actions once one action has returned errors :type continue_on_error: bool :param context: A dictionary of extra values to include in the context header :type context: dict :param control_extra: A dictionary of extra values to include in the control header :type control_extra: dict :param message_expiry_in_seconds: How soon the message will expire if not received by a server (defaults to sixty seconds unless the settings are otherwise) :type message_expiry_in_seconds: int :param suppress_response: If `True`, the service will process the request normally but omit the step of sending a response back to the client (use this feature to implement send-and-forget patterns for asynchronous execution) :type suppress_response: bool :return: The request ID :rtype: int :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge """ control_extra = control_extra.copy() if control_extra else {} if message_expiry_in_seconds and 'timeout' not in control_extra: control_extra['timeout'] = message_expiry_in_seconds handler = self._get_handler(service_name) control = self._make_control_header( continue_on_error=continue_on_error, control_extra=control_extra, suppress_response=suppress_response, ) context = self._make_context_header( switches=switches, correlation_id=correlation_id, context_extra=context, ) job_request = JobRequest(actions=actions, control=control, context=context or {}) return handler.send_request(job_request, message_expiry_in_seconds)
[ "def", "send_request", "(", "self", ",", "service_name", ",", "actions", ",", "switches", "=", "None", ",", "correlation_id", "=", "None", ",", "continue_on_error", "=", "False", ",", "context", "=", "None", ",", "control_extra", "=", "None", ",", "message_expiry_in_seconds", "=", "None", ",", "suppress_response", "=", "False", ",", ")", ":", "control_extra", "=", "control_extra", ".", "copy", "(", ")", "if", "control_extra", "else", "{", "}", "if", "message_expiry_in_seconds", "and", "'timeout'", "not", "in", "control_extra", ":", "control_extra", "[", "'timeout'", "]", "=", "message_expiry_in_seconds", "handler", "=", "self", ".", "_get_handler", "(", "service_name", ")", "control", "=", "self", ".", "_make_control_header", "(", "continue_on_error", "=", "continue_on_error", ",", "control_extra", "=", "control_extra", ",", "suppress_response", "=", "suppress_response", ",", ")", "context", "=", "self", ".", "_make_context_header", "(", "switches", "=", "switches", ",", "correlation_id", "=", "correlation_id", ",", "context_extra", "=", "context", ",", ")", "job_request", "=", "JobRequest", "(", "actions", "=", "actions", ",", "control", "=", "control", ",", "context", "=", "context", "or", "{", "}", ")", "return", "handler", ".", "send_request", "(", "job_request", ",", "message_expiry_in_seconds", ")" ]
Build and send a JobRequest, and return a request ID. The context and control_extra arguments may be used to include extra values in the context and control headers, respectively. :param service_name: The name of the service from which to receive responses :type service_name: union[str, unicode] :param actions: A list of `ActionRequest` objects :type actions: list :param switches: A list of switch value integers :type switches: union[list, set] :param correlation_id: The request correlation ID :type correlation_id: union[str, unicode] :param continue_on_error: Whether to continue executing further actions once one action has returned errors :type continue_on_error: bool :param context: A dictionary of extra values to include in the context header :type context: dict :param control_extra: A dictionary of extra values to include in the control header :type control_extra: dict :param message_expiry_in_seconds: How soon the message will expire if not received by a server (defaults to sixty seconds unless the settings are otherwise) :type message_expiry_in_seconds: int :param suppress_response: If `True`, the service will process the request normally but omit the step of sending a response back to the client (use this feature to implement send-and-forget patterns for asynchronous execution) :type suppress_response: bool :return: The request ID :rtype: int :raise: ConnectionError, InvalidField, MessageSendError, MessageSendTimeout, MessageTooLarge
[ "Build", "and", "send", "a", "JobRequest", "and", "return", "a", "request", "ID", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/client/client.py#L804-L866
train
eventbrite/pysoa
pysoa/client/client.py
Client.get_all_responses
def get_all_responses(self, service_name, receive_timeout_in_seconds=None): """ Receive all available responses from the service as a generator. :param service_name: The name of the service from which to receive responses :type service_name: union[str, unicode] :param receive_timeout_in_seconds: How long to block without receiving a message before raising `MessageReceiveTimeout` (defaults to five seconds unless the settings are otherwise). :type receive_timeout_in_seconds: int :return: A generator that yields (request ID, job response) :rtype: generator :raise: ConnectionError, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, StopIteration """ handler = self._get_handler(service_name) return handler.get_all_responses(receive_timeout_in_seconds)
python
def get_all_responses(self, service_name, receive_timeout_in_seconds=None): """ Receive all available responses from the service as a generator. :param service_name: The name of the service from which to receive responses :type service_name: union[str, unicode] :param receive_timeout_in_seconds: How long to block without receiving a message before raising `MessageReceiveTimeout` (defaults to five seconds unless the settings are otherwise). :type receive_timeout_in_seconds: int :return: A generator that yields (request ID, job response) :rtype: generator :raise: ConnectionError, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, StopIteration """ handler = self._get_handler(service_name) return handler.get_all_responses(receive_timeout_in_seconds)
[ "def", "get_all_responses", "(", "self", ",", "service_name", ",", "receive_timeout_in_seconds", "=", "None", ")", ":", "handler", "=", "self", ".", "_get_handler", "(", "service_name", ")", "return", "handler", ".", "get_all_responses", "(", "receive_timeout_in_seconds", ")" ]
Receive all available responses from the service as a generator. :param service_name: The name of the service from which to receive responses :type service_name: union[str, unicode] :param receive_timeout_in_seconds: How long to block without receiving a message before raising `MessageReceiveTimeout` (defaults to five seconds unless the settings are otherwise). :type receive_timeout_in_seconds: int :return: A generator that yields (request ID, job response) :rtype: generator :raise: ConnectionError, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, StopIteration
[ "Receive", "all", "available", "responses", "from", "the", "service", "as", "a", "generator", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/client/client.py#L868-L886
train
eventbrite/pysoa
pysoa/server/autoreload.py
get_reloader
def get_reloader(main_module_name, watch_modules, signal_forks=False): """ Don't instantiate a reloader directly. Instead, call this method to get a reloader, and then call `main` on that reloader. See the documentation for `AbstractReloader.main` above to see how to call it. :param main_module_name: The main module name (such as "example_service.standalone"). It should be the value that was passed to the `-m` parameter when starting the Python executable, or `None` if the `-m` parameter was not used. :param watch_modules: If passed an iterable/generator of module names, file watching will be limited to modules whose names start with one of these names (including their submodules). For example, if passed `['example', 'pysoa']`, it will monitor all of PySOA's modules and submodules and all of `example_service`'s modules and submodules, as well as any other modules that start with `example`. If `None`, all files from all modules in all libraries, including Python, will be watched. :param signal_forks: If `True`, this means the server process is actually multiprocessing/forking and its child processes are the actual server processes. In this case, the file watcher also sends `SIGHUP` in addition to `SIGTERM` to the clone process, and the clone process receives this and knows to send `SIGTERM` to all of its forked child processes. :return: a new reloader instance. """ if USE_PY_INOTIFY: return _PyInotifyReloader(main_module_name, watch_modules, signal_forks) return _PollingReloader(main_module_name, watch_modules, signal_forks)
python
def get_reloader(main_module_name, watch_modules, signal_forks=False): """ Don't instantiate a reloader directly. Instead, call this method to get a reloader, and then call `main` on that reloader. See the documentation for `AbstractReloader.main` above to see how to call it. :param main_module_name: The main module name (such as "example_service.standalone"). It should be the value that was passed to the `-m` parameter when starting the Python executable, or `None` if the `-m` parameter was not used. :param watch_modules: If passed an iterable/generator of module names, file watching will be limited to modules whose names start with one of these names (including their submodules). For example, if passed `['example', 'pysoa']`, it will monitor all of PySOA's modules and submodules and all of `example_service`'s modules and submodules, as well as any other modules that start with `example`. If `None`, all files from all modules in all libraries, including Python, will be watched. :param signal_forks: If `True`, this means the server process is actually multiprocessing/forking and its child processes are the actual server processes. In this case, the file watcher also sends `SIGHUP` in addition to `SIGTERM` to the clone process, and the clone process receives this and knows to send `SIGTERM` to all of its forked child processes. :return: a new reloader instance. """ if USE_PY_INOTIFY: return _PyInotifyReloader(main_module_name, watch_modules, signal_forks) return _PollingReloader(main_module_name, watch_modules, signal_forks)
[ "def", "get_reloader", "(", "main_module_name", ",", "watch_modules", ",", "signal_forks", "=", "False", ")", ":", "if", "USE_PY_INOTIFY", ":", "return", "_PyInotifyReloader", "(", "main_module_name", ",", "watch_modules", ",", "signal_forks", ")", "return", "_PollingReloader", "(", "main_module_name", ",", "watch_modules", ",", "signal_forks", ")" ]
Don't instantiate a reloader directly. Instead, call this method to get a reloader, and then call `main` on that reloader. See the documentation for `AbstractReloader.main` above to see how to call it. :param main_module_name: The main module name (such as "example_service.standalone"). It should be the value that was passed to the `-m` parameter when starting the Python executable, or `None` if the `-m` parameter was not used. :param watch_modules: If passed an iterable/generator of module names, file watching will be limited to modules whose names start with one of these names (including their submodules). For example, if passed `['example', 'pysoa']`, it will monitor all of PySOA's modules and submodules and all of `example_service`'s modules and submodules, as well as any other modules that start with `example`. If `None`, all files from all modules in all libraries, including Python, will be watched. :param signal_forks: If `True`, this means the server process is actually multiprocessing/forking and its child processes are the actual server processes. In this case, the file watcher also sends `SIGHUP` in addition to `SIGTERM` to the clone process, and the clone process receives this and knows to send `SIGTERM` to all of its forked child processes. :return: a new reloader instance.
[ "Don", "t", "instantiate", "a", "reloader", "directly", ".", "Instead", "call", "this", "method", "to", "get", "a", "reloader", "and", "then", "call", "main", "on", "that", "reloader", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/server/autoreload.py#L414-L438
train
eventbrite/pysoa
pysoa/common/serializer/msgpack_serializer.py
MsgpackSerializer.ext_hook
def ext_hook(self, code, data): """ Decodes our custom extension types """ if code == self.EXT_DATETIME: # Unpack datetime object from a big-endian signed 64-bit integer. microseconds = self.STRUCT_DATETIME.unpack(data)[0] return datetime.datetime.utcfromtimestamp(microseconds / 1000000.0) elif code == self.EXT_DATE: # Unpack local-date object from a big-endian unsigned short and two big-endian unsigned chars return datetime.date(*self.STRUCT_DATE.unpack(data)) elif code == self.EXT_TIME: # Unpack a dateless-time object from three big-endian unsigned chars and a big-endian unsigned # 32-bit integer. return datetime.time(*self.STRUCT_TIME.unpack(data)) elif code == self.EXT_DECIMAL: obj_len = self.STRUCT_DECIMAL_LENGTH.unpack(data[:2])[0] obj_decoder = struct.Struct(str('!{}s'.format(obj_len))) return decimal.Decimal(obj_decoder.unpack(data[2:])[0].decode('utf-8')) elif code == self.EXT_CURRINT: # Unpack Amount object into (code, minor) from a 3-char ASCII string and a signed 64-bit integer. code, minor_value = self.STRUCT_CURRINT.unpack(data) return currint.Amount.from_code_and_minor(code.decode('ascii'), minor_value) else: raise TypeError('Cannot decode unknown extension type {} from MessagePack'.format(code))
python
def ext_hook(self, code, data): """ Decodes our custom extension types """ if code == self.EXT_DATETIME: # Unpack datetime object from a big-endian signed 64-bit integer. microseconds = self.STRUCT_DATETIME.unpack(data)[0] return datetime.datetime.utcfromtimestamp(microseconds / 1000000.0) elif code == self.EXT_DATE: # Unpack local-date object from a big-endian unsigned short and two big-endian unsigned chars return datetime.date(*self.STRUCT_DATE.unpack(data)) elif code == self.EXT_TIME: # Unpack a dateless-time object from three big-endian unsigned chars and a big-endian unsigned # 32-bit integer. return datetime.time(*self.STRUCT_TIME.unpack(data)) elif code == self.EXT_DECIMAL: obj_len = self.STRUCT_DECIMAL_LENGTH.unpack(data[:2])[0] obj_decoder = struct.Struct(str('!{}s'.format(obj_len))) return decimal.Decimal(obj_decoder.unpack(data[2:])[0].decode('utf-8')) elif code == self.EXT_CURRINT: # Unpack Amount object into (code, minor) from a 3-char ASCII string and a signed 64-bit integer. code, minor_value = self.STRUCT_CURRINT.unpack(data) return currint.Amount.from_code_and_minor(code.decode('ascii'), minor_value) else: raise TypeError('Cannot decode unknown extension type {} from MessagePack'.format(code))
[ "def", "ext_hook", "(", "self", ",", "code", ",", "data", ")", ":", "if", "code", "==", "self", ".", "EXT_DATETIME", ":", "# Unpack datetime object from a big-endian signed 64-bit integer.", "microseconds", "=", "self", ".", "STRUCT_DATETIME", ".", "unpack", "(", "data", ")", "[", "0", "]", "return", "datetime", ".", "datetime", ".", "utcfromtimestamp", "(", "microseconds", "/", "1000000.0", ")", "elif", "code", "==", "self", ".", "EXT_DATE", ":", "# Unpack local-date object from a big-endian unsigned short and two big-endian unsigned chars", "return", "datetime", ".", "date", "(", "*", "self", ".", "STRUCT_DATE", ".", "unpack", "(", "data", ")", ")", "elif", "code", "==", "self", ".", "EXT_TIME", ":", "# Unpack a dateless-time object from three big-endian unsigned chars and a big-endian unsigned", "# 32-bit integer.", "return", "datetime", ".", "time", "(", "*", "self", ".", "STRUCT_TIME", ".", "unpack", "(", "data", ")", ")", "elif", "code", "==", "self", ".", "EXT_DECIMAL", ":", "obj_len", "=", "self", ".", "STRUCT_DECIMAL_LENGTH", ".", "unpack", "(", "data", "[", ":", "2", "]", ")", "[", "0", "]", "obj_decoder", "=", "struct", ".", "Struct", "(", "str", "(", "'!{}s'", ".", "format", "(", "obj_len", ")", ")", ")", "return", "decimal", ".", "Decimal", "(", "obj_decoder", ".", "unpack", "(", "data", "[", "2", ":", "]", ")", "[", "0", "]", ".", "decode", "(", "'utf-8'", ")", ")", "elif", "code", "==", "self", ".", "EXT_CURRINT", ":", "# Unpack Amount object into (code, minor) from a 3-char ASCII string and a signed 64-bit integer.", "code", ",", "minor_value", "=", "self", ".", "STRUCT_CURRINT", ".", "unpack", "(", "data", ")", "return", "currint", ".", "Amount", ".", "from_code_and_minor", "(", "code", ".", "decode", "(", "'ascii'", ")", ",", "minor_value", ")", "else", ":", "raise", "TypeError", "(", "'Cannot decode unknown extension type {} from MessagePack'", ".", "format", "(", "code", ")", ")" ]
Decodes our custom extension types
[ "Decodes", "our", "custom", "extension", "types" ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/common/serializer/msgpack_serializer.py#L139-L163
train
eventbrite/pysoa
pysoa/common/transport/local.py
LocalClientTransport.send_request_message
def send_request_message(self, request_id, meta, body, _=None): """ Receives a request from the client and handles and dispatches in in-thread. `message_expiry_in_seconds` is not supported. Messages do not expire, as the server handles the request immediately in the same thread before this method returns. This method blocks until the server has completed handling the request. """ self._current_request = (request_id, meta, body) try: self.server.handle_next_request() finally: self._current_request = None
python
def send_request_message(self, request_id, meta, body, _=None): """ Receives a request from the client and handles and dispatches in in-thread. `message_expiry_in_seconds` is not supported. Messages do not expire, as the server handles the request immediately in the same thread before this method returns. This method blocks until the server has completed handling the request. """ self._current_request = (request_id, meta, body) try: self.server.handle_next_request() finally: self._current_request = None
[ "def", "send_request_message", "(", "self", ",", "request_id", ",", "meta", ",", "body", ",", "_", "=", "None", ")", ":", "self", ".", "_current_request", "=", "(", "request_id", ",", "meta", ",", "body", ")", "try", ":", "self", ".", "server", ".", "handle_next_request", "(", ")", "finally", ":", "self", ".", "_current_request", "=", "None" ]
Receives a request from the client and handles and dispatches in in-thread. `message_expiry_in_seconds` is not supported. Messages do not expire, as the server handles the request immediately in the same thread before this method returns. This method blocks until the server has completed handling the request.
[ "Receives", "a", "request", "from", "the", "client", "and", "handles", "and", "dispatches", "in", "in", "-", "thread", ".", "message_expiry_in_seconds", "is", "not", "supported", ".", "Messages", "do", "not", "expire", "as", "the", "server", "handles", "the", "request", "immediately", "in", "the", "same", "thread", "before", "this", "method", "returns", ".", "This", "method", "blocks", "until", "the", "server", "has", "completed", "handling", "the", "request", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/common/transport/local.py#L78-L88
train
eventbrite/pysoa
pysoa/common/transport/local.py
LocalClientTransport.send_response_message
def send_response_message(self, request_id, meta, body): """ Add the response to the deque. """ self.response_messages.append((request_id, meta, body))
python
def send_response_message(self, request_id, meta, body): """ Add the response to the deque. """ self.response_messages.append((request_id, meta, body))
[ "def", "send_response_message", "(", "self", ",", "request_id", ",", "meta", ",", "body", ")", ":", "self", ".", "response_messages", ".", "append", "(", "(", "request_id", ",", "meta", ",", "body", ")", ")" ]
Add the response to the deque.
[ "Add", "the", "response", "to", "the", "deque", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/common/transport/local.py#L103-L107
train
eventbrite/pysoa
pysoa/server/action/status.py
StatusActionFactory
def StatusActionFactory(version, build=None, base_class=BaseStatusAction): # noqa """ A factory for creating a new status action class specific to a service. :param version: The service version :type version: union[str, unicode] :param build: The optional service build identifier :type build: union[str, unicode] :param base_class: The optional base class, to override `BaseStatusAction` as the base class :type base_class: BaseStatusAction :return: A class named `StatusAction`, extending `base_class`, with version and build matching the input parameters :rtype: class """ return type( str('StatusAction'), (base_class, ), {str('_version'): version, str('_build'): build}, )
python
def StatusActionFactory(version, build=None, base_class=BaseStatusAction): # noqa """ A factory for creating a new status action class specific to a service. :param version: The service version :type version: union[str, unicode] :param build: The optional service build identifier :type build: union[str, unicode] :param base_class: The optional base class, to override `BaseStatusAction` as the base class :type base_class: BaseStatusAction :return: A class named `StatusAction`, extending `base_class`, with version and build matching the input parameters :rtype: class """ return type( str('StatusAction'), (base_class, ), {str('_version'): version, str('_build'): build}, )
[ "def", "StatusActionFactory", "(", "version", ",", "build", "=", "None", ",", "base_class", "=", "BaseStatusAction", ")", ":", "# noqa", "return", "type", "(", "str", "(", "'StatusAction'", ")", ",", "(", "base_class", ",", ")", ",", "{", "str", "(", "'_version'", ")", ":", "version", ",", "str", "(", "'_build'", ")", ":", "build", "}", ",", ")" ]
A factory for creating a new status action class specific to a service. :param version: The service version :type version: union[str, unicode] :param build: The optional service build identifier :type build: union[str, unicode] :param base_class: The optional base class, to override `BaseStatusAction` as the base class :type base_class: BaseStatusAction :return: A class named `StatusAction`, extending `base_class`, with version and build matching the input parameters :rtype: class
[ "A", "factory", "for", "creating", "a", "new", "status", "action", "class", "specific", "to", "a", "service", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/server/action/status.py#L235-L253
train
eventbrite/pysoa
pysoa/server/server.py
Server.make_middleware_stack
def make_middleware_stack(middleware, base): """ Given a list of in-order middleware callable objects `middleware` and a base function `base`, chains them together so each middleware is fed the function below, and returns the top level ready to call. :param middleware: The middleware stack :type middleware: iterable[callable] :param base: The base callable that the lowest-order middleware wraps :type base: callable :return: The topmost middleware, which calls the next middleware ... which calls the lowest-order middleware, which calls the `base` callable. :rtype: callable """ for ware in reversed(middleware): base = ware(base) return base
python
def make_middleware_stack(middleware, base): """ Given a list of in-order middleware callable objects `middleware` and a base function `base`, chains them together so each middleware is fed the function below, and returns the top level ready to call. :param middleware: The middleware stack :type middleware: iterable[callable] :param base: The base callable that the lowest-order middleware wraps :type base: callable :return: The topmost middleware, which calls the next middleware ... which calls the lowest-order middleware, which calls the `base` callable. :rtype: callable """ for ware in reversed(middleware): base = ware(base) return base
[ "def", "make_middleware_stack", "(", "middleware", ",", "base", ")", ":", "for", "ware", "in", "reversed", "(", "middleware", ")", ":", "base", "=", "ware", "(", "base", ")", "return", "base" ]
Given a list of in-order middleware callable objects `middleware` and a base function `base`, chains them together so each middleware is fed the function below, and returns the top level ready to call. :param middleware: The middleware stack :type middleware: iterable[callable] :param base: The base callable that the lowest-order middleware wraps :type base: callable :return: The topmost middleware, which calls the next middleware ... which calls the lowest-order middleware, which calls the `base` callable. :rtype: callable
[ "Given", "a", "list", "of", "in", "-", "order", "middleware", "callable", "objects", "middleware", "and", "a", "base", "function", "base", "chains", "them", "together", "so", "each", "middleware", "is", "fed", "the", "function", "below", "and", "returns", "the", "top", "level", "ready", "to", "call", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/server/server.py#L266-L282
train
eventbrite/pysoa
pysoa/server/server.py
Server.process_job
def process_job(self, job_request): """ Validate, execute, and run the job request, wrapping it with any applicable job middleware. :param job_request: The job request :type job_request: dict :return: A `JobResponse` object :rtype: JobResponse :raise: JobError """ try: # Validate JobRequest message validation_errors = [ Error( code=error.code, message=error.message, field=error.pointer, ) for error in (JobRequestSchema.errors(job_request) or []) ] if validation_errors: raise JobError(errors=validation_errors) # Add the client object in case a middleware wishes to use it job_request['client'] = self.make_client(job_request['context']) # Add the async event loop in case a middleware wishes to use it job_request['async_event_loop'] = self._async_event_loop if hasattr(self, '_async_event_loop_thread'): job_request['run_coroutine'] = self._async_event_loop_thread.run_coroutine else: job_request['run_coroutine'] = None # Build set of middleware + job handler, then run job wrapper = self.make_middleware_stack( [m.job for m in self.middleware], self.execute_job, ) job_response = wrapper(job_request) if 'correlation_id' in job_request['context']: job_response.context['correlation_id'] = job_request['context']['correlation_id'] except JobError as e: self.metrics.counter('server.error.job_error').increment() job_response = JobResponse( errors=e.errors, ) except Exception as e: # Send an error response if no middleware caught this. # Formatting the error might itself error, so try to catch that self.metrics.counter('server.error.unhandled_error').increment() return self.handle_job_exception(e) return job_response
python
def process_job(self, job_request): """ Validate, execute, and run the job request, wrapping it with any applicable job middleware. :param job_request: The job request :type job_request: dict :return: A `JobResponse` object :rtype: JobResponse :raise: JobError """ try: # Validate JobRequest message validation_errors = [ Error( code=error.code, message=error.message, field=error.pointer, ) for error in (JobRequestSchema.errors(job_request) or []) ] if validation_errors: raise JobError(errors=validation_errors) # Add the client object in case a middleware wishes to use it job_request['client'] = self.make_client(job_request['context']) # Add the async event loop in case a middleware wishes to use it job_request['async_event_loop'] = self._async_event_loop if hasattr(self, '_async_event_loop_thread'): job_request['run_coroutine'] = self._async_event_loop_thread.run_coroutine else: job_request['run_coroutine'] = None # Build set of middleware + job handler, then run job wrapper = self.make_middleware_stack( [m.job for m in self.middleware], self.execute_job, ) job_response = wrapper(job_request) if 'correlation_id' in job_request['context']: job_response.context['correlation_id'] = job_request['context']['correlation_id'] except JobError as e: self.metrics.counter('server.error.job_error').increment() job_response = JobResponse( errors=e.errors, ) except Exception as e: # Send an error response if no middleware caught this. # Formatting the error might itself error, so try to catch that self.metrics.counter('server.error.unhandled_error').increment() return self.handle_job_exception(e) return job_response
[ "def", "process_job", "(", "self", ",", "job_request", ")", ":", "try", ":", "# Validate JobRequest message", "validation_errors", "=", "[", "Error", "(", "code", "=", "error", ".", "code", ",", "message", "=", "error", ".", "message", ",", "field", "=", "error", ".", "pointer", ",", ")", "for", "error", "in", "(", "JobRequestSchema", ".", "errors", "(", "job_request", ")", "or", "[", "]", ")", "]", "if", "validation_errors", ":", "raise", "JobError", "(", "errors", "=", "validation_errors", ")", "# Add the client object in case a middleware wishes to use it", "job_request", "[", "'client'", "]", "=", "self", ".", "make_client", "(", "job_request", "[", "'context'", "]", ")", "# Add the async event loop in case a middleware wishes to use it", "job_request", "[", "'async_event_loop'", "]", "=", "self", ".", "_async_event_loop", "if", "hasattr", "(", "self", ",", "'_async_event_loop_thread'", ")", ":", "job_request", "[", "'run_coroutine'", "]", "=", "self", ".", "_async_event_loop_thread", ".", "run_coroutine", "else", ":", "job_request", "[", "'run_coroutine'", "]", "=", "None", "# Build set of middleware + job handler, then run job", "wrapper", "=", "self", ".", "make_middleware_stack", "(", "[", "m", ".", "job", "for", "m", "in", "self", ".", "middleware", "]", ",", "self", ".", "execute_job", ",", ")", "job_response", "=", "wrapper", "(", "job_request", ")", "if", "'correlation_id'", "in", "job_request", "[", "'context'", "]", ":", "job_response", ".", "context", "[", "'correlation_id'", "]", "=", "job_request", "[", "'context'", "]", "[", "'correlation_id'", "]", "except", "JobError", "as", "e", ":", "self", ".", "metrics", ".", "counter", "(", "'server.error.job_error'", ")", ".", "increment", "(", ")", "job_response", "=", "JobResponse", "(", "errors", "=", "e", ".", "errors", ",", ")", "except", "Exception", "as", "e", ":", "# Send an error response if no middleware caught this.", "# Formatting the error might itself error, so try to catch that", "self", ".", "metrics", ".", "counter", "(", "'server.error.unhandled_error'", ")", ".", "increment", "(", ")", "return", "self", ".", "handle_job_exception", "(", "e", ")", "return", "job_response" ]
Validate, execute, and run the job request, wrapping it with any applicable job middleware. :param job_request: The job request :type job_request: dict :return: A `JobResponse` object :rtype: JobResponse :raise: JobError
[ "Validate", "execute", "and", "run", "the", "job", "request", "wrapping", "it", "with", "any", "applicable", "job", "middleware", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/server/server.py#L284-L339
train
eventbrite/pysoa
pysoa/server/server.py
Server.handle_job_exception
def handle_job_exception(self, exception, variables=None): """ Makes and returns a last-ditch error response. :param exception: The exception that happened :type exception: Exception :param variables: A dictionary of context-relevant variables to include in the error response :type variables: dict :return: A `JobResponse` object :rtype: JobResponse """ # Get the error and traceback if we can # noinspection PyBroadException try: error_str, traceback_str = six.text_type(exception), traceback.format_exc() except Exception: self.metrics.counter('server.error.error_formatting_failure').increment() error_str, traceback_str = 'Error formatting error', traceback.format_exc() # Log what happened self.logger.exception(exception) if not isinstance(traceback_str, six.text_type): try: # Try to traceback_str = traceback_str.decode('utf-8') except UnicodeDecodeError: traceback_str = 'UnicodeDecodeError: Traceback could not be decoded' # Make a bare bones job response error_dict = { 'code': ERROR_CODE_SERVER_ERROR, 'message': 'Internal server error: %s' % error_str, 'traceback': traceback_str, } if variables is not None: # noinspection PyBroadException try: error_dict['variables'] = {key: repr(value) for key, value in variables.items()} except Exception: self.metrics.counter('server.error.variable_formatting_failure').increment() error_dict['variables'] = 'Error formatting variables' return JobResponse(errors=[error_dict])
python
def handle_job_exception(self, exception, variables=None): """ Makes and returns a last-ditch error response. :param exception: The exception that happened :type exception: Exception :param variables: A dictionary of context-relevant variables to include in the error response :type variables: dict :return: A `JobResponse` object :rtype: JobResponse """ # Get the error and traceback if we can # noinspection PyBroadException try: error_str, traceback_str = six.text_type(exception), traceback.format_exc() except Exception: self.metrics.counter('server.error.error_formatting_failure').increment() error_str, traceback_str = 'Error formatting error', traceback.format_exc() # Log what happened self.logger.exception(exception) if not isinstance(traceback_str, six.text_type): try: # Try to traceback_str = traceback_str.decode('utf-8') except UnicodeDecodeError: traceback_str = 'UnicodeDecodeError: Traceback could not be decoded' # Make a bare bones job response error_dict = { 'code': ERROR_CODE_SERVER_ERROR, 'message': 'Internal server error: %s' % error_str, 'traceback': traceback_str, } if variables is not None: # noinspection PyBroadException try: error_dict['variables'] = {key: repr(value) for key, value in variables.items()} except Exception: self.metrics.counter('server.error.variable_formatting_failure').increment() error_dict['variables'] = 'Error formatting variables' return JobResponse(errors=[error_dict])
[ "def", "handle_job_exception", "(", "self", ",", "exception", ",", "variables", "=", "None", ")", ":", "# Get the error and traceback if we can", "# noinspection PyBroadException", "try", ":", "error_str", ",", "traceback_str", "=", "six", ".", "text_type", "(", "exception", ")", ",", "traceback", ".", "format_exc", "(", ")", "except", "Exception", ":", "self", ".", "metrics", ".", "counter", "(", "'server.error.error_formatting_failure'", ")", ".", "increment", "(", ")", "error_str", ",", "traceback_str", "=", "'Error formatting error'", ",", "traceback", ".", "format_exc", "(", ")", "# Log what happened", "self", ".", "logger", ".", "exception", "(", "exception", ")", "if", "not", "isinstance", "(", "traceback_str", ",", "six", ".", "text_type", ")", ":", "try", ":", "# Try to", "traceback_str", "=", "traceback_str", ".", "decode", "(", "'utf-8'", ")", "except", "UnicodeDecodeError", ":", "traceback_str", "=", "'UnicodeDecodeError: Traceback could not be decoded'", "# Make a bare bones job response", "error_dict", "=", "{", "'code'", ":", "ERROR_CODE_SERVER_ERROR", ",", "'message'", ":", "'Internal server error: %s'", "%", "error_str", ",", "'traceback'", ":", "traceback_str", ",", "}", "if", "variables", "is", "not", "None", ":", "# noinspection PyBroadException", "try", ":", "error_dict", "[", "'variables'", "]", "=", "{", "key", ":", "repr", "(", "value", ")", "for", "key", ",", "value", "in", "variables", ".", "items", "(", ")", "}", "except", "Exception", ":", "self", ".", "metrics", ".", "counter", "(", "'server.error.variable_formatting_failure'", ")", ".", "increment", "(", ")", "error_dict", "[", "'variables'", "]", "=", "'Error formatting variables'", "return", "JobResponse", "(", "errors", "=", "[", "error_dict", "]", ")" ]
Makes and returns a last-ditch error response. :param exception: The exception that happened :type exception: Exception :param variables: A dictionary of context-relevant variables to include in the error response :type variables: dict :return: A `JobResponse` object :rtype: JobResponse
[ "Makes", "and", "returns", "a", "last", "-", "ditch", "error", "response", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/server/server.py#L341-L383
train
eventbrite/pysoa
pysoa/server/server.py
Server.execute_job
def execute_job(self, job_request): """ Processes and runs the action requests contained in the job and returns a `JobResponse`. :param job_request: The job request :type job_request: dict :return: A `JobResponse` object :rtype: JobResponse """ # Run the Job's Actions job_response = JobResponse() job_switches = RequestSwitchSet(job_request['context']['switches']) for i, raw_action_request in enumerate(job_request['actions']): action_request = EnrichedActionRequest( action=raw_action_request['action'], body=raw_action_request.get('body', None), switches=job_switches, context=job_request['context'], control=job_request['control'], client=job_request['client'], async_event_loop=job_request['async_event_loop'], run_coroutine=job_request['run_coroutine'], ) action_in_class_map = action_request.action in self.action_class_map if action_in_class_map or action_request.action in ('status', 'introspect'): # Get action to run if action_in_class_map: action = self.action_class_map[action_request.action](self.settings) elif action_request.action == 'introspect': from pysoa.server.action.introspection import IntrospectionAction action = IntrospectionAction(server=self) else: if not self._default_status_action_class: from pysoa.server.action.status import make_default_status_action_class self._default_status_action_class = make_default_status_action_class(self.__class__) action = self._default_status_action_class(self.settings) # Wrap it in middleware wrapper = self.make_middleware_stack( [m.action for m in self.middleware], action, ) # Execute the middleware stack try: action_response = wrapper(action_request) except ActionError as e: # Error: an error was thrown while running the Action (or Action middleware) action_response = ActionResponse( action=action_request.action, errors=e.errors, ) else: # Error: Action not found. action_response = ActionResponse( action=action_request.action, errors=[Error( code=ERROR_CODE_UNKNOWN, message='The action "{}" was not found on this server.'.format(action_request.action), field='action', )], ) job_response.actions.append(action_response) if ( action_response.errors and not job_request['control'].get('continue_on_error', False) ): # Quit running Actions if an error occurred and continue_on_error is False break return job_response
python
def execute_job(self, job_request): """ Processes and runs the action requests contained in the job and returns a `JobResponse`. :param job_request: The job request :type job_request: dict :return: A `JobResponse` object :rtype: JobResponse """ # Run the Job's Actions job_response = JobResponse() job_switches = RequestSwitchSet(job_request['context']['switches']) for i, raw_action_request in enumerate(job_request['actions']): action_request = EnrichedActionRequest( action=raw_action_request['action'], body=raw_action_request.get('body', None), switches=job_switches, context=job_request['context'], control=job_request['control'], client=job_request['client'], async_event_loop=job_request['async_event_loop'], run_coroutine=job_request['run_coroutine'], ) action_in_class_map = action_request.action in self.action_class_map if action_in_class_map or action_request.action in ('status', 'introspect'): # Get action to run if action_in_class_map: action = self.action_class_map[action_request.action](self.settings) elif action_request.action == 'introspect': from pysoa.server.action.introspection import IntrospectionAction action = IntrospectionAction(server=self) else: if not self._default_status_action_class: from pysoa.server.action.status import make_default_status_action_class self._default_status_action_class = make_default_status_action_class(self.__class__) action = self._default_status_action_class(self.settings) # Wrap it in middleware wrapper = self.make_middleware_stack( [m.action for m in self.middleware], action, ) # Execute the middleware stack try: action_response = wrapper(action_request) except ActionError as e: # Error: an error was thrown while running the Action (or Action middleware) action_response = ActionResponse( action=action_request.action, errors=e.errors, ) else: # Error: Action not found. action_response = ActionResponse( action=action_request.action, errors=[Error( code=ERROR_CODE_UNKNOWN, message='The action "{}" was not found on this server.'.format(action_request.action), field='action', )], ) job_response.actions.append(action_response) if ( action_response.errors and not job_request['control'].get('continue_on_error', False) ): # Quit running Actions if an error occurred and continue_on_error is False break return job_response
[ "def", "execute_job", "(", "self", ",", "job_request", ")", ":", "# Run the Job's Actions", "job_response", "=", "JobResponse", "(", ")", "job_switches", "=", "RequestSwitchSet", "(", "job_request", "[", "'context'", "]", "[", "'switches'", "]", ")", "for", "i", ",", "raw_action_request", "in", "enumerate", "(", "job_request", "[", "'actions'", "]", ")", ":", "action_request", "=", "EnrichedActionRequest", "(", "action", "=", "raw_action_request", "[", "'action'", "]", ",", "body", "=", "raw_action_request", ".", "get", "(", "'body'", ",", "None", ")", ",", "switches", "=", "job_switches", ",", "context", "=", "job_request", "[", "'context'", "]", ",", "control", "=", "job_request", "[", "'control'", "]", ",", "client", "=", "job_request", "[", "'client'", "]", ",", "async_event_loop", "=", "job_request", "[", "'async_event_loop'", "]", ",", "run_coroutine", "=", "job_request", "[", "'run_coroutine'", "]", ",", ")", "action_in_class_map", "=", "action_request", ".", "action", "in", "self", ".", "action_class_map", "if", "action_in_class_map", "or", "action_request", ".", "action", "in", "(", "'status'", ",", "'introspect'", ")", ":", "# Get action to run", "if", "action_in_class_map", ":", "action", "=", "self", ".", "action_class_map", "[", "action_request", ".", "action", "]", "(", "self", ".", "settings", ")", "elif", "action_request", ".", "action", "==", "'introspect'", ":", "from", "pysoa", ".", "server", ".", "action", ".", "introspection", "import", "IntrospectionAction", "action", "=", "IntrospectionAction", "(", "server", "=", "self", ")", "else", ":", "if", "not", "self", ".", "_default_status_action_class", ":", "from", "pysoa", ".", "server", ".", "action", ".", "status", "import", "make_default_status_action_class", "self", ".", "_default_status_action_class", "=", "make_default_status_action_class", "(", "self", ".", "__class__", ")", "action", "=", "self", ".", "_default_status_action_class", "(", "self", ".", "settings", ")", "# Wrap it in middleware", "wrapper", "=", "self", ".", "make_middleware_stack", "(", "[", "m", ".", "action", "for", "m", "in", "self", ".", "middleware", "]", ",", "action", ",", ")", "# Execute the middleware stack", "try", ":", "action_response", "=", "wrapper", "(", "action_request", ")", "except", "ActionError", "as", "e", ":", "# Error: an error was thrown while running the Action (or Action middleware)", "action_response", "=", "ActionResponse", "(", "action", "=", "action_request", ".", "action", ",", "errors", "=", "e", ".", "errors", ",", ")", "else", ":", "# Error: Action not found.", "action_response", "=", "ActionResponse", "(", "action", "=", "action_request", ".", "action", ",", "errors", "=", "[", "Error", "(", "code", "=", "ERROR_CODE_UNKNOWN", ",", "message", "=", "'The action \"{}\" was not found on this server.'", ".", "format", "(", "action_request", ".", "action", ")", ",", "field", "=", "'action'", ",", ")", "]", ",", ")", "job_response", ".", "actions", ".", "append", "(", "action_response", ")", "if", "(", "action_response", ".", "errors", "and", "not", "job_request", "[", "'control'", "]", ".", "get", "(", "'continue_on_error'", ",", "False", ")", ")", ":", "# Quit running Actions if an error occurred and continue_on_error is False", "break", "return", "job_response" ]
Processes and runs the action requests contained in the job and returns a `JobResponse`. :param job_request: The job request :type job_request: dict :return: A `JobResponse` object :rtype: JobResponse
[ "Processes", "and", "runs", "the", "action", "requests", "contained", "in", "the", "job", "and", "returns", "a", "JobResponse", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/server/server.py#L397-L467
train
eventbrite/pysoa
pysoa/server/server.py
Server.handle_shutdown_signal
def handle_shutdown_signal(self, *_): """ Handles the reception of a shutdown signal. """ if self.shutting_down: self.logger.warning('Received double interrupt, forcing shutdown') sys.exit(1) else: self.logger.warning('Received interrupt, initiating shutdown') self.shutting_down = True
python
def handle_shutdown_signal(self, *_): """ Handles the reception of a shutdown signal. """ if self.shutting_down: self.logger.warning('Received double interrupt, forcing shutdown') sys.exit(1) else: self.logger.warning('Received interrupt, initiating shutdown') self.shutting_down = True
[ "def", "handle_shutdown_signal", "(", "self", ",", "*", "_", ")", ":", "if", "self", ".", "shutting_down", ":", "self", ".", "logger", ".", "warning", "(", "'Received double interrupt, forcing shutdown'", ")", "sys", ".", "exit", "(", "1", ")", "else", ":", "self", ".", "logger", ".", "warning", "(", "'Received interrupt, initiating shutdown'", ")", "self", ".", "shutting_down", "=", "True" ]
Handles the reception of a shutdown signal.
[ "Handles", "the", "reception", "of", "a", "shutdown", "signal", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/server/server.py#L469-L478
train
eventbrite/pysoa
pysoa/server/server.py
Server.harakiri
def harakiri(self, *_): """ Handles the reception of a timeout signal indicating that a request has been processing for too long, as defined by the Harakiri settings. """ if self.shutting_down: self.logger.warning('Graceful shutdown failed after {}s. Exiting now!'.format( self.settings['harakiri']['shutdown_grace'] )) sys.exit(1) else: self.logger.warning('No activity during {}s, triggering harakiri with grace {}s'.format( self.settings['harakiri']['timeout'], self.settings['harakiri']['shutdown_grace'], )) self.shutting_down = True signal.alarm(self.settings['harakiri']['shutdown_grace'])
python
def harakiri(self, *_): """ Handles the reception of a timeout signal indicating that a request has been processing for too long, as defined by the Harakiri settings. """ if self.shutting_down: self.logger.warning('Graceful shutdown failed after {}s. Exiting now!'.format( self.settings['harakiri']['shutdown_grace'] )) sys.exit(1) else: self.logger.warning('No activity during {}s, triggering harakiri with grace {}s'.format( self.settings['harakiri']['timeout'], self.settings['harakiri']['shutdown_grace'], )) self.shutting_down = True signal.alarm(self.settings['harakiri']['shutdown_grace'])
[ "def", "harakiri", "(", "self", ",", "*", "_", ")", ":", "if", "self", ".", "shutting_down", ":", "self", ".", "logger", ".", "warning", "(", "'Graceful shutdown failed after {}s. Exiting now!'", ".", "format", "(", "self", ".", "settings", "[", "'harakiri'", "]", "[", "'shutdown_grace'", "]", ")", ")", "sys", ".", "exit", "(", "1", ")", "else", ":", "self", ".", "logger", ".", "warning", "(", "'No activity during {}s, triggering harakiri with grace {}s'", ".", "format", "(", "self", ".", "settings", "[", "'harakiri'", "]", "[", "'timeout'", "]", ",", "self", ".", "settings", "[", "'harakiri'", "]", "[", "'shutdown_grace'", "]", ",", ")", ")", "self", ".", "shutting_down", "=", "True", "signal", ".", "alarm", "(", "self", ".", "settings", "[", "'harakiri'", "]", "[", "'shutdown_grace'", "]", ")" ]
Handles the reception of a timeout signal indicating that a request has been processing for too long, as defined by the Harakiri settings.
[ "Handles", "the", "reception", "of", "a", "timeout", "signal", "indicating", "that", "a", "request", "has", "been", "processing", "for", "too", "long", "as", "defined", "by", "the", "Harakiri", "settings", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/server/server.py#L480-L496
train
eventbrite/pysoa
pysoa/server/server.py
Server.run
def run(self): """ Starts the server run loop and returns after the server shuts down due to a shutdown-request, Harakiri signal, or unhandled exception. See the documentation for `Server.main` for full details on the chain of `Server` method calls. """ self.logger.info( 'Service "{service}" server starting up, pysoa version {pysoa}, listening on transport {transport}.'.format( service=self.service_name, pysoa=pysoa.version.__version__, transport=self.transport, ) ) self.setup() self.metrics.commit() if self._async_event_loop_thread: self._async_event_loop_thread.start() self._create_heartbeat_file() signal.signal(signal.SIGINT, self.handle_shutdown_signal) signal.signal(signal.SIGTERM, self.handle_shutdown_signal) signal.signal(signal.SIGALRM, self.harakiri) # noinspection PyBroadException try: while not self.shutting_down: # reset harakiri timeout signal.alarm(self.settings['harakiri']['timeout']) # Get, process, and execute the next JobRequest self.handle_next_request() self.metrics.commit() except MessageReceiveError: self.logger.exception('Error receiving message from transport; shutting down') except Exception: self.metrics.counter('server.error.unknown').increment() self.logger.exception('Unhandled server error; shutting down') finally: self.metrics.commit() self.logger.info('Server shutting down') if self._async_event_loop_thread: self._async_event_loop_thread.join() self._close_django_caches(shutdown=True) self._delete_heartbeat_file() self.logger.info('Server shutdown complete')
python
def run(self): """ Starts the server run loop and returns after the server shuts down due to a shutdown-request, Harakiri signal, or unhandled exception. See the documentation for `Server.main` for full details on the chain of `Server` method calls. """ self.logger.info( 'Service "{service}" server starting up, pysoa version {pysoa}, listening on transport {transport}.'.format( service=self.service_name, pysoa=pysoa.version.__version__, transport=self.transport, ) ) self.setup() self.metrics.commit() if self._async_event_loop_thread: self._async_event_loop_thread.start() self._create_heartbeat_file() signal.signal(signal.SIGINT, self.handle_shutdown_signal) signal.signal(signal.SIGTERM, self.handle_shutdown_signal) signal.signal(signal.SIGALRM, self.harakiri) # noinspection PyBroadException try: while not self.shutting_down: # reset harakiri timeout signal.alarm(self.settings['harakiri']['timeout']) # Get, process, and execute the next JobRequest self.handle_next_request() self.metrics.commit() except MessageReceiveError: self.logger.exception('Error receiving message from transport; shutting down') except Exception: self.metrics.counter('server.error.unknown').increment() self.logger.exception('Unhandled server error; shutting down') finally: self.metrics.commit() self.logger.info('Server shutting down') if self._async_event_loop_thread: self._async_event_loop_thread.join() self._close_django_caches(shutdown=True) self._delete_heartbeat_file() self.logger.info('Server shutdown complete')
[ "def", "run", "(", "self", ")", ":", "self", ".", "logger", ".", "info", "(", "'Service \"{service}\" server starting up, pysoa version {pysoa}, listening on transport {transport}.'", ".", "format", "(", "service", "=", "self", ".", "service_name", ",", "pysoa", "=", "pysoa", ".", "version", ".", "__version__", ",", "transport", "=", "self", ".", "transport", ",", ")", ")", "self", ".", "setup", "(", ")", "self", ".", "metrics", ".", "commit", "(", ")", "if", "self", ".", "_async_event_loop_thread", ":", "self", ".", "_async_event_loop_thread", ".", "start", "(", ")", "self", ".", "_create_heartbeat_file", "(", ")", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "self", ".", "handle_shutdown_signal", ")", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "self", ".", "handle_shutdown_signal", ")", "signal", ".", "signal", "(", "signal", ".", "SIGALRM", ",", "self", ".", "harakiri", ")", "# noinspection PyBroadException", "try", ":", "while", "not", "self", ".", "shutting_down", ":", "# reset harakiri timeout", "signal", ".", "alarm", "(", "self", ".", "settings", "[", "'harakiri'", "]", "[", "'timeout'", "]", ")", "# Get, process, and execute the next JobRequest", "self", ".", "handle_next_request", "(", ")", "self", ".", "metrics", ".", "commit", "(", ")", "except", "MessageReceiveError", ":", "self", ".", "logger", ".", "exception", "(", "'Error receiving message from transport; shutting down'", ")", "except", "Exception", ":", "self", ".", "metrics", ".", "counter", "(", "'server.error.unknown'", ")", ".", "increment", "(", ")", "self", ".", "logger", ".", "exception", "(", "'Unhandled server error; shutting down'", ")", "finally", ":", "self", ".", "metrics", ".", "commit", "(", ")", "self", ".", "logger", ".", "info", "(", "'Server shutting down'", ")", "if", "self", ".", "_async_event_loop_thread", ":", "self", ".", "_async_event_loop_thread", ".", "join", "(", ")", "self", ".", "_close_django_caches", "(", "shutdown", "=", "True", ")", "self", ".", "_delete_heartbeat_file", "(", ")", "self", ".", "logger", ".", "info", "(", "'Server shutdown complete'", ")" ]
Starts the server run loop and returns after the server shuts down due to a shutdown-request, Harakiri signal, or unhandled exception. See the documentation for `Server.main` for full details on the chain of `Server` method calls.
[ "Starts", "the", "server", "run", "loop", "and", "returns", "after", "the", "server", "shuts", "down", "due", "to", "a", "shutdown", "-", "request", "Harakiri", "signal", "or", "unhandled", "exception", ".", "See", "the", "documentation", "for", "Server", ".", "main", "for", "full", "details", "on", "the", "chain", "of", "Server", "method", "calls", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/server/server.py#L611-L658
train
eventbrite/pysoa
pysoa/common/logging.py
SyslogHandler.emit
def emit(self, record): """ Emits a record. The record is sent carefully, according to the following rules, to ensure that data is not lost by exceeding the MTU of the connection. - If the byte-encoded record length plus prefix length plus suffix length plus priority length is less than the maximum allowed length, then a single packet is sent, containing the priority, prefix, full record, and suffix, in that order. - If it's greater than or equal to the maximum allowed length and the overflow behavior is set to "truncate," the record is cleanly truncated (being careful not to split in the middle of a multi-byte character), and then a single packet is sent, containing the priority, prefix, truncated record, and suffix, in that order. - If it's greater than or equal to the maximum allowed length and the overflow behavior is set to "fragment," the record preamble (things like file name, logger name, correlation ID, etc.) is extracted from the start of the record to calculate a new chunk length. The remainder of the record (which should just be the true message and any exception info) is then chunked (being careful not to split in the middle of a multi-byte character) into lengths less than or equal to the chunk length, and then the record is sent as multiple packets, each packet containing the priority, prefix, record preamble, message chunk, and suffix, in that order. """ # noinspection PyBroadException try: formatted_message = self.format(record) encoded_message = formatted_message.encode('utf-8') prefix = suffix = b'' if getattr(self, 'ident', False): prefix = self.ident.encode('utf-8') if isinstance(self.ident, six.text_type) else self.ident if getattr(self, 'append_nul', True): suffix = '\000'.encode('utf-8') priority = '<{:d}>'.format( self.encodePriority(self.facility, self.mapPriority(record.levelname)) ).encode('utf-8') message_length = len(encoded_message) message_length_limit = self.maximum_length - len(prefix) - len(suffix) - len(priority) if message_length < message_length_limit: parts = [priority + prefix + encoded_message + suffix] elif self.overflow == self.OVERFLOW_BEHAVIOR_TRUNCATE: truncated_message, _ = self._cleanly_slice_encoded_string(encoded_message, message_length_limit) parts = [priority + prefix + truncated_message + suffix] else: # This can't work perfectly, but it's pretty unusual for a message to go before machine-parseable parts # in the formatted record. So we split the record on the message part. Everything before the split # becomes the preamble and gets repeated every packet. Everything after the split gets chunked. There's # no reason to match on more than the first 40 characters of the message--the chances of that matching # the wrong part of the record are astronomical. try: index = formatted_message.index(record.getMessage()[:40]) start_of_message, to_chunk = formatted_message[:index], formatted_message[index:] except (TypeError, ValueError): # We can't locate the message in the formatted record? That's unfortunate. Let's make something up. start_of_message, to_chunk = '{} '.format(formatted_message[:30]), formatted_message[30:] start_of_message = start_of_message.encode('utf-8') to_chunk = to_chunk.encode('utf-8') # 12 is the length of "... (cont'd)" in bytes chunk_length_limit = message_length_limit - len(start_of_message) - 12 i = 1 parts = [] remaining_message = to_chunk while remaining_message: message_id = b'' subtractor = 0 if i > 1: # If this is not the first message, we determine message # so that we can subtract that length message_id = '{}'.format(i).encode('utf-8') # 14 is the length of "(cont'd #) ..." in bytes subtractor = 14 + len(message_id) chunk, remaining_message = self._cleanly_slice_encoded_string( remaining_message, chunk_length_limit - subtractor, ) if i > 1: # If this is not the first message, we prepend the chunk to indicate continuation chunk = b"(cont'd #" + message_id + b') ...' + chunk i += 1 if remaining_message: # If this is not the last message, we append the chunk to indicate continuation chunk = chunk + b"... (cont'd)" parts.append(priority + prefix + start_of_message + chunk + suffix) self._send(parts) except Exception: self.handleError(record)
python
def emit(self, record): """ Emits a record. The record is sent carefully, according to the following rules, to ensure that data is not lost by exceeding the MTU of the connection. - If the byte-encoded record length plus prefix length plus suffix length plus priority length is less than the maximum allowed length, then a single packet is sent, containing the priority, prefix, full record, and suffix, in that order. - If it's greater than or equal to the maximum allowed length and the overflow behavior is set to "truncate," the record is cleanly truncated (being careful not to split in the middle of a multi-byte character), and then a single packet is sent, containing the priority, prefix, truncated record, and suffix, in that order. - If it's greater than or equal to the maximum allowed length and the overflow behavior is set to "fragment," the record preamble (things like file name, logger name, correlation ID, etc.) is extracted from the start of the record to calculate a new chunk length. The remainder of the record (which should just be the true message and any exception info) is then chunked (being careful not to split in the middle of a multi-byte character) into lengths less than or equal to the chunk length, and then the record is sent as multiple packets, each packet containing the priority, prefix, record preamble, message chunk, and suffix, in that order. """ # noinspection PyBroadException try: formatted_message = self.format(record) encoded_message = formatted_message.encode('utf-8') prefix = suffix = b'' if getattr(self, 'ident', False): prefix = self.ident.encode('utf-8') if isinstance(self.ident, six.text_type) else self.ident if getattr(self, 'append_nul', True): suffix = '\000'.encode('utf-8') priority = '<{:d}>'.format( self.encodePriority(self.facility, self.mapPriority(record.levelname)) ).encode('utf-8') message_length = len(encoded_message) message_length_limit = self.maximum_length - len(prefix) - len(suffix) - len(priority) if message_length < message_length_limit: parts = [priority + prefix + encoded_message + suffix] elif self.overflow == self.OVERFLOW_BEHAVIOR_TRUNCATE: truncated_message, _ = self._cleanly_slice_encoded_string(encoded_message, message_length_limit) parts = [priority + prefix + truncated_message + suffix] else: # This can't work perfectly, but it's pretty unusual for a message to go before machine-parseable parts # in the formatted record. So we split the record on the message part. Everything before the split # becomes the preamble and gets repeated every packet. Everything after the split gets chunked. There's # no reason to match on more than the first 40 characters of the message--the chances of that matching # the wrong part of the record are astronomical. try: index = formatted_message.index(record.getMessage()[:40]) start_of_message, to_chunk = formatted_message[:index], formatted_message[index:] except (TypeError, ValueError): # We can't locate the message in the formatted record? That's unfortunate. Let's make something up. start_of_message, to_chunk = '{} '.format(formatted_message[:30]), formatted_message[30:] start_of_message = start_of_message.encode('utf-8') to_chunk = to_chunk.encode('utf-8') # 12 is the length of "... (cont'd)" in bytes chunk_length_limit = message_length_limit - len(start_of_message) - 12 i = 1 parts = [] remaining_message = to_chunk while remaining_message: message_id = b'' subtractor = 0 if i > 1: # If this is not the first message, we determine message # so that we can subtract that length message_id = '{}'.format(i).encode('utf-8') # 14 is the length of "(cont'd #) ..." in bytes subtractor = 14 + len(message_id) chunk, remaining_message = self._cleanly_slice_encoded_string( remaining_message, chunk_length_limit - subtractor, ) if i > 1: # If this is not the first message, we prepend the chunk to indicate continuation chunk = b"(cont'd #" + message_id + b') ...' + chunk i += 1 if remaining_message: # If this is not the last message, we append the chunk to indicate continuation chunk = chunk + b"... (cont'd)" parts.append(priority + prefix + start_of_message + chunk + suffix) self._send(parts) except Exception: self.handleError(record)
[ "def", "emit", "(", "self", ",", "record", ")", ":", "# noinspection PyBroadException", "try", ":", "formatted_message", "=", "self", ".", "format", "(", "record", ")", "encoded_message", "=", "formatted_message", ".", "encode", "(", "'utf-8'", ")", "prefix", "=", "suffix", "=", "b''", "if", "getattr", "(", "self", ",", "'ident'", ",", "False", ")", ":", "prefix", "=", "self", ".", "ident", ".", "encode", "(", "'utf-8'", ")", "if", "isinstance", "(", "self", ".", "ident", ",", "six", ".", "text_type", ")", "else", "self", ".", "ident", "if", "getattr", "(", "self", ",", "'append_nul'", ",", "True", ")", ":", "suffix", "=", "'\\000'", ".", "encode", "(", "'utf-8'", ")", "priority", "=", "'<{:d}>'", ".", "format", "(", "self", ".", "encodePriority", "(", "self", ".", "facility", ",", "self", ".", "mapPriority", "(", "record", ".", "levelname", ")", ")", ")", ".", "encode", "(", "'utf-8'", ")", "message_length", "=", "len", "(", "encoded_message", ")", "message_length_limit", "=", "self", ".", "maximum_length", "-", "len", "(", "prefix", ")", "-", "len", "(", "suffix", ")", "-", "len", "(", "priority", ")", "if", "message_length", "<", "message_length_limit", ":", "parts", "=", "[", "priority", "+", "prefix", "+", "encoded_message", "+", "suffix", "]", "elif", "self", ".", "overflow", "==", "self", ".", "OVERFLOW_BEHAVIOR_TRUNCATE", ":", "truncated_message", ",", "_", "=", "self", ".", "_cleanly_slice_encoded_string", "(", "encoded_message", ",", "message_length_limit", ")", "parts", "=", "[", "priority", "+", "prefix", "+", "truncated_message", "+", "suffix", "]", "else", ":", "# This can't work perfectly, but it's pretty unusual for a message to go before machine-parseable parts", "# in the formatted record. So we split the record on the message part. Everything before the split", "# becomes the preamble and gets repeated every packet. Everything after the split gets chunked. There's", "# no reason to match on more than the first 40 characters of the message--the chances of that matching", "# the wrong part of the record are astronomical.", "try", ":", "index", "=", "formatted_message", ".", "index", "(", "record", ".", "getMessage", "(", ")", "[", ":", "40", "]", ")", "start_of_message", ",", "to_chunk", "=", "formatted_message", "[", ":", "index", "]", ",", "formatted_message", "[", "index", ":", "]", "except", "(", "TypeError", ",", "ValueError", ")", ":", "# We can't locate the message in the formatted record? That's unfortunate. Let's make something up.", "start_of_message", ",", "to_chunk", "=", "'{} '", ".", "format", "(", "formatted_message", "[", ":", "30", "]", ")", ",", "formatted_message", "[", "30", ":", "]", "start_of_message", "=", "start_of_message", ".", "encode", "(", "'utf-8'", ")", "to_chunk", "=", "to_chunk", ".", "encode", "(", "'utf-8'", ")", "# 12 is the length of \"... (cont'd)\" in bytes", "chunk_length_limit", "=", "message_length_limit", "-", "len", "(", "start_of_message", ")", "-", "12", "i", "=", "1", "parts", "=", "[", "]", "remaining_message", "=", "to_chunk", "while", "remaining_message", ":", "message_id", "=", "b''", "subtractor", "=", "0", "if", "i", ">", "1", ":", "# If this is not the first message, we determine message # so that we can subtract that length", "message_id", "=", "'{}'", ".", "format", "(", "i", ")", ".", "encode", "(", "'utf-8'", ")", "# 14 is the length of \"(cont'd #) ...\" in bytes", "subtractor", "=", "14", "+", "len", "(", "message_id", ")", "chunk", ",", "remaining_message", "=", "self", ".", "_cleanly_slice_encoded_string", "(", "remaining_message", ",", "chunk_length_limit", "-", "subtractor", ",", ")", "if", "i", ">", "1", ":", "# If this is not the first message, we prepend the chunk to indicate continuation", "chunk", "=", "b\"(cont'd #\"", "+", "message_id", "+", "b') ...'", "+", "chunk", "i", "+=", "1", "if", "remaining_message", ":", "# If this is not the last message, we append the chunk to indicate continuation", "chunk", "=", "chunk", "+", "b\"... (cont'd)\"", "parts", ".", "append", "(", "priority", "+", "prefix", "+", "start_of_message", "+", "chunk", "+", "suffix", ")", "self", ".", "_send", "(", "parts", ")", "except", "Exception", ":", "self", ".", "handleError", "(", "record", ")" ]
Emits a record. The record is sent carefully, according to the following rules, to ensure that data is not lost by exceeding the MTU of the connection. - If the byte-encoded record length plus prefix length plus suffix length plus priority length is less than the maximum allowed length, then a single packet is sent, containing the priority, prefix, full record, and suffix, in that order. - If it's greater than or equal to the maximum allowed length and the overflow behavior is set to "truncate," the record is cleanly truncated (being careful not to split in the middle of a multi-byte character), and then a single packet is sent, containing the priority, prefix, truncated record, and suffix, in that order. - If it's greater than or equal to the maximum allowed length and the overflow behavior is set to "fragment," the record preamble (things like file name, logger name, correlation ID, etc.) is extracted from the start of the record to calculate a new chunk length. The remainder of the record (which should just be the true message and any exception info) is then chunked (being careful not to split in the middle of a multi-byte character) into lengths less than or equal to the chunk length, and then the record is sent as multiple packets, each packet containing the priority, prefix, record preamble, message chunk, and suffix, in that order.
[ "Emits", "a", "record", ".", "The", "record", "is", "sent", "carefully", "according", "to", "the", "following", "rules", "to", "ensure", "that", "data", "is", "not", "lost", "by", "exceeding", "the", "MTU", "of", "the", "connection", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/common/logging.py#L270-L359
train
eventbrite/pysoa
pysoa/client/expander.py
TypeNode.add_expansion
def add_expansion(self, expansion_node): """ Add a child expansion node to the type node's expansions. If an expansion node with the same name is already present in type node's expansions, the new and existing expansion node's children are merged. :param expansion_node: The expansion node to add :type expansion_node: ExpansionNode """ # Check for existing expansion node with the same name existing_expansion_node = self.get_expansion(expansion_node.name) if existing_expansion_node: # Expansion node exists with the same name, merge child expansions. for child_expansion in expansion_node.expansions: existing_expansion_node.add_expansion(child_expansion) else: # Add the expansion node. self._expansions[expansion_node.name] = expansion_node
python
def add_expansion(self, expansion_node): """ Add a child expansion node to the type node's expansions. If an expansion node with the same name is already present in type node's expansions, the new and existing expansion node's children are merged. :param expansion_node: The expansion node to add :type expansion_node: ExpansionNode """ # Check for existing expansion node with the same name existing_expansion_node = self.get_expansion(expansion_node.name) if existing_expansion_node: # Expansion node exists with the same name, merge child expansions. for child_expansion in expansion_node.expansions: existing_expansion_node.add_expansion(child_expansion) else: # Add the expansion node. self._expansions[expansion_node.name] = expansion_node
[ "def", "add_expansion", "(", "self", ",", "expansion_node", ")", ":", "# Check for existing expansion node with the same name", "existing_expansion_node", "=", "self", ".", "get_expansion", "(", "expansion_node", ".", "name", ")", "if", "existing_expansion_node", ":", "# Expansion node exists with the same name, merge child expansions.", "for", "child_expansion", "in", "expansion_node", ".", "expansions", ":", "existing_expansion_node", ".", "add_expansion", "(", "child_expansion", ")", "else", ":", "# Add the expansion node.", "self", ".", "_expansions", "[", "expansion_node", ".", "name", "]", "=", "expansion_node" ]
Add a child expansion node to the type node's expansions. If an expansion node with the same name is already present in type node's expansions, the new and existing expansion node's children are merged. :param expansion_node: The expansion node to add :type expansion_node: ExpansionNode
[ "Add", "a", "child", "expansion", "node", "to", "the", "type", "node", "s", "expansions", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/client/expander.py#L114-L132
train
eventbrite/pysoa
pysoa/client/expander.py
TypeNode.find_objects
def find_objects(self, obj): """ Find all objects in obj that match the type of the type node. :param obj: A dictionary or list of dictionaries to search, recursively :type obj: union[dict, list[dict]] :return: a list of dictionary objects that have a "_type" key value that matches the type of this node. :rtype: list[dict] """ objects = [] if isinstance(obj, dict): # obj is a dictionary, so it is a potential match... object_type = obj.get('_type') if object_type == self.type: # Found a match! objects.append(obj) else: # Not a match. Check each value of the dictionary for matches. for sub_object in six.itervalues(obj): objects.extend(self.find_objects(sub_object)) elif isinstance(obj, list): # obj is a list. Check each element of the list for matches. for sub_object in obj: objects.extend(self.find_objects(sub_object)) return objects
python
def find_objects(self, obj): """ Find all objects in obj that match the type of the type node. :param obj: A dictionary or list of dictionaries to search, recursively :type obj: union[dict, list[dict]] :return: a list of dictionary objects that have a "_type" key value that matches the type of this node. :rtype: list[dict] """ objects = [] if isinstance(obj, dict): # obj is a dictionary, so it is a potential match... object_type = obj.get('_type') if object_type == self.type: # Found a match! objects.append(obj) else: # Not a match. Check each value of the dictionary for matches. for sub_object in six.itervalues(obj): objects.extend(self.find_objects(sub_object)) elif isinstance(obj, list): # obj is a list. Check each element of the list for matches. for sub_object in obj: objects.extend(self.find_objects(sub_object)) return objects
[ "def", "find_objects", "(", "self", ",", "obj", ")", ":", "objects", "=", "[", "]", "if", "isinstance", "(", "obj", ",", "dict", ")", ":", "# obj is a dictionary, so it is a potential match...", "object_type", "=", "obj", ".", "get", "(", "'_type'", ")", "if", "object_type", "==", "self", ".", "type", ":", "# Found a match!", "objects", ".", "append", "(", "obj", ")", "else", ":", "# Not a match. Check each value of the dictionary for matches.", "for", "sub_object", "in", "six", ".", "itervalues", "(", "obj", ")", ":", "objects", ".", "extend", "(", "self", ".", "find_objects", "(", "sub_object", ")", ")", "elif", "isinstance", "(", "obj", ",", "list", ")", ":", "# obj is a list. Check each element of the list for matches.", "for", "sub_object", "in", "obj", ":", "objects", ".", "extend", "(", "self", ".", "find_objects", "(", "sub_object", ")", ")", "return", "objects" ]
Find all objects in obj that match the type of the type node. :param obj: A dictionary or list of dictionaries to search, recursively :type obj: union[dict, list[dict]] :return: a list of dictionary objects that have a "_type" key value that matches the type of this node. :rtype: list[dict]
[ "Find", "all", "objects", "in", "obj", "that", "match", "the", "type", "of", "the", "type", "node", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/client/expander.py#L147-L174
train
eventbrite/pysoa
pysoa/client/expander.py
TypeNode.to_dict
def to_dict(self): """ Convert the tree node to its dictionary representation. :return: an expansion dictionary that represents the type and expansions of this tree node. :rtype dict[list[union[str, unicode]]] """ expansion_strings = [] for expansion in self.expansions: expansion_strings.extend(expansion.to_strings()) return { self.type: expansion_strings, }
python
def to_dict(self): """ Convert the tree node to its dictionary representation. :return: an expansion dictionary that represents the type and expansions of this tree node. :rtype dict[list[union[str, unicode]]] """ expansion_strings = [] for expansion in self.expansions: expansion_strings.extend(expansion.to_strings()) return { self.type: expansion_strings, }
[ "def", "to_dict", "(", "self", ")", ":", "expansion_strings", "=", "[", "]", "for", "expansion", "in", "self", ".", "expansions", ":", "expansion_strings", ".", "extend", "(", "expansion", ".", "to_strings", "(", ")", ")", "return", "{", "self", ".", "type", ":", "expansion_strings", ",", "}" ]
Convert the tree node to its dictionary representation. :return: an expansion dictionary that represents the type and expansions of this tree node. :rtype dict[list[union[str, unicode]]]
[ "Convert", "the", "tree", "node", "to", "its", "dictionary", "representation", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/client/expander.py#L183-L197
train
eventbrite/pysoa
pysoa/client/expander.py
ExpansionNode.to_strings
def to_strings(self): """ Convert the expansion node to a list of expansion strings. :return: a list of expansion strings that represent the leaf nodes of the expansion tree. :rtype: list[union[str, unicode]] """ result = [] if not self.expansions: result.append(self.name) else: for expansion in self.expansions: result.extend('{}.{}'.format(self.name, es) for es in expansion.to_strings()) return result
python
def to_strings(self): """ Convert the expansion node to a list of expansion strings. :return: a list of expansion strings that represent the leaf nodes of the expansion tree. :rtype: list[union[str, unicode]] """ result = [] if not self.expansions: result.append(self.name) else: for expansion in self.expansions: result.extend('{}.{}'.format(self.name, es) for es in expansion.to_strings()) return result
[ "def", "to_strings", "(", "self", ")", ":", "result", "=", "[", "]", "if", "not", "self", ".", "expansions", ":", "result", ".", "append", "(", "self", ".", "name", ")", "else", ":", "for", "expansion", "in", "self", ".", "expansions", ":", "result", ".", "extend", "(", "'{}.{}'", ".", "format", "(", "self", ".", "name", ",", "es", ")", "for", "es", "in", "expansion", ".", "to_strings", "(", ")", ")", "return", "result" ]
Convert the expansion node to a list of expansion strings. :return: a list of expansion strings that represent the leaf nodes of the expansion tree. :rtype: list[union[str, unicode]]
[ "Convert", "the", "expansion", "node", "to", "a", "list", "of", "expansion", "strings", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/client/expander.py#L252-L267
train
eventbrite/pysoa
pysoa/client/expander.py
ExpansionConverter.dict_to_trees
def dict_to_trees(self, expansion_dict): """ Convert an expansion dictionary to a list of expansion trees. :param expansion_dict: An expansion dictionary (see below) :type expansion_dict: dict :return: a list of expansion trees (`TreeNode` instances). :rtype: list[TreeNode] Expansion Dictionary Format: { "<type>": ["<expansion string>", ...], ... } <type> is the type of object to expand. <expansion string> is a string with the following format: <expansion string> => <expansion name>[.<expansion string>] """ trees = [] for node_type, expansion_list in six.iteritems(expansion_dict): type_node = TypeNode(node_type=node_type) for expansion_string in expansion_list: expansion_node = type_node for expansion_name in expansion_string.split('.'): child_expansion_node = expansion_node.get_expansion(expansion_name) if not child_expansion_node: type_expansion = self.type_expansions[expansion_node.type][expansion_name] type_route = self.type_routes[type_expansion['route']] if type_expansion['destination_field'] == type_expansion['source_field']: raise ValueError( 'Expansion configuration destination_field error: ' 'destination_field can not have the same name as the source_field: ' '{}'.format(type_expansion['source_field']) ) child_expansion_node = ExpansionNode( node_type=type_expansion['type'], name=expansion_name, source_field=type_expansion['source_field'], destination_field=type_expansion['destination_field'], service=type_route['service'], action=type_route['action'], request_field=type_route['request_field'], response_field=type_route['response_field'], raise_action_errors=type_expansion.get('raise_action_errors', False), ) expansion_node.add_expansion(child_expansion_node) expansion_node = child_expansion_node trees.append(type_node) return trees
python
def dict_to_trees(self, expansion_dict): """ Convert an expansion dictionary to a list of expansion trees. :param expansion_dict: An expansion dictionary (see below) :type expansion_dict: dict :return: a list of expansion trees (`TreeNode` instances). :rtype: list[TreeNode] Expansion Dictionary Format: { "<type>": ["<expansion string>", ...], ... } <type> is the type of object to expand. <expansion string> is a string with the following format: <expansion string> => <expansion name>[.<expansion string>] """ trees = [] for node_type, expansion_list in six.iteritems(expansion_dict): type_node = TypeNode(node_type=node_type) for expansion_string in expansion_list: expansion_node = type_node for expansion_name in expansion_string.split('.'): child_expansion_node = expansion_node.get_expansion(expansion_name) if not child_expansion_node: type_expansion = self.type_expansions[expansion_node.type][expansion_name] type_route = self.type_routes[type_expansion['route']] if type_expansion['destination_field'] == type_expansion['source_field']: raise ValueError( 'Expansion configuration destination_field error: ' 'destination_field can not have the same name as the source_field: ' '{}'.format(type_expansion['source_field']) ) child_expansion_node = ExpansionNode( node_type=type_expansion['type'], name=expansion_name, source_field=type_expansion['source_field'], destination_field=type_expansion['destination_field'], service=type_route['service'], action=type_route['action'], request_field=type_route['request_field'], response_field=type_route['response_field'], raise_action_errors=type_expansion.get('raise_action_errors', False), ) expansion_node.add_expansion(child_expansion_node) expansion_node = child_expansion_node trees.append(type_node) return trees
[ "def", "dict_to_trees", "(", "self", ",", "expansion_dict", ")", ":", "trees", "=", "[", "]", "for", "node_type", ",", "expansion_list", "in", "six", ".", "iteritems", "(", "expansion_dict", ")", ":", "type_node", "=", "TypeNode", "(", "node_type", "=", "node_type", ")", "for", "expansion_string", "in", "expansion_list", ":", "expansion_node", "=", "type_node", "for", "expansion_name", "in", "expansion_string", ".", "split", "(", "'.'", ")", ":", "child_expansion_node", "=", "expansion_node", ".", "get_expansion", "(", "expansion_name", ")", "if", "not", "child_expansion_node", ":", "type_expansion", "=", "self", ".", "type_expansions", "[", "expansion_node", ".", "type", "]", "[", "expansion_name", "]", "type_route", "=", "self", ".", "type_routes", "[", "type_expansion", "[", "'route'", "]", "]", "if", "type_expansion", "[", "'destination_field'", "]", "==", "type_expansion", "[", "'source_field'", "]", ":", "raise", "ValueError", "(", "'Expansion configuration destination_field error: '", "'destination_field can not have the same name as the source_field: '", "'{}'", ".", "format", "(", "type_expansion", "[", "'source_field'", "]", ")", ")", "child_expansion_node", "=", "ExpansionNode", "(", "node_type", "=", "type_expansion", "[", "'type'", "]", ",", "name", "=", "expansion_name", ",", "source_field", "=", "type_expansion", "[", "'source_field'", "]", ",", "destination_field", "=", "type_expansion", "[", "'destination_field'", "]", ",", "service", "=", "type_route", "[", "'service'", "]", ",", "action", "=", "type_route", "[", "'action'", "]", ",", "request_field", "=", "type_route", "[", "'request_field'", "]", ",", "response_field", "=", "type_route", "[", "'response_field'", "]", ",", "raise_action_errors", "=", "type_expansion", ".", "get", "(", "'raise_action_errors'", ",", "False", ")", ",", ")", "expansion_node", ".", "add_expansion", "(", "child_expansion_node", ")", "expansion_node", "=", "child_expansion_node", "trees", ".", "append", "(", "type_node", ")", "return", "trees" ]
Convert an expansion dictionary to a list of expansion trees. :param expansion_dict: An expansion dictionary (see below) :type expansion_dict: dict :return: a list of expansion trees (`TreeNode` instances). :rtype: list[TreeNode] Expansion Dictionary Format: { "<type>": ["<expansion string>", ...], ... } <type> is the type of object to expand. <expansion string> is a string with the following format: <expansion string> => <expansion name>[.<expansion string>]
[ "Convert", "an", "expansion", "dictionary", "to", "a", "list", "of", "expansion", "trees", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/client/expander.py#L344-L401
train
eventbrite/pysoa
pysoa/client/expander.py
ExpansionConverter.trees_to_dict
def trees_to_dict(trees_list): """ Convert a list of `TreeNode`s to an expansion dictionary. :param trees_list: A list of `TreeNode` instances :type trees_list: list[TreeNode] :return: An expansion dictionary that represents the expansions detailed in the provided expansions tree nodes :rtype: dict[union[str, unicode]] """ result = {} for tree in trees_list: result.update(tree.to_dict()) return result
python
def trees_to_dict(trees_list): """ Convert a list of `TreeNode`s to an expansion dictionary. :param trees_list: A list of `TreeNode` instances :type trees_list: list[TreeNode] :return: An expansion dictionary that represents the expansions detailed in the provided expansions tree nodes :rtype: dict[union[str, unicode]] """ result = {} for tree in trees_list: result.update(tree.to_dict()) return result
[ "def", "trees_to_dict", "(", "trees_list", ")", ":", "result", "=", "{", "}", "for", "tree", "in", "trees_list", ":", "result", ".", "update", "(", "tree", ".", "to_dict", "(", ")", ")", "return", "result" ]
Convert a list of `TreeNode`s to an expansion dictionary. :param trees_list: A list of `TreeNode` instances :type trees_list: list[TreeNode] :return: An expansion dictionary that represents the expansions detailed in the provided expansions tree nodes :rtype: dict[union[str, unicode]]
[ "Convert", "a", "list", "of", "TreeNode", "s", "to", "an", "expansion", "dictionary", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/client/expander.py#L404-L419
train
eventbrite/pysoa
pysoa/common/transport/redis_gateway/backend/sentinel.py
SentinelRedisClient._get_service_names
def _get_service_names(self): """ Get a list of service names from Sentinel. Tries Sentinel hosts until one succeeds; if none succeed, raises a ConnectionError. :return: the list of service names from Sentinel. """ master_info = None connection_errors = [] for sentinel in self._sentinel.sentinels: # Unfortunately, redis.sentinel.Sentinel does not support sentinel_masters, so we have to step # through all of its connections manually try: master_info = sentinel.sentinel_masters() break except (redis.ConnectionError, redis.TimeoutError) as e: connection_errors.append('Failed to connect to {} due to error: "{}".'.format(sentinel, e)) continue if master_info is None: raise redis.ConnectionError( 'Could not get master info from Sentinel\n{}:'.format('\n'.join(connection_errors)) ) return list(master_info.keys())
python
def _get_service_names(self): """ Get a list of service names from Sentinel. Tries Sentinel hosts until one succeeds; if none succeed, raises a ConnectionError. :return: the list of service names from Sentinel. """ master_info = None connection_errors = [] for sentinel in self._sentinel.sentinels: # Unfortunately, redis.sentinel.Sentinel does not support sentinel_masters, so we have to step # through all of its connections manually try: master_info = sentinel.sentinel_masters() break except (redis.ConnectionError, redis.TimeoutError) as e: connection_errors.append('Failed to connect to {} due to error: "{}".'.format(sentinel, e)) continue if master_info is None: raise redis.ConnectionError( 'Could not get master info from Sentinel\n{}:'.format('\n'.join(connection_errors)) ) return list(master_info.keys())
[ "def", "_get_service_names", "(", "self", ")", ":", "master_info", "=", "None", "connection_errors", "=", "[", "]", "for", "sentinel", "in", "self", ".", "_sentinel", ".", "sentinels", ":", "# Unfortunately, redis.sentinel.Sentinel does not support sentinel_masters, so we have to step", "# through all of its connections manually", "try", ":", "master_info", "=", "sentinel", ".", "sentinel_masters", "(", ")", "break", "except", "(", "redis", ".", "ConnectionError", ",", "redis", ".", "TimeoutError", ")", "as", "e", ":", "connection_errors", ".", "append", "(", "'Failed to connect to {} due to error: \"{}\".'", ".", "format", "(", "sentinel", ",", "e", ")", ")", "continue", "if", "master_info", "is", "None", ":", "raise", "redis", ".", "ConnectionError", "(", "'Could not get master info from Sentinel\\n{}:'", ".", "format", "(", "'\\n'", ".", "join", "(", "connection_errors", ")", ")", ")", "return", "list", "(", "master_info", ".", "keys", "(", ")", ")" ]
Get a list of service names from Sentinel. Tries Sentinel hosts until one succeeds; if none succeed, raises a ConnectionError. :return: the list of service names from Sentinel.
[ "Get", "a", "list", "of", "service", "names", "from", "Sentinel", ".", "Tries", "Sentinel", "hosts", "until", "one", "succeeds", ";", "if", "none", "succeed", "raises", "a", "ConnectionError", "." ]
9c052cae2397d13de3df8ae2c790846a70b53f18
https://github.com/eventbrite/pysoa/blob/9c052cae2397d13de3df8ae2c790846a70b53f18/pysoa/common/transport/redis_gateway/backend/sentinel.py#L92-L114
train
Yelp/venv-update
venv_update.py
timid_relpath
def timid_relpath(arg): """convert an argument to a relative path, carefully""" # TODO-TEST: unit tests from os.path import isabs, relpath, sep if isabs(arg): result = relpath(arg) if result.count(sep) + 1 < arg.count(sep): return result return arg
python
def timid_relpath(arg): """convert an argument to a relative path, carefully""" # TODO-TEST: unit tests from os.path import isabs, relpath, sep if isabs(arg): result = relpath(arg) if result.count(sep) + 1 < arg.count(sep): return result return arg
[ "def", "timid_relpath", "(", "arg", ")", ":", "# TODO-TEST: unit tests", "from", "os", ".", "path", "import", "isabs", ",", "relpath", ",", "sep", "if", "isabs", "(", "arg", ")", ":", "result", "=", "relpath", "(", "arg", ")", "if", "result", ".", "count", "(", "sep", ")", "+", "1", "<", "arg", ".", "count", "(", "sep", ")", ":", "return", "result", "return", "arg" ]
convert an argument to a relative path, carefully
[ "convert", "an", "argument", "to", "a", "relative", "path", "carefully" ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/venv_update.py#L104-L113
train
Yelp/venv-update
venv_update.py
ensure_virtualenv
def ensure_virtualenv(args, return_values): """Ensure we have a valid virtualenv.""" def adjust_options(options, args): # TODO-TEST: proper error message with no arguments venv_path = return_values.venv_path = args[0] if venv_path == DEFAULT_VIRTUALENV_PATH or options.prompt == '<dirname>': from os.path import abspath, basename, dirname options.prompt = '(%s)' % basename(dirname(abspath(venv_path))) # end of option munging. # there are two python interpreters involved here: # 1) the interpreter we're instructing virtualenv to copy if options.python is None: source_python = None else: source_python = virtualenv.resolve_interpreter(options.python) # 2) the interpreter virtualenv will create destination_python = venv_python(venv_path) if exists(destination_python): reason = invalid_virtualenv_reason(venv_path, source_python, destination_python, options) if reason: info('Removing invalidated virtualenv. (%s)' % reason) run(('rm', '-rf', venv_path)) else: info('Keeping valid virtualenv from previous run.') raise SystemExit(0) # looks good! we're done here. # this is actually a documented extension point: # http://virtualenv.readthedocs.org/en/latest/reference.html#adjust_options import virtualenv virtualenv.adjust_options = adjust_options from sys import argv argv[:] = ('virtualenv',) + args info(colorize(argv)) raise_on_failure(virtualenv.main) # There might not be a venv_path if doing something like "venv= --version" # and not actually asking virtualenv to make a venv. if return_values.venv_path is not None: run(('rm', '-rf', join(return_values.venv_path, 'local')))
python
def ensure_virtualenv(args, return_values): """Ensure we have a valid virtualenv.""" def adjust_options(options, args): # TODO-TEST: proper error message with no arguments venv_path = return_values.venv_path = args[0] if venv_path == DEFAULT_VIRTUALENV_PATH or options.prompt == '<dirname>': from os.path import abspath, basename, dirname options.prompt = '(%s)' % basename(dirname(abspath(venv_path))) # end of option munging. # there are two python interpreters involved here: # 1) the interpreter we're instructing virtualenv to copy if options.python is None: source_python = None else: source_python = virtualenv.resolve_interpreter(options.python) # 2) the interpreter virtualenv will create destination_python = venv_python(venv_path) if exists(destination_python): reason = invalid_virtualenv_reason(venv_path, source_python, destination_python, options) if reason: info('Removing invalidated virtualenv. (%s)' % reason) run(('rm', '-rf', venv_path)) else: info('Keeping valid virtualenv from previous run.') raise SystemExit(0) # looks good! we're done here. # this is actually a documented extension point: # http://virtualenv.readthedocs.org/en/latest/reference.html#adjust_options import virtualenv virtualenv.adjust_options = adjust_options from sys import argv argv[:] = ('virtualenv',) + args info(colorize(argv)) raise_on_failure(virtualenv.main) # There might not be a venv_path if doing something like "venv= --version" # and not actually asking virtualenv to make a venv. if return_values.venv_path is not None: run(('rm', '-rf', join(return_values.venv_path, 'local')))
[ "def", "ensure_virtualenv", "(", "args", ",", "return_values", ")", ":", "def", "adjust_options", "(", "options", ",", "args", ")", ":", "# TODO-TEST: proper error message with no arguments", "venv_path", "=", "return_values", ".", "venv_path", "=", "args", "[", "0", "]", "if", "venv_path", "==", "DEFAULT_VIRTUALENV_PATH", "or", "options", ".", "prompt", "==", "'<dirname>'", ":", "from", "os", ".", "path", "import", "abspath", ",", "basename", ",", "dirname", "options", ".", "prompt", "=", "'(%s)'", "%", "basename", "(", "dirname", "(", "abspath", "(", "venv_path", ")", ")", ")", "# end of option munging.", "# there are two python interpreters involved here:", "# 1) the interpreter we're instructing virtualenv to copy", "if", "options", ".", "python", "is", "None", ":", "source_python", "=", "None", "else", ":", "source_python", "=", "virtualenv", ".", "resolve_interpreter", "(", "options", ".", "python", ")", "# 2) the interpreter virtualenv will create", "destination_python", "=", "venv_python", "(", "venv_path", ")", "if", "exists", "(", "destination_python", ")", ":", "reason", "=", "invalid_virtualenv_reason", "(", "venv_path", ",", "source_python", ",", "destination_python", ",", "options", ")", "if", "reason", ":", "info", "(", "'Removing invalidated virtualenv. (%s)'", "%", "reason", ")", "run", "(", "(", "'rm'", ",", "'-rf'", ",", "venv_path", ")", ")", "else", ":", "info", "(", "'Keeping valid virtualenv from previous run.'", ")", "raise", "SystemExit", "(", "0", ")", "# looks good! we're done here.", "# this is actually a documented extension point:", "# http://virtualenv.readthedocs.org/en/latest/reference.html#adjust_options", "import", "virtualenv", "virtualenv", ".", "adjust_options", "=", "adjust_options", "from", "sys", "import", "argv", "argv", "[", ":", "]", "=", "(", "'virtualenv'", ",", ")", "+", "args", "info", "(", "colorize", "(", "argv", ")", ")", "raise_on_failure", "(", "virtualenv", ".", "main", ")", "# There might not be a venv_path if doing something like \"venv= --version\"", "# and not actually asking virtualenv to make a venv.", "if", "return_values", ".", "venv_path", "is", "not", "None", ":", "run", "(", "(", "'rm'", ",", "'-rf'", ",", "join", "(", "return_values", ".", "venv_path", ",", "'local'", ")", ")", ")" ]
Ensure we have a valid virtualenv.
[ "Ensure", "we", "have", "a", "valid", "virtualenv", "." ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/venv_update.py#L272-L313
train
Yelp/venv-update
venv_update.py
touch
def touch(filename, timestamp): """set the mtime of a file""" if timestamp is not None: timestamp = (timestamp, timestamp) # atime, mtime from os import utime utime(filename, timestamp)
python
def touch(filename, timestamp): """set the mtime of a file""" if timestamp is not None: timestamp = (timestamp, timestamp) # atime, mtime from os import utime utime(filename, timestamp)
[ "def", "touch", "(", "filename", ",", "timestamp", ")", ":", "if", "timestamp", "is", "not", "None", ":", "timestamp", "=", "(", "timestamp", ",", "timestamp", ")", "# atime, mtime", "from", "os", "import", "utime", "utime", "(", "filename", ",", "timestamp", ")" ]
set the mtime of a file
[ "set", "the", "mtime", "of", "a", "file" ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/venv_update.py#L328-L334
train
Yelp/venv-update
venv_update.py
pip_faster
def pip_faster(venv_path, pip_command, install, bootstrap_deps): """install and run pip-faster""" # activate the virtualenv execfile_(venv_executable(venv_path, 'activate_this.py')) # disable a useless warning # FIXME: ensure a "true SSLContext" is available from os import environ environ['PIP_DISABLE_PIP_VERSION_CHECK'] = '1' # we always have to run the bootstrap, because the presense of an # executable doesn't imply the right version. pip is able to validate the # version in the fastpath case quickly anyway. run(('pip', 'install') + bootstrap_deps) run(pip_command + install)
python
def pip_faster(venv_path, pip_command, install, bootstrap_deps): """install and run pip-faster""" # activate the virtualenv execfile_(venv_executable(venv_path, 'activate_this.py')) # disable a useless warning # FIXME: ensure a "true SSLContext" is available from os import environ environ['PIP_DISABLE_PIP_VERSION_CHECK'] = '1' # we always have to run the bootstrap, because the presense of an # executable doesn't imply the right version. pip is able to validate the # version in the fastpath case quickly anyway. run(('pip', 'install') + bootstrap_deps) run(pip_command + install)
[ "def", "pip_faster", "(", "venv_path", ",", "pip_command", ",", "install", ",", "bootstrap_deps", ")", ":", "# activate the virtualenv", "execfile_", "(", "venv_executable", "(", "venv_path", ",", "'activate_this.py'", ")", ")", "# disable a useless warning", "# FIXME: ensure a \"true SSLContext\" is available", "from", "os", "import", "environ", "environ", "[", "'PIP_DISABLE_PIP_VERSION_CHECK'", "]", "=", "'1'", "# we always have to run the bootstrap, because the presense of an", "# executable doesn't imply the right version. pip is able to validate the", "# version in the fastpath case quickly anyway.", "run", "(", "(", "'pip'", ",", "'install'", ")", "+", "bootstrap_deps", ")", "run", "(", "pip_command", "+", "install", ")" ]
install and run pip-faster
[ "install", "and", "run", "pip", "-", "faster" ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/venv_update.py#L408-L423
train
Yelp/venv-update
venv_update.py
raise_on_failure
def raise_on_failure(mainfunc): """raise if and only if mainfunc fails""" try: errors = mainfunc() if errors: exit(errors) except CalledProcessError as error: exit(error.returncode) except SystemExit as error: if error.code: raise except KeyboardInterrupt: # I don't plan to test-cover this. :pragma:nocover: exit(1)
python
def raise_on_failure(mainfunc): """raise if and only if mainfunc fails""" try: errors = mainfunc() if errors: exit(errors) except CalledProcessError as error: exit(error.returncode) except SystemExit as error: if error.code: raise except KeyboardInterrupt: # I don't plan to test-cover this. :pragma:nocover: exit(1)
[ "def", "raise_on_failure", "(", "mainfunc", ")", ":", "try", ":", "errors", "=", "mainfunc", "(", ")", "if", "errors", ":", "exit", "(", "errors", ")", "except", "CalledProcessError", "as", "error", ":", "exit", "(", "error", ".", "returncode", ")", "except", "SystemExit", "as", "error", ":", "if", "error", ".", "code", ":", "raise", "except", "KeyboardInterrupt", ":", "# I don't plan to test-cover this. :pragma:nocover:", "exit", "(", "1", ")" ]
raise if and only if mainfunc fails
[ "raise", "if", "and", "only", "if", "mainfunc", "fails" ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/venv_update.py#L426-L438
train
Yelp/venv-update
pip_faster.py
cache_installed_wheels
def cache_installed_wheels(index_url, installed_packages): """After installation, pip tells us what it installed and from where. We build a structure that looks like .cache/pip-faster/wheelhouse/$index_url/$wheel """ for installed_package in installed_packages: if not _can_be_cached(installed_package): continue _store_wheel_in_cache(installed_package.link.path, index_url)
python
def cache_installed_wheels(index_url, installed_packages): """After installation, pip tells us what it installed and from where. We build a structure that looks like .cache/pip-faster/wheelhouse/$index_url/$wheel """ for installed_package in installed_packages: if not _can_be_cached(installed_package): continue _store_wheel_in_cache(installed_package.link.path, index_url)
[ "def", "cache_installed_wheels", "(", "index_url", ",", "installed_packages", ")", ":", "for", "installed_package", "in", "installed_packages", ":", "if", "not", "_can_be_cached", "(", "installed_package", ")", ":", "continue", "_store_wheel_in_cache", "(", "installed_package", ".", "link", ".", "path", ",", "index_url", ")" ]
After installation, pip tells us what it installed and from where. We build a structure that looks like .cache/pip-faster/wheelhouse/$index_url/$wheel
[ "After", "installation", "pip", "tells", "us", "what", "it", "installed", "and", "from", "where", "." ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/pip_faster.py#L171-L181
train
Yelp/venv-update
pip_faster.py
pip
def pip(args): """Run pip, in-process.""" from sys import stdout stdout.write(colorize(('pip',) + args)) stdout.write('\n') stdout.flush() return pipmodule._internal.main(list(args))
python
def pip(args): """Run pip, in-process.""" from sys import stdout stdout.write(colorize(('pip',) + args)) stdout.write('\n') stdout.flush() return pipmodule._internal.main(list(args))
[ "def", "pip", "(", "args", ")", ":", "from", "sys", "import", "stdout", "stdout", ".", "write", "(", "colorize", "(", "(", "'pip'", ",", ")", "+", "args", ")", ")", "stdout", ".", "write", "(", "'\\n'", ")", "stdout", ".", "flush", "(", ")", "return", "pipmodule", ".", "_internal", ".", "main", "(", "list", "(", "args", ")", ")" ]
Run pip, in-process.
[ "Run", "pip", "in", "-", "process", "." ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/pip_faster.py#L204-L211
train
Yelp/venv-update
pip_faster.py
dist_to_req
def dist_to_req(dist): """Make a pip.FrozenRequirement from a pkg_resources distribution object""" try: # :pragma:nocover: (pip>=10) from pip._internal.operations.freeze import FrozenRequirement except ImportError: # :pragma:nocover: (pip<10) from pip import FrozenRequirement # normalize the casing, dashes in the req name orig_name, dist.project_name = dist.project_name, dist.key result = FrozenRequirement.from_dist(dist, []) # put things back the way we found it. dist.project_name = orig_name return result
python
def dist_to_req(dist): """Make a pip.FrozenRequirement from a pkg_resources distribution object""" try: # :pragma:nocover: (pip>=10) from pip._internal.operations.freeze import FrozenRequirement except ImportError: # :pragma:nocover: (pip<10) from pip import FrozenRequirement # normalize the casing, dashes in the req name orig_name, dist.project_name = dist.project_name, dist.key result = FrozenRequirement.from_dist(dist, []) # put things back the way we found it. dist.project_name = orig_name return result
[ "def", "dist_to_req", "(", "dist", ")", ":", "try", ":", "# :pragma:nocover: (pip>=10)", "from", "pip", ".", "_internal", ".", "operations", ".", "freeze", "import", "FrozenRequirement", "except", "ImportError", ":", "# :pragma:nocover: (pip<10)", "from", "pip", "import", "FrozenRequirement", "# normalize the casing, dashes in the req name", "orig_name", ",", "dist", ".", "project_name", "=", "dist", ".", "project_name", ",", "dist", ".", "key", "result", "=", "FrozenRequirement", ".", "from_dist", "(", "dist", ",", "[", "]", ")", "# put things back the way we found it.", "dist", ".", "project_name", "=", "orig_name", "return", "result" ]
Make a pip.FrozenRequirement from a pkg_resources distribution object
[ "Make", "a", "pip", ".", "FrozenRequirement", "from", "a", "pkg_resources", "distribution", "object" ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/pip_faster.py#L214-L227
train
Yelp/venv-update
pip_faster.py
req_cycle
def req_cycle(req): """is this requirement cyclic?""" cls = req.__class__ seen = {req.name} while isinstance(req.comes_from, cls): req = req.comes_from if req.name in seen: return True else: seen.add(req.name) return False
python
def req_cycle(req): """is this requirement cyclic?""" cls = req.__class__ seen = {req.name} while isinstance(req.comes_from, cls): req = req.comes_from if req.name in seen: return True else: seen.add(req.name) return False
[ "def", "req_cycle", "(", "req", ")", ":", "cls", "=", "req", ".", "__class__", "seen", "=", "{", "req", ".", "name", "}", "while", "isinstance", "(", "req", ".", "comes_from", ",", "cls", ")", ":", "req", "=", "req", ".", "comes_from", "if", "req", ".", "name", "in", "seen", ":", "return", "True", "else", ":", "seen", ".", "add", "(", "req", ".", "name", ")", "return", "False" ]
is this requirement cyclic?
[ "is", "this", "requirement", "cyclic?" ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/pip_faster.py#L283-L293
train
Yelp/venv-update
pip_faster.py
pretty_req
def pretty_req(req): """ return a copy of a pip requirement that is a bit more readable, at the expense of removing some of its data """ from copy import copy req = copy(req) req.link = None req.satisfied_by = None return req
python
def pretty_req(req): """ return a copy of a pip requirement that is a bit more readable, at the expense of removing some of its data """ from copy import copy req = copy(req) req.link = None req.satisfied_by = None return req
[ "def", "pretty_req", "(", "req", ")", ":", "from", "copy", "import", "copy", "req", "=", "copy", "(", "req", ")", "req", ".", "link", "=", "None", "req", ".", "satisfied_by", "=", "None", "return", "req" ]
return a copy of a pip requirement that is a bit more readable, at the expense of removing some of its data
[ "return", "a", "copy", "of", "a", "pip", "requirement", "that", "is", "a", "bit", "more", "readable", "at", "the", "expense", "of", "removing", "some", "of", "its", "data" ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/pip_faster.py#L296-L305
train
Yelp/venv-update
pip_faster.py
trace_requirements
def trace_requirements(requirements): """given an iterable of pip InstallRequirements, return the set of required packages, given their transitive requirements. """ requirements = tuple(pretty_req(r) for r in requirements) working_set = fresh_working_set() # breadth-first traversal: from collections import deque queue = deque(requirements) queued = {_package_req_to_pkg_resources_req(req.req) for req in queue} errors = [] result = [] while queue: req = queue.popleft() logger.debug('tracing: %s', req) try: dist = working_set.find_normalized(_package_req_to_pkg_resources_req(req.req)) except pkg_resources.VersionConflict as conflict: dist = conflict.args[0] errors.append('Error: version conflict: {} ({}) <-> {}'.format( dist, timid_relpath(dist.location), req )) assert dist is not None, 'Should be unreachable in pip8+' result.append(dist_to_req(dist)) # TODO: pip does no validation of extras. should we? extras = [extra for extra in req.extras if extra in dist.extras] for sub_req in sorted(dist.requires(extras=extras), key=lambda req: req.key): sub_req = InstallRequirement(sub_req, req) if req_cycle(sub_req): logger.warning('Circular dependency! %s', sub_req) continue elif sub_req.req in queued: logger.debug('already queued: %s', sub_req) continue else: logger.debug('adding sub-requirement %s', sub_req) queue.append(sub_req) queued.add(sub_req.req) if errors: raise InstallationError('\n'.join(errors)) return result
python
def trace_requirements(requirements): """given an iterable of pip InstallRequirements, return the set of required packages, given their transitive requirements. """ requirements = tuple(pretty_req(r) for r in requirements) working_set = fresh_working_set() # breadth-first traversal: from collections import deque queue = deque(requirements) queued = {_package_req_to_pkg_resources_req(req.req) for req in queue} errors = [] result = [] while queue: req = queue.popleft() logger.debug('tracing: %s', req) try: dist = working_set.find_normalized(_package_req_to_pkg_resources_req(req.req)) except pkg_resources.VersionConflict as conflict: dist = conflict.args[0] errors.append('Error: version conflict: {} ({}) <-> {}'.format( dist, timid_relpath(dist.location), req )) assert dist is not None, 'Should be unreachable in pip8+' result.append(dist_to_req(dist)) # TODO: pip does no validation of extras. should we? extras = [extra for extra in req.extras if extra in dist.extras] for sub_req in sorted(dist.requires(extras=extras), key=lambda req: req.key): sub_req = InstallRequirement(sub_req, req) if req_cycle(sub_req): logger.warning('Circular dependency! %s', sub_req) continue elif sub_req.req in queued: logger.debug('already queued: %s', sub_req) continue else: logger.debug('adding sub-requirement %s', sub_req) queue.append(sub_req) queued.add(sub_req.req) if errors: raise InstallationError('\n'.join(errors)) return result
[ "def", "trace_requirements", "(", "requirements", ")", ":", "requirements", "=", "tuple", "(", "pretty_req", "(", "r", ")", "for", "r", "in", "requirements", ")", "working_set", "=", "fresh_working_set", "(", ")", "# breadth-first traversal:", "from", "collections", "import", "deque", "queue", "=", "deque", "(", "requirements", ")", "queued", "=", "{", "_package_req_to_pkg_resources_req", "(", "req", ".", "req", ")", "for", "req", "in", "queue", "}", "errors", "=", "[", "]", "result", "=", "[", "]", "while", "queue", ":", "req", "=", "queue", ".", "popleft", "(", ")", "logger", ".", "debug", "(", "'tracing: %s'", ",", "req", ")", "try", ":", "dist", "=", "working_set", ".", "find_normalized", "(", "_package_req_to_pkg_resources_req", "(", "req", ".", "req", ")", ")", "except", "pkg_resources", ".", "VersionConflict", "as", "conflict", ":", "dist", "=", "conflict", ".", "args", "[", "0", "]", "errors", ".", "append", "(", "'Error: version conflict: {} ({}) <-> {}'", ".", "format", "(", "dist", ",", "timid_relpath", "(", "dist", ".", "location", ")", ",", "req", ")", ")", "assert", "dist", "is", "not", "None", ",", "'Should be unreachable in pip8+'", "result", ".", "append", "(", "dist_to_req", "(", "dist", ")", ")", "# TODO: pip does no validation of extras. should we?", "extras", "=", "[", "extra", "for", "extra", "in", "req", ".", "extras", "if", "extra", "in", "dist", ".", "extras", "]", "for", "sub_req", "in", "sorted", "(", "dist", ".", "requires", "(", "extras", "=", "extras", ")", ",", "key", "=", "lambda", "req", ":", "req", ".", "key", ")", ":", "sub_req", "=", "InstallRequirement", "(", "sub_req", ",", "req", ")", "if", "req_cycle", "(", "sub_req", ")", ":", "logger", ".", "warning", "(", "'Circular dependency! %s'", ",", "sub_req", ")", "continue", "elif", "sub_req", ".", "req", "in", "queued", ":", "logger", ".", "debug", "(", "'already queued: %s'", ",", "sub_req", ")", "continue", "else", ":", "logger", ".", "debug", "(", "'adding sub-requirement %s'", ",", "sub_req", ")", "queue", ".", "append", "(", "sub_req", ")", "queued", ".", "add", "(", "sub_req", ".", "req", ")", "if", "errors", ":", "raise", "InstallationError", "(", "'\\n'", ".", "join", "(", "errors", ")", ")", "return", "result" ]
given an iterable of pip InstallRequirements, return the set of required packages, given their transitive requirements.
[ "given", "an", "iterable", "of", "pip", "InstallRequirements", "return", "the", "set", "of", "required", "packages", "given", "their", "transitive", "requirements", "." ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/pip_faster.py#L312-L359
train
Yelp/venv-update
pip_faster.py
patch
def patch(attrs, updates): """Perform a set of updates to a attribute dictionary, return the original values.""" orig = {} for attr, value in updates: orig[attr] = attrs[attr] attrs[attr] = value return orig
python
def patch(attrs, updates): """Perform a set of updates to a attribute dictionary, return the original values.""" orig = {} for attr, value in updates: orig[attr] = attrs[attr] attrs[attr] = value return orig
[ "def", "patch", "(", "attrs", ",", "updates", ")", ":", "orig", "=", "{", "}", "for", "attr", ",", "value", "in", "updates", ":", "orig", "[", "attr", "]", "=", "attrs", "[", "attr", "]", "attrs", "[", "attr", "]", "=", "value", "return", "orig" ]
Perform a set of updates to a attribute dictionary, return the original values.
[ "Perform", "a", "set", "of", "updates", "to", "a", "attribute", "dictionary", "return", "the", "original", "values", "." ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/pip_faster.py#L430-L436
train
Yelp/venv-update
pip_faster.py
patched
def patched(attrs, updates): """A context in which some attributes temporarily have a modified value.""" orig = patch(attrs, updates.items()) try: yield orig finally: patch(attrs, orig.items())
python
def patched(attrs, updates): """A context in which some attributes temporarily have a modified value.""" orig = patch(attrs, updates.items()) try: yield orig finally: patch(attrs, orig.items())
[ "def", "patched", "(", "attrs", ",", "updates", ")", ":", "orig", "=", "patch", "(", "attrs", ",", "updates", ".", "items", "(", ")", ")", "try", ":", "yield", "orig", "finally", ":", "patch", "(", "attrs", ",", "orig", ".", "items", "(", ")", ")" ]
A context in which some attributes temporarily have a modified value.
[ "A", "context", "in", "which", "some", "attributes", "temporarily", "have", "a", "modified", "value", "." ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/pip_faster.py#L440-L446
train
Yelp/venv-update
pip_faster.py
pipfaster_packagefinder
def pipfaster_packagefinder(): """Provide a short-circuited search when the requirement is pinned and appears on disk. Suggested upstream at: https://github.com/pypa/pip/pull/2114 """ # A poor man's dependency injection: monkeypatch :( try: # :pragma:nocover: pip>=18.1 from pip._internal.cli import base_command except ImportError: # :pragma:nocover: pip<18.1 from pip._internal import basecommand as base_command return patched(vars(base_command), {'PackageFinder': FasterPackageFinder})
python
def pipfaster_packagefinder(): """Provide a short-circuited search when the requirement is pinned and appears on disk. Suggested upstream at: https://github.com/pypa/pip/pull/2114 """ # A poor man's dependency injection: monkeypatch :( try: # :pragma:nocover: pip>=18.1 from pip._internal.cli import base_command except ImportError: # :pragma:nocover: pip<18.1 from pip._internal import basecommand as base_command return patched(vars(base_command), {'PackageFinder': FasterPackageFinder})
[ "def", "pipfaster_packagefinder", "(", ")", ":", "# A poor man's dependency injection: monkeypatch :(", "try", ":", "# :pragma:nocover: pip>=18.1", "from", "pip", ".", "_internal", ".", "cli", "import", "base_command", "except", "ImportError", ":", "# :pragma:nocover: pip<18.1", "from", "pip", ".", "_internal", "import", "basecommand", "as", "base_command", "return", "patched", "(", "vars", "(", "base_command", ")", ",", "{", "'PackageFinder'", ":", "FasterPackageFinder", "}", ")" ]
Provide a short-circuited search when the requirement is pinned and appears on disk. Suggested upstream at: https://github.com/pypa/pip/pull/2114
[ "Provide", "a", "short", "-", "circuited", "search", "when", "the", "requirement", "is", "pinned", "and", "appears", "on", "disk", "." ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/pip_faster.py#L454-L464
train
Yelp/venv-update
pip_faster.py
pipfaster_download_cacher
def pipfaster_download_cacher(index_urls): """vanilla pip stores a cache of the http session in its cache and not the wheel files. We intercept the download and save those files into our cache """ from pip._internal import download orig = download._download_http_url patched_fn = get_patched_download_http_url(orig, index_urls) return patched(vars(download), {'_download_http_url': patched_fn})
python
def pipfaster_download_cacher(index_urls): """vanilla pip stores a cache of the http session in its cache and not the wheel files. We intercept the download and save those files into our cache """ from pip._internal import download orig = download._download_http_url patched_fn = get_patched_download_http_url(orig, index_urls) return patched(vars(download), {'_download_http_url': patched_fn})
[ "def", "pipfaster_download_cacher", "(", "index_urls", ")", ":", "from", "pip", ".", "_internal", "import", "download", "orig", "=", "download", ".", "_download_http_url", "patched_fn", "=", "get_patched_download_http_url", "(", "orig", ",", "index_urls", ")", "return", "patched", "(", "vars", "(", "download", ")", ",", "{", "'_download_http_url'", ":", "patched_fn", "}", ")" ]
vanilla pip stores a cache of the http session in its cache and not the wheel files. We intercept the download and save those files into our cache
[ "vanilla", "pip", "stores", "a", "cache", "of", "the", "http", "session", "in", "its", "cache", "and", "not", "the", "wheel", "files", ".", "We", "intercept", "the", "download", "and", "save", "those", "files", "into", "our", "cache" ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/pip_faster.py#L467-L475
train
Yelp/venv-update
pip_faster.py
FasterInstallCommand.run
def run(self, options, args): """update install options with caching values""" if options.prune: previously_installed = pip_get_installed() index_urls = [options.index_url] + options.extra_index_urls with pipfaster_download_cacher(index_urls): requirement_set = super(FasterInstallCommand, self).run( options, args, ) required = requirement_set.requirements.values() # With extra_index_urls we don't know where the wheel is from if not options.extra_index_urls: cache_installed_wheels(options.index_url, requirement_set.successfully_downloaded) if not options.ignore_dependencies: # transitive requirements, previously installed, are also required # this has a side-effect of finding any missing / conflicting requirements required = trace_requirements(required) if not options.prune: return requirement_set extraneous = ( reqnames(previously_installed) - reqnames(required) - # the stage1 bootstrap packages reqnames(trace_requirements([install_req_from_line('venv-update')])) - # See #186 frozenset(('pkg-resources',)) ) if extraneous: extraneous = sorted(extraneous) pip(('uninstall', '--yes') + tuple(extraneous))
python
def run(self, options, args): """update install options with caching values""" if options.prune: previously_installed = pip_get_installed() index_urls = [options.index_url] + options.extra_index_urls with pipfaster_download_cacher(index_urls): requirement_set = super(FasterInstallCommand, self).run( options, args, ) required = requirement_set.requirements.values() # With extra_index_urls we don't know where the wheel is from if not options.extra_index_urls: cache_installed_wheels(options.index_url, requirement_set.successfully_downloaded) if not options.ignore_dependencies: # transitive requirements, previously installed, are also required # this has a side-effect of finding any missing / conflicting requirements required = trace_requirements(required) if not options.prune: return requirement_set extraneous = ( reqnames(previously_installed) - reqnames(required) - # the stage1 bootstrap packages reqnames(trace_requirements([install_req_from_line('venv-update')])) - # See #186 frozenset(('pkg-resources',)) ) if extraneous: extraneous = sorted(extraneous) pip(('uninstall', '--yes') + tuple(extraneous))
[ "def", "run", "(", "self", ",", "options", ",", "args", ")", ":", "if", "options", ".", "prune", ":", "previously_installed", "=", "pip_get_installed", "(", ")", "index_urls", "=", "[", "options", ".", "index_url", "]", "+", "options", ".", "extra_index_urls", "with", "pipfaster_download_cacher", "(", "index_urls", ")", ":", "requirement_set", "=", "super", "(", "FasterInstallCommand", ",", "self", ")", ".", "run", "(", "options", ",", "args", ",", ")", "required", "=", "requirement_set", ".", "requirements", ".", "values", "(", ")", "# With extra_index_urls we don't know where the wheel is from", "if", "not", "options", ".", "extra_index_urls", ":", "cache_installed_wheels", "(", "options", ".", "index_url", ",", "requirement_set", ".", "successfully_downloaded", ")", "if", "not", "options", ".", "ignore_dependencies", ":", "# transitive requirements, previously installed, are also required", "# this has a side-effect of finding any missing / conflicting requirements", "required", "=", "trace_requirements", "(", "required", ")", "if", "not", "options", ".", "prune", ":", "return", "requirement_set", "extraneous", "=", "(", "reqnames", "(", "previously_installed", ")", "-", "reqnames", "(", "required", ")", "-", "# the stage1 bootstrap packages", "reqnames", "(", "trace_requirements", "(", "[", "install_req_from_line", "(", "'venv-update'", ")", "]", ")", ")", "-", "# See #186", "frozenset", "(", "(", "'pkg-resources'", ",", ")", ")", ")", "if", "extraneous", ":", "extraneous", "=", "sorted", "(", "extraneous", ")", "pip", "(", "(", "'uninstall'", ",", "'--yes'", ")", "+", "tuple", "(", "extraneous", ")", ")" ]
update install options with caching values
[ "update", "install", "options", "with", "caching", "values" ]
6feae7ab09ee870c582b97443cfa8f0dc8626ba7
https://github.com/Yelp/venv-update/blob/6feae7ab09ee870c582b97443cfa8f0dc8626ba7/pip_faster.py#L387-L423
train
RedisJSON/rejson-py
rejson/client.py
Client.setEncoder
def setEncoder(self, encoder): """ Sets the client's encoder ``encoder`` should be an instance of a ``json.JSONEncoder`` class """ if not encoder: self._encoder = json.JSONEncoder() else: self._encoder = encoder self._encode = self._encoder.encode
python
def setEncoder(self, encoder): """ Sets the client's encoder ``encoder`` should be an instance of a ``json.JSONEncoder`` class """ if not encoder: self._encoder = json.JSONEncoder() else: self._encoder = encoder self._encode = self._encoder.encode
[ "def", "setEncoder", "(", "self", ",", "encoder", ")", ":", "if", "not", "encoder", ":", "self", ".", "_encoder", "=", "json", ".", "JSONEncoder", "(", ")", "else", ":", "self", ".", "_encoder", "=", "encoder", "self", ".", "_encode", "=", "self", ".", "_encoder", ".", "encode" ]
Sets the client's encoder ``encoder`` should be an instance of a ``json.JSONEncoder`` class
[ "Sets", "the", "client", "s", "encoder", "encoder", "should", "be", "an", "instance", "of", "a", "json", ".", "JSONEncoder", "class" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L78-L87
train
RedisJSON/rejson-py
rejson/client.py
Client.setDecoder
def setDecoder(self, decoder): """ Sets the client's decoder ``decoder`` should be an instance of a ``json.JSONDecoder`` class """ if not decoder: self._decoder = json.JSONDecoder() else: self._decoder = decoder self._decode = self._decoder.decode
python
def setDecoder(self, decoder): """ Sets the client's decoder ``decoder`` should be an instance of a ``json.JSONDecoder`` class """ if not decoder: self._decoder = json.JSONDecoder() else: self._decoder = decoder self._decode = self._decoder.decode
[ "def", "setDecoder", "(", "self", ",", "decoder", ")", ":", "if", "not", "decoder", ":", "self", ".", "_decoder", "=", "json", ".", "JSONDecoder", "(", ")", "else", ":", "self", ".", "_decoder", "=", "decoder", "self", ".", "_decode", "=", "self", ".", "_decoder", ".", "decode" ]
Sets the client's decoder ``decoder`` should be an instance of a ``json.JSONDecoder`` class
[ "Sets", "the", "client", "s", "decoder", "decoder", "should", "be", "an", "instance", "of", "a", "json", ".", "JSONDecoder", "class" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L89-L98
train
RedisJSON/rejson-py
rejson/client.py
Client.jsondel
def jsondel(self, name, path=Path.rootPath()): """ Deletes the JSON value stored at key ``name`` under ``path`` """ return self.execute_command('JSON.DEL', name, str_path(path))
python
def jsondel(self, name, path=Path.rootPath()): """ Deletes the JSON value stored at key ``name`` under ``path`` """ return self.execute_command('JSON.DEL', name, str_path(path))
[ "def", "jsondel", "(", "self", ",", "name", ",", "path", "=", "Path", ".", "rootPath", "(", ")", ")", ":", "return", "self", ".", "execute_command", "(", "'JSON.DEL'", ",", "name", ",", "str_path", "(", "path", ")", ")" ]
Deletes the JSON value stored at key ``name`` under ``path``
[ "Deletes", "the", "JSON", "value", "stored", "at", "key", "name", "under", "path" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L100-L104
train
RedisJSON/rejson-py
rejson/client.py
Client.jsonget
def jsonget(self, name, *args): """ Get the object stored as a JSON value at key ``name`` ``args`` is zero or more paths, and defaults to root path """ pieces = [name] if len(args) == 0: pieces.append(Path.rootPath()) else: for p in args: pieces.append(str_path(p)) # Handle case where key doesn't exist. The JSONDecoder would raise a # TypeError exception since it can't decode None try: return self.execute_command('JSON.GET', *pieces) except TypeError: return None
python
def jsonget(self, name, *args): """ Get the object stored as a JSON value at key ``name`` ``args`` is zero or more paths, and defaults to root path """ pieces = [name] if len(args) == 0: pieces.append(Path.rootPath()) else: for p in args: pieces.append(str_path(p)) # Handle case where key doesn't exist. The JSONDecoder would raise a # TypeError exception since it can't decode None try: return self.execute_command('JSON.GET', *pieces) except TypeError: return None
[ "def", "jsonget", "(", "self", ",", "name", ",", "*", "args", ")", ":", "pieces", "=", "[", "name", "]", "if", "len", "(", "args", ")", "==", "0", ":", "pieces", ".", "append", "(", "Path", ".", "rootPath", "(", ")", ")", "else", ":", "for", "p", "in", "args", ":", "pieces", ".", "append", "(", "str_path", "(", "p", ")", ")", "# Handle case where key doesn't exist. The JSONDecoder would raise a", "# TypeError exception since it can't decode None", "try", ":", "return", "self", ".", "execute_command", "(", "'JSON.GET'", ",", "*", "pieces", ")", "except", "TypeError", ":", "return", "None" ]
Get the object stored as a JSON value at key ``name`` ``args`` is zero or more paths, and defaults to root path
[ "Get", "the", "object", "stored", "as", "a", "JSON", "value", "at", "key", "name", "args", "is", "zero", "or", "more", "paths", "and", "defaults", "to", "root", "path" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L106-L123
train
RedisJSON/rejson-py
rejson/client.py
Client.jsonmget
def jsonmget(self, path, *args): """ Gets the objects stored as a JSON values under ``path`` from keys ``args`` """ pieces = [] pieces.extend(args) pieces.append(str_path(path)) return self.execute_command('JSON.MGET', *pieces)
python
def jsonmget(self, path, *args): """ Gets the objects stored as a JSON values under ``path`` from keys ``args`` """ pieces = [] pieces.extend(args) pieces.append(str_path(path)) return self.execute_command('JSON.MGET', *pieces)
[ "def", "jsonmget", "(", "self", ",", "path", ",", "*", "args", ")", ":", "pieces", "=", "[", "]", "pieces", ".", "extend", "(", "args", ")", "pieces", ".", "append", "(", "str_path", "(", "path", ")", ")", "return", "self", ".", "execute_command", "(", "'JSON.MGET'", ",", "*", "pieces", ")" ]
Gets the objects stored as a JSON values under ``path`` from keys ``args``
[ "Gets", "the", "objects", "stored", "as", "a", "JSON", "values", "under", "path", "from", "keys", "args" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L125-L133
train
RedisJSON/rejson-py
rejson/client.py
Client.jsonset
def jsonset(self, name, path, obj, nx=False, xx=False): """ Set the JSON value at key ``name`` under the ``path`` to ``obj`` ``nx`` if set to True, set ``value`` only if it does not exist ``xx`` if set to True, set ``value`` only if it exists """ pieces = [name, str_path(path), self._encode(obj)] # Handle existential modifiers if nx and xx: raise Exception('nx and xx are mutually exclusive: use one, the ' 'other or neither - but not both') elif nx: pieces.append('NX') elif xx: pieces.append('XX') return self.execute_command('JSON.SET', *pieces)
python
def jsonset(self, name, path, obj, nx=False, xx=False): """ Set the JSON value at key ``name`` under the ``path`` to ``obj`` ``nx`` if set to True, set ``value`` only if it does not exist ``xx`` if set to True, set ``value`` only if it exists """ pieces = [name, str_path(path), self._encode(obj)] # Handle existential modifiers if nx and xx: raise Exception('nx and xx are mutually exclusive: use one, the ' 'other or neither - but not both') elif nx: pieces.append('NX') elif xx: pieces.append('XX') return self.execute_command('JSON.SET', *pieces)
[ "def", "jsonset", "(", "self", ",", "name", ",", "path", ",", "obj", ",", "nx", "=", "False", ",", "xx", "=", "False", ")", ":", "pieces", "=", "[", "name", ",", "str_path", "(", "path", ")", ",", "self", ".", "_encode", "(", "obj", ")", "]", "# Handle existential modifiers", "if", "nx", "and", "xx", ":", "raise", "Exception", "(", "'nx and xx are mutually exclusive: use one, the '", "'other or neither - but not both'", ")", "elif", "nx", ":", "pieces", ".", "append", "(", "'NX'", ")", "elif", "xx", ":", "pieces", ".", "append", "(", "'XX'", ")", "return", "self", ".", "execute_command", "(", "'JSON.SET'", ",", "*", "pieces", ")" ]
Set the JSON value at key ``name`` under the ``path`` to ``obj`` ``nx`` if set to True, set ``value`` only if it does not exist ``xx`` if set to True, set ``value`` only if it exists
[ "Set", "the", "JSON", "value", "at", "key", "name", "under", "the", "path", "to", "obj", "nx", "if", "set", "to", "True", "set", "value", "only", "if", "it", "does", "not", "exist", "xx", "if", "set", "to", "True", "set", "value", "only", "if", "it", "exists" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L135-L151
train
RedisJSON/rejson-py
rejson/client.py
Client.jsontype
def jsontype(self, name, path=Path.rootPath()): """ Gets the type of the JSON value under ``path`` from key ``name`` """ return self.execute_command('JSON.TYPE', name, str_path(path))
python
def jsontype(self, name, path=Path.rootPath()): """ Gets the type of the JSON value under ``path`` from key ``name`` """ return self.execute_command('JSON.TYPE', name, str_path(path))
[ "def", "jsontype", "(", "self", ",", "name", ",", "path", "=", "Path", ".", "rootPath", "(", ")", ")", ":", "return", "self", ".", "execute_command", "(", "'JSON.TYPE'", ",", "name", ",", "str_path", "(", "path", ")", ")" ]
Gets the type of the JSON value under ``path`` from key ``name``
[ "Gets", "the", "type", "of", "the", "JSON", "value", "under", "path", "from", "key", "name" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L153-L157
train
RedisJSON/rejson-py
rejson/client.py
Client.jsonstrappend
def jsonstrappend(self, name, string, path=Path.rootPath()): """ Appends to the string JSON value under ``path`` at key ``name`` the provided ``string`` """ return self.execute_command('JSON.STRAPPEND', name, str_path(path), self._encode(string))
python
def jsonstrappend(self, name, string, path=Path.rootPath()): """ Appends to the string JSON value under ``path`` at key ``name`` the provided ``string`` """ return self.execute_command('JSON.STRAPPEND', name, str_path(path), self._encode(string))
[ "def", "jsonstrappend", "(", "self", ",", "name", ",", "string", ",", "path", "=", "Path", ".", "rootPath", "(", ")", ")", ":", "return", "self", ".", "execute_command", "(", "'JSON.STRAPPEND'", ",", "name", ",", "str_path", "(", "path", ")", ",", "self", ".", "_encode", "(", "string", ")", ")" ]
Appends to the string JSON value under ``path`` at key ``name`` the provided ``string``
[ "Appends", "to", "the", "string", "JSON", "value", "under", "path", "at", "key", "name", "the", "provided", "string" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L173-L178
train
RedisJSON/rejson-py
rejson/client.py
Client.jsonstrlen
def jsonstrlen(self, name, path=Path.rootPath()): """ Returns the length of the string JSON value under ``path`` at key ``name`` """ return self.execute_command('JSON.STRLEN', name, str_path(path))
python
def jsonstrlen(self, name, path=Path.rootPath()): """ Returns the length of the string JSON value under ``path`` at key ``name`` """ return self.execute_command('JSON.STRLEN', name, str_path(path))
[ "def", "jsonstrlen", "(", "self", ",", "name", ",", "path", "=", "Path", ".", "rootPath", "(", ")", ")", ":", "return", "self", ".", "execute_command", "(", "'JSON.STRLEN'", ",", "name", ",", "str_path", "(", "path", ")", ")" ]
Returns the length of the string JSON value under ``path`` at key ``name``
[ "Returns", "the", "length", "of", "the", "string", "JSON", "value", "under", "path", "at", "key", "name" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L180-L185
train
RedisJSON/rejson-py
rejson/client.py
Client.jsonarrappend
def jsonarrappend(self, name, path=Path.rootPath(), *args): """ Appends the objects ``args`` to the array under the ``path` in key ``name`` """ pieces = [name, str_path(path)] for o in args: pieces.append(self._encode(o)) return self.execute_command('JSON.ARRAPPEND', *pieces)
python
def jsonarrappend(self, name, path=Path.rootPath(), *args): """ Appends the objects ``args`` to the array under the ``path` in key ``name`` """ pieces = [name, str_path(path)] for o in args: pieces.append(self._encode(o)) return self.execute_command('JSON.ARRAPPEND', *pieces)
[ "def", "jsonarrappend", "(", "self", ",", "name", ",", "path", "=", "Path", ".", "rootPath", "(", ")", ",", "*", "args", ")", ":", "pieces", "=", "[", "name", ",", "str_path", "(", "path", ")", "]", "for", "o", "in", "args", ":", "pieces", ".", "append", "(", "self", ".", "_encode", "(", "o", ")", ")", "return", "self", ".", "execute_command", "(", "'JSON.ARRAPPEND'", ",", "*", "pieces", ")" ]
Appends the objects ``args`` to the array under the ``path` in key ``name``
[ "Appends", "the", "objects", "args", "to", "the", "array", "under", "the", "path", "in", "key", "name" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L187-L195
train
RedisJSON/rejson-py
rejson/client.py
Client.jsonarrindex
def jsonarrindex(self, name, path, scalar, start=0, stop=-1): """ Returns the index of ``scalar`` in the JSON array under ``path`` at key ``name``. The search can be limited using the optional inclusive ``start`` and exclusive ``stop`` indices. """ return self.execute_command('JSON.ARRINDEX', name, str_path(path), self._encode(scalar), start, stop)
python
def jsonarrindex(self, name, path, scalar, start=0, stop=-1): """ Returns the index of ``scalar`` in the JSON array under ``path`` at key ``name``. The search can be limited using the optional inclusive ``start`` and exclusive ``stop`` indices. """ return self.execute_command('JSON.ARRINDEX', name, str_path(path), self._encode(scalar), start, stop)
[ "def", "jsonarrindex", "(", "self", ",", "name", ",", "path", ",", "scalar", ",", "start", "=", "0", ",", "stop", "=", "-", "1", ")", ":", "return", "self", ".", "execute_command", "(", "'JSON.ARRINDEX'", ",", "name", ",", "str_path", "(", "path", ")", ",", "self", ".", "_encode", "(", "scalar", ")", ",", "start", ",", "stop", ")" ]
Returns the index of ``scalar`` in the JSON array under ``path`` at key ``name``. The search can be limited using the optional inclusive ``start`` and exclusive ``stop`` indices.
[ "Returns", "the", "index", "of", "scalar", "in", "the", "JSON", "array", "under", "path", "at", "key", "name", ".", "The", "search", "can", "be", "limited", "using", "the", "optional", "inclusive", "start", "and", "exclusive", "stop", "indices", "." ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L197-L203
train
RedisJSON/rejson-py
rejson/client.py
Client.jsonarrinsert
def jsonarrinsert(self, name, path, index, *args): """ Inserts the objects ``args`` to the array at index ``index`` under the ``path` in key ``name`` """ pieces = [name, str_path(path), index] for o in args: pieces.append(self._encode(o)) return self.execute_command('JSON.ARRINSERT', *pieces)
python
def jsonarrinsert(self, name, path, index, *args): """ Inserts the objects ``args`` to the array at index ``index`` under the ``path` in key ``name`` """ pieces = [name, str_path(path), index] for o in args: pieces.append(self._encode(o)) return self.execute_command('JSON.ARRINSERT', *pieces)
[ "def", "jsonarrinsert", "(", "self", ",", "name", ",", "path", ",", "index", ",", "*", "args", ")", ":", "pieces", "=", "[", "name", ",", "str_path", "(", "path", ")", ",", "index", "]", "for", "o", "in", "args", ":", "pieces", ".", "append", "(", "self", ".", "_encode", "(", "o", ")", ")", "return", "self", ".", "execute_command", "(", "'JSON.ARRINSERT'", ",", "*", "pieces", ")" ]
Inserts the objects ``args`` to the array at index ``index`` under the ``path` in key ``name``
[ "Inserts", "the", "objects", "args", "to", "the", "array", "at", "index", "index", "under", "the", "path", "in", "key", "name" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L205-L213
train
RedisJSON/rejson-py
rejson/client.py
Client.jsonarrlen
def jsonarrlen(self, name, path=Path.rootPath()): """ Returns the length of the array JSON value under ``path`` at key ``name`` """ return self.execute_command('JSON.ARRLEN', name, str_path(path))
python
def jsonarrlen(self, name, path=Path.rootPath()): """ Returns the length of the array JSON value under ``path`` at key ``name`` """ return self.execute_command('JSON.ARRLEN', name, str_path(path))
[ "def", "jsonarrlen", "(", "self", ",", "name", ",", "path", "=", "Path", ".", "rootPath", "(", ")", ")", ":", "return", "self", ".", "execute_command", "(", "'JSON.ARRLEN'", ",", "name", ",", "str_path", "(", "path", ")", ")" ]
Returns the length of the array JSON value under ``path`` at key ``name``
[ "Returns", "the", "length", "of", "the", "array", "JSON", "value", "under", "path", "at", "key", "name" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L215-L220
train
RedisJSON/rejson-py
rejson/client.py
Client.jsonarrpop
def jsonarrpop(self, name, path=Path.rootPath(), index=-1): """ Pops the element at ``index`` in the array JSON value under ``path`` at key ``name`` """ return self.execute_command('JSON.ARRPOP', name, str_path(path), index)
python
def jsonarrpop(self, name, path=Path.rootPath(), index=-1): """ Pops the element at ``index`` in the array JSON value under ``path`` at key ``name`` """ return self.execute_command('JSON.ARRPOP', name, str_path(path), index)
[ "def", "jsonarrpop", "(", "self", ",", "name", ",", "path", "=", "Path", ".", "rootPath", "(", ")", ",", "index", "=", "-", "1", ")", ":", "return", "self", ".", "execute_command", "(", "'JSON.ARRPOP'", ",", "name", ",", "str_path", "(", "path", ")", ",", "index", ")" ]
Pops the element at ``index`` in the array JSON value under ``path`` at key ``name``
[ "Pops", "the", "element", "at", "index", "in", "the", "array", "JSON", "value", "under", "path", "at", "key", "name" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L222-L227
train
RedisJSON/rejson-py
rejson/client.py
Client.jsonarrtrim
def jsonarrtrim(self, name, path, start, stop): """ Trim the array JSON value under ``path`` at key ``name`` to the inclusive range given by ``start`` and ``stop`` """ return self.execute_command('JSON.ARRTRIM', name, str_path(path), start, stop)
python
def jsonarrtrim(self, name, path, start, stop): """ Trim the array JSON value under ``path`` at key ``name`` to the inclusive range given by ``start`` and ``stop`` """ return self.execute_command('JSON.ARRTRIM', name, str_path(path), start, stop)
[ "def", "jsonarrtrim", "(", "self", ",", "name", ",", "path", ",", "start", ",", "stop", ")", ":", "return", "self", ".", "execute_command", "(", "'JSON.ARRTRIM'", ",", "name", ",", "str_path", "(", "path", ")", ",", "start", ",", "stop", ")" ]
Trim the array JSON value under ``path`` at key ``name`` to the inclusive range given by ``start`` and ``stop``
[ "Trim", "the", "array", "JSON", "value", "under", "path", "at", "key", "name", "to", "the", "inclusive", "range", "given", "by", "start", "and", "stop" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L229-L234
train
RedisJSON/rejson-py
rejson/client.py
Client.jsonobjkeys
def jsonobjkeys(self, name, path=Path.rootPath()): """ Returns the key names in the dictionary JSON value under ``path`` at key ``name`` """ return self.execute_command('JSON.OBJKEYS', name, str_path(path))
python
def jsonobjkeys(self, name, path=Path.rootPath()): """ Returns the key names in the dictionary JSON value under ``path`` at key ``name`` """ return self.execute_command('JSON.OBJKEYS', name, str_path(path))
[ "def", "jsonobjkeys", "(", "self", ",", "name", ",", "path", "=", "Path", ".", "rootPath", "(", ")", ")", ":", "return", "self", ".", "execute_command", "(", "'JSON.OBJKEYS'", ",", "name", ",", "str_path", "(", "path", ")", ")" ]
Returns the key names in the dictionary JSON value under ``path`` at key ``name``
[ "Returns", "the", "key", "names", "in", "the", "dictionary", "JSON", "value", "under", "path", "at", "key", "name" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L236-L241
train
RedisJSON/rejson-py
rejson/client.py
Client.jsonobjlen
def jsonobjlen(self, name, path=Path.rootPath()): """ Returns the length of the dictionary JSON value under ``path`` at key ``name`` """ return self.execute_command('JSON.OBJLEN', name, str_path(path))
python
def jsonobjlen(self, name, path=Path.rootPath()): """ Returns the length of the dictionary JSON value under ``path`` at key ``name`` """ return self.execute_command('JSON.OBJLEN', name, str_path(path))
[ "def", "jsonobjlen", "(", "self", ",", "name", ",", "path", "=", "Path", ".", "rootPath", "(", ")", ")", ":", "return", "self", ".", "execute_command", "(", "'JSON.OBJLEN'", ",", "name", ",", "str_path", "(", "path", ")", ")" ]
Returns the length of the dictionary JSON value under ``path`` at key ``name``
[ "Returns", "the", "length", "of", "the", "dictionary", "JSON", "value", "under", "path", "at", "key", "name" ]
55f0adf3adc40f5a769e28e541dbbf5377b90ec6
https://github.com/RedisJSON/rejson-py/blob/55f0adf3adc40f5a769e28e541dbbf5377b90ec6/rejson/client.py#L243-L248
train
mitodl/django-server-status
server_status/views.py
get_pg_info
def get_pg_info(): """Check PostgreSQL connection.""" from psycopg2 import connect, OperationalError log.debug("entered get_pg_info") try: conf = settings.DATABASES['default'] database = conf["NAME"] user = conf["USER"] host = conf["HOST"] port = conf["PORT"] password = conf["PASSWORD"] except (AttributeError, KeyError): log.error("No PostgreSQL connection info found in settings.") return {"status": NO_CONFIG} except TypeError: return {"status": DOWN} log.debug("got past getting conf") try: start = datetime.now() connection = connect( database=database, user=user, host=host, port=port, password=password, connect_timeout=TIMEOUT_SECONDS, ) log.debug("at end of context manager") micro = (datetime.now() - start).microseconds connection.close() except (OperationalError, KeyError) as ex: log.error("No PostgreSQL connection info found in settings. %s Error: %s", conf, ex) return {"status": DOWN} log.debug("got to end of postgres check successfully") return {"status": UP, "response_microseconds": micro}
python
def get_pg_info(): """Check PostgreSQL connection.""" from psycopg2 import connect, OperationalError log.debug("entered get_pg_info") try: conf = settings.DATABASES['default'] database = conf["NAME"] user = conf["USER"] host = conf["HOST"] port = conf["PORT"] password = conf["PASSWORD"] except (AttributeError, KeyError): log.error("No PostgreSQL connection info found in settings.") return {"status": NO_CONFIG} except TypeError: return {"status": DOWN} log.debug("got past getting conf") try: start = datetime.now() connection = connect( database=database, user=user, host=host, port=port, password=password, connect_timeout=TIMEOUT_SECONDS, ) log.debug("at end of context manager") micro = (datetime.now() - start).microseconds connection.close() except (OperationalError, KeyError) as ex: log.error("No PostgreSQL connection info found in settings. %s Error: %s", conf, ex) return {"status": DOWN} log.debug("got to end of postgres check successfully") return {"status": UP, "response_microseconds": micro}
[ "def", "get_pg_info", "(", ")", ":", "from", "psycopg2", "import", "connect", ",", "OperationalError", "log", ".", "debug", "(", "\"entered get_pg_info\"", ")", "try", ":", "conf", "=", "settings", ".", "DATABASES", "[", "'default'", "]", "database", "=", "conf", "[", "\"NAME\"", "]", "user", "=", "conf", "[", "\"USER\"", "]", "host", "=", "conf", "[", "\"HOST\"", "]", "port", "=", "conf", "[", "\"PORT\"", "]", "password", "=", "conf", "[", "\"PASSWORD\"", "]", "except", "(", "AttributeError", ",", "KeyError", ")", ":", "log", ".", "error", "(", "\"No PostgreSQL connection info found in settings.\"", ")", "return", "{", "\"status\"", ":", "NO_CONFIG", "}", "except", "TypeError", ":", "return", "{", "\"status\"", ":", "DOWN", "}", "log", ".", "debug", "(", "\"got past getting conf\"", ")", "try", ":", "start", "=", "datetime", ".", "now", "(", ")", "connection", "=", "connect", "(", "database", "=", "database", ",", "user", "=", "user", ",", "host", "=", "host", ",", "port", "=", "port", ",", "password", "=", "password", ",", "connect_timeout", "=", "TIMEOUT_SECONDS", ",", ")", "log", ".", "debug", "(", "\"at end of context manager\"", ")", "micro", "=", "(", "datetime", ".", "now", "(", ")", "-", "start", ")", ".", "microseconds", "connection", ".", "close", "(", ")", "except", "(", "OperationalError", ",", "KeyError", ")", "as", "ex", ":", "log", ".", "error", "(", "\"No PostgreSQL connection info found in settings. %s Error: %s\"", ",", "conf", ",", "ex", ")", "return", "{", "\"status\"", ":", "DOWN", "}", "log", ".", "debug", "(", "\"got to end of postgres check successfully\"", ")", "return", "{", "\"status\"", ":", "UP", ",", "\"response_microseconds\"", ":", "micro", "}" ]
Check PostgreSQL connection.
[ "Check", "PostgreSQL", "connection", "." ]
99bd29343138f94a08718fdbd9285e551751777b
https://github.com/mitodl/django-server-status/blob/99bd29343138f94a08718fdbd9285e551751777b/server_status/views.py#L30-L61
train