body
stringlengths 26
98.2k
| body_hash
int64 -9,222,864,604,528,158,000
9,221,803,474B
| docstring
stringlengths 1
16.8k
| path
stringlengths 5
230
| name
stringlengths 1
96
| repository_name
stringlengths 7
89
| lang
stringclasses 1
value | body_without_docstring
stringlengths 20
98.2k
|
---|---|---|---|---|---|---|---|
def option_changed(self, option, value):
'Option has changed'
setattr(self, to_text_string(option), value)
if (not self.is_internal_shell):
settings = self.get_view_settings()
communicate(self._get_sock(), 'set_remote_view_settings()', settings=[settings]) | -8,845,038,066,668,590,000 | Option has changed | spyderlib/widgets/externalshell/namespacebrowser.py | option_changed | junglefunkyman/spectracer | python | def option_changed(self, option, value):
setattr(self, to_text_string(option), value)
if (not self.is_internal_shell):
settings = self.get_view_settings()
communicate(self._get_sock(), 'set_remote_view_settings()', settings=[settings]) |
def visibility_changed(self, enable):
'Notify the widget whether its container (the namespace browser \n plugin is visible or not'
self.is_visible = enable
if enable:
self.refresh_table() | -7,547,586,716,755,413,000 | Notify the widget whether its container (the namespace browser
plugin is visible or not | spyderlib/widgets/externalshell/namespacebrowser.py | visibility_changed | junglefunkyman/spectracer | python | def visibility_changed(self, enable):
'Notify the widget whether its container (the namespace browser \n plugin is visible or not'
self.is_visible = enable
if enable:
self.refresh_table() |
@Slot(bool)
def toggle_auto_refresh(self, state):
'Toggle auto refresh state'
self.autorefresh = state
if ((not self.setup_in_progress) and (not self.is_internal_shell)):
communicate(self._get_sock(), ('set_monitor_auto_refresh(%r)' % state)) | -1,922,236,421,669,813,000 | Toggle auto refresh state | spyderlib/widgets/externalshell/namespacebrowser.py | toggle_auto_refresh | junglefunkyman/spectracer | python | @Slot(bool)
def toggle_auto_refresh(self, state):
self.autorefresh = state
if ((not self.setup_in_progress) and (not self.is_internal_shell)):
communicate(self._get_sock(), ('set_monitor_auto_refresh(%r)' % state)) |
def _get_sock(self):
'Return socket connection'
return self.shellwidget.introspection_socket | 3,881,894,221,573,878,300 | Return socket connection | spyderlib/widgets/externalshell/namespacebrowser.py | _get_sock | junglefunkyman/spectracer | python | def _get_sock(self):
return self.shellwidget.introspection_socket |
def get_internal_shell_filter(self, mode, check_all=None):
"\n Return internal shell data types filter:\n * check_all: check all elements data types for sequences\n (dict, list, tuple)\n * mode (string): 'editable' or 'picklable'\n "
assert (mode in list(SUPPORTED_TYPES.keys()))
if (check_all is None):
check_all = self.check_all
def wsfilter(input_dict, check_all=check_all, filters=tuple(SUPPORTED_TYPES[mode])):
'Keep only objects that can be pickled'
return globalsfilter(input_dict, check_all=check_all, filters=filters, exclude_private=self.exclude_private, exclude_uppercase=self.exclude_uppercase, exclude_capitalized=self.exclude_capitalized, exclude_unsupported=self.exclude_unsupported, excluded_names=self.excluded_names)
return wsfilter | 8,096,231,403,400,801,000 | Return internal shell data types filter:
* check_all: check all elements data types for sequences
(dict, list, tuple)
* mode (string): 'editable' or 'picklable' | spyderlib/widgets/externalshell/namespacebrowser.py | get_internal_shell_filter | junglefunkyman/spectracer | python | def get_internal_shell_filter(self, mode, check_all=None):
"\n Return internal shell data types filter:\n * check_all: check all elements data types for sequences\n (dict, list, tuple)\n * mode (string): 'editable' or 'picklable'\n "
assert (mode in list(SUPPORTED_TYPES.keys()))
if (check_all is None):
check_all = self.check_all
def wsfilter(input_dict, check_all=check_all, filters=tuple(SUPPORTED_TYPES[mode])):
'Keep only objects that can be pickled'
return globalsfilter(input_dict, check_all=check_all, filters=filters, exclude_private=self.exclude_private, exclude_uppercase=self.exclude_uppercase, exclude_capitalized=self.exclude_capitalized, exclude_unsupported=self.exclude_unsupported, excluded_names=self.excluded_names)
return wsfilter |
def get_view_settings(self):
'Return dict editor view settings'
settings = {}
for name in REMOTE_SETTINGS:
settings[name] = getattr(self, name)
return settings | -5,934,980,031,078,844,000 | Return dict editor view settings | spyderlib/widgets/externalshell/namespacebrowser.py | get_view_settings | junglefunkyman/spectracer | python | def get_view_settings(self):
settings = {}
for name in REMOTE_SETTINGS:
settings[name] = getattr(self, name)
return settings |
@Slot()
def refresh_table(self):
'Refresh variable table'
if (self.is_visible and self.isVisible()):
if self.is_internal_shell:
wsfilter = self.get_internal_shell_filter('editable')
self.editor.set_filter(wsfilter)
interpreter = self.shellwidget.interpreter
if (interpreter is not None):
self.editor.set_data(interpreter.namespace)
self.editor.adjust_columns()
elif self.shellwidget.is_running():
sock = self._get_sock()
if (sock is None):
return
try:
communicate(sock, 'refresh()')
except socket.error:
pass | -8,334,101,751,475,448,000 | Refresh variable table | spyderlib/widgets/externalshell/namespacebrowser.py | refresh_table | junglefunkyman/spectracer | python | @Slot()
def refresh_table(self):
if (self.is_visible and self.isVisible()):
if self.is_internal_shell:
wsfilter = self.get_internal_shell_filter('editable')
self.editor.set_filter(wsfilter)
interpreter = self.shellwidget.interpreter
if (interpreter is not None):
self.editor.set_data(interpreter.namespace)
self.editor.adjust_columns()
elif self.shellwidget.is_running():
sock = self._get_sock()
if (sock is None):
return
try:
communicate(sock, 'refresh()')
except socket.error:
pass |
def process_remote_view(self, remote_view):
'Process remote view'
if (remote_view is not None):
self.set_data(remote_view) | 5,714,049,586,079,871,000 | Process remote view | spyderlib/widgets/externalshell/namespacebrowser.py | process_remote_view | junglefunkyman/spectracer | python | def process_remote_view(self, remote_view):
if (remote_view is not None):
self.set_data(remote_view) |
def is_list(self, name):
'Return True if variable is a list or a tuple'
return communicate(self._get_sock(), ('isinstance(%s, (tuple, list))' % name)) | 2,203,094,412,697,349,600 | Return True if variable is a list or a tuple | spyderlib/widgets/externalshell/namespacebrowser.py | is_list | junglefunkyman/spectracer | python | def is_list(self, name):
return communicate(self._get_sock(), ('isinstance(%s, (tuple, list))' % name)) |
def is_dict(self, name):
'Return True if variable is a dictionary'
return communicate(self._get_sock(), ('isinstance(%s, dict)' % name)) | -3,121,465,370,666,674,000 | Return True if variable is a dictionary | spyderlib/widgets/externalshell/namespacebrowser.py | is_dict | junglefunkyman/spectracer | python | def is_dict(self, name):
return communicate(self._get_sock(), ('isinstance(%s, dict)' % name)) |
def get_len(self, name):
'Return sequence length'
return communicate(self._get_sock(), ('len(%s)' % name)) | 4,044,742,339,188,115,500 | Return sequence length | spyderlib/widgets/externalshell/namespacebrowser.py | get_len | junglefunkyman/spectracer | python | def get_len(self, name):
return communicate(self._get_sock(), ('len(%s)' % name)) |
def is_array(self, name):
'Return True if variable is a NumPy array'
return communicate(self._get_sock(), ('is_array("%s")' % name)) | 255,030,878,359,538,700 | Return True if variable is a NumPy array | spyderlib/widgets/externalshell/namespacebrowser.py | is_array | junglefunkyman/spectracer | python | def is_array(self, name):
return communicate(self._get_sock(), ('is_array("%s")' % name)) |
def is_image(self, name):
'Return True if variable is a PIL.Image image'
return communicate(self._get_sock(), ('is_image("%s")' % name)) | -4,378,532,613,325,333,500 | Return True if variable is a PIL.Image image | spyderlib/widgets/externalshell/namespacebrowser.py | is_image | junglefunkyman/spectracer | python | def is_image(self, name):
return communicate(self._get_sock(), ('is_image("%s")' % name)) |
def is_data_frame(self, name):
'Return True if variable is a data_frame'
return communicate(self._get_sock(), ("isinstance(globals()['%s'], DataFrame)" % name)) | 4,802,414,351,952,608,000 | Return True if variable is a data_frame | spyderlib/widgets/externalshell/namespacebrowser.py | is_data_frame | junglefunkyman/spectracer | python | def is_data_frame(self, name):
return communicate(self._get_sock(), ("isinstance(globals()['%s'], DataFrame)" % name)) |
def is_time_series(self, name):
'Return True if variable is a data_frame'
return communicate(self._get_sock(), ("isinstance(globals()['%s'], TimeSeries)" % name)) | 8,962,604,087,680,779,000 | Return True if variable is a data_frame | spyderlib/widgets/externalshell/namespacebrowser.py | is_time_series | junglefunkyman/spectracer | python | def is_time_series(self, name):
return communicate(self._get_sock(), ("isinstance(globals()['%s'], TimeSeries)" % name)) |
def get_array_shape(self, name):
"Return array's shape"
return communicate(self._get_sock(), ('%s.shape' % name)) | 2,768,263,564,737,517,000 | Return array's shape | spyderlib/widgets/externalshell/namespacebrowser.py | get_array_shape | junglefunkyman/spectracer | python | def get_array_shape(self, name):
return communicate(self._get_sock(), ('%s.shape' % name)) |
def get_array_ndim(self, name):
"Return array's ndim"
return communicate(self._get_sock(), ('%s.ndim' % name)) | -6,829,903,158,666,539,000 | Return array's ndim | spyderlib/widgets/externalshell/namespacebrowser.py | get_array_ndim | junglefunkyman/spectracer | python | def get_array_ndim(self, name):
return communicate(self._get_sock(), ('%s.ndim' % name)) |
def set_data(self, data):
'Set data'
if (data != self.editor.model.get_data()):
self.editor.set_data(data)
self.editor.adjust_columns() | 3,616,445,535,043,942,400 | Set data | spyderlib/widgets/externalshell/namespacebrowser.py | set_data | junglefunkyman/spectracer | python | def set_data(self, data):
if (data != self.editor.model.get_data()):
self.editor.set_data(data)
self.editor.adjust_columns() |
def collapse(self):
'Collapse'
self.sig_collapse.emit() | 7,820,196,456,549,986,000 | Collapse | spyderlib/widgets/externalshell/namespacebrowser.py | collapse | junglefunkyman/spectracer | python | def collapse(self):
self.sig_collapse.emit() |
@Slot(list)
def import_data(self, filenames=None):
'Import data from text file'
title = _('Import data')
if (filenames is None):
if (self.filename is None):
basedir = getcwd()
else:
basedir = osp.dirname(self.filename)
(filenames, _selfilter) = getopenfilenames(self, title, basedir, iofunctions.load_filters)
if (not filenames):
return
elif is_text_string(filenames):
filenames = [filenames]
for filename in filenames:
self.filename = to_text_string(filename)
ext = osp.splitext(self.filename)[1].lower()
if (ext not in iofunctions.load_funcs):
buttons = (QMessageBox.Yes | QMessageBox.Cancel)
answer = QMessageBox.question(self, title, (_("<b>Unsupported file extension '%s'</b><br><br>Would you like to import it anyway (by selecting a known file format)?") % ext), buttons)
if (answer == QMessageBox.Cancel):
return
formats = list(iofunctions.load_extensions.keys())
(item, ok) = QInputDialog.getItem(self, title, _('Open file as:'), formats, 0, False)
if ok:
ext = iofunctions.load_extensions[to_text_string(item)]
else:
return
load_func = iofunctions.load_funcs[ext]
if is_text_string(load_func):
error_message = None
try:
(text, _encoding) = encoding.read(self.filename)
if self.is_internal_shell:
self.editor.import_from_string(text)
else:
base_name = osp.basename(self.filename)
editor = ImportWizard(self, text, title=base_name, varname=fix_reference_name(base_name))
if editor.exec_():
(var_name, clip_data) = editor.get_data()
monitor_set_global(self._get_sock(), var_name, clip_data)
except Exception as error:
error_message = str(error)
else:
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
QApplication.processEvents()
if self.is_internal_shell:
(namespace, error_message) = load_func(self.filename)
interpreter = self.shellwidget.interpreter
for key in list(namespace.keys()):
new_key = fix_reference_name(key, blacklist=list(interpreter.namespace.keys()))
if (new_key != key):
namespace[new_key] = namespace.pop(key)
if (error_message is None):
interpreter.namespace.update(namespace)
else:
error_message = monitor_load_globals(self._get_sock(), self.filename, ext)
QApplication.restoreOverrideCursor()
QApplication.processEvents()
if (error_message is not None):
QMessageBox.critical(self, title, (_("<b>Unable to load '%s'</b><br><br>Error message:<br>%s") % (self.filename, error_message)))
self.refresh_table() | 1,021,473,350,209,373,700 | Import data from text file | spyderlib/widgets/externalshell/namespacebrowser.py | import_data | junglefunkyman/spectracer | python | @Slot(list)
def import_data(self, filenames=None):
title = _('Import data')
if (filenames is None):
if (self.filename is None):
basedir = getcwd()
else:
basedir = osp.dirname(self.filename)
(filenames, _selfilter) = getopenfilenames(self, title, basedir, iofunctions.load_filters)
if (not filenames):
return
elif is_text_string(filenames):
filenames = [filenames]
for filename in filenames:
self.filename = to_text_string(filename)
ext = osp.splitext(self.filename)[1].lower()
if (ext not in iofunctions.load_funcs):
buttons = (QMessageBox.Yes | QMessageBox.Cancel)
answer = QMessageBox.question(self, title, (_("<b>Unsupported file extension '%s'</b><br><br>Would you like to import it anyway (by selecting a known file format)?") % ext), buttons)
if (answer == QMessageBox.Cancel):
return
formats = list(iofunctions.load_extensions.keys())
(item, ok) = QInputDialog.getItem(self, title, _('Open file as:'), formats, 0, False)
if ok:
ext = iofunctions.load_extensions[to_text_string(item)]
else:
return
load_func = iofunctions.load_funcs[ext]
if is_text_string(load_func):
error_message = None
try:
(text, _encoding) = encoding.read(self.filename)
if self.is_internal_shell:
self.editor.import_from_string(text)
else:
base_name = osp.basename(self.filename)
editor = ImportWizard(self, text, title=base_name, varname=fix_reference_name(base_name))
if editor.exec_():
(var_name, clip_data) = editor.get_data()
monitor_set_global(self._get_sock(), var_name, clip_data)
except Exception as error:
error_message = str(error)
else:
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
QApplication.processEvents()
if self.is_internal_shell:
(namespace, error_message) = load_func(self.filename)
interpreter = self.shellwidget.interpreter
for key in list(namespace.keys()):
new_key = fix_reference_name(key, blacklist=list(interpreter.namespace.keys()))
if (new_key != key):
namespace[new_key] = namespace.pop(key)
if (error_message is None):
interpreter.namespace.update(namespace)
else:
error_message = monitor_load_globals(self._get_sock(), self.filename, ext)
QApplication.restoreOverrideCursor()
QApplication.processEvents()
if (error_message is not None):
QMessageBox.critical(self, title, (_("<b>Unable to load '%s'</b><br><br>Error message:<br>%s") % (self.filename, error_message)))
self.refresh_table() |
@Slot()
def save_data(self, filename=None):
'Save data'
if (filename is None):
filename = self.filename
if (filename is None):
filename = getcwd()
(filename, _selfilter) = getsavefilename(self, _('Save data'), filename, iofunctions.save_filters)
if filename:
self.filename = filename
else:
return False
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
QApplication.processEvents()
if self.is_internal_shell:
wsfilter = self.get_internal_shell_filter('picklable', check_all=True)
namespace = wsfilter(self.shellwidget.interpreter.namespace).copy()
error_message = iofunctions.save(namespace, filename)
else:
settings = self.get_view_settings()
error_message = monitor_save_globals(self._get_sock(), settings, filename)
QApplication.restoreOverrideCursor()
QApplication.processEvents()
if (error_message is not None):
QMessageBox.critical(self, _('Save data'), (_('<b>Unable to save current workspace</b><br><br>Error message:<br>%s') % error_message))
self.save_button.setEnabled((self.filename is not None)) | -3,953,673,790,544,086,500 | Save data | spyderlib/widgets/externalshell/namespacebrowser.py | save_data | junglefunkyman/spectracer | python | @Slot()
def save_data(self, filename=None):
if (filename is None):
filename = self.filename
if (filename is None):
filename = getcwd()
(filename, _selfilter) = getsavefilename(self, _(), filename, iofunctions.save_filters)
if filename:
self.filename = filename
else:
return False
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
QApplication.processEvents()
if self.is_internal_shell:
wsfilter = self.get_internal_shell_filter('picklable', check_all=True)
namespace = wsfilter(self.shellwidget.interpreter.namespace).copy()
error_message = iofunctions.save(namespace, filename)
else:
settings = self.get_view_settings()
error_message = monitor_save_globals(self._get_sock(), settings, filename)
QApplication.restoreOverrideCursor()
QApplication.processEvents()
if (error_message is not None):
QMessageBox.critical(self, _(), (_('<b>Unable to save current workspace</b><br><br>Error message:<br>%s') % error_message))
self.save_button.setEnabled((self.filename is not None)) |
def wsfilter(input_dict, check_all=check_all, filters=tuple(SUPPORTED_TYPES[mode])):
'Keep only objects that can be pickled'
return globalsfilter(input_dict, check_all=check_all, filters=filters, exclude_private=self.exclude_private, exclude_uppercase=self.exclude_uppercase, exclude_capitalized=self.exclude_capitalized, exclude_unsupported=self.exclude_unsupported, excluded_names=self.excluded_names) | 6,039,570,429,384,177,000 | Keep only objects that can be pickled | spyderlib/widgets/externalshell/namespacebrowser.py | wsfilter | junglefunkyman/spectracer | python | def wsfilter(input_dict, check_all=check_all, filters=tuple(SUPPORTED_TYPES[mode])):
return globalsfilter(input_dict, check_all=check_all, filters=filters, exclude_private=self.exclude_private, exclude_uppercase=self.exclude_uppercase, exclude_capitalized=self.exclude_capitalized, exclude_unsupported=self.exclude_unsupported, excluded_names=self.excluded_names) |
def create_genesis_or_zero_coin_checker(genesis_coin_id: bytes32) -> Program:
'\n Given a specific genesis coin id, create a `genesis_coin_mod` that allows\n both that coin id to issue a cc, or anyone to create a cc with amount 0.\n '
genesis_coin_mod = MOD
return genesis_coin_mod.curry(genesis_coin_id) | -90,381,955,436,043,090 | Given a specific genesis coin id, create a `genesis_coin_mod` that allows
both that coin id to issue a cc, or anyone to create a cc with amount 0. | inan/wallet/puzzles/genesis_by_coin_id_with_0.py | create_genesis_or_zero_coin_checker | inan0812/Inans-blockchain | python | def create_genesis_or_zero_coin_checker(genesis_coin_id: bytes32) -> Program:
'\n Given a specific genesis coin id, create a `genesis_coin_mod` that allows\n both that coin id to issue a cc, or anyone to create a cc with amount 0.\n '
genesis_coin_mod = MOD
return genesis_coin_mod.curry(genesis_coin_id) |
def genesis_coin_id_for_genesis_coin_checker(genesis_coin_checker: Program) -> Optional[bytes32]:
'\n Given a `genesis_coin_checker` program, pull out the genesis coin id.\n '
r = genesis_coin_checker.uncurry()
if (r is None):
return r
(f, args) = r
if (f != MOD):
return None
return args.first().as_atom() | 5,862,018,908,251,419,000 | Given a `genesis_coin_checker` program, pull out the genesis coin id. | inan/wallet/puzzles/genesis_by_coin_id_with_0.py | genesis_coin_id_for_genesis_coin_checker | inan0812/Inans-blockchain | python | def genesis_coin_id_for_genesis_coin_checker(genesis_coin_checker: Program) -> Optional[bytes32]:
'\n \n '
r = genesis_coin_checker.uncurry()
if (r is None):
return r
(f, args) = r
if (f != MOD):
return None
return args.first().as_atom() |
def initialize(self, step_size=0.25, friction=0.1, *args, **kwargs):
'Initialize inference algorithm.\n\n Args:\n step_size: float, optional.\n Constant scale factor of learning rate.\n friction: float, optional.\n Constant scale on the friction term in the Hamiltonian system.\n '
self.step_size = step_size
self.friction = friction
self.v = {z: tf.Variable(tf.zeros(qz.params.shape[1:])) for (z, qz) in six.iteritems(self.latent_vars)}
return super(SGHMC, self).initialize(*args, **kwargs) | -3,089,824,664,962,818,600 | Initialize inference algorithm.
Args:
step_size: float, optional.
Constant scale factor of learning rate.
friction: float, optional.
Constant scale on the friction term in the Hamiltonian system. | edward/inferences/sghmc.py | initialize | mmargenot/edward | python | def initialize(self, step_size=0.25, friction=0.1, *args, **kwargs):
'Initialize inference algorithm.\n\n Args:\n step_size: float, optional.\n Constant scale factor of learning rate.\n friction: float, optional.\n Constant scale on the friction term in the Hamiltonian system.\n '
self.step_size = step_size
self.friction = friction
self.v = {z: tf.Variable(tf.zeros(qz.params.shape[1:])) for (z, qz) in six.iteritems(self.latent_vars)}
return super(SGHMC, self).initialize(*args, **kwargs) |
def build_update(self):
'Simulate Hamiltonian dynamics with friction using a discretized\n integrator. Its discretization error goes to zero as the learning\n rate decreases.\n\n Implements the update equations from (15) of Chen et al. (2014).\n '
old_sample = {z: tf.gather(qz.params, tf.maximum((self.t - 1), 0)) for (z, qz) in six.iteritems(self.latent_vars)}
old_v_sample = {z: v for (z, v) in six.iteritems(self.v)}
friction = tf.constant(self.friction, dtype=tf.float32)
learning_rate = tf.constant((self.step_size * 0.01), dtype=tf.float32)
grad_log_joint = tf.gradients(self._log_joint(old_sample), list(six.itervalues(old_sample)))
sample = {}
v_sample = {}
for (z, grad_log_p) in zip(six.iterkeys(old_sample), grad_log_joint):
qz = self.latent_vars[z]
event_shape = qz.event_shape
normal = Normal(loc=tf.zeros(event_shape), scale=(tf.sqrt((learning_rate * friction)) * tf.ones(event_shape)))
sample[z] = (old_sample[z] + old_v_sample[z])
v_sample[z] = ((((1.0 - (0.5 * friction)) * old_v_sample[z]) + (learning_rate * tf.convert_to_tensor(grad_log_p))) + normal.sample())
assign_ops = []
for (z, qz) in six.iteritems(self.latent_vars):
variable = qz.get_variables()[0]
assign_ops.append(tf.scatter_update(variable, self.t, sample[z]))
assign_ops.append(tf.assign(self.v[z], v_sample[z]).op)
assign_ops.append(self.n_accept.assign_add(1))
return tf.group(*assign_ops) | -2,625,119,007,481,280,000 | Simulate Hamiltonian dynamics with friction using a discretized
integrator. Its discretization error goes to zero as the learning
rate decreases.
Implements the update equations from (15) of Chen et al. (2014). | edward/inferences/sghmc.py | build_update | mmargenot/edward | python | def build_update(self):
'Simulate Hamiltonian dynamics with friction using a discretized\n integrator. Its discretization error goes to zero as the learning\n rate decreases.\n\n Implements the update equations from (15) of Chen et al. (2014).\n '
old_sample = {z: tf.gather(qz.params, tf.maximum((self.t - 1), 0)) for (z, qz) in six.iteritems(self.latent_vars)}
old_v_sample = {z: v for (z, v) in six.iteritems(self.v)}
friction = tf.constant(self.friction, dtype=tf.float32)
learning_rate = tf.constant((self.step_size * 0.01), dtype=tf.float32)
grad_log_joint = tf.gradients(self._log_joint(old_sample), list(six.itervalues(old_sample)))
sample = {}
v_sample = {}
for (z, grad_log_p) in zip(six.iterkeys(old_sample), grad_log_joint):
qz = self.latent_vars[z]
event_shape = qz.event_shape
normal = Normal(loc=tf.zeros(event_shape), scale=(tf.sqrt((learning_rate * friction)) * tf.ones(event_shape)))
sample[z] = (old_sample[z] + old_v_sample[z])
v_sample[z] = ((((1.0 - (0.5 * friction)) * old_v_sample[z]) + (learning_rate * tf.convert_to_tensor(grad_log_p))) + normal.sample())
assign_ops = []
for (z, qz) in six.iteritems(self.latent_vars):
variable = qz.get_variables()[0]
assign_ops.append(tf.scatter_update(variable, self.t, sample[z]))
assign_ops.append(tf.assign(self.v[z], v_sample[z]).op)
assign_ops.append(self.n_accept.assign_add(1))
return tf.group(*assign_ops) |
def _log_joint(self, z_sample):
"Utility function to calculate model's log joint density,\n log p(x, z), for inputs z (and fixed data x).\n\n Args:\n z_sample: dict.\n Latent variable keys to samples.\n "
scope = tf.get_default_graph().unique_name('inference')
dict_swap = z_sample.copy()
for (x, qx) in six.iteritems(self.data):
if isinstance(x, RandomVariable):
if isinstance(qx, RandomVariable):
qx_copy = copy(qx, scope=scope)
dict_swap[x] = qx_copy.value()
else:
dict_swap[x] = qx
log_joint = 0.0
for z in six.iterkeys(self.latent_vars):
z_copy = copy(z, dict_swap, scope=scope)
log_joint += tf.reduce_sum((self.scale.get(z, 1.0) * z_copy.log_prob(dict_swap[z])))
for x in six.iterkeys(self.data):
if isinstance(x, RandomVariable):
x_copy = copy(x, dict_swap, scope=scope)
log_joint += tf.reduce_sum((self.scale.get(x, 1.0) * x_copy.log_prob(dict_swap[x])))
return log_joint | -3,124,798,161,510,049,300 | Utility function to calculate model's log joint density,
log p(x, z), for inputs z (and fixed data x).
Args:
z_sample: dict.
Latent variable keys to samples. | edward/inferences/sghmc.py | _log_joint | mmargenot/edward | python | def _log_joint(self, z_sample):
"Utility function to calculate model's log joint density,\n log p(x, z), for inputs z (and fixed data x).\n\n Args:\n z_sample: dict.\n Latent variable keys to samples.\n "
scope = tf.get_default_graph().unique_name('inference')
dict_swap = z_sample.copy()
for (x, qx) in six.iteritems(self.data):
if isinstance(x, RandomVariable):
if isinstance(qx, RandomVariable):
qx_copy = copy(qx, scope=scope)
dict_swap[x] = qx_copy.value()
else:
dict_swap[x] = qx
log_joint = 0.0
for z in six.iterkeys(self.latent_vars):
z_copy = copy(z, dict_swap, scope=scope)
log_joint += tf.reduce_sum((self.scale.get(z, 1.0) * z_copy.log_prob(dict_swap[z])))
for x in six.iterkeys(self.data):
if isinstance(x, RandomVariable):
x_copy = copy(x, dict_swap, scope=scope)
log_joint += tf.reduce_sum((self.scale.get(x, 1.0) * x_copy.log_prob(dict_swap[x])))
return log_joint |
@abc.abstractmethod
def select_action(self, observation: types.NestedArray) -> types.NestedArray:
'Samples from the policy and returns an action.' | 3,334,353,750,810,003,000 | Samples from the policy and returns an action. | acme/core.py | select_action | Idate96/acme | python | @abc.abstractmethod
def select_action(self, observation: types.NestedArray) -> types.NestedArray:
|
@abc.abstractmethod
def observe_first(self, timestep: dm_env.TimeStep):
'Make a first observation from the environment.\n\n Note that this need not be an initial state, it is merely beginning the\n recording of a trajectory.\n\n Args:\n timestep: first timestep.\n ' | 714,302,161,988,559,700 | Make a first observation from the environment.
Note that this need not be an initial state, it is merely beginning the
recording of a trajectory.
Args:
timestep: first timestep. | acme/core.py | observe_first | Idate96/acme | python | @abc.abstractmethod
def observe_first(self, timestep: dm_env.TimeStep):
'Make a first observation from the environment.\n\n Note that this need not be an initial state, it is merely beginning the\n recording of a trajectory.\n\n Args:\n timestep: first timestep.\n ' |
@abc.abstractmethod
def observe(self, action: types.NestedArray, next_timestep: dm_env.TimeStep):
'Make an observation of timestep data from the environment.\n\n Args:\n action: action taken in the environment.\n next_timestep: timestep produced by the environment given the action.\n ' | 5,499,029,746,203,244,000 | Make an observation of timestep data from the environment.
Args:
action: action taken in the environment.
next_timestep: timestep produced by the environment given the action. | acme/core.py | observe | Idate96/acme | python | @abc.abstractmethod
def observe(self, action: types.NestedArray, next_timestep: dm_env.TimeStep):
'Make an observation of timestep data from the environment.\n\n Args:\n action: action taken in the environment.\n next_timestep: timestep produced by the environment given the action.\n ' |
@abc.abstractmethod
def update(self, wait: bool=False):
'Perform an update of the actor parameters from past observations.\n\n Args:\n wait: if True, the update will be blocking.\n ' | 2,431,580,511,403,109,000 | Perform an update of the actor parameters from past observations.
Args:
wait: if True, the update will be blocking. | acme/core.py | update | Idate96/acme | python | @abc.abstractmethod
def update(self, wait: bool=False):
'Perform an update of the actor parameters from past observations.\n\n Args:\n wait: if True, the update will be blocking.\n ' |
@abc.abstractmethod
def get_variables(self, names: Sequence[str]) -> List[types.NestedArray]:
'Return the named variables as a collection of (nested) numpy arrays.\n\n Args:\n names: args where each name is a string identifying a predefined subset of\n the variables.\n\n Returns:\n A list of (nested) numpy arrays `variables` such that `variables[i]`\n corresponds to the collection named by `names[i]`.\n ' | -2,400,294,580,726,518,300 | Return the named variables as a collection of (nested) numpy arrays.
Args:
names: args where each name is a string identifying a predefined subset of
the variables.
Returns:
A list of (nested) numpy arrays `variables` such that `variables[i]`
corresponds to the collection named by `names[i]`. | acme/core.py | get_variables | Idate96/acme | python | @abc.abstractmethod
def get_variables(self, names: Sequence[str]) -> List[types.NestedArray]:
'Return the named variables as a collection of (nested) numpy arrays.\n\n Args:\n names: args where each name is a string identifying a predefined subset of\n the variables.\n\n Returns:\n A list of (nested) numpy arrays `variables` such that `variables[i]`\n corresponds to the collection named by `names[i]`.\n ' |
@abc.abstractmethod
def run(self):
'Runs the worker.' | -3,515,378,102,120,287,700 | Runs the worker. | acme/core.py | run | Idate96/acme | python | @abc.abstractmethod
def run(self):
|
@abc.abstractmethod
def save(self) -> T:
'Returns the state from the object to be saved.' | 5,266,081,092,811,180,000 | Returns the state from the object to be saved. | acme/core.py | save | Idate96/acme | python | @abc.abstractmethod
def save(self) -> T:
|
@abc.abstractmethod
def restore(self, state: T):
'Given the state, restores the object.' | 2,677,573,035,170,030,000 | Given the state, restores the object. | acme/core.py | restore | Idate96/acme | python | @abc.abstractmethod
def restore(self, state: T):
|
@abc.abstractmethod
def step(self):
"Perform an update step of the learner's parameters." | -474,191,761,237,137,540 | Perform an update step of the learner's parameters. | acme/core.py | step | Idate96/acme | python | @abc.abstractmethod
def step(self):
|
def run(self, num_steps: Optional[int]=None) -> None:
'Run the update loop; typically an infinite loop which calls step.'
iterator = (range(num_steps) if (num_steps is not None) else itertools.count())
for _ in iterator:
self.step() | 3,586,073,729,707,862,500 | Run the update loop; typically an infinite loop which calls step. | acme/core.py | run | Idate96/acme | python | def run(self, num_steps: Optional[int]=None) -> None:
iterator = (range(num_steps) if (num_steps is not None) else itertools.count())
for _ in iterator:
self.step() |
def generate(self, multiprocessing_queue):
'Generate the next neural architecture.\n\n Args:\n multiprocessing_queue: the Queue for multiprocessing return value.\n\n Returns:\n list of 2-element tuples: generated_graph and other_info,\n for random searcher the length of list is 1.\n generated_graph: An instance of Graph.\n other_info: Anything to be saved in the training queue together with the architecture.\n\n '
random_index = randrange(len(self.history))
model_id = self.history[random_index]['model_id']
graph = self.load_model_by_id(model_id)
new_father_id = None
generated_graph = None
for temp_graph in transform(graph):
if (not contain(self.descriptors, temp_graph.extract_descriptor())):
new_father_id = model_id
generated_graph = temp_graph
break
if (new_father_id is None):
new_father_id = 0
generated_graph = self.generators[0](self.n_classes, self.input_shape).generate(self.default_model_len, self.default_model_width)
return [(generated_graph, new_father_id)] | 4,361,810,324,385,211,400 | Generate the next neural architecture.
Args:
multiprocessing_queue: the Queue for multiprocessing return value.
Returns:
list of 2-element tuples: generated_graph and other_info,
for random searcher the length of list is 1.
generated_graph: An instance of Graph.
other_info: Anything to be saved in the training queue together with the architecture. | nas/random.py | generate | Beomi/autokeras | python | def generate(self, multiprocessing_queue):
'Generate the next neural architecture.\n\n Args:\n multiprocessing_queue: the Queue for multiprocessing return value.\n\n Returns:\n list of 2-element tuples: generated_graph and other_info,\n for random searcher the length of list is 1.\n generated_graph: An instance of Graph.\n other_info: Anything to be saved in the training queue together with the architecture.\n\n '
random_index = randrange(len(self.history))
model_id = self.history[random_index]['model_id']
graph = self.load_model_by_id(model_id)
new_father_id = None
generated_graph = None
for temp_graph in transform(graph):
if (not contain(self.descriptors, temp_graph.extract_descriptor())):
new_father_id = model_id
generated_graph = temp_graph
break
if (new_father_id is None):
new_father_id = 0
generated_graph = self.generators[0](self.n_classes, self.input_shape).generate(self.default_model_len, self.default_model_width)
return [(generated_graph, new_father_id)] |
def update(self, other_info, model_id, *args):
' Update the controller with evaluation result of a neural architecture.\n\n Args:\n other_info: Anything. In our case it is the father ID in the search tree.\n model_id: An integer.\n '
father_id = other_info
self.search_tree.add_child(father_id, model_id) | 6,691,283,388,623,155,000 | Update the controller with evaluation result of a neural architecture.
Args:
other_info: Anything. In our case it is the father ID in the search tree.
model_id: An integer. | nas/random.py | update | Beomi/autokeras | python | def update(self, other_info, model_id, *args):
' Update the controller with evaluation result of a neural architecture.\n\n Args:\n other_info: Anything. In our case it is the father ID in the search tree.\n model_id: An integer.\n '
father_id = other_info
self.search_tree.add_child(father_id, model_id) |
def __init__(self, video_path: str, fps: float=10.0) -> None:
'\n Arguments:\n video_path: The path to output the video.\n fps: If higher than the writing rate, the video will be fast-forwarded.\n '
fourcc: int = cv2.VideoWriter_fourcc(*'mp4v')
self._video_writer = cv2.VideoWriter((video_path + '.mp4'), fourcc, fps, (640, 480)) | 352,790,010,399,150,700 | Arguments:
video_path: The path to output the video.
fps: If higher than the writing rate, the video will be fast-forwarded. | util/video_writer.py | __init__ | Lai-YT/webcam-applications | python | def __init__(self, video_path: str, fps: float=10.0) -> None:
'\n Arguments:\n video_path: The path to output the video.\n fps: If higher than the writing rate, the video will be fast-forwarded.\n '
fourcc: int = cv2.VideoWriter_fourcc(*'mp4v')
self._video_writer = cv2.VideoWriter((video_path + '.mp4'), fourcc, fps, (640, 480)) |
def write(self, image: ColorImage) -> None:
'Writes the next video frame.'
self._video_writer.write(image) | -1,781,123,371,747,891,700 | Writes the next video frame. | util/video_writer.py | write | Lai-YT/webcam-applications | python | def write(self, image: ColorImage) -> None:
self._video_writer.write(image) |
def is_opened(self) -> bool:
'Returns True if video writer has been successfully initialized.'
return self._video_writer.isOpend() | -7,223,344,477,292,423,000 | Returns True if video writer has been successfully initialized. | util/video_writer.py | is_opened | Lai-YT/webcam-applications | python | def is_opened(self) -> bool:
return self._video_writer.isOpend() |
def release(self) -> None:
'Closes the video writer.'
self._video_writer.release() | 8,570,291,608,643,472,000 | Closes the video writer. | util/video_writer.py | release | Lai-YT/webcam-applications | python | def release(self) -> None:
self._video_writer.release() |
@staticmethod
def parseString(s: str) -> ParseResults:
'Parse the source string and apply the rules.'
return Module.rule.parseString(s)[0] | 4,050,641,732,349,504,000 | Parse the source string and apply the rules. | wrap/gtwrap/interface_parser/module.py | parseString | BaiLiping/gtsam | python | @staticmethod
def parseString(s: str) -> ParseResults:
return Module.rule.parseString(s)[0] |
def handle_answer_request(player_answer, session):
'check if the answer is right, adjust score, and continue'
print('=====handle_answer_request fired...')
attributes = {}
should_end_session = False
print(('=====answer heard was: ' + player_answer))
current_question = session['attributes']['question']
correct_answer = current_question['answer']
shuffle(QUESTIONS)
next_question = choice(QUESTIONS)
if (correct_answer == player_answer):
answered_correctly = True
else:
log_wrong_answer(current_question['question'], player_answer, correct_answer)
answered_correctly = False
next_tts = ('Next question in 3... 2... 1... ' + next_question['question'])
attributes = {'question': next_question, 'game_status': 'in_progress'}
if answered_correctly:
speech_output = ('Correct!' + next_tts)
card_title = 'Correct!'
else:
speech_output = ('Wrong!' + next_tts)
card_title = 'Wrong!'
card_text = ('The question was:\n' + current_question['question'])
return speech_with_card(speech_output, attributes, should_end_session, card_title, card_text, answered_correctly) | -677,893,882,524,254,600 | check if the answer is right, adjust score, and continue | src/handle_answer_request.py | handle_answer_request | irlrobot/memory_loss | python | def handle_answer_request(player_answer, session):
print('=====handle_answer_request fired...')
attributes = {}
should_end_session = False
print(('=====answer heard was: ' + player_answer))
current_question = session['attributes']['question']
correct_answer = current_question['answer']
shuffle(QUESTIONS)
next_question = choice(QUESTIONS)
if (correct_answer == player_answer):
answered_correctly = True
else:
log_wrong_answer(current_question['question'], player_answer, correct_answer)
answered_correctly = False
next_tts = ('Next question in 3... 2... 1... ' + next_question['question'])
attributes = {'question': next_question, 'game_status': 'in_progress'}
if answered_correctly:
speech_output = ('Correct!' + next_tts)
card_title = 'Correct!'
else:
speech_output = ('Wrong!' + next_tts)
card_title = 'Wrong!'
card_text = ('The question was:\n' + current_question['question'])
return speech_with_card(speech_output, attributes, should_end_session, card_title, card_text, answered_correctly) |
def log_wrong_answer(question, answer, correct_answer):
'log all questions answered incorrectly so i can analyze later'
print(((((('[WRONG ANSWER]:' + question) + ':') + answer) + ':') + correct_answer)) | 3,996,305,433,288,203,300 | log all questions answered incorrectly so i can analyze later | src/handle_answer_request.py | log_wrong_answer | irlrobot/memory_loss | python | def log_wrong_answer(question, answer, correct_answer):
print(((((('[WRONG ANSWER]:' + question) + ':') + answer) + ':') + correct_answer)) |
def get_transport_class(cls, label: str=None) -> Type[JobServiceTransport]:
'Return an appropriate transport class.\n\n Args:\n label: The name of the desired transport. If none is\n provided, then the first transport in the registry is used.\n\n Returns:\n The transport class to use.\n '
if label:
return cls._transport_registry[label]
return next(iter(cls._transport_registry.values())) | -8,985,399,459,643,055,000 | Return an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use. | google/cloud/talent_v4beta1/services/job_service/client.py | get_transport_class | busunkim96/python-talent | python | def get_transport_class(cls, label: str=None) -> Type[JobServiceTransport]:
'Return an appropriate transport class.\n\n Args:\n label: The name of the desired transport. If none is\n provided, then the first transport in the registry is used.\n\n Returns:\n The transport class to use.\n '
if label:
return cls._transport_registry[label]
return next(iter(cls._transport_registry.values())) |
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
'Convert api endpoint to mTLS endpoint.\n Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to\n "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.\n Args:\n api_endpoint (Optional[str]): the api endpoint to convert.\n Returns:\n str: converted mTLS api endpoint.\n '
if (not api_endpoint):
return api_endpoint
mtls_endpoint_re = re.compile('(?P<name>[^.]+)(?P<mtls>\\.mtls)?(?P<sandbox>\\.sandbox)?(?P<googledomain>\\.googleapis\\.com)?')
m = mtls_endpoint_re.match(api_endpoint)
(name, mtls, sandbox, googledomain) = m.groups()
if (mtls or (not googledomain)):
return api_endpoint
if sandbox:
return api_endpoint.replace('sandbox.googleapis.com', 'mtls.sandbox.googleapis.com')
return api_endpoint.replace('.googleapis.com', '.mtls.googleapis.com') | 7,502,654,529,445,025,000 | Convert api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint. | google/cloud/talent_v4beta1/services/job_service/client.py | _get_default_mtls_endpoint | busunkim96/python-talent | python | @staticmethod
def _get_default_mtls_endpoint(api_endpoint):
'Convert api endpoint to mTLS endpoint.\n Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to\n "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.\n Args:\n api_endpoint (Optional[str]): the api endpoint to convert.\n Returns:\n str: converted mTLS api endpoint.\n '
if (not api_endpoint):
return api_endpoint
mtls_endpoint_re = re.compile('(?P<name>[^.]+)(?P<mtls>\\.mtls)?(?P<sandbox>\\.sandbox)?(?P<googledomain>\\.googleapis\\.com)?')
m = mtls_endpoint_re.match(api_endpoint)
(name, mtls, sandbox, googledomain) = m.groups()
if (mtls or (not googledomain)):
return api_endpoint
if sandbox:
return api_endpoint.replace('sandbox.googleapis.com', 'mtls.sandbox.googleapis.com')
return api_endpoint.replace('.googleapis.com', '.mtls.googleapis.com') |
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
'Creates an instance of this client using the provided credentials\n file.\n\n Args:\n filename (str): The path to the service account private key json\n file.\n args: Additional arguments to pass to the constructor.\n kwargs: Additional arguments to pass to the constructor.\n\n Returns:\n {@api.name}: The constructed client.\n '
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs) | -6,103,940,365,711,152,000 | Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
{@api.name}: The constructed client. | google/cloud/talent_v4beta1/services/job_service/client.py | from_service_account_file | busunkim96/python-talent | python | @classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
'Creates an instance of this client using the provided credentials\n file.\n\n Args:\n filename (str): The path to the service account private key json\n file.\n args: Additional arguments to pass to the constructor.\n kwargs: Additional arguments to pass to the constructor.\n\n Returns:\n {@api.name}: The constructed client.\n '
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs) |
@staticmethod
def job_path(project: str, tenant: str, job: str) -> str:
'Return a fully-qualified job string.'
return 'projects/{project}/tenants/{tenant}/jobs/{job}'.format(project=project, tenant=tenant, job=job) | -8,864,691,335,288,957,000 | Return a fully-qualified job string. | google/cloud/talent_v4beta1/services/job_service/client.py | job_path | busunkim96/python-talent | python | @staticmethod
def job_path(project: str, tenant: str, job: str) -> str:
return 'projects/{project}/tenants/{tenant}/jobs/{job}'.format(project=project, tenant=tenant, job=job) |
@staticmethod
def parse_job_path(path: str) -> Dict[(str, str)]:
'Parse a job path into its component segments.'
m = re.match('^projects/(?P<project>.+?)/tenants/(?P<tenant>.+?)/jobs/(?P<job>.+?)$', path)
return (m.groupdict() if m else {}) | -5,382,653,198,987,315,000 | Parse a job path into its component segments. | google/cloud/talent_v4beta1/services/job_service/client.py | parse_job_path | busunkim96/python-talent | python | @staticmethod
def parse_job_path(path: str) -> Dict[(str, str)]:
m = re.match('^projects/(?P<project>.+?)/tenants/(?P<tenant>.+?)/jobs/(?P<job>.+?)$', path)
return (m.groupdict() if m else {}) |
def __init__(self, *, credentials: credentials.Credentials=None, transport: Union[(str, JobServiceTransport)]=None, client_options: ClientOptions=None) -> None:
'Instantiate the job service client.\n\n Args:\n credentials (Optional[google.auth.credentials.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify the application to the service; if none\n are specified, the client will attempt to ascertain the\n credentials from the environment.\n transport (Union[str, ~.JobServiceTransport]): The\n transport to use. If set to None, a transport is chosen\n automatically.\n client_options (ClientOptions): Custom options for the client. It\n won\'t take effect if a ``transport`` instance is provided.\n (1) The ``api_endpoint`` property can be used to override the\n default endpoint provided by the client. GOOGLE_API_USE_MTLS\n environment variable can also be used to override the endpoint:\n "always" (always use the default mTLS endpoint), "never" (always\n use the default regular endpoint, this is the default value for\n the environment variable) and "auto" (auto switch to the default\n mTLS endpoint if client SSL credentials is present). However,\n the ``api_endpoint`` property takes precedence if provided.\n (2) The ``client_cert_source`` property is used to provide client\n SSL credentials for mutual TLS transport. If not provided, the\n default SSL credentials will be used if present.\n\n Raises:\n google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport\n creation failed for any reason.\n '
if isinstance(client_options, dict):
client_options = ClientOptions.from_dict(client_options)
if (client_options is None):
client_options = ClientOptions.ClientOptions()
if (client_options.api_endpoint is None):
use_mtls_env = os.getenv('GOOGLE_API_USE_MTLS', 'never')
if (use_mtls_env == 'never'):
client_options.api_endpoint = self.DEFAULT_ENDPOINT
elif (use_mtls_env == 'always'):
client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif (use_mtls_env == 'auto'):
has_client_cert_source = ((client_options.client_cert_source is not None) or mtls.has_default_client_cert_source())
client_options.api_endpoint = (self.DEFAULT_MTLS_ENDPOINT if has_client_cert_source else self.DEFAULT_ENDPOINT)
else:
raise MutualTLSChannelError('Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always')
if isinstance(transport, JobServiceTransport):
if (credentials or client_options.credentials_file):
raise ValueError('When providing a transport instance, provide its credentials directly.')
if client_options.scopes:
raise ValueError('When providing a transport instance, provide its scopes directly.')
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(credentials=credentials, credentials_file=client_options.credentials_file, host=client_options.api_endpoint, scopes=client_options.scopes, api_mtls_endpoint=client_options.api_endpoint, client_cert_source=client_options.client_cert_source, quota_project_id=client_options.quota_project_id) | -8,968,239,428,967,943,000 | Instantiate the job service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.JobServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint, this is the default value for
the environment variable) and "auto" (auto switch to the default
mTLS endpoint if client SSL credentials is present). However,
the ``api_endpoint`` property takes precedence if provided.
(2) The ``client_cert_source`` property is used to provide client
SSL credentials for mutual TLS transport. If not provided, the
default SSL credentials will be used if present.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason. | google/cloud/talent_v4beta1/services/job_service/client.py | __init__ | busunkim96/python-talent | python | def __init__(self, *, credentials: credentials.Credentials=None, transport: Union[(str, JobServiceTransport)]=None, client_options: ClientOptions=None) -> None:
'Instantiate the job service client.\n\n Args:\n credentials (Optional[google.auth.credentials.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify the application to the service; if none\n are specified, the client will attempt to ascertain the\n credentials from the environment.\n transport (Union[str, ~.JobServiceTransport]): The\n transport to use. If set to None, a transport is chosen\n automatically.\n client_options (ClientOptions): Custom options for the client. It\n won\'t take effect if a ``transport`` instance is provided.\n (1) The ``api_endpoint`` property can be used to override the\n default endpoint provided by the client. GOOGLE_API_USE_MTLS\n environment variable can also be used to override the endpoint:\n "always" (always use the default mTLS endpoint), "never" (always\n use the default regular endpoint, this is the default value for\n the environment variable) and "auto" (auto switch to the default\n mTLS endpoint if client SSL credentials is present). However,\n the ``api_endpoint`` property takes precedence if provided.\n (2) The ``client_cert_source`` property is used to provide client\n SSL credentials for mutual TLS transport. If not provided, the\n default SSL credentials will be used if present.\n\n Raises:\n google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport\n creation failed for any reason.\n '
if isinstance(client_options, dict):
client_options = ClientOptions.from_dict(client_options)
if (client_options is None):
client_options = ClientOptions.ClientOptions()
if (client_options.api_endpoint is None):
use_mtls_env = os.getenv('GOOGLE_API_USE_MTLS', 'never')
if (use_mtls_env == 'never'):
client_options.api_endpoint = self.DEFAULT_ENDPOINT
elif (use_mtls_env == 'always'):
client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif (use_mtls_env == 'auto'):
has_client_cert_source = ((client_options.client_cert_source is not None) or mtls.has_default_client_cert_source())
client_options.api_endpoint = (self.DEFAULT_MTLS_ENDPOINT if has_client_cert_source else self.DEFAULT_ENDPOINT)
else:
raise MutualTLSChannelError('Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always')
if isinstance(transport, JobServiceTransport):
if (credentials or client_options.credentials_file):
raise ValueError('When providing a transport instance, provide its credentials directly.')
if client_options.scopes:
raise ValueError('When providing a transport instance, provide its scopes directly.')
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(credentials=credentials, credentials_file=client_options.credentials_file, host=client_options.api_endpoint, scopes=client_options.scopes, api_mtls_endpoint=client_options.api_endpoint, client_cert_source=client_options.client_cert_source, quota_project_id=client_options.quota_project_id) |
def create_job(self, request: job_service.CreateJobRequest=None, *, parent: str=None, job: gct_job.Job=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> gct_job.Job:
'Creates a new job.\n Typically, the job becomes searchable within 10 seconds,\n but it may take up to 5 minutes.\n\n Args:\n request (:class:`~.job_service.CreateJobRequest`):\n The request object. Create job request.\n parent (:class:`str`):\n Required. The resource name of the tenant under which\n the job is created.\n\n The format is\n "projects/{project_id}/tenants/{tenant_id}". For\n example, "projects/foo/tenant/bar". If tenant id is\n unspecified a default tenant is created. For example,\n "projects/foo".\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n job (:class:`~.gct_job.Job`):\n Required. The Job to be created.\n This corresponds to the ``job`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.gct_job.Job:\n A Job resource represents a job posting (also referred\n to as a "job listing" or "job requisition"). A job\n belongs to a\n [Company][google.cloud.talent.v4beta1.Company], which is\n the hiring entity responsible for the job.\n\n '
has_flattened_params = any([parent, job])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.CreateJobRequest)):
request = job_service.CreateJobRequest(request)
if (parent is not None):
request.parent = parent
if (job is not None):
request.job = job
rpc = self._transport._wrapped_methods[self._transport.create_job]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
return response | -5,979,966,913,168,546,000 | Creates a new job.
Typically, the job becomes searchable within 10 seconds,
but it may take up to 5 minutes.
Args:
request (:class:`~.job_service.CreateJobRequest`):
The request object. Create job request.
parent (:class:`str`):
Required. The resource name of the tenant under which
the job is created.
The format is
"projects/{project_id}/tenants/{tenant_id}". For
example, "projects/foo/tenant/bar". If tenant id is
unspecified a default tenant is created. For example,
"projects/foo".
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
job (:class:`~.gct_job.Job`):
Required. The Job to be created.
This corresponds to the ``job`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.gct_job.Job:
A Job resource represents a job posting (also referred
to as a "job listing" or "job requisition"). A job
belongs to a
[Company][google.cloud.talent.v4beta1.Company], which is
the hiring entity responsible for the job. | google/cloud/talent_v4beta1/services/job_service/client.py | create_job | busunkim96/python-talent | python | def create_job(self, request: job_service.CreateJobRequest=None, *, parent: str=None, job: gct_job.Job=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> gct_job.Job:
'Creates a new job.\n Typically, the job becomes searchable within 10 seconds,\n but it may take up to 5 minutes.\n\n Args:\n request (:class:`~.job_service.CreateJobRequest`):\n The request object. Create job request.\n parent (:class:`str`):\n Required. The resource name of the tenant under which\n the job is created.\n\n The format is\n "projects/{project_id}/tenants/{tenant_id}". For\n example, "projects/foo/tenant/bar". If tenant id is\n unspecified a default tenant is created. For example,\n "projects/foo".\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n job (:class:`~.gct_job.Job`):\n Required. The Job to be created.\n This corresponds to the ``job`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.gct_job.Job:\n A Job resource represents a job posting (also referred\n to as a "job listing" or "job requisition"). A job\n belongs to a\n [Company][google.cloud.talent.v4beta1.Company], which is\n the hiring entity responsible for the job.\n\n '
has_flattened_params = any([parent, job])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.CreateJobRequest)):
request = job_service.CreateJobRequest(request)
if (parent is not None):
request.parent = parent
if (job is not None):
request.job = job
rpc = self._transport._wrapped_methods[self._transport.create_job]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
return response |
def batch_create_jobs(self, request: job_service.BatchCreateJobsRequest=None, *, parent: str=None, jobs: Sequence[job.Job]=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> operation.Operation:
'Begins executing a batch create jobs operation.\n\n Args:\n request (:class:`~.job_service.BatchCreateJobsRequest`):\n The request object. Request to create a batch of jobs.\n parent (:class:`str`):\n Required. The resource name of the tenant under which\n the job is created.\n\n The format is\n "projects/{project_id}/tenants/{tenant_id}". For\n example, "projects/foo/tenant/bar". If tenant id is\n unspecified, a default tenant is created. For example,\n "projects/foo".\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n jobs (:class:`Sequence[~.job.Job]`):\n Required. The jobs to be created.\n This corresponds to the ``jobs`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.operation.Operation:\n An object representing a long-running operation.\n\n The result type for the operation will be\n :class:``~.job_service.JobOperationResult``: The result\n of\n [JobService.BatchCreateJobs][google.cloud.talent.v4beta1.JobService.BatchCreateJobs]\n or\n [JobService.BatchUpdateJobs][google.cloud.talent.v4beta1.JobService.BatchUpdateJobs]\n APIs. It\'s used to replace\n [google.longrunning.Operation.response][google.longrunning.Operation.response]\n in case of success.\n\n '
has_flattened_params = any([parent, jobs])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.BatchCreateJobsRequest)):
request = job_service.BatchCreateJobsRequest(request)
if (parent is not None):
request.parent = parent
if (jobs is not None):
request.jobs = jobs
rpc = self._transport._wrapped_methods[self._transport.batch_create_jobs]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
response = operation.from_gapic(response, self._transport.operations_client, job_service.JobOperationResult, metadata_type=common.BatchOperationMetadata)
return response | 6,053,910,170,900,765,000 | Begins executing a batch create jobs operation.
Args:
request (:class:`~.job_service.BatchCreateJobsRequest`):
The request object. Request to create a batch of jobs.
parent (:class:`str`):
Required. The resource name of the tenant under which
the job is created.
The format is
"projects/{project_id}/tenants/{tenant_id}". For
example, "projects/foo/tenant/bar". If tenant id is
unspecified, a default tenant is created. For example,
"projects/foo".
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
jobs (:class:`Sequence[~.job.Job]`):
Required. The jobs to be created.
This corresponds to the ``jobs`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
:class:``~.job_service.JobOperationResult``: The result
of
[JobService.BatchCreateJobs][google.cloud.talent.v4beta1.JobService.BatchCreateJobs]
or
[JobService.BatchUpdateJobs][google.cloud.talent.v4beta1.JobService.BatchUpdateJobs]
APIs. It's used to replace
[google.longrunning.Operation.response][google.longrunning.Operation.response]
in case of success. | google/cloud/talent_v4beta1/services/job_service/client.py | batch_create_jobs | busunkim96/python-talent | python | def batch_create_jobs(self, request: job_service.BatchCreateJobsRequest=None, *, parent: str=None, jobs: Sequence[job.Job]=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> operation.Operation:
'Begins executing a batch create jobs operation.\n\n Args:\n request (:class:`~.job_service.BatchCreateJobsRequest`):\n The request object. Request to create a batch of jobs.\n parent (:class:`str`):\n Required. The resource name of the tenant under which\n the job is created.\n\n The format is\n "projects/{project_id}/tenants/{tenant_id}". For\n example, "projects/foo/tenant/bar". If tenant id is\n unspecified, a default tenant is created. For example,\n "projects/foo".\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n jobs (:class:`Sequence[~.job.Job]`):\n Required. The jobs to be created.\n This corresponds to the ``jobs`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.operation.Operation:\n An object representing a long-running operation.\n\n The result type for the operation will be\n :class:``~.job_service.JobOperationResult``: The result\n of\n [JobService.BatchCreateJobs][google.cloud.talent.v4beta1.JobService.BatchCreateJobs]\n or\n [JobService.BatchUpdateJobs][google.cloud.talent.v4beta1.JobService.BatchUpdateJobs]\n APIs. It\'s used to replace\n [google.longrunning.Operation.response][google.longrunning.Operation.response]\n in case of success.\n\n '
has_flattened_params = any([parent, jobs])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.BatchCreateJobsRequest)):
request = job_service.BatchCreateJobsRequest(request)
if (parent is not None):
request.parent = parent
if (jobs is not None):
request.jobs = jobs
rpc = self._transport._wrapped_methods[self._transport.batch_create_jobs]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
response = operation.from_gapic(response, self._transport.operations_client, job_service.JobOperationResult, metadata_type=common.BatchOperationMetadata)
return response |
def get_job(self, request: job_service.GetJobRequest=None, *, name: str=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> job.Job:
'Retrieves the specified job, whose status is OPEN or\n recently EXPIRED within the last 90 days.\n\n Args:\n request (:class:`~.job_service.GetJobRequest`):\n The request object. Get job request.\n name (:class:`str`):\n Required. The resource name of the job to retrieve.\n\n The format is\n "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}".\n For example, "projects/foo/tenants/bar/jobs/baz".\n\n If tenant id is unspecified, the default tenant is used.\n For example, "projects/foo/jobs/bar".\n This corresponds to the ``name`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.job.Job:\n A Job resource represents a job posting (also referred\n to as a "job listing" or "job requisition"). A job\n belongs to a\n [Company][google.cloud.talent.v4beta1.Company], which is\n the hiring entity responsible for the job.\n\n '
has_flattened_params = any([name])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.GetJobRequest)):
request = job_service.GetJobRequest(request)
if (name is not None):
request.name = name
rpc = self._transport._wrapped_methods[self._transport.get_job]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('name', request.name),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
return response | -2,709,083,377,417,962,500 | Retrieves the specified job, whose status is OPEN or
recently EXPIRED within the last 90 days.
Args:
request (:class:`~.job_service.GetJobRequest`):
The request object. Get job request.
name (:class:`str`):
Required. The resource name of the job to retrieve.
The format is
"projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}".
For example, "projects/foo/tenants/bar/jobs/baz".
If tenant id is unspecified, the default tenant is used.
For example, "projects/foo/jobs/bar".
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.job.Job:
A Job resource represents a job posting (also referred
to as a "job listing" or "job requisition"). A job
belongs to a
[Company][google.cloud.talent.v4beta1.Company], which is
the hiring entity responsible for the job. | google/cloud/talent_v4beta1/services/job_service/client.py | get_job | busunkim96/python-talent | python | def get_job(self, request: job_service.GetJobRequest=None, *, name: str=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> job.Job:
'Retrieves the specified job, whose status is OPEN or\n recently EXPIRED within the last 90 days.\n\n Args:\n request (:class:`~.job_service.GetJobRequest`):\n The request object. Get job request.\n name (:class:`str`):\n Required. The resource name of the job to retrieve.\n\n The format is\n "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}".\n For example, "projects/foo/tenants/bar/jobs/baz".\n\n If tenant id is unspecified, the default tenant is used.\n For example, "projects/foo/jobs/bar".\n This corresponds to the ``name`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.job.Job:\n A Job resource represents a job posting (also referred\n to as a "job listing" or "job requisition"). A job\n belongs to a\n [Company][google.cloud.talent.v4beta1.Company], which is\n the hiring entity responsible for the job.\n\n '
has_flattened_params = any([name])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.GetJobRequest)):
request = job_service.GetJobRequest(request)
if (name is not None):
request.name = name
rpc = self._transport._wrapped_methods[self._transport.get_job]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('name', request.name),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
return response |
def update_job(self, request: job_service.UpdateJobRequest=None, *, job: gct_job.Job=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> gct_job.Job:
'Updates specified job.\n Typically, updated contents become visible in search\n results within 10 seconds, but it may take up to 5\n minutes.\n\n Args:\n request (:class:`~.job_service.UpdateJobRequest`):\n The request object. Update job request.\n job (:class:`~.gct_job.Job`):\n Required. The Job to be updated.\n This corresponds to the ``job`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.gct_job.Job:\n A Job resource represents a job posting (also referred\n to as a "job listing" or "job requisition"). A job\n belongs to a\n [Company][google.cloud.talent.v4beta1.Company], which is\n the hiring entity responsible for the job.\n\n '
has_flattened_params = any([job])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.UpdateJobRequest)):
request = job_service.UpdateJobRequest(request)
if (job is not None):
request.job = job
rpc = self._transport._wrapped_methods[self._transport.update_job]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('job.name', request.job.name),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
return response | -7,455,544,025,035,734,000 | Updates specified job.
Typically, updated contents become visible in search
results within 10 seconds, but it may take up to 5
minutes.
Args:
request (:class:`~.job_service.UpdateJobRequest`):
The request object. Update job request.
job (:class:`~.gct_job.Job`):
Required. The Job to be updated.
This corresponds to the ``job`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.gct_job.Job:
A Job resource represents a job posting (also referred
to as a "job listing" or "job requisition"). A job
belongs to a
[Company][google.cloud.talent.v4beta1.Company], which is
the hiring entity responsible for the job. | google/cloud/talent_v4beta1/services/job_service/client.py | update_job | busunkim96/python-talent | python | def update_job(self, request: job_service.UpdateJobRequest=None, *, job: gct_job.Job=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> gct_job.Job:
'Updates specified job.\n Typically, updated contents become visible in search\n results within 10 seconds, but it may take up to 5\n minutes.\n\n Args:\n request (:class:`~.job_service.UpdateJobRequest`):\n The request object. Update job request.\n job (:class:`~.gct_job.Job`):\n Required. The Job to be updated.\n This corresponds to the ``job`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.gct_job.Job:\n A Job resource represents a job posting (also referred\n to as a "job listing" or "job requisition"). A job\n belongs to a\n [Company][google.cloud.talent.v4beta1.Company], which is\n the hiring entity responsible for the job.\n\n '
has_flattened_params = any([job])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.UpdateJobRequest)):
request = job_service.UpdateJobRequest(request)
if (job is not None):
request.job = job
rpc = self._transport._wrapped_methods[self._transport.update_job]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('job.name', request.job.name),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
return response |
def batch_update_jobs(self, request: job_service.BatchUpdateJobsRequest=None, *, parent: str=None, jobs: Sequence[job.Job]=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> operation.Operation:
'Begins executing a batch update jobs operation.\n\n Args:\n request (:class:`~.job_service.BatchUpdateJobsRequest`):\n The request object. Request to update a batch of jobs.\n parent (:class:`str`):\n Required. The resource name of the tenant under which\n the job is created.\n\n The format is\n "projects/{project_id}/tenants/{tenant_id}". For\n example, "projects/foo/tenant/bar". If tenant id is\n unspecified, a default tenant is created. For example,\n "projects/foo".\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n jobs (:class:`Sequence[~.job.Job]`):\n Required. The jobs to be updated.\n This corresponds to the ``jobs`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.operation.Operation:\n An object representing a long-running operation.\n\n The result type for the operation will be\n :class:``~.job_service.JobOperationResult``: The result\n of\n [JobService.BatchCreateJobs][google.cloud.talent.v4beta1.JobService.BatchCreateJobs]\n or\n [JobService.BatchUpdateJobs][google.cloud.talent.v4beta1.JobService.BatchUpdateJobs]\n APIs. It\'s used to replace\n [google.longrunning.Operation.response][google.longrunning.Operation.response]\n in case of success.\n\n '
has_flattened_params = any([parent, jobs])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.BatchUpdateJobsRequest)):
request = job_service.BatchUpdateJobsRequest(request)
if (parent is not None):
request.parent = parent
if (jobs is not None):
request.jobs = jobs
rpc = self._transport._wrapped_methods[self._transport.batch_update_jobs]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
response = operation.from_gapic(response, self._transport.operations_client, job_service.JobOperationResult, metadata_type=common.BatchOperationMetadata)
return response | 4,255,394,085,127,359,500 | Begins executing a batch update jobs operation.
Args:
request (:class:`~.job_service.BatchUpdateJobsRequest`):
The request object. Request to update a batch of jobs.
parent (:class:`str`):
Required. The resource name of the tenant under which
the job is created.
The format is
"projects/{project_id}/tenants/{tenant_id}". For
example, "projects/foo/tenant/bar". If tenant id is
unspecified, a default tenant is created. For example,
"projects/foo".
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
jobs (:class:`Sequence[~.job.Job]`):
Required. The jobs to be updated.
This corresponds to the ``jobs`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.operation.Operation:
An object representing a long-running operation.
The result type for the operation will be
:class:``~.job_service.JobOperationResult``: The result
of
[JobService.BatchCreateJobs][google.cloud.talent.v4beta1.JobService.BatchCreateJobs]
or
[JobService.BatchUpdateJobs][google.cloud.talent.v4beta1.JobService.BatchUpdateJobs]
APIs. It's used to replace
[google.longrunning.Operation.response][google.longrunning.Operation.response]
in case of success. | google/cloud/talent_v4beta1/services/job_service/client.py | batch_update_jobs | busunkim96/python-talent | python | def batch_update_jobs(self, request: job_service.BatchUpdateJobsRequest=None, *, parent: str=None, jobs: Sequence[job.Job]=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> operation.Operation:
'Begins executing a batch update jobs operation.\n\n Args:\n request (:class:`~.job_service.BatchUpdateJobsRequest`):\n The request object. Request to update a batch of jobs.\n parent (:class:`str`):\n Required. The resource name of the tenant under which\n the job is created.\n\n The format is\n "projects/{project_id}/tenants/{tenant_id}". For\n example, "projects/foo/tenant/bar". If tenant id is\n unspecified, a default tenant is created. For example,\n "projects/foo".\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n jobs (:class:`Sequence[~.job.Job]`):\n Required. The jobs to be updated.\n This corresponds to the ``jobs`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.operation.Operation:\n An object representing a long-running operation.\n\n The result type for the operation will be\n :class:``~.job_service.JobOperationResult``: The result\n of\n [JobService.BatchCreateJobs][google.cloud.talent.v4beta1.JobService.BatchCreateJobs]\n or\n [JobService.BatchUpdateJobs][google.cloud.talent.v4beta1.JobService.BatchUpdateJobs]\n APIs. It\'s used to replace\n [google.longrunning.Operation.response][google.longrunning.Operation.response]\n in case of success.\n\n '
has_flattened_params = any([parent, jobs])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.BatchUpdateJobsRequest)):
request = job_service.BatchUpdateJobsRequest(request)
if (parent is not None):
request.parent = parent
if (jobs is not None):
request.jobs = jobs
rpc = self._transport._wrapped_methods[self._transport.batch_update_jobs]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
response = operation.from_gapic(response, self._transport.operations_client, job_service.JobOperationResult, metadata_type=common.BatchOperationMetadata)
return response |
def delete_job(self, request: job_service.DeleteJobRequest=None, *, name: str=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> None:
'Deletes the specified job.\n Typically, the job becomes unsearchable within 10\n seconds, but it may take up to 5 minutes.\n\n Args:\n request (:class:`~.job_service.DeleteJobRequest`):\n The request object. Delete job request.\n name (:class:`str`):\n Required. The resource name of the job to be deleted.\n\n The format is\n "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}".\n For example, "projects/foo/tenants/bar/jobs/baz".\n\n If tenant id is unspecified, the default tenant is used.\n For example, "projects/foo/jobs/bar".\n This corresponds to the ``name`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n '
has_flattened_params = any([name])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.DeleteJobRequest)):
request = job_service.DeleteJobRequest(request)
if (name is not None):
request.name = name
rpc = self._transport._wrapped_methods[self._transport.delete_job]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('name', request.name),)),))
rpc(request, retry=retry, timeout=timeout, metadata=metadata) | -8,331,857,933,890,689,000 | Deletes the specified job.
Typically, the job becomes unsearchable within 10
seconds, but it may take up to 5 minutes.
Args:
request (:class:`~.job_service.DeleteJobRequest`):
The request object. Delete job request.
name (:class:`str`):
Required. The resource name of the job to be deleted.
The format is
"projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}".
For example, "projects/foo/tenants/bar/jobs/baz".
If tenant id is unspecified, the default tenant is used.
For example, "projects/foo/jobs/bar".
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata. | google/cloud/talent_v4beta1/services/job_service/client.py | delete_job | busunkim96/python-talent | python | def delete_job(self, request: job_service.DeleteJobRequest=None, *, name: str=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> None:
'Deletes the specified job.\n Typically, the job becomes unsearchable within 10\n seconds, but it may take up to 5 minutes.\n\n Args:\n request (:class:`~.job_service.DeleteJobRequest`):\n The request object. Delete job request.\n name (:class:`str`):\n Required. The resource name of the job to be deleted.\n\n The format is\n "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}".\n For example, "projects/foo/tenants/bar/jobs/baz".\n\n If tenant id is unspecified, the default tenant is used.\n For example, "projects/foo/jobs/bar".\n This corresponds to the ``name`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n '
has_flattened_params = any([name])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.DeleteJobRequest)):
request = job_service.DeleteJobRequest(request)
if (name is not None):
request.name = name
rpc = self._transport._wrapped_methods[self._transport.delete_job]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('name', request.name),)),))
rpc(request, retry=retry, timeout=timeout, metadata=metadata) |
def batch_delete_jobs(self, request: job_service.BatchDeleteJobsRequest=None, *, parent: str=None, filter: str=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> None:
'Deletes a list of [Job][google.cloud.talent.v4beta1.Job]s by\n filter.\n\n Args:\n request (:class:`~.job_service.BatchDeleteJobsRequest`):\n The request object. Batch delete jobs request.\n parent (:class:`str`):\n Required. The resource name of the tenant under which\n the job is created.\n\n The format is\n "projects/{project_id}/tenants/{tenant_id}". For\n example, "projects/foo/tenant/bar". If tenant id is\n unspecified, a default tenant is created. For example,\n "projects/foo".\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n filter (:class:`str`):\n Required. The filter string specifies the jobs to be\n deleted.\n\n Supported operator: =, AND\n\n The fields eligible for filtering are:\n\n - ``companyName`` (Required)\n - ``requisitionId`` (Required)\n\n Sample Query: companyName = "projects/foo/companies/bar"\n AND requisitionId = "req-1".\n This corresponds to the ``filter`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n '
has_flattened_params = any([parent, filter])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.BatchDeleteJobsRequest)):
request = job_service.BatchDeleteJobsRequest(request)
if (parent is not None):
request.parent = parent
if (filter is not None):
request.filter = filter
rpc = self._transport._wrapped_methods[self._transport.batch_delete_jobs]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
rpc(request, retry=retry, timeout=timeout, metadata=metadata) | 2,415,183,544,373,126,000 | Deletes a list of [Job][google.cloud.talent.v4beta1.Job]s by
filter.
Args:
request (:class:`~.job_service.BatchDeleteJobsRequest`):
The request object. Batch delete jobs request.
parent (:class:`str`):
Required. The resource name of the tenant under which
the job is created.
The format is
"projects/{project_id}/tenants/{tenant_id}". For
example, "projects/foo/tenant/bar". If tenant id is
unspecified, a default tenant is created. For example,
"projects/foo".
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
filter (:class:`str`):
Required. The filter string specifies the jobs to be
deleted.
Supported operator: =, AND
The fields eligible for filtering are:
- ``companyName`` (Required)
- ``requisitionId`` (Required)
Sample Query: companyName = "projects/foo/companies/bar"
AND requisitionId = "req-1".
This corresponds to the ``filter`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata. | google/cloud/talent_v4beta1/services/job_service/client.py | batch_delete_jobs | busunkim96/python-talent | python | def batch_delete_jobs(self, request: job_service.BatchDeleteJobsRequest=None, *, parent: str=None, filter: str=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> None:
'Deletes a list of [Job][google.cloud.talent.v4beta1.Job]s by\n filter.\n\n Args:\n request (:class:`~.job_service.BatchDeleteJobsRequest`):\n The request object. Batch delete jobs request.\n parent (:class:`str`):\n Required. The resource name of the tenant under which\n the job is created.\n\n The format is\n "projects/{project_id}/tenants/{tenant_id}". For\n example, "projects/foo/tenant/bar". If tenant id is\n unspecified, a default tenant is created. For example,\n "projects/foo".\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n filter (:class:`str`):\n Required. The filter string specifies the jobs to be\n deleted.\n\n Supported operator: =, AND\n\n The fields eligible for filtering are:\n\n - ``companyName`` (Required)\n - ``requisitionId`` (Required)\n\n Sample Query: companyName = "projects/foo/companies/bar"\n AND requisitionId = "req-1".\n This corresponds to the ``filter`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n '
has_flattened_params = any([parent, filter])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.BatchDeleteJobsRequest)):
request = job_service.BatchDeleteJobsRequest(request)
if (parent is not None):
request.parent = parent
if (filter is not None):
request.filter = filter
rpc = self._transport._wrapped_methods[self._transport.batch_delete_jobs]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
rpc(request, retry=retry, timeout=timeout, metadata=metadata) |
def list_jobs(self, request: job_service.ListJobsRequest=None, *, parent: str=None, filter: str=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> pagers.ListJobsPager:
'Lists jobs by filter.\n\n Args:\n request (:class:`~.job_service.ListJobsRequest`):\n The request object. List jobs request.\n parent (:class:`str`):\n Required. The resource name of the tenant under which\n the job is created.\n\n The format is\n "projects/{project_id}/tenants/{tenant_id}". For\n example, "projects/foo/tenant/bar". If tenant id is\n unspecified, a default tenant is created. For example,\n "projects/foo".\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n filter (:class:`str`):\n Required. The filter string specifies the jobs to be\n enumerated.\n\n Supported operator: =, AND\n\n The fields eligible for filtering are:\n\n - ``companyName`` (Required)\n - ``requisitionId``\n - ``status`` Available values: OPEN, EXPIRED, ALL.\n Defaults to OPEN if no value is specified.\n\n Sample Query:\n\n - companyName =\n "projects/foo/tenants/bar/companies/baz"\n - companyName =\n "projects/foo/tenants/bar/companies/baz" AND\n requisitionId = "req-1"\n - companyName =\n "projects/foo/tenants/bar/companies/baz" AND status =\n "EXPIRED".\n This corresponds to the ``filter`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.pagers.ListJobsPager:\n List jobs response.\n Iterating over this object will yield\n results and resolve additional pages\n automatically.\n\n '
has_flattened_params = any([parent, filter])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.ListJobsRequest)):
request = job_service.ListJobsRequest(request)
if (parent is not None):
request.parent = parent
if (filter is not None):
request.filter = filter
rpc = self._transport._wrapped_methods[self._transport.list_jobs]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
response = pagers.ListJobsPager(method=rpc, request=request, response=response, metadata=metadata)
return response | -455,659,918,997,943,040 | Lists jobs by filter.
Args:
request (:class:`~.job_service.ListJobsRequest`):
The request object. List jobs request.
parent (:class:`str`):
Required. The resource name of the tenant under which
the job is created.
The format is
"projects/{project_id}/tenants/{tenant_id}". For
example, "projects/foo/tenant/bar". If tenant id is
unspecified, a default tenant is created. For example,
"projects/foo".
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
filter (:class:`str`):
Required. The filter string specifies the jobs to be
enumerated.
Supported operator: =, AND
The fields eligible for filtering are:
- ``companyName`` (Required)
- ``requisitionId``
- ``status`` Available values: OPEN, EXPIRED, ALL.
Defaults to OPEN if no value is specified.
Sample Query:
- companyName =
"projects/foo/tenants/bar/companies/baz"
- companyName =
"projects/foo/tenants/bar/companies/baz" AND
requisitionId = "req-1"
- companyName =
"projects/foo/tenants/bar/companies/baz" AND status =
"EXPIRED".
This corresponds to the ``filter`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.pagers.ListJobsPager:
List jobs response.
Iterating over this object will yield
results and resolve additional pages
automatically. | google/cloud/talent_v4beta1/services/job_service/client.py | list_jobs | busunkim96/python-talent | python | def list_jobs(self, request: job_service.ListJobsRequest=None, *, parent: str=None, filter: str=None, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> pagers.ListJobsPager:
'Lists jobs by filter.\n\n Args:\n request (:class:`~.job_service.ListJobsRequest`):\n The request object. List jobs request.\n parent (:class:`str`):\n Required. The resource name of the tenant under which\n the job is created.\n\n The format is\n "projects/{project_id}/tenants/{tenant_id}". For\n example, "projects/foo/tenant/bar". If tenant id is\n unspecified, a default tenant is created. For example,\n "projects/foo".\n This corresponds to the ``parent`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n filter (:class:`str`):\n Required. The filter string specifies the jobs to be\n enumerated.\n\n Supported operator: =, AND\n\n The fields eligible for filtering are:\n\n - ``companyName`` (Required)\n - ``requisitionId``\n - ``status`` Available values: OPEN, EXPIRED, ALL.\n Defaults to OPEN if no value is specified.\n\n Sample Query:\n\n - companyName =\n "projects/foo/tenants/bar/companies/baz"\n - companyName =\n "projects/foo/tenants/bar/companies/baz" AND\n requisitionId = "req-1"\n - companyName =\n "projects/foo/tenants/bar/companies/baz" AND status =\n "EXPIRED".\n This corresponds to the ``filter`` field\n on the ``request`` instance; if ``request`` is provided, this\n should not be set.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.pagers.ListJobsPager:\n List jobs response.\n Iterating over this object will yield\n results and resolve additional pages\n automatically.\n\n '
has_flattened_params = any([parent, filter])
if ((request is not None) and has_flattened_params):
raise ValueError('If the `request` argument is set, then none of the individual field arguments should be set.')
if (not isinstance(request, job_service.ListJobsRequest)):
request = job_service.ListJobsRequest(request)
if (parent is not None):
request.parent = parent
if (filter is not None):
request.filter = filter
rpc = self._transport._wrapped_methods[self._transport.list_jobs]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
response = pagers.ListJobsPager(method=rpc, request=request, response=response, metadata=metadata)
return response |
def search_jobs(self, request: job_service.SearchJobsRequest=None, *, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> pagers.SearchJobsPager:
'Searches for jobs using the provided\n [SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest].\n\n This call constrains the\n [visibility][google.cloud.talent.v4beta1.Job.visibility] of jobs\n present in the database, and only returns jobs that the caller\n has permission to search against.\n\n Args:\n request (:class:`~.job_service.SearchJobsRequest`):\n The request object. The Request body of the `SearchJobs`\n call.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.pagers.SearchJobsPager:\n Response for SearchJob method.\n Iterating over this object will yield\n results and resolve additional pages\n automatically.\n\n '
if (not isinstance(request, job_service.SearchJobsRequest)):
request = job_service.SearchJobsRequest(request)
rpc = self._transport._wrapped_methods[self._transport.search_jobs]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
response = pagers.SearchJobsPager(method=rpc, request=request, response=response, metadata=metadata)
return response | -3,449,814,602,899,575,300 | Searches for jobs using the provided
[SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest].
This call constrains the
[visibility][google.cloud.talent.v4beta1.Job.visibility] of jobs
present in the database, and only returns jobs that the caller
has permission to search against.
Args:
request (:class:`~.job_service.SearchJobsRequest`):
The request object. The Request body of the `SearchJobs`
call.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.pagers.SearchJobsPager:
Response for SearchJob method.
Iterating over this object will yield
results and resolve additional pages
automatically. | google/cloud/talent_v4beta1/services/job_service/client.py | search_jobs | busunkim96/python-talent | python | def search_jobs(self, request: job_service.SearchJobsRequest=None, *, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> pagers.SearchJobsPager:
'Searches for jobs using the provided\n [SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest].\n\n This call constrains the\n [visibility][google.cloud.talent.v4beta1.Job.visibility] of jobs\n present in the database, and only returns jobs that the caller\n has permission to search against.\n\n Args:\n request (:class:`~.job_service.SearchJobsRequest`):\n The request object. The Request body of the `SearchJobs`\n call.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.pagers.SearchJobsPager:\n Response for SearchJob method.\n Iterating over this object will yield\n results and resolve additional pages\n automatically.\n\n '
if (not isinstance(request, job_service.SearchJobsRequest)):
request = job_service.SearchJobsRequest(request)
rpc = self._transport._wrapped_methods[self._transport.search_jobs]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
response = pagers.SearchJobsPager(method=rpc, request=request, response=response, metadata=metadata)
return response |
def search_jobs_for_alert(self, request: job_service.SearchJobsRequest=None, *, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> pagers.SearchJobsForAlertPager:
'Searches for jobs using the provided\n [SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest].\n\n This API call is intended for the use case of targeting passive\n job seekers (for example, job seekers who have signed up to\n receive email alerts about potential job opportunities), and has\n different algorithmic adjustments that are targeted to passive\n job seekers.\n\n This call constrains the\n [visibility][google.cloud.talent.v4beta1.Job.visibility] of jobs\n present in the database, and only returns jobs the caller has\n permission to search against.\n\n Args:\n request (:class:`~.job_service.SearchJobsRequest`):\n The request object. The Request body of the `SearchJobs`\n call.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.pagers.SearchJobsForAlertPager:\n Response for SearchJob method.\n Iterating over this object will yield\n results and resolve additional pages\n automatically.\n\n '
if (not isinstance(request, job_service.SearchJobsRequest)):
request = job_service.SearchJobsRequest(request)
rpc = self._transport._wrapped_methods[self._transport.search_jobs_for_alert]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
response = pagers.SearchJobsForAlertPager(method=rpc, request=request, response=response, metadata=metadata)
return response | -3,885,907,973,043,638,300 | Searches for jobs using the provided
[SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest].
This API call is intended for the use case of targeting passive
job seekers (for example, job seekers who have signed up to
receive email alerts about potential job opportunities), and has
different algorithmic adjustments that are targeted to passive
job seekers.
This call constrains the
[visibility][google.cloud.talent.v4beta1.Job.visibility] of jobs
present in the database, and only returns jobs the caller has
permission to search against.
Args:
request (:class:`~.job_service.SearchJobsRequest`):
The request object. The Request body of the `SearchJobs`
call.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.pagers.SearchJobsForAlertPager:
Response for SearchJob method.
Iterating over this object will yield
results and resolve additional pages
automatically. | google/cloud/talent_v4beta1/services/job_service/client.py | search_jobs_for_alert | busunkim96/python-talent | python | def search_jobs_for_alert(self, request: job_service.SearchJobsRequest=None, *, retry: retries.Retry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[(str, str)]]=()) -> pagers.SearchJobsForAlertPager:
'Searches for jobs using the provided\n [SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest].\n\n This API call is intended for the use case of targeting passive\n job seekers (for example, job seekers who have signed up to\n receive email alerts about potential job opportunities), and has\n different algorithmic adjustments that are targeted to passive\n job seekers.\n\n This call constrains the\n [visibility][google.cloud.talent.v4beta1.Job.visibility] of jobs\n present in the database, and only returns jobs the caller has\n permission to search against.\n\n Args:\n request (:class:`~.job_service.SearchJobsRequest`):\n The request object. The Request body of the `SearchJobs`\n call.\n\n retry (google.api_core.retry.Retry): Designation of what errors, if any,\n should be retried.\n timeout (float): The timeout for this request.\n metadata (Sequence[Tuple[str, str]]): Strings which should be\n sent along with the request as metadata.\n\n Returns:\n ~.pagers.SearchJobsForAlertPager:\n Response for SearchJob method.\n Iterating over this object will yield\n results and resolve additional pages\n automatically.\n\n '
if (not isinstance(request, job_service.SearchJobsRequest)):
request = job_service.SearchJobsRequest(request)
rpc = self._transport._wrapped_methods[self._transport.search_jobs_for_alert]
metadata = (tuple(metadata) + (gapic_v1.routing_header.to_grpc_metadata((('parent', request.parent),)),))
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
response = pagers.SearchJobsForAlertPager(method=rpc, request=request, response=response, metadata=metadata)
return response |
def replace_material_pyleecan_obj(obj, mat1, mat2, comp_name_path=True):
'\n replace first material by the second in the object\n\n Parameters\n ----------\n obj: Pyleecan object\n mat1: Material\n material to replace\n mat2: Material\n new material\n comp_name_path: bool\n replace strictly mat1 or replace materials without comparing mat1.path and mat1.name\n\n Returns\n -------\n is_change: bool\n True if a material has been replaced\n '
is_change = False
obj_dict = obj.as_dict()
if comp_name_path:
for (key, val) in obj_dict.items():
if (isinstance(getattr(obj, key), Material) and (getattr(obj, key) == mat1)):
setattr(obj, key, mat2)
is_change = True
elif isinstance(val, dict):
is_change_recurs = replace_material_pyleecan_obj(getattr(obj, key), mat1, mat2, comp_name_path)
if (not is_change):
is_change = is_change_recurs
else:
for (key, val) in obj_dict.items():
if (isinstance(getattr(obj, key), Material) and compare_material(getattr(obj, key), mat1)):
setattr(obj, key, mat2)
is_change = True
elif isinstance(val, dict):
is_change_recurs = replace_material_pyleecan_obj(getattr(obj, key), mat1, mat2, comp_name_path)
if (not is_change):
is_change = is_change_recurs
return is_change | -4,199,062,306,853,896,700 | replace first material by the second in the object
Parameters
----------
obj: Pyleecan object
mat1: Material
material to replace
mat2: Material
new material
comp_name_path: bool
replace strictly mat1 or replace materials without comparing mat1.path and mat1.name
Returns
-------
is_change: bool
True if a material has been replaced | pyleecan/Functions/Material/replace_material_pyleecan_obj.py | replace_material_pyleecan_obj | BonneelP/pyleecan | python | def replace_material_pyleecan_obj(obj, mat1, mat2, comp_name_path=True):
'\n replace first material by the second in the object\n\n Parameters\n ----------\n obj: Pyleecan object\n mat1: Material\n material to replace\n mat2: Material\n new material\n comp_name_path: bool\n replace strictly mat1 or replace materials without comparing mat1.path and mat1.name\n\n Returns\n -------\n is_change: bool\n True if a material has been replaced\n '
is_change = False
obj_dict = obj.as_dict()
if comp_name_path:
for (key, val) in obj_dict.items():
if (isinstance(getattr(obj, key), Material) and (getattr(obj, key) == mat1)):
setattr(obj, key, mat2)
is_change = True
elif isinstance(val, dict):
is_change_recurs = replace_material_pyleecan_obj(getattr(obj, key), mat1, mat2, comp_name_path)
if (not is_change):
is_change = is_change_recurs
else:
for (key, val) in obj_dict.items():
if (isinstance(getattr(obj, key), Material) and compare_material(getattr(obj, key), mat1)):
setattr(obj, key, mat2)
is_change = True
elif isinstance(val, dict):
is_change_recurs = replace_material_pyleecan_obj(getattr(obj, key), mat1, mat2, comp_name_path)
if (not is_change):
is_change = is_change_recurs
return is_change |
def pad_gt(gt_labels, gt_bboxes, gt_scores=None):
' Pad 0 in gt_labels and gt_bboxes.\n Args:\n gt_labels (Tensor|List[Tensor], int64): Label of gt_bboxes,\n shape is [B, n, 1] or [[n_1, 1], [n_2, 1], ...], here n = sum(n_i)\n gt_bboxes (Tensor|List[Tensor], float32): Ground truth bboxes,\n shape is [B, n, 4] or [[n_1, 4], [n_2, 4], ...], here n = sum(n_i)\n gt_scores (Tensor|List[Tensor]|None, float32): Score of gt_bboxes,\n shape is [B, n, 1] or [[n_1, 4], [n_2, 4], ...], here n = sum(n_i)\n Returns:\n pad_gt_labels (Tensor, int64): shape[B, n, 1]\n pad_gt_bboxes (Tensor, float32): shape[B, n, 4]\n pad_gt_scores (Tensor, float32): shape[B, n, 1]\n pad_gt_mask (Tensor, float32): shape[B, n, 1], 1 means bbox, 0 means no bbox\n '
if (isinstance(gt_labels, paddle.Tensor) and isinstance(gt_bboxes, paddle.Tensor)):
assert ((gt_labels.ndim == gt_bboxes.ndim) and (gt_bboxes.ndim == 3))
pad_gt_mask = (gt_bboxes.sum(axis=(- 1), keepdim=True) > 0).astype(gt_bboxes.dtype)
if (gt_scores is None):
gt_scores = pad_gt_mask.clone()
assert (gt_labels.ndim == gt_scores.ndim)
return (gt_labels, gt_bboxes, gt_scores, pad_gt_mask)
elif (isinstance(gt_labels, list) and isinstance(gt_bboxes, list)):
assert (len(gt_labels) == len(gt_bboxes)), 'The number of `gt_labels` and `gt_bboxes` is not equal. '
num_max_boxes = max([len(a) for a in gt_bboxes])
batch_size = len(gt_bboxes)
pad_gt_labels = paddle.zeros([batch_size, num_max_boxes, 1], dtype=gt_labels[0].dtype)
pad_gt_bboxes = paddle.zeros([batch_size, num_max_boxes, 4], dtype=gt_bboxes[0].dtype)
pad_gt_scores = paddle.zeros([batch_size, num_max_boxes, 1], dtype=gt_bboxes[0].dtype)
pad_gt_mask = paddle.zeros([batch_size, num_max_boxes, 1], dtype=gt_bboxes[0].dtype)
for (i, (label, bbox)) in enumerate(zip(gt_labels, gt_bboxes)):
if ((len(label) > 0) and (len(bbox) > 0)):
pad_gt_labels[i, :len(label)] = label
pad_gt_bboxes[i, :len(bbox)] = bbox
pad_gt_mask[i, :len(bbox)] = 1.0
if (gt_scores is not None):
pad_gt_scores[i, :len(gt_scores[i])] = gt_scores[i]
if (gt_scores is None):
pad_gt_scores = pad_gt_mask.clone()
return (pad_gt_labels, pad_gt_bboxes, pad_gt_scores, pad_gt_mask)
else:
raise ValueError('The input `gt_labels` or `gt_bboxes` is invalid! ') | 6,308,231,961,735,706,000 | Pad 0 in gt_labels and gt_bboxes.
Args:
gt_labels (Tensor|List[Tensor], int64): Label of gt_bboxes,
shape is [B, n, 1] or [[n_1, 1], [n_2, 1], ...], here n = sum(n_i)
gt_bboxes (Tensor|List[Tensor], float32): Ground truth bboxes,
shape is [B, n, 4] or [[n_1, 4], [n_2, 4], ...], here n = sum(n_i)
gt_scores (Tensor|List[Tensor]|None, float32): Score of gt_bboxes,
shape is [B, n, 1] or [[n_1, 4], [n_2, 4], ...], here n = sum(n_i)
Returns:
pad_gt_labels (Tensor, int64): shape[B, n, 1]
pad_gt_bboxes (Tensor, float32): shape[B, n, 4]
pad_gt_scores (Tensor, float32): shape[B, n, 1]
pad_gt_mask (Tensor, float32): shape[B, n, 1], 1 means bbox, 0 means no bbox | paddlex/ppdet/modeling/assigners/utils.py | pad_gt | 17729703508/PaddleX | python | def pad_gt(gt_labels, gt_bboxes, gt_scores=None):
' Pad 0 in gt_labels and gt_bboxes.\n Args:\n gt_labels (Tensor|List[Tensor], int64): Label of gt_bboxes,\n shape is [B, n, 1] or [[n_1, 1], [n_2, 1], ...], here n = sum(n_i)\n gt_bboxes (Tensor|List[Tensor], float32): Ground truth bboxes,\n shape is [B, n, 4] or [[n_1, 4], [n_2, 4], ...], here n = sum(n_i)\n gt_scores (Tensor|List[Tensor]|None, float32): Score of gt_bboxes,\n shape is [B, n, 1] or [[n_1, 4], [n_2, 4], ...], here n = sum(n_i)\n Returns:\n pad_gt_labels (Tensor, int64): shape[B, n, 1]\n pad_gt_bboxes (Tensor, float32): shape[B, n, 4]\n pad_gt_scores (Tensor, float32): shape[B, n, 1]\n pad_gt_mask (Tensor, float32): shape[B, n, 1], 1 means bbox, 0 means no bbox\n '
if (isinstance(gt_labels, paddle.Tensor) and isinstance(gt_bboxes, paddle.Tensor)):
assert ((gt_labels.ndim == gt_bboxes.ndim) and (gt_bboxes.ndim == 3))
pad_gt_mask = (gt_bboxes.sum(axis=(- 1), keepdim=True) > 0).astype(gt_bboxes.dtype)
if (gt_scores is None):
gt_scores = pad_gt_mask.clone()
assert (gt_labels.ndim == gt_scores.ndim)
return (gt_labels, gt_bboxes, gt_scores, pad_gt_mask)
elif (isinstance(gt_labels, list) and isinstance(gt_bboxes, list)):
assert (len(gt_labels) == len(gt_bboxes)), 'The number of `gt_labels` and `gt_bboxes` is not equal. '
num_max_boxes = max([len(a) for a in gt_bboxes])
batch_size = len(gt_bboxes)
pad_gt_labels = paddle.zeros([batch_size, num_max_boxes, 1], dtype=gt_labels[0].dtype)
pad_gt_bboxes = paddle.zeros([batch_size, num_max_boxes, 4], dtype=gt_bboxes[0].dtype)
pad_gt_scores = paddle.zeros([batch_size, num_max_boxes, 1], dtype=gt_bboxes[0].dtype)
pad_gt_mask = paddle.zeros([batch_size, num_max_boxes, 1], dtype=gt_bboxes[0].dtype)
for (i, (label, bbox)) in enumerate(zip(gt_labels, gt_bboxes)):
if ((len(label) > 0) and (len(bbox) > 0)):
pad_gt_labels[i, :len(label)] = label
pad_gt_bboxes[i, :len(bbox)] = bbox
pad_gt_mask[i, :len(bbox)] = 1.0
if (gt_scores is not None):
pad_gt_scores[i, :len(gt_scores[i])] = gt_scores[i]
if (gt_scores is None):
pad_gt_scores = pad_gt_mask.clone()
return (pad_gt_labels, pad_gt_bboxes, pad_gt_scores, pad_gt_mask)
else:
raise ValueError('The input `gt_labels` or `gt_bboxes` is invalid! ') |
def gather_topk_anchors(metrics, topk, largest=True, topk_mask=None, eps=1e-09):
'\n Args:\n metrics (Tensor, float32): shape[B, n, L], n: num_gts, L: num_anchors\n topk (int): The number of top elements to look for along the axis.\n largest (bool) : largest is a flag, if set to true,\n algorithm will sort by descending order, otherwise sort by\n ascending order. Default: True\n topk_mask (Tensor, bool|None): shape[B, n, topk], ignore bbox mask,\n Default: None\n eps (float): Default: 1e-9\n Returns:\n is_in_topk (Tensor, float32): shape[B, n, L], value=1. means selected\n '
num_anchors = metrics.shape[(- 1)]
(topk_metrics, topk_idxs) = paddle.topk(metrics, topk, axis=(- 1), largest=largest)
if (topk_mask is None):
topk_mask = (topk_metrics.max(axis=(- 1), keepdim=True) > eps).tile([1, 1, topk])
topk_idxs = paddle.where(topk_mask, topk_idxs, paddle.zeros_like(topk_idxs))
is_in_topk = F.one_hot(topk_idxs, num_anchors).sum(axis=(- 2))
is_in_topk = paddle.where((is_in_topk > 1), paddle.zeros_like(is_in_topk), is_in_topk)
return is_in_topk.astype(metrics.dtype) | 621,021,089,504,764,000 | Args:
metrics (Tensor, float32): shape[B, n, L], n: num_gts, L: num_anchors
topk (int): The number of top elements to look for along the axis.
largest (bool) : largest is a flag, if set to true,
algorithm will sort by descending order, otherwise sort by
ascending order. Default: True
topk_mask (Tensor, bool|None): shape[B, n, topk], ignore bbox mask,
Default: None
eps (float): Default: 1e-9
Returns:
is_in_topk (Tensor, float32): shape[B, n, L], value=1. means selected | paddlex/ppdet/modeling/assigners/utils.py | gather_topk_anchors | 17729703508/PaddleX | python | def gather_topk_anchors(metrics, topk, largest=True, topk_mask=None, eps=1e-09):
'\n Args:\n metrics (Tensor, float32): shape[B, n, L], n: num_gts, L: num_anchors\n topk (int): The number of top elements to look for along the axis.\n largest (bool) : largest is a flag, if set to true,\n algorithm will sort by descending order, otherwise sort by\n ascending order. Default: True\n topk_mask (Tensor, bool|None): shape[B, n, topk], ignore bbox mask,\n Default: None\n eps (float): Default: 1e-9\n Returns:\n is_in_topk (Tensor, float32): shape[B, n, L], value=1. means selected\n '
num_anchors = metrics.shape[(- 1)]
(topk_metrics, topk_idxs) = paddle.topk(metrics, topk, axis=(- 1), largest=largest)
if (topk_mask is None):
topk_mask = (topk_metrics.max(axis=(- 1), keepdim=True) > eps).tile([1, 1, topk])
topk_idxs = paddle.where(topk_mask, topk_idxs, paddle.zeros_like(topk_idxs))
is_in_topk = F.one_hot(topk_idxs, num_anchors).sum(axis=(- 2))
is_in_topk = paddle.where((is_in_topk > 1), paddle.zeros_like(is_in_topk), is_in_topk)
return is_in_topk.astype(metrics.dtype) |
def check_points_inside_bboxes(points, bboxes, eps=1e-09):
'\n Args:\n points (Tensor, float32): shape[L, 2], "xy" format, L: num_anchors\n bboxes (Tensor, float32): shape[B, n, 4], "xmin, ymin, xmax, ymax" format\n eps (float): Default: 1e-9\n Returns:\n is_in_bboxes (Tensor, float32): shape[B, n, L], value=1. means selected\n '
points = points.unsqueeze([0, 1])
(x, y) = points.chunk(2, axis=(- 1))
(xmin, ymin, xmax, ymax) = bboxes.unsqueeze(2).chunk(4, axis=(- 1))
l = (x - xmin)
t = (y - ymin)
r = (xmax - x)
b = (ymax - y)
bbox_ltrb = paddle.concat([l, t, r, b], axis=(- 1))
return (bbox_ltrb.min(axis=(- 1)) > eps).astype(bboxes.dtype) | 7,214,536,391,338,231,000 | Args:
points (Tensor, float32): shape[L, 2], "xy" format, L: num_anchors
bboxes (Tensor, float32): shape[B, n, 4], "xmin, ymin, xmax, ymax" format
eps (float): Default: 1e-9
Returns:
is_in_bboxes (Tensor, float32): shape[B, n, L], value=1. means selected | paddlex/ppdet/modeling/assigners/utils.py | check_points_inside_bboxes | 17729703508/PaddleX | python | def check_points_inside_bboxes(points, bboxes, eps=1e-09):
'\n Args:\n points (Tensor, float32): shape[L, 2], "xy" format, L: num_anchors\n bboxes (Tensor, float32): shape[B, n, 4], "xmin, ymin, xmax, ymax" format\n eps (float): Default: 1e-9\n Returns:\n is_in_bboxes (Tensor, float32): shape[B, n, L], value=1. means selected\n '
points = points.unsqueeze([0, 1])
(x, y) = points.chunk(2, axis=(- 1))
(xmin, ymin, xmax, ymax) = bboxes.unsqueeze(2).chunk(4, axis=(- 1))
l = (x - xmin)
t = (y - ymin)
r = (xmax - x)
b = (ymax - y)
bbox_ltrb = paddle.concat([l, t, r, b], axis=(- 1))
return (bbox_ltrb.min(axis=(- 1)) > eps).astype(bboxes.dtype) |
def compute_max_iou_anchor(ious):
'\n For each anchor, find the GT with the largest IOU.\n Args:\n ious (Tensor, float32): shape[B, n, L], n: num_gts, L: num_anchors\n Returns:\n is_max_iou (Tensor, float32): shape[B, n, L], value=1. means selected\n '
num_max_boxes = ious.shape[(- 2)]
max_iou_index = ious.argmax(axis=(- 2))
is_max_iou = F.one_hot(max_iou_index, num_max_boxes).transpose([0, 2, 1])
return is_max_iou.astype(ious.dtype) | 2,584,670,708,681,284,000 | For each anchor, find the GT with the largest IOU.
Args:
ious (Tensor, float32): shape[B, n, L], n: num_gts, L: num_anchors
Returns:
is_max_iou (Tensor, float32): shape[B, n, L], value=1. means selected | paddlex/ppdet/modeling/assigners/utils.py | compute_max_iou_anchor | 17729703508/PaddleX | python | def compute_max_iou_anchor(ious):
'\n For each anchor, find the GT with the largest IOU.\n Args:\n ious (Tensor, float32): shape[B, n, L], n: num_gts, L: num_anchors\n Returns:\n is_max_iou (Tensor, float32): shape[B, n, L], value=1. means selected\n '
num_max_boxes = ious.shape[(- 2)]
max_iou_index = ious.argmax(axis=(- 2))
is_max_iou = F.one_hot(max_iou_index, num_max_boxes).transpose([0, 2, 1])
return is_max_iou.astype(ious.dtype) |
def compute_max_iou_gt(ious):
'\n For each GT, find the anchor with the largest IOU.\n Args:\n ious (Tensor, float32): shape[B, n, L], n: num_gts, L: num_anchors\n Returns:\n is_max_iou (Tensor, float32): shape[B, n, L], value=1. means selected\n '
num_anchors = ious.shape[(- 1)]
max_iou_index = ious.argmax(axis=(- 1))
is_max_iou = F.one_hot(max_iou_index, num_anchors)
return is_max_iou.astype(ious.dtype) | -6,144,532,771,873,281,000 | For each GT, find the anchor with the largest IOU.
Args:
ious (Tensor, float32): shape[B, n, L], n: num_gts, L: num_anchors
Returns:
is_max_iou (Tensor, float32): shape[B, n, L], value=1. means selected | paddlex/ppdet/modeling/assigners/utils.py | compute_max_iou_gt | 17729703508/PaddleX | python | def compute_max_iou_gt(ious):
'\n For each GT, find the anchor with the largest IOU.\n Args:\n ious (Tensor, float32): shape[B, n, L], n: num_gts, L: num_anchors\n Returns:\n is_max_iou (Tensor, float32): shape[B, n, L], value=1. means selected\n '
num_anchors = ious.shape[(- 1)]
max_iou_index = ious.argmax(axis=(- 1))
is_max_iou = F.one_hot(max_iou_index, num_anchors)
return is_max_iou.astype(ious.dtype) |
def generate_anchors_for_grid_cell(feats, fpn_strides, grid_cell_size=5.0, grid_cell_offset=0.5):
'\n Like ATSS, generate anchors based on grid size.\n Args:\n feats (List[Tensor]): shape[s, (b, c, h, w)]\n fpn_strides (tuple|list): shape[s], stride for each scale feature\n grid_cell_size (float): anchor size\n grid_cell_offset (float): The range is between 0 and 1.\n Returns:\n anchors (List[Tensor]): shape[s, (l, 4)]\n num_anchors_list (List[int]): shape[s]\n stride_tensor_list (List[Tensor]): shape[s, (l, 1)]\n '
assert (len(feats) == len(fpn_strides))
anchors = []
num_anchors_list = []
stride_tensor_list = []
for (feat, stride) in zip(feats, fpn_strides):
(_, _, h, w) = feat.shape
cell_half_size = ((grid_cell_size * stride) * 0.5)
shift_x = ((paddle.arange(end=w) + grid_cell_offset) * stride)
shift_y = ((paddle.arange(end=h) + grid_cell_offset) * stride)
(shift_y, shift_x) = paddle.meshgrid(shift_y, shift_x)
anchor = paddle.stack([(shift_x - cell_half_size), (shift_y - cell_half_size), (shift_x + cell_half_size), (shift_y + cell_half_size)], axis=(- 1)).astype(feat.dtype)
anchors.append(anchor.reshape([(- 1), 4]))
num_anchors_list.append(len(anchors[(- 1)]))
stride_tensor_list.append(paddle.full([num_anchors_list[(- 1)], 1], stride))
return (anchors, num_anchors_list, stride_tensor_list) | 6,568,202,103,311,855,000 | Like ATSS, generate anchors based on grid size.
Args:
feats (List[Tensor]): shape[s, (b, c, h, w)]
fpn_strides (tuple|list): shape[s], stride for each scale feature
grid_cell_size (float): anchor size
grid_cell_offset (float): The range is between 0 and 1.
Returns:
anchors (List[Tensor]): shape[s, (l, 4)]
num_anchors_list (List[int]): shape[s]
stride_tensor_list (List[Tensor]): shape[s, (l, 1)] | paddlex/ppdet/modeling/assigners/utils.py | generate_anchors_for_grid_cell | 17729703508/PaddleX | python | def generate_anchors_for_grid_cell(feats, fpn_strides, grid_cell_size=5.0, grid_cell_offset=0.5):
'\n Like ATSS, generate anchors based on grid size.\n Args:\n feats (List[Tensor]): shape[s, (b, c, h, w)]\n fpn_strides (tuple|list): shape[s], stride for each scale feature\n grid_cell_size (float): anchor size\n grid_cell_offset (float): The range is between 0 and 1.\n Returns:\n anchors (List[Tensor]): shape[s, (l, 4)]\n num_anchors_list (List[int]): shape[s]\n stride_tensor_list (List[Tensor]): shape[s, (l, 1)]\n '
assert (len(feats) == len(fpn_strides))
anchors = []
num_anchors_list = []
stride_tensor_list = []
for (feat, stride) in zip(feats, fpn_strides):
(_, _, h, w) = feat.shape
cell_half_size = ((grid_cell_size * stride) * 0.5)
shift_x = ((paddle.arange(end=w) + grid_cell_offset) * stride)
shift_y = ((paddle.arange(end=h) + grid_cell_offset) * stride)
(shift_y, shift_x) = paddle.meshgrid(shift_y, shift_x)
anchor = paddle.stack([(shift_x - cell_half_size), (shift_y - cell_half_size), (shift_x + cell_half_size), (shift_y + cell_half_size)], axis=(- 1)).astype(feat.dtype)
anchors.append(anchor.reshape([(- 1), 4]))
num_anchors_list.append(len(anchors[(- 1)]))
stride_tensor_list.append(paddle.full([num_anchors_list[(- 1)], 1], stride))
return (anchors, num_anchors_list, stride_tensor_list) |
def suite():
'Return a test suite consisting of all the test cases in the module.'
theSuite = common.unittest.TestSuite()
theSuite.addTest(common.unittest.makeSuite(GarbageTestCase))
return theSuite | 7,385,886,178,908,180,000 | Return a test suite consisting of all the test cases in the module. | tables/tests/test_garbage.py | suite | Daybreak2019/PyTables | python | def suite():
theSuite = common.unittest.TestSuite()
theSuite.addTest(common.unittest.makeSuite(GarbageTestCase))
return theSuite |
def test00(self):
'Checking for uncollectable garbage.'
garbageLen = len(gc.garbage)
if (garbageLen == 0):
return
if common.verbose:
classCount = {}
for obj in gc.garbage:
objClass = obj.__class__.__name__
if (objClass in classCount):
classCount[objClass] += 1
else:
classCount[objClass] = 1
incidence = [('``%s``: %d' % (cls, cnt)) for (cls, cnt) in classCount.items()]
print('Class incidence:', ', '.join(incidence))
self.fail(('Possible leak: %d uncollected objects.' % garbageLen)) | 5,119,149,170,739,987,000 | Checking for uncollectable garbage. | tables/tests/test_garbage.py | test00 | Daybreak2019/PyTables | python | def test00(self):
garbageLen = len(gc.garbage)
if (garbageLen == 0):
return
if common.verbose:
classCount = {}
for obj in gc.garbage:
objClass = obj.__class__.__name__
if (objClass in classCount):
classCount[objClass] += 1
else:
classCount[objClass] = 1
incidence = [('``%s``: %d' % (cls, cnt)) for (cls, cnt) in classCount.items()]
print('Class incidence:', ', '.join(incidence))
self.fail(('Possible leak: %d uncollected objects.' % garbageLen)) |
def _invalid(message, location=None):
' A helper for validating policies. '
if (location is None):
raise PolicyValidationError(f'{message}.')
raise PolicyValidationError(f'{message} in {location}.') | 2,284,822,228,579,378,200 | A helper for validating policies. | starbelly/policy.py | _invalid | HyperionGray/starbelly | python | def _invalid(message, location=None):
' '
if (location is None):
raise PolicyValidationError(f'{message}.')
raise PolicyValidationError(f'{message} in {location}.') |
@staticmethod
def convert_doc_to_pb(doc, pb):
'\n Convert policy from database document to protobuf.\n\n :param dict doc: Database document.\n :param pb: An empty protobuf.\n :type pb: starbelly.starbelly_pb2.Policy\n '
if ('id' in doc):
pb.policy_id = UUID(doc['id']).bytes
pb.name = doc['name']
pb.created_at = doc['created_at'].isoformat()
pb.updated_at = doc['updated_at'].isoformat()
PolicyAuthentication.convert_doc_to_pb(doc.get('authentication', dict()), pb.authentication)
if (doc.get('captcha_solver_id') is not None):
pb.captcha_solver_id = UUID(doc['captcha_solver_id']).bytes
PolicyLimits.convert_doc_to_pb(doc.get('limits', dict()), pb.limits)
PolicyMimeTypeRules.convert_doc_to_pb(doc.get('mime_type_rules', list()), pb.mime_type_rules)
PolicyProxyRules.convert_doc_to_pb(doc.get('proxy_rules', list()), pb.proxy_rules)
PolicyRobotsTxt.convert_doc_to_pb(doc.get('robots_txt', dict()), pb.robots_txt)
PolicyUrlNormalization.convert_doc_to_pb(doc.get('url_normalization', dict()), pb.url_normalization)
PolicyUrlRules.convert_doc_to_pb(doc.get('url_rules', list()), pb.url_rules)
PolicyUserAgents.convert_doc_to_pb(doc.get('user_agents', list()), pb.user_agents) | 2,858,774,557,939,773,400 | Convert policy from database document to protobuf.
:param dict doc: Database document.
:param pb: An empty protobuf.
:type pb: starbelly.starbelly_pb2.Policy | starbelly/policy.py | convert_doc_to_pb | HyperionGray/starbelly | python | @staticmethod
def convert_doc_to_pb(doc, pb):
'\n Convert policy from database document to protobuf.\n\n :param dict doc: Database document.\n :param pb: An empty protobuf.\n :type pb: starbelly.starbelly_pb2.Policy\n '
if ('id' in doc):
pb.policy_id = UUID(doc['id']).bytes
pb.name = doc['name']
pb.created_at = doc['created_at'].isoformat()
pb.updated_at = doc['updated_at'].isoformat()
PolicyAuthentication.convert_doc_to_pb(doc.get('authentication', dict()), pb.authentication)
if (doc.get('captcha_solver_id') is not None):
pb.captcha_solver_id = UUID(doc['captcha_solver_id']).bytes
PolicyLimits.convert_doc_to_pb(doc.get('limits', dict()), pb.limits)
PolicyMimeTypeRules.convert_doc_to_pb(doc.get('mime_type_rules', list()), pb.mime_type_rules)
PolicyProxyRules.convert_doc_to_pb(doc.get('proxy_rules', list()), pb.proxy_rules)
PolicyRobotsTxt.convert_doc_to_pb(doc.get('robots_txt', dict()), pb.robots_txt)
PolicyUrlNormalization.convert_doc_to_pb(doc.get('url_normalization', dict()), pb.url_normalization)
PolicyUrlRules.convert_doc_to_pb(doc.get('url_rules', list()), pb.url_rules)
PolicyUserAgents.convert_doc_to_pb(doc.get('user_agents', list()), pb.user_agents) |
@staticmethod
def convert_pb_to_doc(pb):
'\n Convert protobuf to database document.\n\n :param pb: A protobuf\n :type pb: starbelly.starbelly_pb2.Policy.\n :returns: Database document.\n :rtype: dict\n '
doc = {'name': pb.name, 'authentication': dict(), 'limits': dict(), 'mime_type_rules': list(), 'proxy_rules': list(), 'robots_txt': dict(), 'url_normalization': dict(), 'url_rules': list(), 'user_agents': list()}
if pb.HasField('policy_id'):
doc['id'] = str(UUID(bytes=pb.policy_id))
if pb.HasField('created_at'):
doc['created_at'] = dateutil.parser.parse(pb.created_at)
if pb.HasField('updated_at'):
doc['updated_at'] = dateutil.parser.parse(pb.updated_at)
PolicyAuthentication.convert_pb_to_doc(pb.authentication, doc['authentication'])
if pb.HasField('captcha_solver_id'):
doc['captcha_solver_id'] = str(UUID(bytes=pb.captcha_solver_id))
else:
doc['captcha_solver_id'] = None
PolicyLimits.convert_pb_to_doc(pb.limits, doc['limits'])
PolicyMimeTypeRules.convert_pb_to_doc(pb.mime_type_rules, doc['mime_type_rules'])
PolicyProxyRules.convert_pb_to_doc(pb.proxy_rules, doc['proxy_rules'])
PolicyRobotsTxt.convert_pb_to_doc(pb.robots_txt, doc['robots_txt'])
PolicyUrlNormalization.convert_pb_to_doc(pb.url_normalization, doc['url_normalization'])
PolicyUrlRules.convert_pb_to_doc(pb.url_rules, doc['url_rules'])
PolicyUserAgents.convert_pb_to_doc(pb.user_agents, doc['user_agents'])
return doc | 1,428,939,199,741,155,800 | Convert protobuf to database document.
:param pb: A protobuf
:type pb: starbelly.starbelly_pb2.Policy.
:returns: Database document.
:rtype: dict | starbelly/policy.py | convert_pb_to_doc | HyperionGray/starbelly | python | @staticmethod
def convert_pb_to_doc(pb):
'\n Convert protobuf to database document.\n\n :param pb: A protobuf\n :type pb: starbelly.starbelly_pb2.Policy.\n :returns: Database document.\n :rtype: dict\n '
doc = {'name': pb.name, 'authentication': dict(), 'limits': dict(), 'mime_type_rules': list(), 'proxy_rules': list(), 'robots_txt': dict(), 'url_normalization': dict(), 'url_rules': list(), 'user_agents': list()}
if pb.HasField('policy_id'):
doc['id'] = str(UUID(bytes=pb.policy_id))
if pb.HasField('created_at'):
doc['created_at'] = dateutil.parser.parse(pb.created_at)
if pb.HasField('updated_at'):
doc['updated_at'] = dateutil.parser.parse(pb.updated_at)
PolicyAuthentication.convert_pb_to_doc(pb.authentication, doc['authentication'])
if pb.HasField('captcha_solver_id'):
doc['captcha_solver_id'] = str(UUID(bytes=pb.captcha_solver_id))
else:
doc['captcha_solver_id'] = None
PolicyLimits.convert_pb_to_doc(pb.limits, doc['limits'])
PolicyMimeTypeRules.convert_pb_to_doc(pb.mime_type_rules, doc['mime_type_rules'])
PolicyProxyRules.convert_pb_to_doc(pb.proxy_rules, doc['proxy_rules'])
PolicyRobotsTxt.convert_pb_to_doc(pb.robots_txt, doc['robots_txt'])
PolicyUrlNormalization.convert_pb_to_doc(pb.url_normalization, doc['url_normalization'])
PolicyUrlRules.convert_pb_to_doc(pb.url_rules, doc['url_rules'])
PolicyUserAgents.convert_pb_to_doc(pb.user_agents, doc['user_agents'])
return doc |
def __init__(self, doc, version, seeds):
'\n Initialize a policy object from its database document.\n\n :param dict doc: A database document.\n :param str version: The version number of Starbelly that created the\n policy.\n :param list seeds: A list of seed URLs, used for computing costs for\n crawled links.\n '
if (doc['name'].strip() == ''):
_invalid('Policy name cannot be blank')
self.authentication = PolicyAuthentication(doc['authentication'])
if ('captcha_solver' in doc):
self.captcha_solver = CaptchaSolver(doc['captcha_solver'])
else:
self.captcha_solver = None
self.limits = PolicyLimits(doc['limits'])
self.mime_type_rules = PolicyMimeTypeRules(doc['mime_type_rules'])
self.proxy_rules = PolicyProxyRules(doc['proxy_rules'])
self.robots_txt = PolicyRobotsTxt(doc['robots_txt'])
self.url_normalization = PolicyUrlNormalization(doc['url_normalization'])
self.url_rules = PolicyUrlRules(doc['url_rules'], seeds)
self.user_agents = PolicyUserAgents(doc['user_agents'], version) | 8,305,825,290,544,075,000 | Initialize a policy object from its database document.
:param dict doc: A database document.
:param str version: The version number of Starbelly that created the
policy.
:param list seeds: A list of seed URLs, used for computing costs for
crawled links. | starbelly/policy.py | __init__ | HyperionGray/starbelly | python | def __init__(self, doc, version, seeds):
'\n Initialize a policy object from its database document.\n\n :param dict doc: A database document.\n :param str version: The version number of Starbelly that created the\n policy.\n :param list seeds: A list of seed URLs, used for computing costs for\n crawled links.\n '
if (doc['name'].strip() == ):
_invalid('Policy name cannot be blank')
self.authentication = PolicyAuthentication(doc['authentication'])
if ('captcha_solver' in doc):
self.captcha_solver = CaptchaSolver(doc['captcha_solver'])
else:
self.captcha_solver = None
self.limits = PolicyLimits(doc['limits'])
self.mime_type_rules = PolicyMimeTypeRules(doc['mime_type_rules'])
self.proxy_rules = PolicyProxyRules(doc['proxy_rules'])
self.robots_txt = PolicyRobotsTxt(doc['robots_txt'])
self.url_normalization = PolicyUrlNormalization(doc['url_normalization'])
self.url_rules = PolicyUrlRules(doc['url_rules'], seeds)
self.user_agents = PolicyUserAgents(doc['user_agents'], version) |
def replace_mime_type_rules(self, rules):
'\n Return a shallow copy of this policy with new MIME type rules from\n ``doc``.\n\n :param list rules: MIME type rules in database document form.\n :returns: A new policy.\n :rtype: Policy\n '
policy = Policy.__new__(Policy)
policy.authentication = self.authentication
policy.captcha_solver = self.captcha_solver
policy.limits = self.limits
policy.mime_type_rules = PolicyMimeTypeRules(rules)
policy.proxy_rules = self.proxy_rules
policy.robots_txt = self.robots_txt
policy.url_normalization = self.url_normalization
policy.url_rules = self.url_rules
policy.user_agents = self.user_agents
return policy | 4,880,489,040,282,076,000 | Return a shallow copy of this policy with new MIME type rules from
``doc``.
:param list rules: MIME type rules in database document form.
:returns: A new policy.
:rtype: Policy | starbelly/policy.py | replace_mime_type_rules | HyperionGray/starbelly | python | def replace_mime_type_rules(self, rules):
'\n Return a shallow copy of this policy with new MIME type rules from\n ``doc``.\n\n :param list rules: MIME type rules in database document form.\n :returns: A new policy.\n :rtype: Policy\n '
policy = Policy.__new__(Policy)
policy.authentication = self.authentication
policy.captcha_solver = self.captcha_solver
policy.limits = self.limits
policy.mime_type_rules = PolicyMimeTypeRules(rules)
policy.proxy_rules = self.proxy_rules
policy.robots_txt = self.robots_txt
policy.url_normalization = self.url_normalization
policy.url_rules = self.url_rules
policy.user_agents = self.user_agents
return policy |
@staticmethod
def convert_doc_to_pb(doc, pb):
'\n Convert from database document to protobuf.\n\n :param dict doc: Database document.\n :param pb: An empty protobuf.\n :type pb: starbelly.starbelly_pb2.PolicyAuthentication\n '
pb.enabled = doc['enabled'] | 3,645,022,146,247,596,000 | Convert from database document to protobuf.
:param dict doc: Database document.
:param pb: An empty protobuf.
:type pb: starbelly.starbelly_pb2.PolicyAuthentication | starbelly/policy.py | convert_doc_to_pb | HyperionGray/starbelly | python | @staticmethod
def convert_doc_to_pb(doc, pb):
'\n Convert from database document to protobuf.\n\n :param dict doc: Database document.\n :param pb: An empty protobuf.\n :type pb: starbelly.starbelly_pb2.PolicyAuthentication\n '
pb.enabled = doc['enabled'] |
@staticmethod
def convert_pb_to_doc(pb, doc):
'\n Convert protobuf to database document.\n\n :param pb: A protobuf\n :type pb: starbelly.starbelly_pb2.PolicyAuthentication\n :returns: Database document.\n :rtype: dict\n '
doc['enabled'] = pb.enabled | -4,715,012,268,751,988,000 | Convert protobuf to database document.
:param pb: A protobuf
:type pb: starbelly.starbelly_pb2.PolicyAuthentication
:returns: Database document.
:rtype: dict | starbelly/policy.py | convert_pb_to_doc | HyperionGray/starbelly | python | @staticmethod
def convert_pb_to_doc(pb, doc):
'\n Convert protobuf to database document.\n\n :param pb: A protobuf\n :type pb: starbelly.starbelly_pb2.PolicyAuthentication\n :returns: Database document.\n :rtype: dict\n '
doc['enabled'] = pb.enabled |
def __init__(self, doc):
'\n Initialize from a database document.\n\n :param dict doc: A database document.\n '
self._enabled = doc.get('enabled', False) | 4,257,061,601,009,672,000 | Initialize from a database document.
:param dict doc: A database document. | starbelly/policy.py | __init__ | HyperionGray/starbelly | python | def __init__(self, doc):
'\n Initialize from a database document.\n\n :param dict doc: A database document.\n '
self._enabled = doc.get('enabled', False) |
def is_enabled(self):
'\n Return True if authentication is enabled.\n\n :rtype: bool\n '
return self._enabled | -6,463,759,857,772,971,000 | Return True if authentication is enabled.
:rtype: bool | starbelly/policy.py | is_enabled | HyperionGray/starbelly | python | def is_enabled(self):
'\n Return True if authentication is enabled.\n\n :rtype: bool\n '
return self._enabled |
@staticmethod
def convert_doc_to_pb(doc, pb):
'\n Convert from database document to protobuf.\n\n :param dict doc: Database document.\n :param pb: An empty protobuf.\n :type pb: starbelly.starbelly_pb2.PolicyLimits\n '
if (doc.get('max_cost') is not None):
pb.max_cost = doc['max_cost']
if (doc.get('max_duration') is not None):
pb.max_duration = doc['max_duration']
if (doc.get('max_items') is not None):
pb.max_items = doc['max_items'] | -5,416,938,138,696,050,000 | Convert from database document to protobuf.
:param dict doc: Database document.
:param pb: An empty protobuf.
:type pb: starbelly.starbelly_pb2.PolicyLimits | starbelly/policy.py | convert_doc_to_pb | HyperionGray/starbelly | python | @staticmethod
def convert_doc_to_pb(doc, pb):
'\n Convert from database document to protobuf.\n\n :param dict doc: Database document.\n :param pb: An empty protobuf.\n :type pb: starbelly.starbelly_pb2.PolicyLimits\n '
if (doc.get('max_cost') is not None):
pb.max_cost = doc['max_cost']
if (doc.get('max_duration') is not None):
pb.max_duration = doc['max_duration']
if (doc.get('max_items') is not None):
pb.max_items = doc['max_items'] |
@staticmethod
def convert_pb_to_doc(pb, doc):
'\n Convert protobuf to database document.\n\n :param pb: A protobuf\n :type pb: starbelly.starbelly_pb2.PolicyLimits\n :returns: Database document.\n :rtype: dict\n '
doc['max_cost'] = (pb.max_cost if pb.HasField('max_cost') else None)
doc['max_duration'] = (pb.max_duration if pb.HasField('max_duration') else None)
doc['max_items'] = (pb.max_items if pb.HasField('max_items') else None) | 5,201,411,884,564,760,000 | Convert protobuf to database document.
:param pb: A protobuf
:type pb: starbelly.starbelly_pb2.PolicyLimits
:returns: Database document.
:rtype: dict | starbelly/policy.py | convert_pb_to_doc | HyperionGray/starbelly | python | @staticmethod
def convert_pb_to_doc(pb, doc):
'\n Convert protobuf to database document.\n\n :param pb: A protobuf\n :type pb: starbelly.starbelly_pb2.PolicyLimits\n :returns: Database document.\n :rtype: dict\n '
doc['max_cost'] = (pb.max_cost if pb.HasField('max_cost') else None)
doc['max_duration'] = (pb.max_duration if pb.HasField('max_duration') else None)
doc['max_items'] = (pb.max_items if pb.HasField('max_items') else None) |
def __init__(self, doc):
'\n Initialize from a database document.\n\n :param dict doc: A database document.\n '
self._max_cost = doc.get('max_cost')
self._max_duration = doc.get('max_duration')
self._max_items = doc.get('max_items')
if ((self._max_duration is not None) and (self._max_duration < 0)):
_invalid('Max duration must be ≥0')
if ((self._max_items is not None) and (self._max_items < 0)):
_invalid('Max items must be ≥0') | 2,002,096,448,386,624,500 | Initialize from a database document.
:param dict doc: A database document. | starbelly/policy.py | __init__ | HyperionGray/starbelly | python | def __init__(self, doc):
'\n Initialize from a database document.\n\n :param dict doc: A database document.\n '
self._max_cost = doc.get('max_cost')
self._max_duration = doc.get('max_duration')
self._max_items = doc.get('max_items')
if ((self._max_duration is not None) and (self._max_duration < 0)):
_invalid('Max duration must be ≥0')
if ((self._max_items is not None) and (self._max_items < 0)):
_invalid('Max items must be ≥0') |
@property
def max_duration(self):
'\n The maximum duration that a crawl is allowed to run.\n\n :rtype: float or None\n '
return self._max_duration | -5,000,819,079,214,965,000 | The maximum duration that a crawl is allowed to run.
:rtype: float or None | starbelly/policy.py | max_duration | HyperionGray/starbelly | python | @property
def max_duration(self):
'\n The maximum duration that a crawl is allowed to run.\n\n :rtype: float or None\n '
return self._max_duration |
def met_item_limit(self, items):
"\n Return true if ``items`` is greater than or equal to the policy's max\n item count.\n\n :param int items:\n :rtype: bool\n "
return ((self._max_items is not None) and (items >= self._max_items)) | 2,740,182,675,464,997,400 | Return true if ``items`` is greater than or equal to the policy's max
item count.
:param int items:
:rtype: bool | starbelly/policy.py | met_item_limit | HyperionGray/starbelly | python | def met_item_limit(self, items):
"\n Return true if ``items`` is greater than or equal to the policy's max\n item count.\n\n :param int items:\n :rtype: bool\n "
return ((self._max_items is not None) and (items >= self._max_items)) |
def exceeds_max_cost(self, cost):
"\n Return true if ``cost`` is greater than the policy's max cost.\n\n :param float cost:\n :rtype: bool\n "
return ((self._max_cost is not None) and (cost > self._max_cost)) | -2,219,242,196,667,622,000 | Return true if ``cost`` is greater than the policy's max cost.
:param float cost:
:rtype: bool | starbelly/policy.py | exceeds_max_cost | HyperionGray/starbelly | python | def exceeds_max_cost(self, cost):
"\n Return true if ``cost`` is greater than the policy's max cost.\n\n :param float cost:\n :rtype: bool\n "
return ((self._max_cost is not None) and (cost > self._max_cost)) |
@staticmethod
def convert_doc_to_pb(doc, pb):
'\n Convert from database document to protobuf.\n\n :param dict doc: Database document.\n :param pb: An empty protobuf.\n :type pb: starbelly.starbelly_pb2.PolicyMimeTypeRules\n '
for doc_mime in doc:
pb_mime = pb.add()
if ('pattern' in doc_mime):
pb_mime.pattern = doc_mime['pattern']
if ('match' in doc_mime):
pb_mime.match = MATCH_ENUM.Value(doc_mime['match'])
if ('save' in doc_mime):
pb_mime.save = doc_mime['save'] | -6,404,686,155,729,991,000 | Convert from database document to protobuf.
:param dict doc: Database document.
:param pb: An empty protobuf.
:type pb: starbelly.starbelly_pb2.PolicyMimeTypeRules | starbelly/policy.py | convert_doc_to_pb | HyperionGray/starbelly | python | @staticmethod
def convert_doc_to_pb(doc, pb):
'\n Convert from database document to protobuf.\n\n :param dict doc: Database document.\n :param pb: An empty protobuf.\n :type pb: starbelly.starbelly_pb2.PolicyMimeTypeRules\n '
for doc_mime in doc:
pb_mime = pb.add()
if ('pattern' in doc_mime):
pb_mime.pattern = doc_mime['pattern']
if ('match' in doc_mime):
pb_mime.match = MATCH_ENUM.Value(doc_mime['match'])
if ('save' in doc_mime):
pb_mime.save = doc_mime['save'] |
@staticmethod
def convert_pb_to_doc(pb, doc):
'\n Convert protobuf to database document.\n\n :param pb: A protobuf\n :type pb: starbelly.starbelly_pb2.PolicyMimeTypeRules\n :returns: Database document.\n :rtype: dict\n '
for pb_mime in pb:
doc_mime = dict()
if pb_mime.HasField('pattern'):
doc_mime['pattern'] = pb_mime.pattern
if pb_mime.HasField('match'):
doc_mime['match'] = MATCH_ENUM.Name(pb_mime.match)
if pb_mime.HasField('save'):
doc_mime['save'] = pb_mime.save
doc.append(doc_mime) | 5,497,932,197,362,549,000 | Convert protobuf to database document.
:param pb: A protobuf
:type pb: starbelly.starbelly_pb2.PolicyMimeTypeRules
:returns: Database document.
:rtype: dict | starbelly/policy.py | convert_pb_to_doc | HyperionGray/starbelly | python | @staticmethod
def convert_pb_to_doc(pb, doc):
'\n Convert protobuf to database document.\n\n :param pb: A protobuf\n :type pb: starbelly.starbelly_pb2.PolicyMimeTypeRules\n :returns: Database document.\n :rtype: dict\n '
for pb_mime in pb:
doc_mime = dict()
if pb_mime.HasField('pattern'):
doc_mime['pattern'] = pb_mime.pattern
if pb_mime.HasField('match'):
doc_mime['match'] = MATCH_ENUM.Name(pb_mime.match)
if pb_mime.HasField('save'):
doc_mime['save'] = pb_mime.save
doc.append(doc_mime) |
def __init__(self, docs):
'\n Initialize from database documents.\n\n :param docs: Database document.\n :type docs: list[dict]\n '
if (not docs):
_invalid('At least one MIME type rule is required')
self._rules = list()
max_index = (len(docs) - 1)
for (index, mime_type_rule) in enumerate(docs):
if (index < max_index):
location = 'MIME type rule #{}'.format((index + 1))
if (mime_type_rule.get('pattern', '').strip() == ''):
_invalid('Pattern is required', location)
if ('save' not in mime_type_rule):
_invalid('Save selector is required', location)
if ('match' not in mime_type_rule):
_invalid('Match selector is required', location)
try:
pattern_re = re.compile(mime_type_rule['pattern'])
except:
_invalid('Invalid regular expression', location)
self._rules.append((pattern_re, mime_type_rule['match'], mime_type_rule['save']))
else:
location = 'last MIME type rule'
if ('save' not in mime_type_rule):
_invalid('Save selector is required', location)
if ('pattern' in mime_type_rule):
_invalid('Pattern is not allowed', location)
if ('match' in mime_type_rule):
_invalid('Match selector is not allowed', location)
self._rules.append((None, None, mime_type_rule['save'])) | 4,335,722,909,998,780,400 | Initialize from database documents.
:param docs: Database document.
:type docs: list[dict] | starbelly/policy.py | __init__ | HyperionGray/starbelly | python | def __init__(self, docs):
'\n Initialize from database documents.\n\n :param docs: Database document.\n :type docs: list[dict]\n '
if (not docs):
_invalid('At least one MIME type rule is required')
self._rules = list()
max_index = (len(docs) - 1)
for (index, mime_type_rule) in enumerate(docs):
if (index < max_index):
location = 'MIME type rule #{}'.format((index + 1))
if (mime_type_rule.get('pattern', ).strip() == ):
_invalid('Pattern is required', location)
if ('save' not in mime_type_rule):
_invalid('Save selector is required', location)
if ('match' not in mime_type_rule):
_invalid('Match selector is required', location)
try:
pattern_re = re.compile(mime_type_rule['pattern'])
except:
_invalid('Invalid regular expression', location)
self._rules.append((pattern_re, mime_type_rule['match'], mime_type_rule['save']))
else:
location = 'last MIME type rule'
if ('save' not in mime_type_rule):
_invalid('Save selector is required', location)
if ('pattern' in mime_type_rule):
_invalid('Pattern is not allowed', location)
if ('match' in mime_type_rule):
_invalid('Match selector is not allowed', location)
self._rules.append((None, None, mime_type_rule['save'])) |
def should_save(self, mime_type):
'\n Returns True if ``mime_type`` is approved by this policy.\n\n If rules are valid, this method always returns True or False.\n\n :param str mime_type:\n :rtype: bool\n '
should_save = False
for (pattern, match, save) in self._rules:
if (pattern is None):
should_save = save
break
mimecheck = (pattern.search(mime_type) is not None)
if (match == 'DOES_NOT_MATCH'):
mimecheck = (not mimecheck)
if mimecheck:
should_save = save
break
return should_save | -1,038,911,491,049,067,500 | Returns True if ``mime_type`` is approved by this policy.
If rules are valid, this method always returns True or False.
:param str mime_type:
:rtype: bool | starbelly/policy.py | should_save | HyperionGray/starbelly | python | def should_save(self, mime_type):
'\n Returns True if ``mime_type`` is approved by this policy.\n\n If rules are valid, this method always returns True or False.\n\n :param str mime_type:\n :rtype: bool\n '
should_save = False
for (pattern, match, save) in self._rules:
if (pattern is None):
should_save = save
break
mimecheck = (pattern.search(mime_type) is not None)
if (match == 'DOES_NOT_MATCH'):
mimecheck = (not mimecheck)
if mimecheck:
should_save = save
break
return should_save |
@staticmethod
def convert_doc_to_pb(doc, pb):
'\n Convert from database document to protobuf.\n\n :param dict doc: Database document.\n :param pb: An empty protobuf.\n :type pb: starbelly.starbelly_pb2.PolicyProxyRules\n '
for doc_proxy in doc:
pb_proxy = pb.add()
if ('pattern' in doc_proxy):
pb_proxy.pattern = doc_proxy['pattern']
if ('match' in doc_proxy):
pb_proxy.match = MATCH_ENUM.Value(doc_proxy['match'])
if ('proxy_url' in doc_proxy):
pb_proxy.proxy_url = doc_proxy['proxy_url'] | -4,435,190,086,674,675,000 | Convert from database document to protobuf.
:param dict doc: Database document.
:param pb: An empty protobuf.
:type pb: starbelly.starbelly_pb2.PolicyProxyRules | starbelly/policy.py | convert_doc_to_pb | HyperionGray/starbelly | python | @staticmethod
def convert_doc_to_pb(doc, pb):
'\n Convert from database document to protobuf.\n\n :param dict doc: Database document.\n :param pb: An empty protobuf.\n :type pb: starbelly.starbelly_pb2.PolicyProxyRules\n '
for doc_proxy in doc:
pb_proxy = pb.add()
if ('pattern' in doc_proxy):
pb_proxy.pattern = doc_proxy['pattern']
if ('match' in doc_proxy):
pb_proxy.match = MATCH_ENUM.Value(doc_proxy['match'])
if ('proxy_url' in doc_proxy):
pb_proxy.proxy_url = doc_proxy['proxy_url'] |
@staticmethod
def convert_pb_to_doc(pb, doc):
'\n Convert protobuf to database document.\n\n :param pb: A protobuf\n :type pb: starbelly.starbelly_pb2.PolicyProxyRules\n :returns: Database document.\n :rtype: dict\n '
for pb_proxy in pb:
doc_proxy = dict()
if pb_proxy.HasField('pattern'):
doc_proxy['pattern'] = pb_proxy.pattern
if pb_proxy.HasField('match'):
doc_proxy['match'] = MATCH_ENUM.Name(pb_proxy.match)
if pb_proxy.HasField('proxy_url'):
doc_proxy['proxy_url'] = pb_proxy.proxy_url
doc.append(doc_proxy) | -3,380,929,956,643,027,000 | Convert protobuf to database document.
:param pb: A protobuf
:type pb: starbelly.starbelly_pb2.PolicyProxyRules
:returns: Database document.
:rtype: dict | starbelly/policy.py | convert_pb_to_doc | HyperionGray/starbelly | python | @staticmethod
def convert_pb_to_doc(pb, doc):
'\n Convert protobuf to database document.\n\n :param pb: A protobuf\n :type pb: starbelly.starbelly_pb2.PolicyProxyRules\n :returns: Database document.\n :rtype: dict\n '
for pb_proxy in pb:
doc_proxy = dict()
if pb_proxy.HasField('pattern'):
doc_proxy['pattern'] = pb_proxy.pattern
if pb_proxy.HasField('match'):
doc_proxy['match'] = MATCH_ENUM.Name(pb_proxy.match)
if pb_proxy.HasField('proxy_url'):
doc_proxy['proxy_url'] = pb_proxy.proxy_url
doc.append(doc_proxy) |
def __init__(self, docs):
'\n Initialize from database documents.\n\n :param docs: Database document.\n :type docs: list[dict]\n '
self._rules = list()
max_index = (len(docs) - 1)
for (index, proxy_rule) in enumerate(docs):
if (index < max_index):
location = 'proxy rule #{}'.format((index + 1))
if (proxy_rule.get('pattern', '').strip() == ''):
_invalid('Pattern is required', location)
try:
pattern_re = re.compile(proxy_rule['pattern'])
except:
_invalid('Invalid regular expression', location)
try:
match = (proxy_rule['match'] == 'MATCHES')
except KeyError:
_invalid('Match selector is required', location)
proxy_url = proxy_rule.get('proxy_url', '')
if (proxy_url == ''):
_invalid('Proxy URL is required', location)
else:
location = 'last proxy rule'
if ('pattern' in proxy_rule):
_invalid('Pattern is not allowed', location)
if ('match' in proxy_rule):
_invalid('Pattern is not allowed', location)
pattern_re = None
match = None
proxy_type = None
proxy_url = proxy_rule.get('proxy_url')
if (proxy_url is None):
proxy_type = None
else:
try:
parsed = URL(proxy_url)
proxy_type = parsed.scheme
if (proxy_type not in self.PROXY_SCHEMES):
raise ValueError()
except:
schemes = ', '.join(self.PROXY_SCHEMES)
_invalid(f'Must have a valid URL with one of the following schemes: {schemes}', location)
self._rules.append((pattern_re, match, proxy_type, proxy_url)) | 578,156,337,105,074,200 | Initialize from database documents.
:param docs: Database document.
:type docs: list[dict] | starbelly/policy.py | __init__ | HyperionGray/starbelly | python | def __init__(self, docs):
'\n Initialize from database documents.\n\n :param docs: Database document.\n :type docs: list[dict]\n '
self._rules = list()
max_index = (len(docs) - 1)
for (index, proxy_rule) in enumerate(docs):
if (index < max_index):
location = 'proxy rule #{}'.format((index + 1))
if (proxy_rule.get('pattern', ).strip() == ):
_invalid('Pattern is required', location)
try:
pattern_re = re.compile(proxy_rule['pattern'])
except:
_invalid('Invalid regular expression', location)
try:
match = (proxy_rule['match'] == 'MATCHES')
except KeyError:
_invalid('Match selector is required', location)
proxy_url = proxy_rule.get('proxy_url', )
if (proxy_url == ):
_invalid('Proxy URL is required', location)
else:
location = 'last proxy rule'
if ('pattern' in proxy_rule):
_invalid('Pattern is not allowed', location)
if ('match' in proxy_rule):
_invalid('Pattern is not allowed', location)
pattern_re = None
match = None
proxy_type = None
proxy_url = proxy_rule.get('proxy_url')
if (proxy_url is None):
proxy_type = None
else:
try:
parsed = URL(proxy_url)
proxy_type = parsed.scheme
if (proxy_type not in self.PROXY_SCHEMES):
raise ValueError()
except:
schemes = ', '.join(self.PROXY_SCHEMES)
_invalid(f'Must have a valid URL with one of the following schemes: {schemes}', location)
self._rules.append((pattern_re, match, proxy_type, proxy_url)) |
def get_proxy_url(self, target_url):
'\n Return a proxy (type, URL) tuple associated with ``target_url`` or\n (None, None) if no such proxy is defined.\n\n :param str target_url:\n :rtype: tuple[proxy_type,URL]\n '
proxy = (None, None)
for (pattern, needs_match, proxy_type, proxy_url) in self._rules:
if (pattern is not None):
has_match = (pattern.search(target_url) is not None)
if (has_match == needs_match):
proxy = (proxy_type, proxy_url)
break
elif (proxy_url is not None):
proxy = (proxy_type, proxy_url)
break
return proxy | -5,271,982,335,871,064,000 | Return a proxy (type, URL) tuple associated with ``target_url`` or
(None, None) if no such proxy is defined.
:param str target_url:
:rtype: tuple[proxy_type,URL] | starbelly/policy.py | get_proxy_url | HyperionGray/starbelly | python | def get_proxy_url(self, target_url):
'\n Return a proxy (type, URL) tuple associated with ``target_url`` or\n (None, None) if no such proxy is defined.\n\n :param str target_url:\n :rtype: tuple[proxy_type,URL]\n '
proxy = (None, None)
for (pattern, needs_match, proxy_type, proxy_url) in self._rules:
if (pattern is not None):
has_match = (pattern.search(target_url) is not None)
if (has_match == needs_match):
proxy = (proxy_type, proxy_url)
break
elif (proxy_url is not None):
proxy = (proxy_type, proxy_url)
break
return proxy |
@staticmethod
def convert_doc_to_pb(doc, pb):
'\n Convert from database document to protobuf.\n\n :param dict doc: Database document.\n :param pb: An empty protobuf.\n :type pb: starbelly.starbelly_pb2.PolicyRobotsTxt\n '
pb.usage = USAGE_ENUM.Value(doc['usage']) | -6,231,441,356,654,925,000 | Convert from database document to protobuf.
:param dict doc: Database document.
:param pb: An empty protobuf.
:type pb: starbelly.starbelly_pb2.PolicyRobotsTxt | starbelly/policy.py | convert_doc_to_pb | HyperionGray/starbelly | python | @staticmethod
def convert_doc_to_pb(doc, pb):
'\n Convert from database document to protobuf.\n\n :param dict doc: Database document.\n :param pb: An empty protobuf.\n :type pb: starbelly.starbelly_pb2.PolicyRobotsTxt\n '
pb.usage = USAGE_ENUM.Value(doc['usage']) |
@staticmethod
def convert_pb_to_doc(pb, doc):
'\n Convert protobuf to database document.\n\n :param pb: A protobuf\n :type pb: starbelly.starbelly_pb2.PolicyRobotsTxt\n :returns: Database document.\n :rtype: dict\n '
if pb.HasField('usage'):
doc['usage'] = USAGE_ENUM.Name(pb.usage) | -9,072,093,195,074,601,000 | Convert protobuf to database document.
:param pb: A protobuf
:type pb: starbelly.starbelly_pb2.PolicyRobotsTxt
:returns: Database document.
:rtype: dict | starbelly/policy.py | convert_pb_to_doc | HyperionGray/starbelly | python | @staticmethod
def convert_pb_to_doc(pb, doc):
'\n Convert protobuf to database document.\n\n :param pb: A protobuf\n :type pb: starbelly.starbelly_pb2.PolicyRobotsTxt\n :returns: Database document.\n :rtype: dict\n '
if pb.HasField('usage'):
doc['usage'] = USAGE_ENUM.Name(pb.usage) |
def __init__(self, doc):
'\n Initialize from a database document.\n\n :param dict doc: A database document.\n '
if ('usage' not in doc):
_invalid('Robots.txt usage is required')
self._usage = doc['usage'] | -467,326,410,203,169,600 | Initialize from a database document.
:param dict doc: A database document. | starbelly/policy.py | __init__ | HyperionGray/starbelly | python | def __init__(self, doc):
'\n Initialize from a database document.\n\n :param dict doc: A database document.\n '
if ('usage' not in doc):
_invalid('Robots.txt usage is required')
self._usage = doc['usage'] |
@property
def usage(self):
' OBEY, IGNORE, or INVERT '
return self._usage | 6,754,642,664,716,121,000 | OBEY, IGNORE, or INVERT | starbelly/policy.py | usage | HyperionGray/starbelly | python | @property
def usage(self):
' '
return self._usage |
@staticmethod
def convert_doc_to_pb(doc, pb):
'\n Convert from database document to protobuf.\n\n :param dict doc: Database document.\n :param pb: An empty protobuf.\n :type pb: starbelly.starbelly_pb2.PolicyUrlNormalization\n '
if ('enabled' in doc):
pb.enabled = doc['enabled']
if ('strip_parameters' in doc):
pb.strip_parameters.extend(doc['strip_parameters']) | 930,943,612,833,391,500 | Convert from database document to protobuf.
:param dict doc: Database document.
:param pb: An empty protobuf.
:type pb: starbelly.starbelly_pb2.PolicyUrlNormalization | starbelly/policy.py | convert_doc_to_pb | HyperionGray/starbelly | python | @staticmethod
def convert_doc_to_pb(doc, pb):
'\n Convert from database document to protobuf.\n\n :param dict doc: Database document.\n :param pb: An empty protobuf.\n :type pb: starbelly.starbelly_pb2.PolicyUrlNormalization\n '
if ('enabled' in doc):
pb.enabled = doc['enabled']
if ('strip_parameters' in doc):
pb.strip_parameters.extend(doc['strip_parameters']) |
@staticmethod
def convert_pb_to_doc(pb, doc):
'\n Convert protobuf to database document.\n\n :param pb: A protobuf\n :type pb: starbelly.starbelly_pb2.PolicyUrlNormalization\n :returns: Database document.\n :rtype: dict\n '
if pb.HasField('enabled'):
doc['enabled'] = pb.enabled
doc['strip_parameters'] = list(pb.strip_parameters) | -7,722,443,896,571,360,000 | Convert protobuf to database document.
:param pb: A protobuf
:type pb: starbelly.starbelly_pb2.PolicyUrlNormalization
:returns: Database document.
:rtype: dict | starbelly/policy.py | convert_pb_to_doc | HyperionGray/starbelly | python | @staticmethod
def convert_pb_to_doc(pb, doc):
'\n Convert protobuf to database document.\n\n :param pb: A protobuf\n :type pb: starbelly.starbelly_pb2.PolicyUrlNormalization\n :returns: Database document.\n :rtype: dict\n '
if pb.HasField('enabled'):
doc['enabled'] = pb.enabled
doc['strip_parameters'] = list(pb.strip_parameters) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.