body
stringlengths
26
98.2k
body_hash
int64
-9,222,864,604,528,158,000
9,221,803,474B
docstring
stringlengths
1
16.8k
path
stringlengths
5
230
name
stringlengths
1
96
repository_name
stringlengths
7
89
lang
stringclasses
1 value
body_without_docstring
stringlengths
20
98.2k
def merge_subsets(a, b): 'Merges field subset definitions together. The b subset is merged into the a subset. Assumes that subsets have been stripped of non-ecs options.' for key in b: if (key not in a): a[key] = b[key] elif (('fields' in a[key]) and ('fields' in b[key])): if (b[key]['fields'] == '*'): a[key]['fields'] = '*' elif (isinstance(a[key]['fields'], dict) and isinstance(b[key]['fields'], dict)): merge_subsets(a[key]['fields'], b[key]['fields']) elif (('fields' in a[key]) or ('fields' in b[key])): raise ValueError("Subsets unmergeable: 'fields' found in key '{}' in only one subset".format(key)) if (a[key].get('enabled', True) or b[key].get('enabled', True)): a[key].pop('enabled', None) if (a[key].get('index', True) or b[key].get('index', True)): a[key].pop('index', None)
-50,387,666,115,584,190
Merges field subset definitions together. The b subset is merged into the a subset. Assumes that subsets have been stripped of non-ecs options.
scripts/schema/subset_filter.py
merge_subsets
6un9-h0-Dan/ecs
python
def merge_subsets(a, b): for key in b: if (key not in a): a[key] = b[key] elif (('fields' in a[key]) and ('fields' in b[key])): if (b[key]['fields'] == '*'): a[key]['fields'] = '*' elif (isinstance(a[key]['fields'], dict) and isinstance(b[key]['fields'], dict)): merge_subsets(a[key]['fields'], b[key]['fields']) elif (('fields' in a[key]) or ('fields' in b[key])): raise ValueError("Subsets unmergeable: 'fields' found in key '{}' in only one subset".format(key)) if (a[key].get('enabled', True) or b[key].get('enabled', True)): a[key].pop('enabled', None) if (a[key].get('index', True) or b[key].get('index', True)): a[key].pop('index', None)
def extract_matching_fields(fields, subset_definitions): 'Removes fields that are not in the subset definition. Returns a copy without modifying the input fields dict.' retained_fields = {x: fields[x].copy() for x in subset_definitions} for (key, val) in subset_definitions.items(): retained_fields[key]['field_details'] = fields[key]['field_details'].copy() for option in val: if (option != 'fields'): if ('intermediate' in retained_fields[key]['field_details']): retained_fields[key]['field_details']['intermediate'] = False retained_fields[key]['field_details'].setdefault('description', 'Intermediate field included by adding option with subset') retained_fields[key]['field_details']['level'] = 'custom' cleaner.field_cleanup(retained_fields[key]) retained_fields[key]['field_details'][option] = val[option] if ('fields' in fields[key]): if ('fields' not in val): raise ValueError("'fields' key expected, not found in subset for {}".format(key)) elif isinstance(val['fields'], dict): retained_fields[key]['fields'] = extract_matching_fields(fields[key]['fields'], val['fields']) elif (val['fields'] != '*'): raise ValueError("Unexpected value '{}' found in 'fields' key".format(val['fields'])) elif ('fields' in val): raise ValueError("'fields' key not expected, found in subset for {}".format(key)) return retained_fields
-6,010,733,992,487,650,000
Removes fields that are not in the subset definition. Returns a copy without modifying the input fields dict.
scripts/schema/subset_filter.py
extract_matching_fields
6un9-h0-Dan/ecs
python
def extract_matching_fields(fields, subset_definitions): retained_fields = {x: fields[x].copy() for x in subset_definitions} for (key, val) in subset_definitions.items(): retained_fields[key]['field_details'] = fields[key]['field_details'].copy() for option in val: if (option != 'fields'): if ('intermediate' in retained_fields[key]['field_details']): retained_fields[key]['field_details']['intermediate'] = False retained_fields[key]['field_details'].setdefault('description', 'Intermediate field included by adding option with subset') retained_fields[key]['field_details']['level'] = 'custom' cleaner.field_cleanup(retained_fields[key]) retained_fields[key]['field_details'][option] = val[option] if ('fields' in fields[key]): if ('fields' not in val): raise ValueError("'fields' key expected, not found in subset for {}".format(key)) elif isinstance(val['fields'], dict): retained_fields[key]['fields'] = extract_matching_fields(fields[key]['fields'], val['fields']) elif (val['fields'] != '*'): raise ValueError("Unexpected value '{}' found in 'fields' key".format(val['fields'])) elif ('fields' in val): raise ValueError("'fields' key not expected, found in subset for {}".format(key)) return retained_fields
@staticmethod def from_file(filename: str) -> 'WallpaperConfig': ' Creates a WallpaperConfig from a YAML file ' with open(filename, 'r') as input_file: return jsons.load(yaml.load(input_file, Loader=yaml.SafeLoader), WallpaperConfig)
-5,361,020,704,774,841,000
Creates a WallpaperConfig from a YAML file
config_objects.py
from_file
JimTheCactus/RedditWallpaperWatcher
python
@staticmethod def from_file(filename: str) -> 'WallpaperConfig': ' ' with open(filename, 'r') as input_file: return jsons.load(yaml.load(input_file, Loader=yaml.SafeLoader), WallpaperConfig)
@staticmethod def from_file(filename: str) -> 'RedditAuthInfo': ' Creates a RedditAuthInfo from a YAML file ' with open(filename, 'r') as input_file: auth = jsons.load(yaml.load(input_file, Loader=yaml.SafeLoader), RedditAuthInfo) return auth
7,710,573,328,413,913,000
Creates a RedditAuthInfo from a YAML file
config_objects.py
from_file
JimTheCactus/RedditWallpaperWatcher
python
@staticmethod def from_file(filename: str) -> 'RedditAuthInfo': ' ' with open(filename, 'r') as input_file: auth = jsons.load(yaml.load(input_file, Loader=yaml.SafeLoader), RedditAuthInfo) return auth
def __init__(self, success=None, message=None, error_code=None, data=None): 'Constructor for the GetScheduledMessageResponse class' self.success = success self.message = message self.error_code = error_code self.data = data
-3,612,874,773,586,269,700
Constructor for the GetScheduledMessageResponse class
unifonicnextgen/models/get_scheduled_message_response.py
__init__
masaar/unifonic_python_sdk
python
def __init__(self, success=None, message=None, error_code=None, data=None): self.success = success self.message = message self.error_code = error_code self.data = data
@classmethod def from_dictionary(cls, dictionary): "Creates an instance of this model from a dictionary\n\n Args:\n dictionary (dictionary): A dictionary representation of the object as\n obtained from the deserialization of the server's response. The keys\n MUST match property names in the API description.\n\n Returns:\n object: An instance of this structure class.\n\n " if (dictionary is None): return None success = dictionary.get('success') message = dictionary.get('message') error_code = dictionary.get('errorCode') data = dictionary.get('data') return cls(success, message, error_code, data)
-1,881,683,673,075,648,500
Creates an instance of this model from a dictionary Args: dictionary (dictionary): A dictionary representation of the object as obtained from the deserialization of the server's response. The keys MUST match property names in the API description. Returns: object: An instance of this structure class.
unifonicnextgen/models/get_scheduled_message_response.py
from_dictionary
masaar/unifonic_python_sdk
python
@classmethod def from_dictionary(cls, dictionary): "Creates an instance of this model from a dictionary\n\n Args:\n dictionary (dictionary): A dictionary representation of the object as\n obtained from the deserialization of the server's response. The keys\n MUST match property names in the API description.\n\n Returns:\n object: An instance of this structure class.\n\n " if (dictionary is None): return None success = dictionary.get('success') message = dictionary.get('message') error_code = dictionary.get('errorCode') data = dictionary.get('data') return cls(success, message, error_code, data)
def testing_job(progress_steps=None, retval=None, fail=False, skip=False, log_messages=None, step_duration=0): '\n Job used for testing purposes.\n\n :param progress_steps:\n A list of tuples: ``(<group_name>, <steps>)``, where "group_name"\n is a tuple of name "levels", "steps" an integer representing how\n many steps should that level have.\n\n Progress reports will be sent in randomized order.\n\n :param retval:\n The return value for the job.\n\n :param fail:\n Whether this job should fail.\n\n :param skip:\n Whether this job should be skipped.\n\n :param log_messages:\n A list of tuples: ``(level, message)``\n\n :param step_duration:\n The time to sleep between steps, in milliseconds.\n ' from jobcontrol.globals import execution_context logger = logging.getLogger('jobcontrol.utils.testing_job') log_messages = list((log_messages or [])) if (progress_steps is None): progress_steps = [(None, 10)] totals = {} counters = {} progress_report_items = [] for (name, steps) in progress_steps: if isinstance(name, list): name = tuple(name) if (not ((name is None) or isinstance(name, tuple))): raise TypeError('Name must be a tuple or None') for i in xrange(steps): progress_report_items.append(name) totals[name] = steps counters[name] = 0 random.shuffle(progress_report_items) sleep_time = ((step_duration * 1.0) / 1000) def report_progress(name, cur, tot, status=None): app = execution_context.current_app app.report_progress(group_name=name, current=cur, total=tot, status_line=status) def _should_fail(): return (random.randint(0, len(progress_report_items)) == 0) for item in progress_report_items: counters[item] += 1 report_progress(item, counters[item], totals[item], 'Doing action {0} [{1}/{2}]'.format(item, counters[item], totals[item])) if len(log_messages): (lev, msg) = log_messages.pop(0) logger.log(lev, msg) if (fail and _should_fail()): raise RuntimeError('This is a simulated exception in the middle of the loop') if (skip and _should_fail()): raise SkipBuild('This is a simulated skip in the middle of the loop') if sleep_time: time.sleep(sleep_time) if skip: raise SkipBuild('This build should be skipped!') if fail: raise RuntimeError('This is a simulated exception') return retval
4,464,256,424,644,385,000
Job used for testing purposes. :param progress_steps: A list of tuples: ``(<group_name>, <steps>)``, where "group_name" is a tuple of name "levels", "steps" an integer representing how many steps should that level have. Progress reports will be sent in randomized order. :param retval: The return value for the job. :param fail: Whether this job should fail. :param skip: Whether this job should be skipped. :param log_messages: A list of tuples: ``(level, message)`` :param step_duration: The time to sleep between steps, in milliseconds.
jobcontrol/utils/testing.py
testing_job
rshk/jobcontrol
python
def testing_job(progress_steps=None, retval=None, fail=False, skip=False, log_messages=None, step_duration=0): '\n Job used for testing purposes.\n\n :param progress_steps:\n A list of tuples: ``(<group_name>, <steps>)``, where "group_name"\n is a tuple of name "levels", "steps" an integer representing how\n many steps should that level have.\n\n Progress reports will be sent in randomized order.\n\n :param retval:\n The return value for the job.\n\n :param fail:\n Whether this job should fail.\n\n :param skip:\n Whether this job should be skipped.\n\n :param log_messages:\n A list of tuples: ``(level, message)``\n\n :param step_duration:\n The time to sleep between steps, in milliseconds.\n ' from jobcontrol.globals import execution_context logger = logging.getLogger('jobcontrol.utils.testing_job') log_messages = list((log_messages or [])) if (progress_steps is None): progress_steps = [(None, 10)] totals = {} counters = {} progress_report_items = [] for (name, steps) in progress_steps: if isinstance(name, list): name = tuple(name) if (not ((name is None) or isinstance(name, tuple))): raise TypeError('Name must be a tuple or None') for i in xrange(steps): progress_report_items.append(name) totals[name] = steps counters[name] = 0 random.shuffle(progress_report_items) sleep_time = ((step_duration * 1.0) / 1000) def report_progress(name, cur, tot, status=None): app = execution_context.current_app app.report_progress(group_name=name, current=cur, total=tot, status_line=status) def _should_fail(): return (random.randint(0, len(progress_report_items)) == 0) for item in progress_report_items: counters[item] += 1 report_progress(item, counters[item], totals[item], 'Doing action {0} [{1}/{2}]'.format(item, counters[item], totals[item])) if len(log_messages): (lev, msg) = log_messages.pop(0) logger.log(lev, msg) if (fail and _should_fail()): raise RuntimeError('This is a simulated exception in the middle of the loop') if (skip and _should_fail()): raise SkipBuild('This is a simulated skip in the middle of the loop') if sleep_time: time.sleep(sleep_time) if skip: raise SkipBuild('This build should be skipped!') if fail: raise RuntimeError('This is a simulated exception') return retval
def job_failing_once(): '\n This job will fail exactly once; retry will be successful\n ' from jobcontrol.globals import current_job exec_count = len(list(current_job.iter_runs())) if (exec_count <= 1): raise RuntimeError('Simulating failure') return exec_count
665,967,870,671,782,900
This job will fail exactly once; retry will be successful
jobcontrol/utils/testing.py
job_failing_once
rshk/jobcontrol
python
def job_failing_once(): '\n \n ' from jobcontrol.globals import current_job exec_count = len(list(current_job.iter_runs())) if (exec_count <= 1): raise RuntimeError('Simulating failure') return exec_count
def job_echo_config(*args, **kwargs): '\n Simple job, "echoing" back the current configuration.\n ' from jobcontrol.globals import current_job, current_build return {'args': args, 'kwargs': kwargs, 'build_id': current_build.id, 'job_id': current_job.id, 'dependencies': current_build.config['dependencies'], 'config': current_build.config}
3,529,256,724,884,849,700
Simple job, "echoing" back the current configuration.
jobcontrol/utils/testing.py
job_echo_config
rshk/jobcontrol
python
def job_echo_config(*args, **kwargs): '\n \n ' from jobcontrol.globals import current_job, current_build return {'args': args, 'kwargs': kwargs, 'build_id': current_build.id, 'job_id': current_job.id, 'dependencies': current_build.config['dependencies'], 'config': current_build.config}
def CrossValidationFolds_Traversal(estimator, vdataset): "\n Arguments: \n - estimator = classifer of model\n - vdataset = vehicld dataset \n \n This function computes acccuracy score with the\n Cross Validation Score for each KFold with K from 2 to 10 \n \n Output:\n returns matrix conatining value of K with it's corresponding performance score\n " X = vdataset.drop(['Class', 'Class_code'], axis=1) y = vdataset['Class_code'] scores = [] matrix = pd.DataFrame(columns=['KFold', 'Accuracy']) for i in range(2, 11): score = cross_val_score(estimator, X, y, cv=i, scoring='accuracy') scores.append(score.mean()) matrix = matrix.append({'KFold': i, 'Accuracy': (score.mean() * 100)}, ignore_index=True) return matrix
-815,941,616,666,243,500
Arguments: - estimator = classifer of model - vdataset = vehicld dataset This function computes acccuracy score with the Cross Validation Score for each KFold with K from 2 to 10 Output: returns matrix conatining value of K with it's corresponding performance score
dev/shiza16/Calibration plot/CrossValidationFold_Traversal.py
CrossValidationFolds_Traversal
Bolaji61/PRESC
python
def CrossValidationFolds_Traversal(estimator, vdataset): "\n Arguments: \n - estimator = classifer of model\n - vdataset = vehicld dataset \n \n This function computes acccuracy score with the\n Cross Validation Score for each KFold with K from 2 to 10 \n \n Output:\n returns matrix conatining value of K with it's corresponding performance score\n " X = vdataset.drop(['Class', 'Class_code'], axis=1) y = vdataset['Class_code'] scores = [] matrix = pd.DataFrame(columns=['KFold', 'Accuracy']) for i in range(2, 11): score = cross_val_score(estimator, X, y, cv=i, scoring='accuracy') scores.append(score.mean()) matrix = matrix.append({'KFold': i, 'Accuracy': (score.mean() * 100)}, ignore_index=True) return matrix
def Visulaize_CrossValidationFolds_Traversal(matrix): "\n Argument:\n - matrix: Dataframe named matrix\n \n Line Plot is drawn for each KFold value with it's respective performance score.\n \n Output:\n - plot the line graph\n " ax = plt.gca() matrix.plot(kind='line', x='KFold', y='Accuracy', color='red', marker='o', markerfacecolor='blue', markersize=12, ax=ax) plt.title("Line plot of No of Kfold with it's corresponding performance score\n") plt.ylabel('Accuracy\n') plt.xlabel('\nNo of KFolds') plt.show()
1,476,091,193,718,027,800
Argument: - matrix: Dataframe named matrix Line Plot is drawn for each KFold value with it's respective performance score. Output: - plot the line graph
dev/shiza16/Calibration plot/CrossValidationFold_Traversal.py
Visulaize_CrossValidationFolds_Traversal
Bolaji61/PRESC
python
def Visulaize_CrossValidationFolds_Traversal(matrix): "\n Argument:\n - matrix: Dataframe named matrix\n \n Line Plot is drawn for each KFold value with it's respective performance score.\n \n Output:\n - plot the line graph\n " ax = plt.gca() matrix.plot(kind='line', x='KFold', y='Accuracy', color='red', marker='o', markerfacecolor='blue', markersize=12, ax=ax) plt.title("Line plot of No of Kfold with it's corresponding performance score\n") plt.ylabel('Accuracy\n') plt.xlabel('\nNo of KFolds') plt.show()
def addPath(rel_path, prepend=False): " Adds a directory to the system python path, either by append (doesn't\n override default or globally installed package names) or by prepend\n (overrides default/global package names).\n " path = (lambda *paths: (os.path.abspath(os.path.join(os.path.dirname(__file__), *paths)) + '/')) if prepend: return sys.path.insert(0, path(rel_path)) return sys.path.append(path(rel_path))
-3,591,457,979,364,174,300
Adds a directory to the system python path, either by append (doesn't override default or globally installed package names) or by prepend (overrides default/global package names).
example/manage.py
addPath
Locu/djoauth2
python
def addPath(rel_path, prepend=False): " Adds a directory to the system python path, either by append (doesn't\n override default or globally installed package names) or by prepend\n (overrides default/global package names).\n " path = (lambda *paths: (os.path.abspath(os.path.join(os.path.dirname(__file__), *paths)) + '/')) if prepend: return sys.path.insert(0, path(rel_path)) return sys.path.append(path(rel_path))
def act(self, state): 'Returns actions for given state(s) as per current policy.' state = np.reshape(state, [(- 1), self.state_size]) action = self.actor_local.model.predict(state)[0] return list((action + self.noise.sample()))
-7,804,609,446,734,043,000
Returns actions for given state(s) as per current policy.
home/agents/agent.py
act
GabrielTourinho/dlnd-teach-a-quadcopter-how-to-fly
python
def act(self, state): state = np.reshape(state, [(- 1), self.state_size]) action = self.actor_local.model.predict(state)[0] return list((action + self.noise.sample()))
def learn(self, experiences): 'Update policy and value parameters using given batch of experience tuples.' states = np.vstack([e.state for e in experiences if (e is not None)]) actions = np.array([e.action for e in experiences if (e is not None)]).astype(np.float32).reshape((- 1), self.action_size) rewards = np.array([e.reward for e in experiences if (e is not None)]).astype(np.float32).reshape((- 1), 1) dones = np.array([e.done for e in experiences if (e is not None)]).astype(np.uint8).reshape((- 1), 1) next_states = np.vstack([e.next_state for e in experiences if (e is not None)]) actions_next = self.actor_target.model.predict_on_batch(next_states) Q_targets_next = self.critic_target.model.predict_on_batch([next_states, actions_next]) Q_targets = (rewards + ((self.gamma * Q_targets_next) * (1 - dones))) self.critic_local.model.train_on_batch(x=[states, actions], y=Q_targets) action_gradients = np.reshape(self.critic_local.get_action_gradients([states, actions, 0]), ((- 1), self.action_size)) self.actor_local.train_fn([states, action_gradients, 1]) self.soft_update(self.critic_local.model, self.critic_target.model) self.soft_update(self.actor_local.model, self.actor_target.model)
9,061,022,206,806,542,000
Update policy and value parameters using given batch of experience tuples.
home/agents/agent.py
learn
GabrielTourinho/dlnd-teach-a-quadcopter-how-to-fly
python
def learn(self, experiences): states = np.vstack([e.state for e in experiences if (e is not None)]) actions = np.array([e.action for e in experiences if (e is not None)]).astype(np.float32).reshape((- 1), self.action_size) rewards = np.array([e.reward for e in experiences if (e is not None)]).astype(np.float32).reshape((- 1), 1) dones = np.array([e.done for e in experiences if (e is not None)]).astype(np.uint8).reshape((- 1), 1) next_states = np.vstack([e.next_state for e in experiences if (e is not None)]) actions_next = self.actor_target.model.predict_on_batch(next_states) Q_targets_next = self.critic_target.model.predict_on_batch([next_states, actions_next]) Q_targets = (rewards + ((self.gamma * Q_targets_next) * (1 - dones))) self.critic_local.model.train_on_batch(x=[states, actions], y=Q_targets) action_gradients = np.reshape(self.critic_local.get_action_gradients([states, actions, 0]), ((- 1), self.action_size)) self.actor_local.train_fn([states, action_gradients, 1]) self.soft_update(self.critic_local.model, self.critic_target.model) self.soft_update(self.actor_local.model, self.actor_target.model)
def soft_update(self, local_model, target_model): 'Soft update model parameters.' local_weights = np.array(local_model.get_weights()) target_weights = np.array(target_model.get_weights()) assert (len(local_weights) == len(target_weights)), 'Local and target model parameters must have the same size' new_weights = ((self.tau * local_weights) + ((1 - self.tau) * target_weights)) target_model.set_weights(new_weights)
-6,402,697,941,261,341,000
Soft update model parameters.
home/agents/agent.py
soft_update
GabrielTourinho/dlnd-teach-a-quadcopter-how-to-fly
python
def soft_update(self, local_model, target_model): local_weights = np.array(local_model.get_weights()) target_weights = np.array(target_model.get_weights()) assert (len(local_weights) == len(target_weights)), 'Local and target model parameters must have the same size' new_weights = ((self.tau * local_weights) + ((1 - self.tau) * target_weights)) target_model.set_weights(new_weights)
def plotfft(s, fmax, doplot=False): ' This functions computes the fft of a signal, returning the frequency\n and their magnitude values.\n\n Parameters\n ----------\n s: array-like\n the input signal.\n fmax: int\n the sampling frequency.\n doplot: boolean\n a variable to indicate whether the plot is done or not.\n\n Returns\n -------\n f: array-like\n the frequency values (xx axis)\n fs: array-like\n the amplitude of the frequency values (yy axis)\n ' fs = abs(np.fft.fft(s)) f = linspace(0, (fmax / 2), (len(s) / 2)) if doplot: pl.plot(f[1:(len(s) / 2)], fs[1:(len(s) / 2)]) return (f[1:(len(s) / 2)].copy(), fs[1:(len(s) / 2)].copy())
-2,794,040,221,416,781,000
This functions computes the fft of a signal, returning the frequency and their magnitude values. Parameters ---------- s: array-like the input signal. fmax: int the sampling frequency. doplot: boolean a variable to indicate whether the plot is done or not. Returns ------- f: array-like the frequency values (xx axis) fs: array-like the amplitude of the frequency values (yy axis)
novainstrumentation/tools.py
plotfft
novabiosignals/novainstrumentation
python
def plotfft(s, fmax, doplot=False): ' This functions computes the fft of a signal, returning the frequency\n and their magnitude values.\n\n Parameters\n ----------\n s: array-like\n the input signal.\n fmax: int\n the sampling frequency.\n doplot: boolean\n a variable to indicate whether the plot is done or not.\n\n Returns\n -------\n f: array-like\n the frequency values (xx axis)\n fs: array-like\n the amplitude of the frequency values (yy axis)\n ' fs = abs(np.fft.fft(s)) f = linspace(0, (fmax / 2), (len(s) / 2)) if doplot: pl.plot(f[1:(len(s) / 2)], fs[1:(len(s) / 2)]) return (f[1:(len(s) / 2)].copy(), fs[1:(len(s) / 2)].copy())
def load_with_cache(file_, recache=False, sampling=1, columns=None, temp_dir='.', data_type='int16'): "@brief This function loads a file from the current directory and saves\n the cached file to later executions. It's also possible to make a recache\n or a subsampling of the signal and choose only a few columns of the signal,\n to accelerate the opening process.\n\n @param file String: the name of the file to open.\n @param recache Boolean: indication whether it's done recache or not\n (default = false).\n @param sampling Integer: the sampling step. if 1, the signal isn't\n sampled (default = 1).\n @param columns Array-Like: the columns to read from the file. if None,\n all columns are considered (default = None).\n\n @return data Array-Like: the data from the file.\n TODO: Should save cache in a different directory\n TODO: Create test function and check size of generated files\n TODO: receive a file handle\n " cfile = ('%s.npy' % file_) if ((not path.exists(cfile)) or recache): if (columns == None): data = np.loadtxt(file_)[::sampling, :] else: data = np.loadtxt(file_)[::sampling, columns] np.save(cfile, data.astype(data_type)) else: data = np.load(cfile) return data
4,169,040,887,613,408,000
@brief This function loads a file from the current directory and saves the cached file to later executions. It's also possible to make a recache or a subsampling of the signal and choose only a few columns of the signal, to accelerate the opening process. @param file String: the name of the file to open. @param recache Boolean: indication whether it's done recache or not (default = false). @param sampling Integer: the sampling step. if 1, the signal isn't sampled (default = 1). @param columns Array-Like: the columns to read from the file. if None, all columns are considered (default = None). @return data Array-Like: the data from the file. TODO: Should save cache in a different directory TODO: Create test function and check size of generated files TODO: receive a file handle
novainstrumentation/tools.py
load_with_cache
novabiosignals/novainstrumentation
python
def load_with_cache(file_, recache=False, sampling=1, columns=None, temp_dir='.', data_type='int16'): "@brief This function loads a file from the current directory and saves\n the cached file to later executions. It's also possible to make a recache\n or a subsampling of the signal and choose only a few columns of the signal,\n to accelerate the opening process.\n\n @param file String: the name of the file to open.\n @param recache Boolean: indication whether it's done recache or not\n (default = false).\n @param sampling Integer: the sampling step. if 1, the signal isn't\n sampled (default = 1).\n @param columns Array-Like: the columns to read from the file. if None,\n all columns are considered (default = None).\n\n @return data Array-Like: the data from the file.\n TODO: Should save cache in a different directory\n TODO: Create test function and check size of generated files\n TODO: receive a file handle\n " cfile = ('%s.npy' % file_) if ((not path.exists(cfile)) or recache): if (columns == None): data = np.loadtxt(file_)[::sampling, :] else: data = np.loadtxt(file_)[::sampling, columns] np.save(cfile, data.astype(data_type)) else: data = np.load(cfile) return data
def load_data(filename): '\n :rtype : numpy matrix\n ' data = pandas.read_csv(filename, header=None, delimiter='\t', skiprows=9) return data.as_matrix()
4,191,160,812,623,671,000
:rtype : numpy matrix
novainstrumentation/tools.py
load_data
novabiosignals/novainstrumentation
python
def load_data(filename): '\n \n ' data = pandas.read_csv(filename, header=None, delimiter='\t', skiprows=9) return data.as_matrix()
def test_experiment_variables(jikken_experiment): 'test variables are initialized properly and are not settable' (exp, expected_variables, *_) = jikken_experiment assert (exp.variables == expected_variables) with pytest.raises(AttributeError): exp.variables = expected_variables
4,985,580,147,700,100,000
test variables are initialized properly and are not settable
tests/unit/test_experiment.py
test_experiment_variables
outcastofmusic/jikken
python
def test_experiment_variables(jikken_experiment): (exp, expected_variables, *_) = jikken_experiment assert (exp.variables == expected_variables) with pytest.raises(AttributeError): exp.variables = expected_variables
def test_experiment_tags(jikken_experiment): 'test tags are initialized properly and are not settable' (exp, _, expected_tags, _) = jikken_experiment assert (exp.tags == expected_tags) with pytest.raises(AttributeError): exp.tags = expected_tags
-8,382,541,816,748,046,000
test tags are initialized properly and are not settable
tests/unit/test_experiment.py
test_experiment_tags
outcastofmusic/jikken
python
def test_experiment_tags(jikken_experiment): (exp, _, expected_tags, _) = jikken_experiment assert (exp.tags == expected_tags) with pytest.raises(AttributeError): exp.tags = expected_tags
def test_experiment_schema(jikken_experiment): 'test schema is constructed properly' (exp, expected_variables, _, tmpdir) = jikken_experiment expected_hash = '40a3f5106cf9426bd4b13b168717e7bf' assert (exp.schema_hash == expected_hash) exp_2 = Experiment(name='exp1', variables=expected_variables, code_dir=tmpdir.strpath) assert (exp_2.schema_hash == exp.schema_hash)
-2,649,363,590,991,716,000
test schema is constructed properly
tests/unit/test_experiment.py
test_experiment_schema
outcastofmusic/jikken
python
def test_experiment_schema(jikken_experiment): (exp, expected_variables, _, tmpdir) = jikken_experiment expected_hash = '40a3f5106cf9426bd4b13b168717e7bf' assert (exp.schema_hash == expected_hash) exp_2 = Experiment(name='exp1', variables=expected_variables, code_dir=tmpdir.strpath) assert (exp_2.schema_hash == exp.schema_hash)
def test_experiment_parameters_schema(jikken_experiment): 'test schema with parameters is constructed properly' (exp, expected_variables, _, tmpdir) = jikken_experiment expected_hash = '77c861c501833128e1cfb5b398588a7e' assert (exp.parameters_hash == expected_hash)
5,444,041,656,221,252,000
test schema with parameters is constructed properly
tests/unit/test_experiment.py
test_experiment_parameters_schema
outcastofmusic/jikken
python
def test_experiment_parameters_schema(jikken_experiment): (exp, expected_variables, _, tmpdir) = jikken_experiment expected_hash = '77c861c501833128e1cfb5b398588a7e' assert (exp.parameters_hash == expected_hash)
def test_log_info(self): 'Test that INFO log entry does not go to the audit log.' logging.setup(self.cfg_path) log = logging.getLogger(__name__) msg = uuid.uuid4().hex log.info(msg) info_log_entries = open(self.info_log_path).read() self.assertIn(msg, info_log_entries) audit_log_entries = open(self.audit_log_path).read() self.assertNotIn(msg, audit_log_entries)
2,806,284,644,488,858,000
Test that INFO log entry does not go to the audit log.
st2common/tests/unit/test_logger.py
test_log_info
Anshika-Gautam/st2
python
def test_log_info(self): logging.setup(self.cfg_path) log = logging.getLogger(__name__) msg = uuid.uuid4().hex log.info(msg) info_log_entries = open(self.info_log_path).read() self.assertIn(msg, info_log_entries) audit_log_entries = open(self.audit_log_path).read() self.assertNotIn(msg, audit_log_entries)
def test_log_critical(self): 'Test that CRITICAL log entry does not go to the audit log.' logging.setup(self.cfg_path) log = logging.getLogger(__name__) msg = uuid.uuid4().hex log.critical(msg) info_log_entries = open(self.info_log_path).read() self.assertIn(msg, info_log_entries) audit_log_entries = open(self.audit_log_path).read() self.assertNotIn(msg, audit_log_entries)
1,073,219,046,804,395,500
Test that CRITICAL log entry does not go to the audit log.
st2common/tests/unit/test_logger.py
test_log_critical
Anshika-Gautam/st2
python
def test_log_critical(self): logging.setup(self.cfg_path) log = logging.getLogger(__name__) msg = uuid.uuid4().hex log.critical(msg) info_log_entries = open(self.info_log_path).read() self.assertIn(msg, info_log_entries) audit_log_entries = open(self.audit_log_path).read() self.assertNotIn(msg, audit_log_entries)
def test_log_audit(self): 'Test that AUDIT log entry goes to the audit log.' logging.setup(self.cfg_path) log = logging.getLogger(__name__) msg = uuid.uuid4().hex log.audit(msg) info_log_entries = open(self.info_log_path).read() self.assertIn(msg, info_log_entries) audit_log_entries = open(self.audit_log_path).read() self.assertIn(msg, audit_log_entries)
1,515,286,437,568,597,800
Test that AUDIT log entry goes to the audit log.
st2common/tests/unit/test_logger.py
test_log_audit
Anshika-Gautam/st2
python
def test_log_audit(self): logging.setup(self.cfg_path) log = logging.getLogger(__name__) msg = uuid.uuid4().hex log.audit(msg) info_log_entries = open(self.info_log_path).read() self.assertIn(msg, info_log_entries) audit_log_entries = open(self.audit_log_path).read() self.assertIn(msg, audit_log_entries)
def stats(fname, times=True): 'Return stats on the file which should have been preserved' with open(fname) as fd: st = os.fstat(fd.fileno()) stats = (st.st_mode, st.st_uid, st.st_gid, st.st_size) if times: return (stats + (st.st_atime, st.st_mtime)) else: return stats
-4,470,607,643,816,927,000
Return stats on the file which should have been preserved
datalad/customremotes/tests/test_archives.py
stats
soichih/datalad
python
def stats(fname, times=True): with open(fname) as fd: st = os.fstat(fd.fileno()) stats = (st.st_mode, st.st_uid, st.st_gid, st.st_size) if times: return (stats + (st.st_atime, st.st_mtime)) else: return stats
def turnAlarm(on): 'write command into file - pass enable/disable command' print(('turn alarm on? %s' % on)) enable = on disable = (not on) alarm = AlarmService() alarm.load() alarm.save(enable, disable) return True
666,391,020,835,512,300
write command into file - pass enable/disable command
web/home.py
turnAlarm
tommykoch/pyhome
python
def turnAlarm(on): print(('turn alarm on? %s' % on)) enable = on disable = (not on) alarm = AlarmService() alarm.load() alarm.save(enable, disable) return True
async def async_setup_entry(hass, config_entry, async_add_entities): 'Set up config entry.' discovery_info = config_entry.data device_ids = set() def supported(event): return (isinstance(event.device, rfxtrxmod.LightingDevice) and event.device.known_to_be_dimmable) entities = [] for (packet_id, entity_info) in discovery_info[CONF_DEVICES].items(): event = get_rfx_object(packet_id) if (event is None): _LOGGER.error('Invalid device: %s', packet_id) continue if (not supported(event)): continue device_id = get_device_id(event.device, data_bits=entity_info.get(CONF_DATA_BITS)) if (device_id in device_ids): continue device_ids.add(device_id) entity = RfxtrxLight(event.device, device_id, entity_info[CONF_SIGNAL_REPETITIONS]) entities.append(entity) async_add_entities(entities) @callback def light_update(event, device_id): 'Handle light updates from the RFXtrx gateway.' if (not supported(event)): return if (device_id in device_ids): return device_ids.add(device_id) _LOGGER.info('Added light (Device ID: %s Class: %s Sub: %s, Event: %s)', event.device.id_string.lower(), event.device.__class__.__name__, event.device.subtype, ''.join((f'{x:02x}' for x in event.data))) entity = RfxtrxLight(event.device, device_id, DEFAULT_SIGNAL_REPETITIONS, event=event) async_add_entities([entity]) if discovery_info[CONF_AUTOMATIC_ADD]: hass.helpers.dispatcher.async_dispatcher_connect(SIGNAL_EVENT, light_update)
2,629,040,797,358,438,400
Set up config entry.
homeassistant/components/rfxtrx/light.py
async_setup_entry
1e1/core-1
python
async def async_setup_entry(hass, config_entry, async_add_entities): discovery_info = config_entry.data device_ids = set() def supported(event): return (isinstance(event.device, rfxtrxmod.LightingDevice) and event.device.known_to_be_dimmable) entities = [] for (packet_id, entity_info) in discovery_info[CONF_DEVICES].items(): event = get_rfx_object(packet_id) if (event is None): _LOGGER.error('Invalid device: %s', packet_id) continue if (not supported(event)): continue device_id = get_device_id(event.device, data_bits=entity_info.get(CONF_DATA_BITS)) if (device_id in device_ids): continue device_ids.add(device_id) entity = RfxtrxLight(event.device, device_id, entity_info[CONF_SIGNAL_REPETITIONS]) entities.append(entity) async_add_entities(entities) @callback def light_update(event, device_id): 'Handle light updates from the RFXtrx gateway.' if (not supported(event)): return if (device_id in device_ids): return device_ids.add(device_id) _LOGGER.info('Added light (Device ID: %s Class: %s Sub: %s, Event: %s)', event.device.id_string.lower(), event.device.__class__.__name__, event.device.subtype, .join((f'{x:02x}' for x in event.data))) entity = RfxtrxLight(event.device, device_id, DEFAULT_SIGNAL_REPETITIONS, event=event) async_add_entities([entity]) if discovery_info[CONF_AUTOMATIC_ADD]: hass.helpers.dispatcher.async_dispatcher_connect(SIGNAL_EVENT, light_update)
@callback def light_update(event, device_id): 'Handle light updates from the RFXtrx gateway.' if (not supported(event)): return if (device_id in device_ids): return device_ids.add(device_id) _LOGGER.info('Added light (Device ID: %s Class: %s Sub: %s, Event: %s)', event.device.id_string.lower(), event.device.__class__.__name__, event.device.subtype, ''.join((f'{x:02x}' for x in event.data))) entity = RfxtrxLight(event.device, device_id, DEFAULT_SIGNAL_REPETITIONS, event=event) async_add_entities([entity])
-5,317,690,462,523,536,000
Handle light updates from the RFXtrx gateway.
homeassistant/components/rfxtrx/light.py
light_update
1e1/core-1
python
@callback def light_update(event, device_id): if (not supported(event)): return if (device_id in device_ids): return device_ids.add(device_id) _LOGGER.info('Added light (Device ID: %s Class: %s Sub: %s, Event: %s)', event.device.id_string.lower(), event.device.__class__.__name__, event.device.subtype, .join((f'{x:02x}' for x in event.data))) entity = RfxtrxLight(event.device, device_id, DEFAULT_SIGNAL_REPETITIONS, event=event) async_add_entities([entity])
async def async_added_to_hass(self): 'Restore RFXtrx device state (ON/OFF).' (await super().async_added_to_hass()) if (self._event is None): old_state = (await self.async_get_last_state()) if (old_state is not None): self._state = (old_state.state == STATE_ON) self._brightness = old_state.attributes.get(ATTR_BRIGHTNESS)
229,973,618,490,358,560
Restore RFXtrx device state (ON/OFF).
homeassistant/components/rfxtrx/light.py
async_added_to_hass
1e1/core-1
python
async def async_added_to_hass(self): (await super().async_added_to_hass()) if (self._event is None): old_state = (await self.async_get_last_state()) if (old_state is not None): self._state = (old_state.state == STATE_ON) self._brightness = old_state.attributes.get(ATTR_BRIGHTNESS)
@property def brightness(self): 'Return the brightness of this light between 0..255.' return self._brightness
-3,846,976,056,796,552,000
Return the brightness of this light between 0..255.
homeassistant/components/rfxtrx/light.py
brightness
1e1/core-1
python
@property def brightness(self): return self._brightness
@property def supported_features(self): 'Flag supported features.' return SUPPORT_RFXTRX
-5,159,653,584,670,436,000
Flag supported features.
homeassistant/components/rfxtrx/light.py
supported_features
1e1/core-1
python
@property def supported_features(self): return SUPPORT_RFXTRX
@property def is_on(self): 'Return true if device is on.' return self._state
-3,559,686,018,939,803,600
Return true if device is on.
homeassistant/components/rfxtrx/light.py
is_on
1e1/core-1
python
@property def is_on(self): return self._state
async def async_turn_on(self, **kwargs): 'Turn the device on.' brightness = kwargs.get(ATTR_BRIGHTNESS) self._state = True if (brightness is None): (await self._async_send(self._device.send_on)) self._brightness = 255 else: (await self._async_send(self._device.send_dim, ((brightness * 100) // 255))) self._brightness = brightness self.async_write_ha_state()
-27,936,814,387,729,360
Turn the device on.
homeassistant/components/rfxtrx/light.py
async_turn_on
1e1/core-1
python
async def async_turn_on(self, **kwargs): brightness = kwargs.get(ATTR_BRIGHTNESS) self._state = True if (brightness is None): (await self._async_send(self._device.send_on)) self._brightness = 255 else: (await self._async_send(self._device.send_dim, ((brightness * 100) // 255))) self._brightness = brightness self.async_write_ha_state()
async def async_turn_off(self, **kwargs): 'Turn the device off.' (await self._async_send(self._device.send_off)) self._state = False self._brightness = 0 self.async_write_ha_state()
-1,614,410,703,092,717,800
Turn the device off.
homeassistant/components/rfxtrx/light.py
async_turn_off
1e1/core-1
python
async def async_turn_off(self, **kwargs): (await self._async_send(self._device.send_off)) self._state = False self._brightness = 0 self.async_write_ha_state()
def _apply_event(self, event): 'Apply command from rfxtrx.' super()._apply_event(event) if (event.values['Command'] in COMMAND_ON_LIST): self._state = True elif (event.values['Command'] in COMMAND_OFF_LIST): self._state = False elif (event.values['Command'] == 'Set level'): self._brightness = ((event.values['Dim level'] * 255) // 100) self._state = (self._brightness > 0)
-4,045,686,276,709,932,500
Apply command from rfxtrx.
homeassistant/components/rfxtrx/light.py
_apply_event
1e1/core-1
python
def _apply_event(self, event): super()._apply_event(event) if (event.values['Command'] in COMMAND_ON_LIST): self._state = True elif (event.values['Command'] in COMMAND_OFF_LIST): self._state = False elif (event.values['Command'] == 'Set level'): self._brightness = ((event.values['Dim level'] * 255) // 100) self._state = (self._brightness > 0)
@callback def _handle_event(self, event, device_id): 'Check if event applies to me and update.' if (device_id != self._device_id): return self._apply_event(event) self.async_write_ha_state()
8,437,661,162,664,978,000
Check if event applies to me and update.
homeassistant/components/rfxtrx/light.py
_handle_event
1e1/core-1
python
@callback def _handle_event(self, event, device_id): if (device_id != self._device_id): return self._apply_event(event) self.async_write_ha_state()
def test_api_pages_list_success(self): '\n\t\tEnsure get request returns 200.\n\t\t' response = self.client.get(reverse('page_list', kwargs={'language': 'nl'})) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1)
1,200,097,847,982,713,000
Ensure get request returns 200.
bluebottle/pages/tests/test_api.py
test_api_pages_list_success
maykinmedia/bluebottle
python
def test_api_pages_list_success(self): '\n\t\t\n\t\t' response = self.client.get(reverse('page_list', kwargs={'language': 'nl'})) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.data['count'], 1)
def test_api_pages_list_content(self): '\n\t\tEnsure get request returns record with correct data.\n\t\t' response = self.client.get(reverse('page_list', kwargs={'language': 'nl'})) page = response.data['results'][0] self.assertEqual(page['title'], self.page1.title) self.assertEqual(page['language'], self.page1.language) self.assertEqual(page['body'], "<!-- no items in placeholder 'blog_contents' -->") self.assertEqual(page['full_page'], self.page1.full_page)
-4,210,556,589,173,268,000
Ensure get request returns record with correct data.
bluebottle/pages/tests/test_api.py
test_api_pages_list_content
maykinmedia/bluebottle
python
def test_api_pages_list_content(self): '\n\t\t\n\t\t' response = self.client.get(reverse('page_list', kwargs={'language': 'nl'})) page = response.data['results'][0] self.assertEqual(page['title'], self.page1.title) self.assertEqual(page['language'], self.page1.language) self.assertEqual(page['body'], "<!-- no items in placeholder 'blog_contents' -->") self.assertEqual(page['full_page'], self.page1.full_page)
def test_api_pages_detail_content(self): '\n\t\tEnsure get request returns record with correct data.\n\t\t' response = self.client.get(reverse('page_detail', kwargs={'language': 'en', 'slug': self.page2.slug})) results = response.data self.assertEqual(results['title'], self.page2.title) self.assertEqual(results['language'], self.page2.language) self.assertEqual(results['body'], "<!-- no items in placeholder 'blog_contents' -->") self.assertEqual(results['full_page'], self.page2.full_page)
1,916,083,978,927,994,400
Ensure get request returns record with correct data.
bluebottle/pages/tests/test_api.py
test_api_pages_detail_content
maykinmedia/bluebottle
python
def test_api_pages_detail_content(self): '\n\t\t\n\t\t' response = self.client.get(reverse('page_detail', kwargs={'language': 'en', 'slug': self.page2.slug})) results = response.data self.assertEqual(results['title'], self.page2.title) self.assertEqual(results['language'], self.page2.language) self.assertEqual(results['body'], "<!-- no items in placeholder 'blog_contents' -->") self.assertEqual(results['full_page'], self.page2.full_page)
def _update_project(self, request, data): 'Update project info' domain_id = identity.get_domain_id_for_operation(request) try: project_id = data['project_id'] EXTRA_INFO = settings.PROJECT_TABLE_EXTRA_INFO kwargs = dict(((key, data.get(key)) for key in EXTRA_INFO)) return api.keystone.tenant_update(request, project_id, name=data['name'], description=data['description'], enabled=data['enabled'], domain=domain_id, **kwargs) except exceptions.Conflict: msg = (_('Project name "%s" is already used.') % data['name']) self.failure_message = msg return except Exception as e: LOG.debug('Project update failed: %s', e) exceptions.handle(request, ignore=True) return
753,130,235,582,491,000
Update project info
openstack_dashboard/dashboards/identity/projects/workflows.py
_update_project
LinkleYping/horizon-vul
python
def _update_project(self, request, data): domain_id = identity.get_domain_id_for_operation(request) try: project_id = data['project_id'] EXTRA_INFO = settings.PROJECT_TABLE_EXTRA_INFO kwargs = dict(((key, data.get(key)) for key in EXTRA_INFO)) return api.keystone.tenant_update(request, project_id, name=data['name'], description=data['description'], enabled=data['enabled'], domain=domain_id, **kwargs) except exceptions.Conflict: msg = (_('Project name "%s" is already used.') % data['name']) self.failure_message = msg return except Exception as e: LOG.debug('Project update failed: %s', e) exceptions.handle(request, ignore=True) return
def alphanum_key(s): ' Turn a string into a list of string and number chunks.\n "z23a" -> ["z", 23, "a"]\n ' return [tryInt(c) for c in re.split('([0-9]+)', s)]
2,718,573,483,697,511,000
Turn a string into a list of string and number chunks. "z23a" -> ["z", 23, "a"]
firstsession/measurement/measure_program.py
alphanum_key
Saqqe/Cream
python
def alphanum_key(s): ' Turn a string into a list of string and number chunks.\n "z23a" -> ["z", 23, "a"]\n ' return [tryInt(c) for c in re.split('([0-9]+)', s)]
def sort_nicely(l): ' Sort the given list in the way that humans expect.\n ' l.sort(key=alphanum_key)
7,898,580,595,700,768,000
Sort the given list in the way that humans expect.
firstsession/measurement/measure_program.py
sort_nicely
Saqqe/Cream
python
def sort_nicely(l): ' \n ' l.sort(key=alphanum_key)
@app.route('/', methods=['POST', 'GET']) def index(): '\n Default app index.\n ' return 'Please contact your System Administrator.'
1,470,829,655,005,580,500
Default app index.
views.py
index
ayushrusiya47/quiz-extensions
python
@app.route('/', methods=['POST', 'GET']) def index(): '\n \n ' return 'Please contact your System Administrator.'
@app.route('/status', methods=['GET']) def status(): '\n Runs smoke tests and reports status\n ' try: job_queue_length = len(q.jobs) except ConnectionError: job_queue_length = (- 1) status = {'tool': 'Quiz Extensions', 'checks': {'index': False, 'xml': False, 'api_key': False, 'redis': False, 'db': False, 'worker': False}, 'url': url_for('index', _external=True), 'api_url': config.API_URL, 'debug': app.debug, 'xml_url': url_for('xml', _external=True), 'job_queue': job_queue_length} try: response = requests.get(url_for('index', _external=True), verify=False) status['checks']['index'] = (response.text == 'Please contact your System Administrator.') except Exception: logger.exception('Index check failed.') try: response = requests.get(url_for('xml', _external=True), verify=False) status['checks']['xml'] = ('application/xml' in response.headers.get('Content-Type')) except Exception: logger.exception('XML check failed.') try: response = requests.get('{}users/self'.format(config.API_URL), headers={'Authorization': ('Bearer ' + config.API_KEY)}) status['checks']['api_key'] = (response.status_code == 200) except Exception: logger.exception('API Key check failed.') try: response = conn.echo('test') status['checks']['redis'] = (response == b'test') except ConnectionError: logger.exception('Redis connection failed.') try: db.session.query(text('1')).all() status['checks']['db'] = True except Exception: logger.exception('DB connection failed.') status['checks']['worker'] = (call('ps aux | grep "rq worker" | grep "quizext" | grep -v grep', shell=True) == 0) status['healthy'] = all(((v is True) for (k, v) in status['checks'].items())) return Response(json.dumps(status), mimetype='application/json')
8,651,372,796,350,657,000
Runs smoke tests and reports status
views.py
status
ayushrusiya47/quiz-extensions
python
@app.route('/status', methods=['GET']) def status(): '\n \n ' try: job_queue_length = len(q.jobs) except ConnectionError: job_queue_length = (- 1) status = {'tool': 'Quiz Extensions', 'checks': {'index': False, 'xml': False, 'api_key': False, 'redis': False, 'db': False, 'worker': False}, 'url': url_for('index', _external=True), 'api_url': config.API_URL, 'debug': app.debug, 'xml_url': url_for('xml', _external=True), 'job_queue': job_queue_length} try: response = requests.get(url_for('index', _external=True), verify=False) status['checks']['index'] = (response.text == 'Please contact your System Administrator.') except Exception: logger.exception('Index check failed.') try: response = requests.get(url_for('xml', _external=True), verify=False) status['checks']['xml'] = ('application/xml' in response.headers.get('Content-Type')) except Exception: logger.exception('XML check failed.') try: response = requests.get('{}users/self'.format(config.API_URL), headers={'Authorization': ('Bearer ' + config.API_KEY)}) status['checks']['api_key'] = (response.status_code == 200) except Exception: logger.exception('API Key check failed.') try: response = conn.echo('test') status['checks']['redis'] = (response == b'test') except ConnectionError: logger.exception('Redis connection failed.') try: db.session.query(text('1')).all() status['checks']['db'] = True except Exception: logger.exception('DB connection failed.') status['checks']['worker'] = (call('ps aux | grep "rq worker" | grep "quizext" | grep -v grep', shell=True) == 0) status['healthy'] = all(((v is True) for (k, v) in status['checks'].items())) return Response(json.dumps(status), mimetype='application/json')
@app.route('/lti.xml', methods=['GET']) def xml(): '\n Returns the lti.xml file for the app.\n ' from urllib.parse import urlparse domain = urlparse(request.url_root).netloc return Response(render_template('lti.xml', tool_id=config.LTI_TOOL_ID, domain=domain), mimetype='application/xml')
4,551,775,549,191,905,000
Returns the lti.xml file for the app.
views.py
xml
ayushrusiya47/quiz-extensions
python
@app.route('/lti.xml', methods=['GET']) def xml(): '\n \n ' from urllib.parse import urlparse domain = urlparse(request.url_root).netloc return Response(render_template('lti.xml', tool_id=config.LTI_TOOL_ID, domain=domain), mimetype='application/xml')
@app.route('/quiz/<course_id>/', methods=['GET']) @check_valid_user @lti(error=error, request='session', role='staff', app=app) def quiz(lti=lti, course_id=None): '\n Main landing page for the app.\n\n Displays a page to the user that allows them to select students\n to moderate quizzes for.\n ' return render_template('userselect.html', course_id=course_id, current_page_number=1)
-1,338,936,958,936,886,800
Main landing page for the app. Displays a page to the user that allows them to select students to moderate quizzes for.
views.py
quiz
ayushrusiya47/quiz-extensions
python
@app.route('/quiz/<course_id>/', methods=['GET']) @check_valid_user @lti(error=error, request='session', role='staff', app=app) def quiz(lti=lti, course_id=None): '\n Main landing page for the app.\n\n Displays a page to the user that allows them to select students\n to moderate quizzes for.\n ' return render_template('userselect.html', course_id=course_id, current_page_number=1)
@app.route('/refresh/<course_id>/', methods=['POST']) def refresh(course_id=None): '\n Creates a new `refresh_background` job.\n\n :param course_id: The Canvas ID of the Course.\n :type course_id: int\n :rtype: flask.Response\n :returns: A JSON-formatted response containing a url for the started job.\n ' job = q.enqueue_call(func=refresh_background, args=(course_id,)) return Response(json.dumps({'refresh_job_url': url_for('job_status', job_key=job.get_id())}), mimetype='application/json', status=202)
-9,084,823,866,377,085,000
Creates a new `refresh_background` job. :param course_id: The Canvas ID of the Course. :type course_id: int :rtype: flask.Response :returns: A JSON-formatted response containing a url for the started job.
views.py
refresh
ayushrusiya47/quiz-extensions
python
@app.route('/refresh/<course_id>/', methods=['POST']) def refresh(course_id=None): '\n Creates a new `refresh_background` job.\n\n :param course_id: The Canvas ID of the Course.\n :type course_id: int\n :rtype: flask.Response\n :returns: A JSON-formatted response containing a url for the started job.\n ' job = q.enqueue_call(func=refresh_background, args=(course_id,)) return Response(json.dumps({'refresh_job_url': url_for('job_status', job_key=job.get_id())}), mimetype='application/json', status=202)
@app.route('/update/<course_id>/', methods=['POST']) @check_valid_user @lti(error=error, request='session', role='staff', app=app) def update(lti=lti, course_id=None): '\n Creates a new `update_background` job.\n\n :param course_id: The Canvas ID of the Course.\n :type coruse_id: int\n :rtype: flask.Response\n :returns: A JSON-formatted response containing urls for the started jobs.\n ' refresh_job = q.enqueue_call(func=refresh_background, args=(course_id,)) update_job = q.enqueue_call(func=update_background, args=(course_id, request.get_json()), depends_on=refresh_job) return Response(json.dumps({'refresh_job_url': url_for('job_status', job_key=refresh_job.get_id()), 'update_job_url': url_for('job_status', job_key=update_job.get_id())}), mimetype='application/json', status=202)
-8,826,940,118,246,629,000
Creates a new `update_background` job. :param course_id: The Canvas ID of the Course. :type coruse_id: int :rtype: flask.Response :returns: A JSON-formatted response containing urls for the started jobs.
views.py
update
ayushrusiya47/quiz-extensions
python
@app.route('/update/<course_id>/', methods=['POST']) @check_valid_user @lti(error=error, request='session', role='staff', app=app) def update(lti=lti, course_id=None): '\n Creates a new `update_background` job.\n\n :param course_id: The Canvas ID of the Course.\n :type coruse_id: int\n :rtype: flask.Response\n :returns: A JSON-formatted response containing urls for the started jobs.\n ' refresh_job = q.enqueue_call(func=refresh_background, args=(course_id,)) update_job = q.enqueue_call(func=update_background, args=(course_id, request.get_json()), depends_on=refresh_job) return Response(json.dumps({'refresh_job_url': url_for('job_status', job_key=refresh_job.get_id()), 'update_job_url': url_for('job_status', job_key=update_job.get_id())}), mimetype='application/json', status=202)
def update_background(course_id, extension_dict): "\n Update time on selected students' quizzes to a specified percentage.\n\n :param course_id: The Canvas ID of the Course to update in\n :type course_id: int\n :param extension_dict: A dictionary that includes the percent of\n time and a list of canvas user ids.\n\n Example:\n {\n 'percent': '300',\n 'user_ids': [\n '0123456',\n '1234567',\n '9867543',\n '5555555'\n ]\n }\n :type extension_dict: dict\n " job = get_current_job() update_job(job, 0, 'Starting...', 'started') with app.app_context(): if (not extension_dict): update_job(job, 0, 'Invalid Request', 'failed', error=True) logger.warning('Invalid Request: {}'.format(extension_dict)) return job.meta try: course_json = get_course(course_id) except requests.exceptions.HTTPError: update_job(job, 0, 'Course not found.', 'failed', error=True) logger.exception('Unable to find course #{}'.format(course_id)) return job.meta course_name = course_json.get('name', '<UNNAMED COURSE>') user_ids = extension_dict.get('user_ids', []) percent = extension_dict.get('percent', None) if (not percent): update_job(job, 0, '`percent` field required.', 'failed', error=True) logger.warning('Percent field not provided. Request: {}'.format(extension_dict)) return job.meta (course, created) = get_or_create(db.session, Course, canvas_id=course_id) course.course_name = course_name db.session.commit() for user_id in user_ids: try: canvas_user = get_user(course_id, user_id) sortable_name = canvas_user.get('sortable_name', '<MISSING NAME>') sis_id = canvas_user.get('sis_user_id') except requests.exceptions.HTTPError: logger.warning('Unable to find user #{} in course #{}'.format(user_id, course_id)) continue (user, created) = get_or_create(db.session, User, canvas_id=user_id) user.sortable_name = sortable_name user.sis_id = sis_id db.session.commit() (extension, created) = get_or_create(db.session, Extension, course_id=course.id, user_id=user.id) extension.percent = percent db.session.commit() quizzes = get_quizzes(course_id) num_quizzes = len(quizzes) quiz_time_list = [] unchanged_quiz_time_list = [] if (num_quizzes < 1): update_job(job, 0, 'Sorry, there are no quizzes for this course.', 'failed', error=True) logger.warning('No quizzes found for course {}. Unable to update.'.format(course_id)) return job.meta for (index, quiz) in enumerate(quizzes): quiz_id = quiz.get('id', None) quiz_title = quiz.get('title', '[UNTITLED QUIZ]') comp_perc = int(((float(index) / float(num_quizzes)) * 100)) updating_str = 'Updating quiz #{} - {} [{} of {}]' update_job(job, comp_perc, updating_str.format(quiz_id, quiz_title, (index + 1), num_quizzes), 'processing', error=False) extension_response = extend_quiz(course_id, quiz, percent, user_ids) if (extension_response.get('success', False) is True): (quiz_obj, created) = get_or_create(db.session, Quiz, canvas_id=quiz_id, course_id=course.id) quiz_obj.title = quiz_title quiz_obj.time_limit = quiz.get('time_limit') db.session.commit() added_time = extension_response.get('added_time', None) if (added_time is not None): quiz_time_list.append({'title': quiz_title, 'added_time': added_time}) else: unchanged_quiz_time_list.append({'title': quiz_title}) else: update_job(job, comp_perc, extension_response.get('message', 'An unknown error occured.'), 'failed', error=True) logger.error('Extension failed: {}'.format(extension_response)) return job.meta msg_str = 'Success! {} {} been updated for {} student(s) to have {}% time. {} {} no time limit and were left unchanged.' message = msg_str.format(len(quiz_time_list), ('quizzes have' if (len(quiz_time_list) != 1) else 'quiz has'), len(user_ids), percent, len(unchanged_quiz_time_list), ('quizzes have' if (len(unchanged_quiz_time_list) != 1) else 'quiz has')) update_job(job, 100, message, 'complete', error=False) job.meta['quiz_list'] = quiz_time_list job.meta['unchanged_list'] = unchanged_quiz_time_list job.save() return job.meta
-4,985,010,300,237,697,000
Update time on selected students' quizzes to a specified percentage. :param course_id: The Canvas ID of the Course to update in :type course_id: int :param extension_dict: A dictionary that includes the percent of time and a list of canvas user ids. Example: { 'percent': '300', 'user_ids': [ '0123456', '1234567', '9867543', '5555555' ] } :type extension_dict: dict
views.py
update_background
ayushrusiya47/quiz-extensions
python
def update_background(course_id, extension_dict): "\n Update time on selected students' quizzes to a specified percentage.\n\n :param course_id: The Canvas ID of the Course to update in\n :type course_id: int\n :param extension_dict: A dictionary that includes the percent of\n time and a list of canvas user ids.\n\n Example:\n {\n 'percent': '300',\n 'user_ids': [\n '0123456',\n '1234567',\n '9867543',\n '5555555'\n ]\n }\n :type extension_dict: dict\n " job = get_current_job() update_job(job, 0, 'Starting...', 'started') with app.app_context(): if (not extension_dict): update_job(job, 0, 'Invalid Request', 'failed', error=True) logger.warning('Invalid Request: {}'.format(extension_dict)) return job.meta try: course_json = get_course(course_id) except requests.exceptions.HTTPError: update_job(job, 0, 'Course not found.', 'failed', error=True) logger.exception('Unable to find course #{}'.format(course_id)) return job.meta course_name = course_json.get('name', '<UNNAMED COURSE>') user_ids = extension_dict.get('user_ids', []) percent = extension_dict.get('percent', None) if (not percent): update_job(job, 0, '`percent` field required.', 'failed', error=True) logger.warning('Percent field not provided. Request: {}'.format(extension_dict)) return job.meta (course, created) = get_or_create(db.session, Course, canvas_id=course_id) course.course_name = course_name db.session.commit() for user_id in user_ids: try: canvas_user = get_user(course_id, user_id) sortable_name = canvas_user.get('sortable_name', '<MISSING NAME>') sis_id = canvas_user.get('sis_user_id') except requests.exceptions.HTTPError: logger.warning('Unable to find user #{} in course #{}'.format(user_id, course_id)) continue (user, created) = get_or_create(db.session, User, canvas_id=user_id) user.sortable_name = sortable_name user.sis_id = sis_id db.session.commit() (extension, created) = get_or_create(db.session, Extension, course_id=course.id, user_id=user.id) extension.percent = percent db.session.commit() quizzes = get_quizzes(course_id) num_quizzes = len(quizzes) quiz_time_list = [] unchanged_quiz_time_list = [] if (num_quizzes < 1): update_job(job, 0, 'Sorry, there are no quizzes for this course.', 'failed', error=True) logger.warning('No quizzes found for course {}. Unable to update.'.format(course_id)) return job.meta for (index, quiz) in enumerate(quizzes): quiz_id = quiz.get('id', None) quiz_title = quiz.get('title', '[UNTITLED QUIZ]') comp_perc = int(((float(index) / float(num_quizzes)) * 100)) updating_str = 'Updating quiz #{} - {} [{} of {}]' update_job(job, comp_perc, updating_str.format(quiz_id, quiz_title, (index + 1), num_quizzes), 'processing', error=False) extension_response = extend_quiz(course_id, quiz, percent, user_ids) if (extension_response.get('success', False) is True): (quiz_obj, created) = get_or_create(db.session, Quiz, canvas_id=quiz_id, course_id=course.id) quiz_obj.title = quiz_title quiz_obj.time_limit = quiz.get('time_limit') db.session.commit() added_time = extension_response.get('added_time', None) if (added_time is not None): quiz_time_list.append({'title': quiz_title, 'added_time': added_time}) else: unchanged_quiz_time_list.append({'title': quiz_title}) else: update_job(job, comp_perc, extension_response.get('message', 'An unknown error occured.'), 'failed', error=True) logger.error('Extension failed: {}'.format(extension_response)) return job.meta msg_str = 'Success! {} {} been updated for {} student(s) to have {}% time. {} {} no time limit and were left unchanged.' message = msg_str.format(len(quiz_time_list), ('quizzes have' if (len(quiz_time_list) != 1) else 'quiz has'), len(user_ids), percent, len(unchanged_quiz_time_list), ('quizzes have' if (len(unchanged_quiz_time_list) != 1) else 'quiz has')) update_job(job, 100, message, 'complete', error=False) job.meta['quiz_list'] = quiz_time_list job.meta['unchanged_list'] = unchanged_quiz_time_list job.save() return job.meta
def refresh_background(course_id): '\n Look up existing extensions and apply them to new quizzes.\n\n :param course_id: The Canvas ID of the Course.\n :type course_id: int\n :rtype: dict\n :returns: A dictionary containing two parts:\n\n - success `bool` False if there was an error, True otherwise.\n - message `str` A long description of success or failure.\n ' job = get_current_job() update_job(job, 0, 'Starting...', 'started') with app.app_context(): (course, created) = get_or_create(db.session, Course, canvas_id=course_id) try: course_name = get_course(course_id).get('name', '<UNNAMED COURSE>') course.course_name = course_name db.session.commit() except requests.exceptions.HTTPError: update_job(job, 0, 'Course not found.', 'failed', error=True) logger.exception('Unable to find course #{}'.format(course_id)) return job.meta quizzes = missing_and_stale_quizzes(course_id) num_quizzes = len(quizzes) if (num_quizzes < 1): update_job(job, 100, 'Complete. No quizzes required updates.', 'complete', error=False) return job.meta percent_user_map = defaultdict(list) inactive_list = [] update_job(job, 0, 'Getting past extensions.', 'processing', False) for extension in course.extensions: if (not extension.active): inactive_list.append(extension.user.sortable_name) logger.debug('Extension #{} is inactive.'.format(extension.id)) continue user_canvas_id = User.query.filter_by(id=extension.user_id).first().canvas_id try: canvas_user = get_user(course_id, user_canvas_id) enrolls = canvas_user.get('enrollments', []) type_list = [e['type'] for e in enrolls if (e['enrollment_state'] == 'active')] if (not any(((t == 'StudentEnrollment') for t in type_list))): logger.info('User #{} was found in course #{}, but is not an active student. Deactivating extension #{}. Roles found: {}'.format(user_canvas_id, course_id, extension.id, (', '.join(type_list) if (len(enrolls) > 0) else None))) extension.active = False db.session.commit() inactive_list.append(extension.user.sortable_name) continue except requests.exceptions.HTTPError: log_str = 'User #{} not in course #{}. Deactivating extension #{}.' logger.info(log_str.format(user_canvas_id, course_id, extension.id)) extension.active = False db.session.commit() inactive_list.append(extension.user.sortable_name) continue percent_user_map[extension.percent].append(user_canvas_id) if (len(percent_user_map) < 1): msg_str = 'No active extensions were found.<br>' if (len(inactive_list) > 0): msg_str += ' Extensions for the following students are inactive:<br>{}' msg_str = msg_str.format('<br>'.join(inactive_list)) update_job(job, 100, msg_str, 'complete', error=False) return job.meta for (index, quiz) in enumerate(quizzes): quiz_id = quiz.get('id', None) quiz_title = quiz.get('title', '[UNTITLED QUIZ]') comp_perc = int(((float(index) / float(num_quizzes)) * 100)) refreshing_str = 'Refreshing quiz #{} - {} [{} of {}]' update_job(job, comp_perc, refreshing_str.format(quiz_id, quiz_title, (index + 1), num_quizzes), 'processing', error=False) for (percent, user_list) in percent_user_map.items(): extension_response = extend_quiz(course_id, quiz, percent, user_list) if (extension_response.get('success', False) is True): (quiz_obj, created) = get_or_create(db.session, Quiz, canvas_id=quiz_id, course_id=course.id) quiz_obj.title = quiz_title quiz_obj.time_limit = quiz.get('time_limit') db.session.commit() else: error_message = "Some quizzes couldn't be updated. " error_message += extension_response.get('message', '') update_job(job, comp_perc, error_message, 'failed', error=True) return job.meta msg = '{} quizzes have been updated.'.format(len(quizzes)) update_job(job, 100, msg, 'complete', error=False) return job.meta
-606,887,113,193,659,500
Look up existing extensions and apply them to new quizzes. :param course_id: The Canvas ID of the Course. :type course_id: int :rtype: dict :returns: A dictionary containing two parts: - success `bool` False if there was an error, True otherwise. - message `str` A long description of success or failure.
views.py
refresh_background
ayushrusiya47/quiz-extensions
python
def refresh_background(course_id): '\n Look up existing extensions and apply them to new quizzes.\n\n :param course_id: The Canvas ID of the Course.\n :type course_id: int\n :rtype: dict\n :returns: A dictionary containing two parts:\n\n - success `bool` False if there was an error, True otherwise.\n - message `str` A long description of success or failure.\n ' job = get_current_job() update_job(job, 0, 'Starting...', 'started') with app.app_context(): (course, created) = get_or_create(db.session, Course, canvas_id=course_id) try: course_name = get_course(course_id).get('name', '<UNNAMED COURSE>') course.course_name = course_name db.session.commit() except requests.exceptions.HTTPError: update_job(job, 0, 'Course not found.', 'failed', error=True) logger.exception('Unable to find course #{}'.format(course_id)) return job.meta quizzes = missing_and_stale_quizzes(course_id) num_quizzes = len(quizzes) if (num_quizzes < 1): update_job(job, 100, 'Complete. No quizzes required updates.', 'complete', error=False) return job.meta percent_user_map = defaultdict(list) inactive_list = [] update_job(job, 0, 'Getting past extensions.', 'processing', False) for extension in course.extensions: if (not extension.active): inactive_list.append(extension.user.sortable_name) logger.debug('Extension #{} is inactive.'.format(extension.id)) continue user_canvas_id = User.query.filter_by(id=extension.user_id).first().canvas_id try: canvas_user = get_user(course_id, user_canvas_id) enrolls = canvas_user.get('enrollments', []) type_list = [e['type'] for e in enrolls if (e['enrollment_state'] == 'active')] if (not any(((t == 'StudentEnrollment') for t in type_list))): logger.info('User #{} was found in course #{}, but is not an active student. Deactivating extension #{}. Roles found: {}'.format(user_canvas_id, course_id, extension.id, (', '.join(type_list) if (len(enrolls) > 0) else None))) extension.active = False db.session.commit() inactive_list.append(extension.user.sortable_name) continue except requests.exceptions.HTTPError: log_str = 'User #{} not in course #{}. Deactivating extension #{}.' logger.info(log_str.format(user_canvas_id, course_id, extension.id)) extension.active = False db.session.commit() inactive_list.append(extension.user.sortable_name) continue percent_user_map[extension.percent].append(user_canvas_id) if (len(percent_user_map) < 1): msg_str = 'No active extensions were found.<br>' if (len(inactive_list) > 0): msg_str += ' Extensions for the following students are inactive:<br>{}' msg_str = msg_str.format('<br>'.join(inactive_list)) update_job(job, 100, msg_str, 'complete', error=False) return job.meta for (index, quiz) in enumerate(quizzes): quiz_id = quiz.get('id', None) quiz_title = quiz.get('title', '[UNTITLED QUIZ]') comp_perc = int(((float(index) / float(num_quizzes)) * 100)) refreshing_str = 'Refreshing quiz #{} - {} [{} of {}]' update_job(job, comp_perc, refreshing_str.format(quiz_id, quiz_title, (index + 1), num_quizzes), 'processing', error=False) for (percent, user_list) in percent_user_map.items(): extension_response = extend_quiz(course_id, quiz, percent, user_list) if (extension_response.get('success', False) is True): (quiz_obj, created) = get_or_create(db.session, Quiz, canvas_id=quiz_id, course_id=course.id) quiz_obj.title = quiz_title quiz_obj.time_limit = quiz.get('time_limit') db.session.commit() else: error_message = "Some quizzes couldn't be updated. " error_message += extension_response.get('message', ) update_job(job, comp_perc, error_message, 'failed', error=True) return job.meta msg = '{} quizzes have been updated.'.format(len(quizzes)) update_job(job, 100, msg, 'complete', error=False) return job.meta
@app.route('/missing_and_stale_quizzes/<course_id>/', methods=['GET']) def missing_and_stale_quizzes_check(course_id): '\n Check if there are missing quizzes.\n\n :param course_id: The Canvas ID of the Course.\n :type course_id: int\n :rtype: str\n :returns: A JSON-formatted string representation of a boolean.\n "true" if there are missing quizzes, "false" if there are not.\n ' course = Course.query.filter_by(canvas_id=course_id).first() if (course is None): return 'false' num_extensions = Extension.query.filter_by(course_id=course.id).count() if (num_extensions == 0): return 'false' missing = (len(missing_and_stale_quizzes(course_id, True)) > 0) return json.dumps(missing)
-8,747,239,967,523,165,000
Check if there are missing quizzes. :param course_id: The Canvas ID of the Course. :type course_id: int :rtype: str :returns: A JSON-formatted string representation of a boolean. "true" if there are missing quizzes, "false" if there are not.
views.py
missing_and_stale_quizzes_check
ayushrusiya47/quiz-extensions
python
@app.route('/missing_and_stale_quizzes/<course_id>/', methods=['GET']) def missing_and_stale_quizzes_check(course_id): '\n Check if there are missing quizzes.\n\n :param course_id: The Canvas ID of the Course.\n :type course_id: int\n :rtype: str\n :returns: A JSON-formatted string representation of a boolean.\n "true" if there are missing quizzes, "false" if there are not.\n ' course = Course.query.filter_by(canvas_id=course_id).first() if (course is None): return 'false' num_extensions = Extension.query.filter_by(course_id=course.id).count() if (num_extensions == 0): return 'false' missing = (len(missing_and_stale_quizzes(course_id, True)) > 0) return json.dumps(missing)
@app.route('/filter/<course_id>/', methods=['GET']) @check_valid_user @lti(error=error, request='session', role='staff', app=app) def filter(lti=lti, course_id=None): '\n Display a filtered and paginated list of students in the course.\n\n :param course_id:\n :type: int\n :rtype: str\n :returns: A list of students in the course using the template\n user_list.html.\n ' query = request.args.get('query', '').lower() page = int(request.args.get('page', 1)) per_page = int(request.args.get('per_page', config.DEFAULT_PER_PAGE)) (user_list, max_pages) = search_students(course_id, per_page=per_page, page=page, search_term=query) if ((not user_list) or (max_pages < 1)): user_list = [] max_pages = 1 return render_template('user_list.html', users=user_list, current_page_number=page, max_pages=max_pages)
-5,975,147,192,149,251,000
Display a filtered and paginated list of students in the course. :param course_id: :type: int :rtype: str :returns: A list of students in the course using the template user_list.html.
views.py
filter
ayushrusiya47/quiz-extensions
python
@app.route('/filter/<course_id>/', methods=['GET']) @check_valid_user @lti(error=error, request='session', role='staff', app=app) def filter(lti=lti, course_id=None): '\n Display a filtered and paginated list of students in the course.\n\n :param course_id:\n :type: int\n :rtype: str\n :returns: A list of students in the course using the template\n user_list.html.\n ' query = request.args.get('query', ).lower() page = int(request.args.get('page', 1)) per_page = int(request.args.get('per_page', config.DEFAULT_PER_PAGE)) (user_list, max_pages) = search_students(course_id, per_page=per_page, page=page, search_term=query) if ((not user_list) or (max_pages < 1)): user_list = [] max_pages = 1 return render_template('user_list.html', users=user_list, current_page_number=page, max_pages=max_pages)
@app.route('/launch', methods=['POST']) @lti(error=error, request='initial', role='staff', app=app) def lti_tool(lti=lti): '\n Bootstrapper for lti.\n ' course_id = request.values.get('custom_canvas_course_id') canvas_user_id = request.values.get('custom_canvas_user_id') canvas_domain = request.values.get('custom_canvas_api_domain') if (canvas_domain not in config.ALLOWED_CANVAS_DOMAINS): msg = '<p>This tool is only available from the following domain(s):<br/>{}</p><p>You attempted to access from this domain:<br/>{}</p>' return render_template('error.html', message=msg.format(', '.join(config.ALLOWED_CANVAS_DOMAINS), canvas_domain)) roles = request.values.get('roles', []) session['is_admin'] = ('Administrator' in roles) session['canvas_user_id'] = canvas_user_id session['lti_logged_in'] = True return redirect(url_for('quiz', course_id=course_id))
1,882,419,556,337,132,300
Bootstrapper for lti.
views.py
lti_tool
ayushrusiya47/quiz-extensions
python
@app.route('/launch', methods=['POST']) @lti(error=error, request='initial', role='staff', app=app) def lti_tool(lti=lti): '\n \n ' course_id = request.values.get('custom_canvas_course_id') canvas_user_id = request.values.get('custom_canvas_user_id') canvas_domain = request.values.get('custom_canvas_api_domain') if (canvas_domain not in config.ALLOWED_CANVAS_DOMAINS): msg = '<p>This tool is only available from the following domain(s):<br/>{}</p><p>You attempted to access from this domain:<br/>{}</p>' return render_template('error.html', message=msg.format(', '.join(config.ALLOWED_CANVAS_DOMAINS), canvas_domain)) roles = request.values.get('roles', []) session['is_admin'] = ('Administrator' in roles) session['canvas_user_id'] = canvas_user_id session['lti_logged_in'] = True return redirect(url_for('quiz', course_id=course_id))
@wraps(f) def decorated_function(*args, **kwargs): '\n Decorator to check if the user is allowed access to the app.\n If user is allowed, return the decorated function.\n Otherwise, return an error page with corresponding message.\n ' canvas_user_id = session.get('canvas_user_id') lti_logged_in = session.get('lti_logged_in', False) if ((not lti_logged_in) or (not canvas_user_id)): return render_template('error.html', message='Not allowed!') if ('course_id' not in kwargs.keys()): return render_template('error.html', message='No course_id provided.') course_id = int(kwargs.get('course_id')) if (not session.get('is_admin', False)): enrollments_url = '{}courses/{}/enrollments'.format(config.API_URL, course_id) payload = {'user_id': canvas_user_id, 'type': ['TeacherEnrollment', 'TaEnrollment', 'DesignerEnrollment']} user_enrollments_response = requests.get(enrollments_url, data=json.dumps(payload), headers=json_headers) user_enrollments = user_enrollments_response.json() if ((not user_enrollments) or ('errors' in user_enrollments)): message = 'You are not enrolled in this course as a Teacher, TA, or Designer.' return render_template('error.html', message=message) return f(*args, **kwargs)
-1,688,540,081,320,270,000
Decorator to check if the user is allowed access to the app. If user is allowed, return the decorated function. Otherwise, return an error page with corresponding message.
views.py
decorated_function
ayushrusiya47/quiz-extensions
python
@wraps(f) def decorated_function(*args, **kwargs): '\n Decorator to check if the user is allowed access to the app.\n If user is allowed, return the decorated function.\n Otherwise, return an error page with corresponding message.\n ' canvas_user_id = session.get('canvas_user_id') lti_logged_in = session.get('lti_logged_in', False) if ((not lti_logged_in) or (not canvas_user_id)): return render_template('error.html', message='Not allowed!') if ('course_id' not in kwargs.keys()): return render_template('error.html', message='No course_id provided.') course_id = int(kwargs.get('course_id')) if (not session.get('is_admin', False)): enrollments_url = '{}courses/{}/enrollments'.format(config.API_URL, course_id) payload = {'user_id': canvas_user_id, 'type': ['TeacherEnrollment', 'TaEnrollment', 'DesignerEnrollment']} user_enrollments_response = requests.get(enrollments_url, data=json.dumps(payload), headers=json_headers) user_enrollments = user_enrollments_response.json() if ((not user_enrollments) or ('errors' in user_enrollments)): message = 'You are not enrolled in this course as a Teacher, TA, or Designer.' return render_template('error.html', message=message) return f(*args, **kwargs)
def deferToThreadPool(reactor, threadpool, f, *args, **kwargs): "\n Call the function C{f} using a thread from the given threadpool and return\n the result as a Deferred.\n\n This function is only used by client code which is maintaining its own\n threadpool. To run a function in the reactor's threadpool, use\n C{deferToThread}.\n\n @param reactor: The reactor in whose main thread the Deferred will be\n invoked.\n\n @param threadpool: An object which supports the C{callInThreadWithCallback}\n method of C{twisted.python.threadpool.ThreadPool}.\n\n @param f: The function to call.\n @param *args: positional arguments to pass to f.\n @param **kwargs: keyword arguments to pass to f.\n\n @return: A Deferred which fires a callback with the result of f, or an\n errback with a L{twisted.python.failure.Failure} if f throws an\n exception.\n " d = defer.Deferred() def onResult(success, result): if success: reactor.callFromThread(d.callback, result) else: reactor.callFromThread(d.errback, result) threadpool.callInThreadWithCallback(onResult, f, *args, **kwargs) return d
4,905,041,117,132,150,000
Call the function C{f} using a thread from the given threadpool and return the result as a Deferred. This function is only used by client code which is maintaining its own threadpool. To run a function in the reactor's threadpool, use C{deferToThread}. @param reactor: The reactor in whose main thread the Deferred will be invoked. @param threadpool: An object which supports the C{callInThreadWithCallback} method of C{twisted.python.threadpool.ThreadPool}. @param f: The function to call. @param *args: positional arguments to pass to f. @param **kwargs: keyword arguments to pass to f. @return: A Deferred which fires a callback with the result of f, or an errback with a L{twisted.python.failure.Failure} if f throws an exception.
src/twisted/internet/threads.py
deferToThreadPool
adamtheturtle/twisted
python
def deferToThreadPool(reactor, threadpool, f, *args, **kwargs): "\n Call the function C{f} using a thread from the given threadpool and return\n the result as a Deferred.\n\n This function is only used by client code which is maintaining its own\n threadpool. To run a function in the reactor's threadpool, use\n C{deferToThread}.\n\n @param reactor: The reactor in whose main thread the Deferred will be\n invoked.\n\n @param threadpool: An object which supports the C{callInThreadWithCallback}\n method of C{twisted.python.threadpool.ThreadPool}.\n\n @param f: The function to call.\n @param *args: positional arguments to pass to f.\n @param **kwargs: keyword arguments to pass to f.\n\n @return: A Deferred which fires a callback with the result of f, or an\n errback with a L{twisted.python.failure.Failure} if f throws an\n exception.\n " d = defer.Deferred() def onResult(success, result): if success: reactor.callFromThread(d.callback, result) else: reactor.callFromThread(d.errback, result) threadpool.callInThreadWithCallback(onResult, f, *args, **kwargs) return d
def deferToThread(f, *args, **kwargs): '\n Run a function in a thread and return the result as a Deferred.\n\n @param f: The function to call.\n @param *args: positional arguments to pass to f.\n @param **kwargs: keyword arguments to pass to f.\n\n @return: A Deferred which fires a callback with the result of f,\n or an errback with a L{twisted.python.failure.Failure} if f throws\n an exception.\n ' from twisted.internet import reactor return deferToThreadPool(reactor, reactor.getThreadPool(), f, *args, **kwargs)
6,221,264,868,728,162,000
Run a function in a thread and return the result as a Deferred. @param f: The function to call. @param *args: positional arguments to pass to f. @param **kwargs: keyword arguments to pass to f. @return: A Deferred which fires a callback with the result of f, or an errback with a L{twisted.python.failure.Failure} if f throws an exception.
src/twisted/internet/threads.py
deferToThread
adamtheturtle/twisted
python
def deferToThread(f, *args, **kwargs): '\n Run a function in a thread and return the result as a Deferred.\n\n @param f: The function to call.\n @param *args: positional arguments to pass to f.\n @param **kwargs: keyword arguments to pass to f.\n\n @return: A Deferred which fires a callback with the result of f,\n or an errback with a L{twisted.python.failure.Failure} if f throws\n an exception.\n ' from twisted.internet import reactor return deferToThreadPool(reactor, reactor.getThreadPool(), f, *args, **kwargs)
def _runMultiple(tupleList): '\n Run a list of functions.\n ' for (f, args, kwargs) in tupleList: f(*args, **kwargs)
5,765,581,595,930,412,000
Run a list of functions.
src/twisted/internet/threads.py
_runMultiple
adamtheturtle/twisted
python
def _runMultiple(tupleList): '\n \n ' for (f, args, kwargs) in tupleList: f(*args, **kwargs)
def callMultipleInThread(tupleList): '\n Run a list of functions in the same thread.\n\n tupleList should be a list of (function, argsList, kwargsDict) tuples.\n ' from twisted.internet import reactor reactor.callInThread(_runMultiple, tupleList)
-7,280,785,097,039,103,000
Run a list of functions in the same thread. tupleList should be a list of (function, argsList, kwargsDict) tuples.
src/twisted/internet/threads.py
callMultipleInThread
adamtheturtle/twisted
python
def callMultipleInThread(tupleList): '\n Run a list of functions in the same thread.\n\n tupleList should be a list of (function, argsList, kwargsDict) tuples.\n ' from twisted.internet import reactor reactor.callInThread(_runMultiple, tupleList)
def blockingCallFromThread(reactor, f, *a, **kw): "\n Run a function in the reactor from a thread, and wait for the result\n synchronously. If the function returns a L{Deferred}, wait for its\n result and return that.\n\n @param reactor: The L{IReactorThreads} provider which will be used to\n schedule the function call.\n @param f: the callable to run in the reactor thread\n @type f: any callable.\n @param a: the arguments to pass to C{f}.\n @param kw: the keyword arguments to pass to C{f}.\n\n @return: the result of the L{Deferred} returned by C{f}, or the result\n of C{f} if it returns anything other than a L{Deferred}.\n\n @raise: If C{f} raises a synchronous exception,\n C{blockingCallFromThread} will raise that exception. If C{f}\n returns a L{Deferred} which fires with a L{Failure},\n C{blockingCallFromThread} will raise that failure's exception (see\n L{Failure.raiseException}).\n " queue = Queue.Queue() def _callFromThread(): result = defer.maybeDeferred(f, *a, **kw) result.addBoth(queue.put) reactor.callFromThread(_callFromThread) result = queue.get() if isinstance(result, failure.Failure): result.raiseException() return result
-6,062,473,800,116,598,000
Run a function in the reactor from a thread, and wait for the result synchronously. If the function returns a L{Deferred}, wait for its result and return that. @param reactor: The L{IReactorThreads} provider which will be used to schedule the function call. @param f: the callable to run in the reactor thread @type f: any callable. @param a: the arguments to pass to C{f}. @param kw: the keyword arguments to pass to C{f}. @return: the result of the L{Deferred} returned by C{f}, or the result of C{f} if it returns anything other than a L{Deferred}. @raise: If C{f} raises a synchronous exception, C{blockingCallFromThread} will raise that exception. If C{f} returns a L{Deferred} which fires with a L{Failure}, C{blockingCallFromThread} will raise that failure's exception (see L{Failure.raiseException}).
src/twisted/internet/threads.py
blockingCallFromThread
adamtheturtle/twisted
python
def blockingCallFromThread(reactor, f, *a, **kw): "\n Run a function in the reactor from a thread, and wait for the result\n synchronously. If the function returns a L{Deferred}, wait for its\n result and return that.\n\n @param reactor: The L{IReactorThreads} provider which will be used to\n schedule the function call.\n @param f: the callable to run in the reactor thread\n @type f: any callable.\n @param a: the arguments to pass to C{f}.\n @param kw: the keyword arguments to pass to C{f}.\n\n @return: the result of the L{Deferred} returned by C{f}, or the result\n of C{f} if it returns anything other than a L{Deferred}.\n\n @raise: If C{f} raises a synchronous exception,\n C{blockingCallFromThread} will raise that exception. If C{f}\n returns a L{Deferred} which fires with a L{Failure},\n C{blockingCallFromThread} will raise that failure's exception (see\n L{Failure.raiseException}).\n " queue = Queue.Queue() def _callFromThread(): result = defer.maybeDeferred(f, *a, **kw) result.addBoth(queue.put) reactor.callFromThread(_callFromThread) result = queue.get() if isinstance(result, failure.Failure): result.raiseException() return result
def read_tle_file(tlefile, **kwargs): '\n \n Read in a TLE file and return the TLE that is closest to the date you want to\n propagate the orbit to.\n ' times = [] line1 = [] line2 = [] from os import path from datetime import datetime try: f = open(tlefile, 'r') except FileNotFoundError: print(('Unable to open: ' + tlefile)) ln = 0 for line in f: if (ln == 0): year = int(line[18:20]) day = int(line[20:23]) times.extend([datetime.strptime('{}:{}'.format(year, day), '%y:%j')]) line1.extend([line.strip()]) ln = 1 else: ln = 0 line2.extend([line.strip()]) f.close() return (times, line1, line2)
8,202,728,023,310,814,000
Read in a TLE file and return the TLE that is closest to the date you want to propagate the orbit to.
nustar_lunar_pointing/tracking.py
read_tle_file
bwgref/nustar_lunar_pointing
python
def read_tle_file(tlefile, **kwargs): '\n \n Read in a TLE file and return the TLE that is closest to the date you want to\n propagate the orbit to.\n ' times = [] line1 = [] line2 = [] from os import path from datetime import datetime try: f = open(tlefile, 'r') except FileNotFoundError: print(('Unable to open: ' + tlefile)) ln = 0 for line in f: if (ln == 0): year = int(line[18:20]) day = int(line[20:23]) times.extend([datetime.strptime('{}:{}'.format(year, day), '%y:%j')]) line1.extend([line.strip()]) ln = 1 else: ln = 0 line2.extend([line.strip()]) f.close() return (times, line1, line2)
def get_epoch_tle(epoch, tlefile): '\n \n Find the TLE that is closest to the epoch you want to search.\n \n epoch is a datetime object, tlefile is the file you want to search through.\n \n ' (times, line1, line2) = read_tle_file(tlefile) from datetime import datetime from astropy.time import Time if (type(epoch) is Time): epoch = epoch.datetime mindt = 100.0 min_ind = 0 for (ind, t) in enumerate(times): dt = abs((epoch - t).days) if (dt < mindt): min_ind = ind mindt = dt good_line1 = line1[min_ind] good_line2 = line2[min_ind] return (mindt, good_line1, good_line2)
-7,966,222,560,800,649,000
Find the TLE that is closest to the epoch you want to search. epoch is a datetime object, tlefile is the file you want to search through.
nustar_lunar_pointing/tracking.py
get_epoch_tle
bwgref/nustar_lunar_pointing
python
def get_epoch_tle(epoch, tlefile): '\n \n Find the TLE that is closest to the epoch you want to search.\n \n epoch is a datetime object, tlefile is the file you want to search through.\n \n ' (times, line1, line2) = read_tle_file(tlefile) from datetime import datetime from astropy.time import Time if (type(epoch) is Time): epoch = epoch.datetime mindt = 100.0 min_ind = 0 for (ind, t) in enumerate(times): dt = abs((epoch - t).days) if (dt < mindt): min_ind = ind mindt = dt good_line1 = line1[min_ind] good_line2 = line2[min_ind] return (mindt, good_line1, good_line2)
def convert_nustar_time(t, leap=5): ' \n \n Converts MET seconds to a datetime object.\n \n Default is to subtract off 5 leap seconds.\n\n ' import astropy.units as u mjdref = (55197 * u.d) met = (((t - leap) * u.s) + mjdref) met_datetime = Time(met.to(u.d), format='mjd').datetime return met_datetime
1,218,465,157,810,114,300
Converts MET seconds to a datetime object. Default is to subtract off 5 leap seconds.
nustar_lunar_pointing/tracking.py
convert_nustar_time
bwgref/nustar_lunar_pointing
python
def convert_nustar_time(t, leap=5): ' \n \n Converts MET seconds to a datetime object.\n \n Default is to subtract off 5 leap seconds.\n\n ' import astropy.units as u mjdref = (55197 * u.d) met = (((t - leap) * u.s) + mjdref) met_datetime = Time(met.to(u.d), format='mjd').datetime return met_datetime
def get_nustar_location(checktime, line1, line2): ' \n \n Code to determine the spacecraft location from the TLE.\n \n Inputs are a datetime object and the two lines of the TLE you want to use.\n \n Returns a tuple that has the X, Y, and Z geocentric coordinates (in km).\n \n ' from sgp4.earth_gravity import wgs72 from sgp4.io import twoline2rv from astropy.coordinates import EarthLocation satellite = twoline2rv(line1, line2, wgs72) (position, velocity) = satellite.propagate(checktime.year, checktime.month, checktime.day, checktime.hour, checktime.minute, checktime.second) return position
167,206,073,857,276,930
Code to determine the spacecraft location from the TLE. Inputs are a datetime object and the two lines of the TLE you want to use. Returns a tuple that has the X, Y, and Z geocentric coordinates (in km).
nustar_lunar_pointing/tracking.py
get_nustar_location
bwgref/nustar_lunar_pointing
python
def get_nustar_location(checktime, line1, line2): ' \n \n Code to determine the spacecraft location from the TLE.\n \n Inputs are a datetime object and the two lines of the TLE you want to use.\n \n Returns a tuple that has the X, Y, and Z geocentric coordinates (in km).\n \n ' from sgp4.earth_gravity import wgs72 from sgp4.io import twoline2rv from astropy.coordinates import EarthLocation satellite = twoline2rv(line1, line2, wgs72) (position, velocity) = satellite.propagate(checktime.year, checktime.month, checktime.day, checktime.hour, checktime.minute, checktime.second) return position
def eci2el(x, y, z, dt): '\n Convert Earth-Centered Inertial (ECI) cartesian coordinates to ITRS for astropy EarthLocation object.\n\n Inputs :\n x = ECI X-coordinate \n y = ECI Y-coordinate \n z = ECI Z-coordinate \n dt = UTC time (datetime object)\n ' from astropy.coordinates import GCRS, ITRS, EarthLocation, CartesianRepresentation import astropy.units as u tt = Time(dt, format='datetime') gcrs = GCRS(CartesianRepresentation(x=x, y=y, z=z), obstime=tt) itrs = gcrs.transform_to(ITRS(obstime=tt)) el = EarthLocation.from_geocentric(itrs.x, itrs.y, itrs.z) return el
339,705,923,134,611,260
Convert Earth-Centered Inertial (ECI) cartesian coordinates to ITRS for astropy EarthLocation object. Inputs : x = ECI X-coordinate y = ECI Y-coordinate z = ECI Z-coordinate dt = UTC time (datetime object)
nustar_lunar_pointing/tracking.py
eci2el
bwgref/nustar_lunar_pointing
python
def eci2el(x, y, z, dt): '\n Convert Earth-Centered Inertial (ECI) cartesian coordinates to ITRS for astropy EarthLocation object.\n\n Inputs :\n x = ECI X-coordinate \n y = ECI Y-coordinate \n z = ECI Z-coordinate \n dt = UTC time (datetime object)\n ' from astropy.coordinates import GCRS, ITRS, EarthLocation, CartesianRepresentation import astropy.units as u tt = Time(dt, format='datetime') gcrs = GCRS(CartesianRepresentation(x=x, y=y, z=z), obstime=tt) itrs = gcrs.transform_to(ITRS(obstime=tt)) el = EarthLocation.from_geocentric(itrs.x, itrs.y, itrs.z) return el
def get_moon_j2000(epoch, line1, line2, position=None): '\n \n Code to determine the apparent J2000 position for a given\n time and at a given position for the observatory.\n \n epoch needs to be a datetime or Time object.\n \n position is a list/tuple of X/Y/Z positions\n \n ' from astropy.time import Time from astropy.coordinates import get_moon, EarthLocation import astropy.units as u import sys from datetime import datetime if (type(epoch) is Time): epoch = epoch.datetime if (position is None): position = get_nustar_location(epoch, line1, line2) t = Time(epoch) loc = eci2el(*(position * u.km), t) moon_coords = get_moon(t, loc) (ra_moon, dec_moon) = ((moon_coords.ra.degree * u.deg), (moon_coords.dec.degree * u.deg)) return (ra_moon, dec_moon)
6,202,994,355,039,850,000
Code to determine the apparent J2000 position for a given time and at a given position for the observatory. epoch needs to be a datetime or Time object. position is a list/tuple of X/Y/Z positions
nustar_lunar_pointing/tracking.py
get_moon_j2000
bwgref/nustar_lunar_pointing
python
def get_moon_j2000(epoch, line1, line2, position=None): '\n \n Code to determine the apparent J2000 position for a given\n time and at a given position for the observatory.\n \n epoch needs to be a datetime or Time object.\n \n position is a list/tuple of X/Y/Z positions\n \n ' from astropy.time import Time from astropy.coordinates import get_moon, EarthLocation import astropy.units as u import sys from datetime import datetime if (type(epoch) is Time): epoch = epoch.datetime if (position is None): position = get_nustar_location(epoch, line1, line2) t = Time(epoch) loc = eci2el(*(position * u.km), t) moon_coords = get_moon(t, loc) (ra_moon, dec_moon) = ((moon_coords.ra.degree * u.deg), (moon_coords.dec.degree * u.deg)) return (ra_moon, dec_moon)
def __init__(self, form, context, *args, **kwargs): '\n Dynamically add each of the form fields for the given form model\n instance and its related field model instances.\n ' self.form = form self.form_fields = form.fields.visible() initial = kwargs.pop('initial', {}) field_entries = {} if kwargs.get('instance'): for field_entry in kwargs['instance'].fields.all(): field_entries[field_entry.field_id] = field_entry.value super(FormForForm, self).__init__(*args, **kwargs) for field in self.form_fields: field_key = ('field_%s' % field.id) field_class = fields.CLASSES[field.field_type] field_widget = fields.WIDGETS.get(field.field_type) field_args = {'label': field.label, 'required': field.required, 'help_text': field.help_text} if (field.required and (not field.help_text)): field_args['help_text'] = _('required') arg_names = field_class.__init__.__code__.co_varnames if ('max_length' in arg_names): field_args['max_length'] = settings.FORMS_FIELD_MAX_LENGTH if ('choices' in arg_names): choices = list(field.get_choices()) if ((field.field_type == fields.SELECT) and (field.default not in [c[0] for c in choices])): choices.insert(0, ('', field.placeholder_text)) field_args['choices'] = choices if (field_widget is not None): field_args['widget'] = field_widget initial_val = None try: initial_val = field_entries[field.id] except KeyError: try: initial_val = initial[field_key] except KeyError: initial_val = Template(field.default).render(context) if initial_val: if field.is_a(*fields.MULTIPLE): initial_val = split_choices(initial_val) elif (field.field_type == fields.CHECKBOX): initial_val = (initial_val != 'False') self.initial[field_key] = initial_val self.fields[field_key] = field_class(**field_args) if (field.field_type == fields.DOB): _now = datetime.now() years = list(range(_now.year, (_now.year - 120), (- 1))) self.fields[field_key].widget.years = years setattr(self.fields[field_key], 'type', field_class.__name__.lower()) if (field.required and settings.FORMS_USE_HTML5 and (field.field_type != fields.CHECKBOX_MULTIPLE)): self.fields[field_key].widget.attrs['required'] = '' if (field.placeholder_text and (not field.default)): text = field.placeholder_text self.fields[field_key].widget.attrs['placeholder'] = text
827,790,042,888,069,000
Dynamically add each of the form fields for the given form model instance and its related field model instances.
zhiliao/forms/forms.py
__init__
gladgod/zhiliao
python
def __init__(self, form, context, *args, **kwargs): '\n Dynamically add each of the form fields for the given form model\n instance and its related field model instances.\n ' self.form = form self.form_fields = form.fields.visible() initial = kwargs.pop('initial', {}) field_entries = {} if kwargs.get('instance'): for field_entry in kwargs['instance'].fields.all(): field_entries[field_entry.field_id] = field_entry.value super(FormForForm, self).__init__(*args, **kwargs) for field in self.form_fields: field_key = ('field_%s' % field.id) field_class = fields.CLASSES[field.field_type] field_widget = fields.WIDGETS.get(field.field_type) field_args = {'label': field.label, 'required': field.required, 'help_text': field.help_text} if (field.required and (not field.help_text)): field_args['help_text'] = _('required') arg_names = field_class.__init__.__code__.co_varnames if ('max_length' in arg_names): field_args['max_length'] = settings.FORMS_FIELD_MAX_LENGTH if ('choices' in arg_names): choices = list(field.get_choices()) if ((field.field_type == fields.SELECT) and (field.default not in [c[0] for c in choices])): choices.insert(0, (, field.placeholder_text)) field_args['choices'] = choices if (field_widget is not None): field_args['widget'] = field_widget initial_val = None try: initial_val = field_entries[field.id] except KeyError: try: initial_val = initial[field_key] except KeyError: initial_val = Template(field.default).render(context) if initial_val: if field.is_a(*fields.MULTIPLE): initial_val = split_choices(initial_val) elif (field.field_type == fields.CHECKBOX): initial_val = (initial_val != 'False') self.initial[field_key] = initial_val self.fields[field_key] = field_class(**field_args) if (field.field_type == fields.DOB): _now = datetime.now() years = list(range(_now.year, (_now.year - 120), (- 1))) self.fields[field_key].widget.years = years setattr(self.fields[field_key], 'type', field_class.__name__.lower()) if (field.required and settings.FORMS_USE_HTML5 and (field.field_type != fields.CHECKBOX_MULTIPLE)): self.fields[field_key].widget.attrs['required'] = if (field.placeholder_text and (not field.default)): text = field.placeholder_text self.fields[field_key].widget.attrs['placeholder'] = text
def save(self, **kwargs): '\n Create a ``FormEntry`` instance and related ``FieldEntry``\n instances for each form field.\n ' entry = super(FormForForm, self).save(commit=False) entry.form = self.form entry.entry_time = now() entry.save() entry_fields = entry.fields.values_list('field_id', flat=True) new_entry_fields = [] for field in self.form_fields: field_key = ('field_%s' % field.id) value = self.cleaned_data[field_key] if (value and self.fields[field_key].widget.needs_multipart_form): value = fs.save(join('forms', str(uuid4()), value.name), value) if isinstance(value, list): value = ', '.join([v.strip() for v in value]) if (field.id in entry_fields): field_entry = entry.fields.get(field_id=field.id) field_entry.value = value field_entry.save() else: new = {'entry': entry, 'field_id': field.id, 'value': value} new_entry_fields.append(FieldEntry(**new)) if new_entry_fields: FieldEntry.objects.bulk_create(new_entry_fields) return entry
-4,948,100,640,383,101,000
Create a ``FormEntry`` instance and related ``FieldEntry`` instances for each form field.
zhiliao/forms/forms.py
save
gladgod/zhiliao
python
def save(self, **kwargs): '\n Create a ``FormEntry`` instance and related ``FieldEntry``\n instances for each form field.\n ' entry = super(FormForForm, self).save(commit=False) entry.form = self.form entry.entry_time = now() entry.save() entry_fields = entry.fields.values_list('field_id', flat=True) new_entry_fields = [] for field in self.form_fields: field_key = ('field_%s' % field.id) value = self.cleaned_data[field_key] if (value and self.fields[field_key].widget.needs_multipart_form): value = fs.save(join('forms', str(uuid4()), value.name), value) if isinstance(value, list): value = ', '.join([v.strip() for v in value]) if (field.id in entry_fields): field_entry = entry.fields.get(field_id=field.id) field_entry.value = value field_entry.save() else: new = {'entry': entry, 'field_id': field.id, 'value': value} new_entry_fields.append(FieldEntry(**new)) if new_entry_fields: FieldEntry.objects.bulk_create(new_entry_fields) return entry
def email_to(self): '\n Return the value entered for the first field of type\n ``forms.fields.EMAIL``.\n ' for field in self.form_fields: if field.is_a(fields.EMAIL): return self.cleaned_data[('field_%s' % field.id)] return None
-6,057,552,900,015,576,000
Return the value entered for the first field of type ``forms.fields.EMAIL``.
zhiliao/forms/forms.py
email_to
gladgod/zhiliao
python
def email_to(self): '\n Return the value entered for the first field of type\n ``forms.fields.EMAIL``.\n ' for field in self.form_fields: if field.is_a(fields.EMAIL): return self.cleaned_data[('field_%s' % field.id)] return None
def __init__(self, form, request, *args, **kwargs): '\n Iterate through the fields of the ``forms.models.Form`` instance and\n create the form fields required to control including the field in\n the export (with a checkbox) or filtering the field which differs\n across field types. User a list of checkboxes when a fixed set of\n choices can be chosen from, a pair of date fields for date ranges,\n and for all other types provide a textbox for text search.\n ' self.form = form self.request = request self.form_fields = form.fields.all() self.entry_time_name = str(FormEntry._meta.get_field('entry_time').verbose_name) super(EntriesForm, self).__init__(*args, **kwargs) for field in self.form_fields: field_key = ('field_%s' % field.id) self.fields[('%s_export' % field_key)] = forms.BooleanField(label=field.label, initial=True, required=False) if field.is_a(*fields.CHOICES): if field.is_a(fields.CHECKBOX): choices = ((True, _('Checked')), (False, _('Not checked'))) else: choices = field.get_choices() contains_field = forms.MultipleChoiceField(label=' ', choices=choices, widget=forms.CheckboxSelectMultiple(), required=False) self.fields[('%s_filter' % field_key)] = choice_filter_field self.fields[('%s_contains' % field_key)] = contains_field elif field.is_a(*fields.MULTIPLE): contains_field = forms.MultipleChoiceField(label=' ', choices=field.get_choices(), widget=forms.CheckboxSelectMultiple(), required=False) self.fields[('%s_filter' % field_key)] = multiple_filter_field self.fields[('%s_contains' % field_key)] = contains_field elif field.is_a(*fields.DATES): self.fields[('%s_filter' % field_key)] = date_filter_field self.fields[('%s_from' % field_key)] = forms.DateField(label=' ', widget=SelectDateWidget(), required=False) self.fields[('%s_to' % field_key)] = forms.DateField(label=_('and'), widget=SelectDateWidget(), required=False) else: contains_field = forms.CharField(label=' ', required=False) self.fields[('%s_filter' % field_key)] = text_filter_field self.fields[('%s_contains' % field_key)] = contains_field field_key = 'field_0' self.fields[('%s_export' % field_key)] = forms.BooleanField(initial=True, label=FormEntry._meta.get_field('entry_time').verbose_name, required=False) self.fields[('%s_filter' % field_key)] = date_filter_field self.fields[('%s_from' % field_key)] = forms.DateField(label=' ', widget=SelectDateWidget(), required=False) self.fields[('%s_to' % field_key)] = forms.DateField(label=_('and'), widget=SelectDateWidget(), required=False)
-7,674,941,775,648,542,000
Iterate through the fields of the ``forms.models.Form`` instance and create the form fields required to control including the field in the export (with a checkbox) or filtering the field which differs across field types. User a list of checkboxes when a fixed set of choices can be chosen from, a pair of date fields for date ranges, and for all other types provide a textbox for text search.
zhiliao/forms/forms.py
__init__
gladgod/zhiliao
python
def __init__(self, form, request, *args, **kwargs): '\n Iterate through the fields of the ``forms.models.Form`` instance and\n create the form fields required to control including the field in\n the export (with a checkbox) or filtering the field which differs\n across field types. User a list of checkboxes when a fixed set of\n choices can be chosen from, a pair of date fields for date ranges,\n and for all other types provide a textbox for text search.\n ' self.form = form self.request = request self.form_fields = form.fields.all() self.entry_time_name = str(FormEntry._meta.get_field('entry_time').verbose_name) super(EntriesForm, self).__init__(*args, **kwargs) for field in self.form_fields: field_key = ('field_%s' % field.id) self.fields[('%s_export' % field_key)] = forms.BooleanField(label=field.label, initial=True, required=False) if field.is_a(*fields.CHOICES): if field.is_a(fields.CHECKBOX): choices = ((True, _('Checked')), (False, _('Not checked'))) else: choices = field.get_choices() contains_field = forms.MultipleChoiceField(label=' ', choices=choices, widget=forms.CheckboxSelectMultiple(), required=False) self.fields[('%s_filter' % field_key)] = choice_filter_field self.fields[('%s_contains' % field_key)] = contains_field elif field.is_a(*fields.MULTIPLE): contains_field = forms.MultipleChoiceField(label=' ', choices=field.get_choices(), widget=forms.CheckboxSelectMultiple(), required=False) self.fields[('%s_filter' % field_key)] = multiple_filter_field self.fields[('%s_contains' % field_key)] = contains_field elif field.is_a(*fields.DATES): self.fields[('%s_filter' % field_key)] = date_filter_field self.fields[('%s_from' % field_key)] = forms.DateField(label=' ', widget=SelectDateWidget(), required=False) self.fields[('%s_to' % field_key)] = forms.DateField(label=_('and'), widget=SelectDateWidget(), required=False) else: contains_field = forms.CharField(label=' ', required=False) self.fields[('%s_filter' % field_key)] = text_filter_field self.fields[('%s_contains' % field_key)] = contains_field field_key = 'field_0' self.fields[('%s_export' % field_key)] = forms.BooleanField(initial=True, label=FormEntry._meta.get_field('entry_time').verbose_name, required=False) self.fields[('%s_filter' % field_key)] = date_filter_field self.fields[('%s_from' % field_key)] = forms.DateField(label=' ', widget=SelectDateWidget(), required=False) self.fields[('%s_to' % field_key)] = forms.DateField(label=_('and'), widget=SelectDateWidget(), required=False)
def __iter__(self): '\n Yield pairs of include checkbox / filters for each field.\n ' for field_id in ([f.id for f in self.form_fields] + [0]): prefix = ('field_%s_' % field_id) fields = [f for f in super(EntriesForm, self).__iter__() if f.name.startswith(prefix)] (yield (fields[0], fields[1], fields[2:]))
1,984,044,150,900,896,500
Yield pairs of include checkbox / filters for each field.
zhiliao/forms/forms.py
__iter__
gladgod/zhiliao
python
def __iter__(self): '\n \n ' for field_id in ([f.id for f in self.form_fields] + [0]): prefix = ('field_%s_' % field_id) fields = [f for f in super(EntriesForm, self).__iter__() if f.name.startswith(prefix)] (yield (fields[0], fields[1], fields[2:]))
def columns(self): '\n Returns the list of selected column names.\n ' fields = [f.label for f in self.form_fields if self.cleaned_data[('field_%s_export' % f.id)]] if self.cleaned_data['field_0_export']: fields.append(self.entry_time_name) return fields
2,414,668,977,055,500,000
Returns the list of selected column names.
zhiliao/forms/forms.py
columns
gladgod/zhiliao
python
def columns(self): '\n \n ' fields = [f.label for f in self.form_fields if self.cleaned_data[('field_%s_export' % f.id)]] if self.cleaned_data['field_0_export']: fields.append(self.entry_time_name) return fields
def rows(self, csv=False): '\n Returns each row based on the selected criteria.\n ' field_indexes = {} file_field_ids = [] date_field_ids = [] for field in self.form_fields: if self.cleaned_data[('field_%s_export' % field.id)]: field_indexes[field.id] = len(field_indexes) if field.is_a(fields.FILE): file_field_ids.append(field.id) elif field.is_a(*fields.DATES): date_field_ids.append(field.id) num_columns = len(field_indexes) include_entry_time = self.cleaned_data['field_0_export'] if include_entry_time: num_columns += 1 field_entries = FieldEntry.objects.filter(entry__form=self.form).order_by('-entry__id').select_related('entry') if (self.cleaned_data['field_0_filter'] == FILTER_CHOICE_BETWEEN): time_from = self.cleaned_data['field_0_from'] time_to = self.cleaned_data['field_0_to'] if (time_from and time_to): field_entries = field_entries.filter(entry__entry_time__range=(time_from, time_to)) current_entry = None current_row = None valid_row = True for field_entry in field_entries: if (field_entry.entry_id != current_entry): if (valid_row and (current_row is not None)): if (not csv): current_row.insert(0, current_entry) (yield current_row) current_entry = field_entry.entry_id current_row = ([''] * num_columns) valid_row = True if include_entry_time: current_row[(- 1)] = field_entry.entry.entry_time field_value = (field_entry.value or '') field_id = field_entry.field_id filter_type = self.cleaned_data.get(('field_%s_filter' % field_id)) filter_args = None if filter_type: if (filter_type == FILTER_CHOICE_BETWEEN): (f, t) = (('field_%s_from' % field_id), ('field_%s_to' % field_id)) filter_args = [self.cleaned_data[f], self.cleaned_data[t]] else: field_name = ('field_%s_contains' % field_id) filter_args = self.cleaned_data[field_name] if filter_args: filter_args = [filter_args] if filter_args: if (field_id in date_field_ids): (y, m, d) = field_value.split(' ')[0].split('-') dte = date(int(y), int(m), int(d)) filter_args.append(dte) else: filter_args.append(field_value) filter_func = FILTER_FUNCS[filter_type] if (not filter_func(*filter_args)): valid_row = False if (field_entry.value and (field_id in file_field_ids)): url = reverse('admin:form_file', args=(field_entry.id,)) field_value = self.request.build_absolute_uri(url) if (not csv): parts = (field_value, split(field_entry.value)[1]) field_value = mark_safe(('<a href="%s">%s</a>' % parts)) try: current_row[field_indexes[field_id]] = field_value except KeyError: pass if (valid_row and (current_row is not None)): if (not csv): current_row.insert(0, current_entry) (yield current_row)
6,989,815,682,023,497,000
Returns each row based on the selected criteria.
zhiliao/forms/forms.py
rows
gladgod/zhiliao
python
def rows(self, csv=False): '\n \n ' field_indexes = {} file_field_ids = [] date_field_ids = [] for field in self.form_fields: if self.cleaned_data[('field_%s_export' % field.id)]: field_indexes[field.id] = len(field_indexes) if field.is_a(fields.FILE): file_field_ids.append(field.id) elif field.is_a(*fields.DATES): date_field_ids.append(field.id) num_columns = len(field_indexes) include_entry_time = self.cleaned_data['field_0_export'] if include_entry_time: num_columns += 1 field_entries = FieldEntry.objects.filter(entry__form=self.form).order_by('-entry__id').select_related('entry') if (self.cleaned_data['field_0_filter'] == FILTER_CHOICE_BETWEEN): time_from = self.cleaned_data['field_0_from'] time_to = self.cleaned_data['field_0_to'] if (time_from and time_to): field_entries = field_entries.filter(entry__entry_time__range=(time_from, time_to)) current_entry = None current_row = None valid_row = True for field_entry in field_entries: if (field_entry.entry_id != current_entry): if (valid_row and (current_row is not None)): if (not csv): current_row.insert(0, current_entry) (yield current_row) current_entry = field_entry.entry_id current_row = ([] * num_columns) valid_row = True if include_entry_time: current_row[(- 1)] = field_entry.entry.entry_time field_value = (field_entry.value or ) field_id = field_entry.field_id filter_type = self.cleaned_data.get(('field_%s_filter' % field_id)) filter_args = None if filter_type: if (filter_type == FILTER_CHOICE_BETWEEN): (f, t) = (('field_%s_from' % field_id), ('field_%s_to' % field_id)) filter_args = [self.cleaned_data[f], self.cleaned_data[t]] else: field_name = ('field_%s_contains' % field_id) filter_args = self.cleaned_data[field_name] if filter_args: filter_args = [filter_args] if filter_args: if (field_id in date_field_ids): (y, m, d) = field_value.split(' ')[0].split('-') dte = date(int(y), int(m), int(d)) filter_args.append(dte) else: filter_args.append(field_value) filter_func = FILTER_FUNCS[filter_type] if (not filter_func(*filter_args)): valid_row = False if (field_entry.value and (field_id in file_field_ids)): url = reverse('admin:form_file', args=(field_entry.id,)) field_value = self.request.build_absolute_uri(url) if (not csv): parts = (field_value, split(field_entry.value)[1]) field_value = mark_safe(('<a href="%s">%s</a>' % parts)) try: current_row[field_indexes[field_id]] = field_value except KeyError: pass if (valid_row and (current_row is not None)): if (not csv): current_row.insert(0, current_entry) (yield current_row)
def check_line_match(self, index, stripped, data_rows): 'Find lines matching stripped from lineMatchKeys and set value to immediately following row' for (key, value) in self.lineMatches.items(): if (stripped == key): next_row = data_rows[(index + 1)] if next_row.find_all('b'): print('Next row was bold element: {0}. Skipping...'.format(next_row)) return False next_row_stripped = next_row.text.strip() if (next_row_stripped in self.lineMatches): print('Next row was {0} and is in lineMatchKeys, skipping...'.format(next_row_stripped)) return False self.attribute_dictionary[value] = json.dumps(next_row_stripped) print('Set {0} to {1}'.format(value, self.attribute_dictionary[value])) del self.lineMatches[key] return True return False
7,381,854,148,757,062,000
Find lines matching stripped from lineMatchKeys and set value to immediately following row
DataObjects.py
check_line_match
ataboo/CalloutScrape
python
def check_line_match(self, index, stripped, data_rows): for (key, value) in self.lineMatches.items(): if (stripped == key): next_row = data_rows[(index + 1)] if next_row.find_all('b'): print('Next row was bold element: {0}. Skipping...'.format(next_row)) return False next_row_stripped = next_row.text.strip() if (next_row_stripped in self.lineMatches): print('Next row was {0} and is in lineMatchKeys, skipping...'.format(next_row_stripped)) return False self.attribute_dictionary[value] = json.dumps(next_row_stripped) print('Set {0} to {1}'.format(value, self.attribute_dictionary[value])) del self.lineMatches[key] return True return False
def __init__(self, app_id=None, state=None, x_request_id=None): 'StopAppResponse - a model defined in huaweicloud sdk' super(StopAppResponse, self).__init__() self._app_id = None self._state = None self._x_request_id = None self.discriminator = None if (app_id is not None): self.app_id = app_id if (state is not None): self.state = state if (x_request_id is not None): self.x_request_id = x_request_id
7,441,684,750,325,065,000
StopAppResponse - a model defined in huaweicloud sdk
huaweicloud-sdk-cloudrtc/huaweicloudsdkcloudrtc/v2/model/stop_app_response.py
__init__
huaweicloud/huaweicloud-sdk-python-v3
python
def __init__(self, app_id=None, state=None, x_request_id=None): super(StopAppResponse, self).__init__() self._app_id = None self._state = None self._x_request_id = None self.discriminator = None if (app_id is not None): self.app_id = app_id if (state is not None): self.state = state if (x_request_id is not None): self.x_request_id = x_request_id
@property def app_id(self): 'Gets the app_id of this StopAppResponse.\n\n 应用id\n\n :return: The app_id of this StopAppResponse.\n :rtype: str\n ' return self._app_id
8,293,203,077,026,026,000
Gets the app_id of this StopAppResponse. 应用id :return: The app_id of this StopAppResponse. :rtype: str
huaweicloud-sdk-cloudrtc/huaweicloudsdkcloudrtc/v2/model/stop_app_response.py
app_id
huaweicloud/huaweicloud-sdk-python-v3
python
@property def app_id(self): 'Gets the app_id of this StopAppResponse.\n\n 应用id\n\n :return: The app_id of this StopAppResponse.\n :rtype: str\n ' return self._app_id
@app_id.setter def app_id(self, app_id): 'Sets the app_id of this StopAppResponse.\n\n 应用id\n\n :param app_id: The app_id of this StopAppResponse.\n :type: str\n ' self._app_id = app_id
-5,006,926,596,443,253,000
Sets the app_id of this StopAppResponse. 应用id :param app_id: The app_id of this StopAppResponse. :type: str
huaweicloud-sdk-cloudrtc/huaweicloudsdkcloudrtc/v2/model/stop_app_response.py
app_id
huaweicloud/huaweicloud-sdk-python-v3
python
@app_id.setter def app_id(self, app_id): 'Sets the app_id of this StopAppResponse.\n\n 应用id\n\n :param app_id: The app_id of this StopAppResponse.\n :type: str\n ' self._app_id = app_id
@property def state(self): 'Gets the state of this StopAppResponse.\n\n\n :return: The state of this StopAppResponse.\n :rtype: AppState\n ' return self._state
8,748,824,888,664,394,000
Gets the state of this StopAppResponse. :return: The state of this StopAppResponse. :rtype: AppState
huaweicloud-sdk-cloudrtc/huaweicloudsdkcloudrtc/v2/model/stop_app_response.py
state
huaweicloud/huaweicloud-sdk-python-v3
python
@property def state(self): 'Gets the state of this StopAppResponse.\n\n\n :return: The state of this StopAppResponse.\n :rtype: AppState\n ' return self._state
@state.setter def state(self, state): 'Sets the state of this StopAppResponse.\n\n\n :param state: The state of this StopAppResponse.\n :type: AppState\n ' self._state = state
4,817,263,328,804,627,000
Sets the state of this StopAppResponse. :param state: The state of this StopAppResponse. :type: AppState
huaweicloud-sdk-cloudrtc/huaweicloudsdkcloudrtc/v2/model/stop_app_response.py
state
huaweicloud/huaweicloud-sdk-python-v3
python
@state.setter def state(self, state): 'Sets the state of this StopAppResponse.\n\n\n :param state: The state of this StopAppResponse.\n :type: AppState\n ' self._state = state
@property def x_request_id(self): 'Gets the x_request_id of this StopAppResponse.\n\n\n :return: The x_request_id of this StopAppResponse.\n :rtype: str\n ' return self._x_request_id
-72,288,185,045,706,720
Gets the x_request_id of this StopAppResponse. :return: The x_request_id of this StopAppResponse. :rtype: str
huaweicloud-sdk-cloudrtc/huaweicloudsdkcloudrtc/v2/model/stop_app_response.py
x_request_id
huaweicloud/huaweicloud-sdk-python-v3
python
@property def x_request_id(self): 'Gets the x_request_id of this StopAppResponse.\n\n\n :return: The x_request_id of this StopAppResponse.\n :rtype: str\n ' return self._x_request_id
@x_request_id.setter def x_request_id(self, x_request_id): 'Sets the x_request_id of this StopAppResponse.\n\n\n :param x_request_id: The x_request_id of this StopAppResponse.\n :type: str\n ' self._x_request_id = x_request_id
-4,375,000,343,484,576,000
Sets the x_request_id of this StopAppResponse. :param x_request_id: The x_request_id of this StopAppResponse. :type: str
huaweicloud-sdk-cloudrtc/huaweicloudsdkcloudrtc/v2/model/stop_app_response.py
x_request_id
huaweicloud/huaweicloud-sdk-python-v3
python
@x_request_id.setter def x_request_id(self, x_request_id): 'Sets the x_request_id of this StopAppResponse.\n\n\n :param x_request_id: The x_request_id of this StopAppResponse.\n :type: str\n ' self._x_request_id = x_request_id
def to_dict(self): 'Returns the model properties as a dict' result = {} for (attr, _) in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) elif (attr in self.sensitive_list): result[attr] = '****' else: result[attr] = value return result
2,594,216,033,120,720,000
Returns the model properties as a dict
huaweicloud-sdk-cloudrtc/huaweicloudsdkcloudrtc/v2/model/stop_app_response.py
to_dict
huaweicloud/huaweicloud-sdk-python-v3
python
def to_dict(self): result = {} for (attr, _) in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value)) elif hasattr(value, 'to_dict'): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items())) elif (attr in self.sensitive_list): result[attr] = '****' else: result[attr] = value return result
def to_str(self): 'Returns the string representation of the model' import simplejson as json if six.PY2: import sys reload(sys) sys.setdefaultencoding('utf-8') return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
-6,095,553,759,700,562,000
Returns the string representation of the model
huaweicloud-sdk-cloudrtc/huaweicloudsdkcloudrtc/v2/model/stop_app_response.py
to_str
huaweicloud/huaweicloud-sdk-python-v3
python
def to_str(self): import simplejson as json if six.PY2: import sys reload(sys) sys.setdefaultencoding('utf-8') return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self): 'For `print`' return self.to_str()
-1,581,176,371,750,213,000
For `print`
huaweicloud-sdk-cloudrtc/huaweicloudsdkcloudrtc/v2/model/stop_app_response.py
__repr__
huaweicloud/huaweicloud-sdk-python-v3
python
def __repr__(self): return self.to_str()
def __eq__(self, other): 'Returns true if both objects are equal' if (not isinstance(other, StopAppResponse)): return False return (self.__dict__ == other.__dict__)
2,557,606,281,473,039,000
Returns true if both objects are equal
huaweicloud-sdk-cloudrtc/huaweicloudsdkcloudrtc/v2/model/stop_app_response.py
__eq__
huaweicloud/huaweicloud-sdk-python-v3
python
def __eq__(self, other): if (not isinstance(other, StopAppResponse)): return False return (self.__dict__ == other.__dict__)
def __ne__(self, other): 'Returns true if both objects are not equal' return (not (self == other))
7,764,124,047,908,058,000
Returns true if both objects are not equal
huaweicloud-sdk-cloudrtc/huaweicloudsdkcloudrtc/v2/model/stop_app_response.py
__ne__
huaweicloud/huaweicloud-sdk-python-v3
python
def __ne__(self, other): return (not (self == other))
def _expand_name(name): 'Replaces common shorthands with respective full names.\n\n "tf.xxx" --> "tensorflow.xxx"\n "tx.xxx" --> "texar.tf.xxx"\n ' return name
8,329,032,541,764,644,000
Replaces common shorthands with respective full names. "tf.xxx" --> "tensorflow.xxx" "tx.xxx" --> "texar.tf.xxx"
texar/tf/utils/utils.py
_expand_name
awesomemachinelearning/texar
python
def _expand_name(name): 'Replaces common shorthands with respective full names.\n\n "tf.xxx" --> "tensorflow.xxx"\n "tx.xxx" --> "texar.tf.xxx"\n ' return name
def _inspect_getargspec(fn): 'Returns `inspect.getargspec(fn)` for Py2 and `inspect.getfullargspec(fn)`\n for Py3\n ' try: return inspect.getfullargspec(fn) except AttributeError: try: return inspect.getargspec(fn) except TypeError: return inspect.getargspec(fn.__call__)
-8,735,037,290,325,360,000
Returns `inspect.getargspec(fn)` for Py2 and `inspect.getfullargspec(fn)` for Py3
texar/tf/utils/utils.py
_inspect_getargspec
awesomemachinelearning/texar
python
def _inspect_getargspec(fn): 'Returns `inspect.getargspec(fn)` for Py2 and `inspect.getfullargspec(fn)`\n for Py3\n ' try: return inspect.getfullargspec(fn) except AttributeError: try: return inspect.getargspec(fn) except TypeError: return inspect.getargspec(fn.__call__)
def get_args(fn): 'Gets the arguments of a function.\n\n Args:\n fn (callable): The function to inspect.\n\n Returns:\n list: A list of argument names (str) of the function.\n ' argspec = _inspect_getargspec(fn) args = argspec.args if (len(args) == 0): args = funcsigs.signature(fn).parameters.keys() args = list(args) return args
-8,241,205,280,697,680,000
Gets the arguments of a function. Args: fn (callable): The function to inspect. Returns: list: A list of argument names (str) of the function.
texar/tf/utils/utils.py
get_args
awesomemachinelearning/texar
python
def get_args(fn): 'Gets the arguments of a function.\n\n Args:\n fn (callable): The function to inspect.\n\n Returns:\n list: A list of argument names (str) of the function.\n ' argspec = _inspect_getargspec(fn) args = argspec.args if (len(args) == 0): args = funcsigs.signature(fn).parameters.keys() args = list(args) return args
def get_default_arg_values(fn): 'Gets the arguments and respective default values of a function.\n\n Only arguments with default values are included in the output dictionary.\n\n Args:\n fn (callable): The function to inspect.\n\n Returns:\n dict: A dictionary that maps argument names (str) to their default\n values. The dictionary is empty if no arguments have default values.\n ' argspec = _inspect_getargspec(fn) if (argspec.defaults is None): return {} num_defaults = len(argspec.defaults) return dict(zip(argspec.args[(- num_defaults):], argspec.defaults))
3,098,645,701,942,710,000
Gets the arguments and respective default values of a function. Only arguments with default values are included in the output dictionary. Args: fn (callable): The function to inspect. Returns: dict: A dictionary that maps argument names (str) to their default values. The dictionary is empty if no arguments have default values.
texar/tf/utils/utils.py
get_default_arg_values
awesomemachinelearning/texar
python
def get_default_arg_values(fn): 'Gets the arguments and respective default values of a function.\n\n Only arguments with default values are included in the output dictionary.\n\n Args:\n fn (callable): The function to inspect.\n\n Returns:\n dict: A dictionary that maps argument names (str) to their default\n values. The dictionary is empty if no arguments have default values.\n ' argspec = _inspect_getargspec(fn) if (argspec.defaults is None): return {} num_defaults = len(argspec.defaults) return dict(zip(argspec.args[(- num_defaults):], argspec.defaults))
def check_or_get_class(class_or_name, module_path=None, superclass=None): 'Returns the class and checks if the class inherits :attr:`superclass`.\n\n Args:\n class_or_name: Name or full path to the class, or the class itself.\n module_paths (list, optional): Paths to candidate modules to search\n for the class. This is used if :attr:`class_or_name` is a string and\n the class cannot be located solely based on :attr:`class_or_name`.\n The first module in the list that contains the class\n is used.\n superclass (optional): A (list of) classes that the target class\n must inherit.\n\n Returns:\n The target class.\n\n Raises:\n ValueError: If class is not found based on :attr:`class_or_name` and\n :attr:`module_paths`.\n TypeError: If class does not inherits :attr:`superclass`.\n ' class_ = class_or_name if is_str(class_): class_ = get_class(class_, module_path) if (superclass is not None): if (not issubclass(class_, superclass)): raise TypeError('A subclass of {} is expected. Got: {}'.format(superclass, class_)) return class_
7,885,878,717,375,071,000
Returns the class and checks if the class inherits :attr:`superclass`. Args: class_or_name: Name or full path to the class, or the class itself. module_paths (list, optional): Paths to candidate modules to search for the class. This is used if :attr:`class_or_name` is a string and the class cannot be located solely based on :attr:`class_or_name`. The first module in the list that contains the class is used. superclass (optional): A (list of) classes that the target class must inherit. Returns: The target class. Raises: ValueError: If class is not found based on :attr:`class_or_name` and :attr:`module_paths`. TypeError: If class does not inherits :attr:`superclass`.
texar/tf/utils/utils.py
check_or_get_class
awesomemachinelearning/texar
python
def check_or_get_class(class_or_name, module_path=None, superclass=None): 'Returns the class and checks if the class inherits :attr:`superclass`.\n\n Args:\n class_or_name: Name or full path to the class, or the class itself.\n module_paths (list, optional): Paths to candidate modules to search\n for the class. This is used if :attr:`class_or_name` is a string and\n the class cannot be located solely based on :attr:`class_or_name`.\n The first module in the list that contains the class\n is used.\n superclass (optional): A (list of) classes that the target class\n must inherit.\n\n Returns:\n The target class.\n\n Raises:\n ValueError: If class is not found based on :attr:`class_or_name` and\n :attr:`module_paths`.\n TypeError: If class does not inherits :attr:`superclass`.\n ' class_ = class_or_name if is_str(class_): class_ = get_class(class_, module_path) if (superclass is not None): if (not issubclass(class_, superclass)): raise TypeError('A subclass of {} is expected. Got: {}'.format(superclass, class_)) return class_
def get_class(class_name, module_paths=None): 'Returns the class based on class name.\n\n Args:\n class_name (str): Name or full path to the class.\n module_paths (list): Paths to candidate modules to search for the\n class. This is used if the class cannot be located solely based on\n `class_name`. The first module in the list that contains the class\n is used.\n\n Returns:\n The target class.\n\n Raises:\n ValueError: If class is not found based on :attr:`class_name` and\n :attr:`module_paths`.\n ' class_ = locate(class_name) if ((class_ is None) and (module_paths is not None)): for module_path in module_paths: class_ = locate('.'.join([module_path, class_name])) if (class_ is not None): break if (class_ is None): raise ValueError('Class not found in {}: {}'.format(module_paths, class_name)) return class_
2,376,394,911,442,920,000
Returns the class based on class name. Args: class_name (str): Name or full path to the class. module_paths (list): Paths to candidate modules to search for the class. This is used if the class cannot be located solely based on `class_name`. The first module in the list that contains the class is used. Returns: The target class. Raises: ValueError: If class is not found based on :attr:`class_name` and :attr:`module_paths`.
texar/tf/utils/utils.py
get_class
awesomemachinelearning/texar
python
def get_class(class_name, module_paths=None): 'Returns the class based on class name.\n\n Args:\n class_name (str): Name or full path to the class.\n module_paths (list): Paths to candidate modules to search for the\n class. This is used if the class cannot be located solely based on\n `class_name`. The first module in the list that contains the class\n is used.\n\n Returns:\n The target class.\n\n Raises:\n ValueError: If class is not found based on :attr:`class_name` and\n :attr:`module_paths`.\n ' class_ = locate(class_name) if ((class_ is None) and (module_paths is not None)): for module_path in module_paths: class_ = locate('.'.join([module_path, class_name])) if (class_ is not None): break if (class_ is None): raise ValueError('Class not found in {}: {}'.format(module_paths, class_name)) return class_
def check_or_get_instance(ins_or_class_or_name, kwargs, module_paths=None, classtype=None): 'Returns a class instance and checks types.\n\n Args:\n ins_or_class_or_name: Can be of 3 types:\n\n - A class to instantiate.\n - A string of the name or full path to a class to instantiate.\n - The class instance to check types.\n\n kwargs (dict): Keyword arguments for the class constructor. Ignored\n if `ins_or_class_or_name` is a class instance.\n module_paths (list, optional): Paths to candidate modules to\n search for the class. This is used if the class cannot be\n located solely based on :attr:`class_name`. The first module\n in the list that contains the class is used.\n classtype (optional): A (list of) class of which the instance must\n be an instantiation.\n\n Raises:\n ValueError: If class is not found based on :attr:`class_name` and\n :attr:`module_paths`.\n ValueError: If :attr:`kwargs` contains arguments that are invalid\n for the class construction.\n TypeError: If the instance is not an instantiation of\n :attr:`classtype`.\n ' ret = ins_or_class_or_name if (is_str(ret) or isinstance(ret, type)): ret = get_instance(ret, kwargs, module_paths) if (classtype is not None): if (not isinstance(ret, classtype)): raise TypeError('An instance of {} is expected. Got: {}'.format(classtype, ret)) return ret
3,205,836,329,023,062,500
Returns a class instance and checks types. Args: ins_or_class_or_name: Can be of 3 types: - A class to instantiate. - A string of the name or full path to a class to instantiate. - The class instance to check types. kwargs (dict): Keyword arguments for the class constructor. Ignored if `ins_or_class_or_name` is a class instance. module_paths (list, optional): Paths to candidate modules to search for the class. This is used if the class cannot be located solely based on :attr:`class_name`. The first module in the list that contains the class is used. classtype (optional): A (list of) class of which the instance must be an instantiation. Raises: ValueError: If class is not found based on :attr:`class_name` and :attr:`module_paths`. ValueError: If :attr:`kwargs` contains arguments that are invalid for the class construction. TypeError: If the instance is not an instantiation of :attr:`classtype`.
texar/tf/utils/utils.py
check_or_get_instance
awesomemachinelearning/texar
python
def check_or_get_instance(ins_or_class_or_name, kwargs, module_paths=None, classtype=None): 'Returns a class instance and checks types.\n\n Args:\n ins_or_class_or_name: Can be of 3 types:\n\n - A class to instantiate.\n - A string of the name or full path to a class to instantiate.\n - The class instance to check types.\n\n kwargs (dict): Keyword arguments for the class constructor. Ignored\n if `ins_or_class_or_name` is a class instance.\n module_paths (list, optional): Paths to candidate modules to\n search for the class. This is used if the class cannot be\n located solely based on :attr:`class_name`. The first module\n in the list that contains the class is used.\n classtype (optional): A (list of) class of which the instance must\n be an instantiation.\n\n Raises:\n ValueError: If class is not found based on :attr:`class_name` and\n :attr:`module_paths`.\n ValueError: If :attr:`kwargs` contains arguments that are invalid\n for the class construction.\n TypeError: If the instance is not an instantiation of\n :attr:`classtype`.\n ' ret = ins_or_class_or_name if (is_str(ret) or isinstance(ret, type)): ret = get_instance(ret, kwargs, module_paths) if (classtype is not None): if (not isinstance(ret, classtype)): raise TypeError('An instance of {} is expected. Got: {}'.format(classtype, ret)) return ret
def get_instance(class_or_name, kwargs, module_paths=None): 'Creates a class instance.\n\n Args:\n class_or_name: A class, or its name or full path to a class to\n instantiate.\n kwargs (dict): Keyword arguments for the class constructor.\n module_paths (list, optional): Paths to candidate modules to\n search for the class. This is used if the class cannot be\n located solely based on :attr:`class_name`. The first module\n in the list that contains the class is used.\n\n Returns:\n A class instance.\n\n Raises:\n ValueError: If class is not found based on :attr:`class_or_name` and\n :attr:`module_paths`.\n ValueError: If :attr:`kwargs` contains arguments that are invalid\n for the class construction.\n ' class_ = class_or_name if is_str(class_): class_ = get_class(class_, module_paths) class_args = set(get_args(class_.__init__)) if (kwargs is None): kwargs = {} for key in kwargs.keys(): if (key not in class_args): raise ValueError(('Invalid argument for class %s.%s: %s, valid args: %s' % (class_.__module__, class_.__name__, key, list(class_args)))) return class_(**kwargs)
-7,312,332,245,295,531,000
Creates a class instance. Args: class_or_name: A class, or its name or full path to a class to instantiate. kwargs (dict): Keyword arguments for the class constructor. module_paths (list, optional): Paths to candidate modules to search for the class. This is used if the class cannot be located solely based on :attr:`class_name`. The first module in the list that contains the class is used. Returns: A class instance. Raises: ValueError: If class is not found based on :attr:`class_or_name` and :attr:`module_paths`. ValueError: If :attr:`kwargs` contains arguments that are invalid for the class construction.
texar/tf/utils/utils.py
get_instance
awesomemachinelearning/texar
python
def get_instance(class_or_name, kwargs, module_paths=None): 'Creates a class instance.\n\n Args:\n class_or_name: A class, or its name or full path to a class to\n instantiate.\n kwargs (dict): Keyword arguments for the class constructor.\n module_paths (list, optional): Paths to candidate modules to\n search for the class. This is used if the class cannot be\n located solely based on :attr:`class_name`. The first module\n in the list that contains the class is used.\n\n Returns:\n A class instance.\n\n Raises:\n ValueError: If class is not found based on :attr:`class_or_name` and\n :attr:`module_paths`.\n ValueError: If :attr:`kwargs` contains arguments that are invalid\n for the class construction.\n ' class_ = class_or_name if is_str(class_): class_ = get_class(class_, module_paths) class_args = set(get_args(class_.__init__)) if (kwargs is None): kwargs = {} for key in kwargs.keys(): if (key not in class_args): raise ValueError(('Invalid argument for class %s.%s: %s, valid args: %s' % (class_.__module__, class_.__name__, key, list(class_args)))) return class_(**kwargs)
def check_or_get_instance_with_redundant_kwargs(ins_or_class_or_name, kwargs, module_paths=None, classtype=None): 'Returns a class instance and checks types.\n\n Only those keyword arguments in :attr:`kwargs` that are included in the\n class construction method are used.\n\n Args:\n ins_or_class_or_name: Can be of 3 types:\n\n - A class to instantiate.\n - A string of the name or module path to a class to instantiate.\n - The class instance to check types.\n\n kwargs (dict): Keyword arguments for the class constructor.\n module_paths (list, optional): Paths to candidate modules to\n search for the class. This is used if the class cannot be\n located solely based on :attr:`class_name`. The first module\n in the list that contains the class is used.\n classtype (optional): A (list of) classes of which the instance must\n be an instantiation.\n\n Raises:\n ValueError: If class is not found based on :attr:`class_name` and\n :attr:`module_paths`.\n ValueError: If :attr:`kwargs` contains arguments that are invalid\n for the class construction.\n TypeError: If the instance is not an instantiation of\n :attr:`classtype`.\n ' ret = ins_or_class_or_name if (is_str(ret) or isinstance(ret, type)): ret = get_instance_with_redundant_kwargs(ret, kwargs, module_paths) if (classtype is not None): if (not isinstance(ret, classtype)): raise TypeError('An instance of {} is expected. Got: {}'.format(classtype, ret)) return ret
7,174,078,021,465,491,000
Returns a class instance and checks types. Only those keyword arguments in :attr:`kwargs` that are included in the class construction method are used. Args: ins_or_class_or_name: Can be of 3 types: - A class to instantiate. - A string of the name or module path to a class to instantiate. - The class instance to check types. kwargs (dict): Keyword arguments for the class constructor. module_paths (list, optional): Paths to candidate modules to search for the class. This is used if the class cannot be located solely based on :attr:`class_name`. The first module in the list that contains the class is used. classtype (optional): A (list of) classes of which the instance must be an instantiation. Raises: ValueError: If class is not found based on :attr:`class_name` and :attr:`module_paths`. ValueError: If :attr:`kwargs` contains arguments that are invalid for the class construction. TypeError: If the instance is not an instantiation of :attr:`classtype`.
texar/tf/utils/utils.py
check_or_get_instance_with_redundant_kwargs
awesomemachinelearning/texar
python
def check_or_get_instance_with_redundant_kwargs(ins_or_class_or_name, kwargs, module_paths=None, classtype=None): 'Returns a class instance and checks types.\n\n Only those keyword arguments in :attr:`kwargs` that are included in the\n class construction method are used.\n\n Args:\n ins_or_class_or_name: Can be of 3 types:\n\n - A class to instantiate.\n - A string of the name or module path to a class to instantiate.\n - The class instance to check types.\n\n kwargs (dict): Keyword arguments for the class constructor.\n module_paths (list, optional): Paths to candidate modules to\n search for the class. This is used if the class cannot be\n located solely based on :attr:`class_name`. The first module\n in the list that contains the class is used.\n classtype (optional): A (list of) classes of which the instance must\n be an instantiation.\n\n Raises:\n ValueError: If class is not found based on :attr:`class_name` and\n :attr:`module_paths`.\n ValueError: If :attr:`kwargs` contains arguments that are invalid\n for the class construction.\n TypeError: If the instance is not an instantiation of\n :attr:`classtype`.\n ' ret = ins_or_class_or_name if (is_str(ret) or isinstance(ret, type)): ret = get_instance_with_redundant_kwargs(ret, kwargs, module_paths) if (classtype is not None): if (not isinstance(ret, classtype)): raise TypeError('An instance of {} is expected. Got: {}'.format(classtype, ret)) return ret
def get_instance_with_redundant_kwargs(class_name, kwargs, module_paths=None): 'Creates a class instance.\n\n Only those keyword arguments in :attr:`kwargs` that are included in the\n class construction method are used.\n\n Args:\n class_name (str): A class or its name or module path.\n kwargs (dict): A dictionary of arguments for the class constructor. It\n may include invalid arguments which will be ignored.\n module_paths (list of str): A list of paths to candidate modules to\n search for the class. This is used if the class cannot be located\n solely based on :attr:`class_name`. The first module in the list\n that contains the class is used.\n\n Returns:\n A class instance.\n\n Raises:\n ValueError: If class is not found based on :attr:`class_name` and\n :attr:`module_paths`.\n ' class_ = get_class(class_name, module_paths) selected_kwargs = {} class_args = set(get_args(class_.__init__)) if (kwargs is None): kwargs = {} for (key, value) in kwargs.items(): if (key in class_args): selected_kwargs[key] = value return class_(**selected_kwargs)
-2,217,281,050,100,717,800
Creates a class instance. Only those keyword arguments in :attr:`kwargs` that are included in the class construction method are used. Args: class_name (str): A class or its name or module path. kwargs (dict): A dictionary of arguments for the class constructor. It may include invalid arguments which will be ignored. module_paths (list of str): A list of paths to candidate modules to search for the class. This is used if the class cannot be located solely based on :attr:`class_name`. The first module in the list that contains the class is used. Returns: A class instance. Raises: ValueError: If class is not found based on :attr:`class_name` and :attr:`module_paths`.
texar/tf/utils/utils.py
get_instance_with_redundant_kwargs
awesomemachinelearning/texar
python
def get_instance_with_redundant_kwargs(class_name, kwargs, module_paths=None): 'Creates a class instance.\n\n Only those keyword arguments in :attr:`kwargs` that are included in the\n class construction method are used.\n\n Args:\n class_name (str): A class or its name or module path.\n kwargs (dict): A dictionary of arguments for the class constructor. It\n may include invalid arguments which will be ignored.\n module_paths (list of str): A list of paths to candidate modules to\n search for the class. This is used if the class cannot be located\n solely based on :attr:`class_name`. The first module in the list\n that contains the class is used.\n\n Returns:\n A class instance.\n\n Raises:\n ValueError: If class is not found based on :attr:`class_name` and\n :attr:`module_paths`.\n ' class_ = get_class(class_name, module_paths) selected_kwargs = {} class_args = set(get_args(class_.__init__)) if (kwargs is None): kwargs = {} for (key, value) in kwargs.items(): if (key in class_args): selected_kwargs[key] = value return class_(**selected_kwargs)
def get_function(fn_or_name, module_paths=None): 'Returns the function of specified name and module.\n\n Args:\n fn_or_name (str or callable): Name or full path to a function, or the\n function itself.\n module_paths (list, optional): A list of paths to candidate modules to\n search for the function. This is used only when the function\n cannot be located solely based on :attr:`fn_or_name`. The first\n module in the list that contains the function is used.\n\n Returns:\n A function.\n ' if is_callable(fn_or_name): return fn_or_name fn = locate(fn_or_name) if ((fn is None) and (module_paths is not None)): for module_path in module_paths: fn = locate('.'.join([module_path, fn_or_name])) if (fn is not None): break if (fn is None): raise ValueError('Method not found in {}: {}'.format(module_paths, fn_or_name)) return fn
6,214,399,954,028,135,000
Returns the function of specified name and module. Args: fn_or_name (str or callable): Name or full path to a function, or the function itself. module_paths (list, optional): A list of paths to candidate modules to search for the function. This is used only when the function cannot be located solely based on :attr:`fn_or_name`. The first module in the list that contains the function is used. Returns: A function.
texar/tf/utils/utils.py
get_function
awesomemachinelearning/texar
python
def get_function(fn_or_name, module_paths=None): 'Returns the function of specified name and module.\n\n Args:\n fn_or_name (str or callable): Name or full path to a function, or the\n function itself.\n module_paths (list, optional): A list of paths to candidate modules to\n search for the function. This is used only when the function\n cannot be located solely based on :attr:`fn_or_name`. The first\n module in the list that contains the function is used.\n\n Returns:\n A function.\n ' if is_callable(fn_or_name): return fn_or_name fn = locate(fn_or_name) if ((fn is None) and (module_paths is not None)): for module_path in module_paths: fn = locate('.'.join([module_path, fn_or_name])) if (fn is not None): break if (fn is None): raise ValueError('Method not found in {}: {}'.format(module_paths, fn_or_name)) return fn
def call_function_with_redundant_kwargs(fn, kwargs): "Calls a function and returns the results.\n\n Only those keyword arguments in :attr:`kwargs` that are included in the\n function's argument list are used to call the function.\n\n Args:\n fn (function): A callable. If :attr:`fn` is not a python function,\n :attr:`fn.__call__` is called.\n kwargs (dict): A `dict` of arguments for the callable. It\n may include invalid arguments which will be ignored.\n\n Returns:\n The returned results by calling :attr:`fn`.\n " try: fn_args = set(get_args(fn)) except TypeError: fn_args = set(get_args(fn.__cal__)) if (kwargs is None): kwargs = {} selected_kwargs = {} for (key, value) in kwargs.items(): if (key in fn_args): selected_kwargs[key] = value return fn(**selected_kwargs)
-4,426,932,169,605,291,500
Calls a function and returns the results. Only those keyword arguments in :attr:`kwargs` that are included in the function's argument list are used to call the function. Args: fn (function): A callable. If :attr:`fn` is not a python function, :attr:`fn.__call__` is called. kwargs (dict): A `dict` of arguments for the callable. It may include invalid arguments which will be ignored. Returns: The returned results by calling :attr:`fn`.
texar/tf/utils/utils.py
call_function_with_redundant_kwargs
awesomemachinelearning/texar
python
def call_function_with_redundant_kwargs(fn, kwargs): "Calls a function and returns the results.\n\n Only those keyword arguments in :attr:`kwargs` that are included in the\n function's argument list are used to call the function.\n\n Args:\n fn (function): A callable. If :attr:`fn` is not a python function,\n :attr:`fn.__call__` is called.\n kwargs (dict): A `dict` of arguments for the callable. It\n may include invalid arguments which will be ignored.\n\n Returns:\n The returned results by calling :attr:`fn`.\n " try: fn_args = set(get_args(fn)) except TypeError: fn_args = set(get_args(fn.__cal__)) if (kwargs is None): kwargs = {} selected_kwargs = {} for (key, value) in kwargs.items(): if (key in fn_args): selected_kwargs[key] = value return fn(**selected_kwargs)
def get_instance_kwargs(kwargs, hparams): "Makes a dict of keyword arguments with the following structure:\n\n `kwargs_ = {'hparams': dict(hparams), **kwargs}`.\n\n This is typically used for constructing a module which takes a set of\n arguments as well as a argument named `hparams`.\n\n Args:\n kwargs (dict): A dict of keyword arguments. Can be `None`.\n hparams: A dict or an instance of :class:`~texar.tf.HParams` Can be `None`.\n\n Returns:\n A `dict` that contains the keyword arguments in :attr:`kwargs`, and\n an additional keyword argument named `hparams`.\n " if ((hparams is None) or isinstance(hparams, dict)): kwargs_ = {'hparams': hparams} elif isinstance(hparams, HParams): kwargs_ = {'hparams': hparams.todict()} else: raise ValueError('`hparams` must be a dict, an instance of HParams, or a `None`.') kwargs_.update((kwargs or {})) return kwargs_
-3,281,419,015,722,989,000
Makes a dict of keyword arguments with the following structure: `kwargs_ = {'hparams': dict(hparams), **kwargs}`. This is typically used for constructing a module which takes a set of arguments as well as a argument named `hparams`. Args: kwargs (dict): A dict of keyword arguments. Can be `None`. hparams: A dict or an instance of :class:`~texar.tf.HParams` Can be `None`. Returns: A `dict` that contains the keyword arguments in :attr:`kwargs`, and an additional keyword argument named `hparams`.
texar/tf/utils/utils.py
get_instance_kwargs
awesomemachinelearning/texar
python
def get_instance_kwargs(kwargs, hparams): "Makes a dict of keyword arguments with the following structure:\n\n `kwargs_ = {'hparams': dict(hparams), **kwargs}`.\n\n This is typically used for constructing a module which takes a set of\n arguments as well as a argument named `hparams`.\n\n Args:\n kwargs (dict): A dict of keyword arguments. Can be `None`.\n hparams: A dict or an instance of :class:`~texar.tf.HParams` Can be `None`.\n\n Returns:\n A `dict` that contains the keyword arguments in :attr:`kwargs`, and\n an additional keyword argument named `hparams`.\n " if ((hparams is None) or isinstance(hparams, dict)): kwargs_ = {'hparams': hparams} elif isinstance(hparams, HParams): kwargs_ = {'hparams': hparams.todict()} else: raise ValueError('`hparams` must be a dict, an instance of HParams, or a `None`.') kwargs_.update((kwargs or {})) return kwargs_
def dict_patch(tgt_dict, src_dict): 'Recursively patch :attr:`tgt_dict` by adding items from :attr:`src_dict`\n that do not exist in :attr:`tgt_dict`.\n\n If respective items in :attr:`src_dict` and :attr:`tgt_dict` are both\n `dict`, the :attr:`tgt_dict` item is patched recursively.\n\n Args:\n tgt_dict (dict): Target dictionary to patch.\n src_dict (dict): Source dictionary.\n\n Return:\n dict: The new :attr:`tgt_dict` that is patched.\n ' if (src_dict is None): return tgt_dict for (key, value) in src_dict.items(): if (key not in tgt_dict): tgt_dict[key] = copy.deepcopy(value) elif (isinstance(value, dict) and isinstance(tgt_dict[key], dict)): tgt_dict[key] = dict_patch(tgt_dict[key], value) return tgt_dict
-3,271,779,255,991,127,600
Recursively patch :attr:`tgt_dict` by adding items from :attr:`src_dict` that do not exist in :attr:`tgt_dict`. If respective items in :attr:`src_dict` and :attr:`tgt_dict` are both `dict`, the :attr:`tgt_dict` item is patched recursively. Args: tgt_dict (dict): Target dictionary to patch. src_dict (dict): Source dictionary. Return: dict: The new :attr:`tgt_dict` that is patched.
texar/tf/utils/utils.py
dict_patch
awesomemachinelearning/texar
python
def dict_patch(tgt_dict, src_dict): 'Recursively patch :attr:`tgt_dict` by adding items from :attr:`src_dict`\n that do not exist in :attr:`tgt_dict`.\n\n If respective items in :attr:`src_dict` and :attr:`tgt_dict` are both\n `dict`, the :attr:`tgt_dict` item is patched recursively.\n\n Args:\n tgt_dict (dict): Target dictionary to patch.\n src_dict (dict): Source dictionary.\n\n Return:\n dict: The new :attr:`tgt_dict` that is patched.\n ' if (src_dict is None): return tgt_dict for (key, value) in src_dict.items(): if (key not in tgt_dict): tgt_dict[key] = copy.deepcopy(value) elif (isinstance(value, dict) and isinstance(tgt_dict[key], dict)): tgt_dict[key] = dict_patch(tgt_dict[key], value) return tgt_dict