language
stringclasses
6 values
original_string
stringlengths
25
887k
text
stringlengths
25
887k
Python
def error_response(msg: str = ""): """Define a error json response to send a client. :param msg: A message indicating that the request has errors. """ _data_response = { u'valid': False, u'msg': msg } return _data_response
def error_response(msg: str = ""): """Define a error json response to send a client. :param msg: A message indicating that the request has errors. """ _data_response = { u'valid': False, u'msg': msg } return _data_response
Python
def success_response(data: any = None, msg: str = ""): """Defines the structure of a response to a client request in JSON format. :param data: Information to send to client. :param msg: A message indicating that the request completed successfully. """ _data_response = { u'valid': True, u'msg': msg, u'data': data } return _data_response
def success_response(data: any = None, msg: str = ""): """Defines the structure of a response to a client request in JSON format. :param data: Information to send to client. :param msg: A message indicating that the request completed successfully. """ _data_response = { u'valid': True, u'msg': msg, u'data': data } return _data_response
Python
def param(self, key: str, default_value: any = None, callback=None): """Returns the value of the parameter with the specified name. req.param(...) finds in the URL path, body, and query string (in that order) for the specified parameter. If no parameter value exists anywhere in the request with the given name, it returns None or the optional default value if specified. :param key: Name of the parameter to find :param default_value: Value of the parameter if this one doesn't exist :param callback: Function that is executed after getting the value """ if key in self.params: _value = self.params.get(key) elif key in self.body.value: _value = self.body.value.get(key) else: _value = self.args.get(key, default_value) if not _value and default_value: _value = default_value if not callback: return _value return callback(_value)
def param(self, key: str, default_value: any = None, callback=None): """Returns the value of the parameter with the specified name. req.param(...) finds in the URL path, body, and query string (in that order) for the specified parameter. If no parameter value exists anywhere in the request with the given name, it returns None or the optional default value if specified. :param key: Name of the parameter to find :param default_value: Value of the parameter if this one doesn't exist :param callback: Function that is executed after getting the value """ if key in self.params: _value = self.params.get(key) elif key in self.body.value: _value = self.body.value.get(key) else: _value = self.args.get(key, default_value) if not _value and default_value: _value = default_value if not callback: return _value return callback(_value)
Python
def all_params(self): """Returns the value of all the parameters sent in the request, combined into a single dictionary. It includes parameters parsed from the URL path, the request body, and the query string, retic dict in that order.""" return {**self.params, **self.body.value, **self.args, **self.retic}
def all_params(self): """Returns the value of all the parameters sent in the request, combined into a single dictionary. It includes parameters parsed from the URL path, the request body, and the query string, retic dict in that order.""" return {**self.params, **self.body.value, **self.args, **self.retic}
Python
def allocated(resource, rule, lookup, default="__default__"): """Pulls resource information for a given rule. If a rule does not have any information for a given resource type, then it will pull from the default. Information is pulled from definitions in the cluster.json (which is used a job submission). This ensures that any resources used at runtime mirror the resources that were allocated. :param resource <str>: resource type to look in cluster.json (i.e. threads, mem, time, gres) :param rule <str>: rule to lookup its information :param lookup <dict>: Lookup containing allocation information (i.e. cluster.json) :param default <str>: default information to use if rule information cannot be found :return allocation <str>: allocation information for a given resource type for a given rule """ try: # Try to get allocation information # for a given rule allocation = lookup[rule][resource] except KeyError: # Use default allocation information allocation = lookup[default][resource] return allocation
def allocated(resource, rule, lookup, default="__default__"): """Pulls resource information for a given rule. If a rule does not have any information for a given resource type, then it will pull from the default. Information is pulled from definitions in the cluster.json (which is used a job submission). This ensures that any resources used at runtime mirror the resources that were allocated. :param resource <str>: resource type to look in cluster.json (i.e. threads, mem, time, gres) :param rule <str>: rule to lookup its information :param lookup <dict>: Lookup containing allocation information (i.e. cluster.json) :param default <str>: default information to use if rule information cannot be found :return allocation <str>: allocation information for a given resource type for a given rule """ try: # Try to get allocation information # for a given rule allocation = lookup[rule][resource] except KeyError: # Use default allocation information allocation = lookup[default][resource] return allocation
Python
def randBright(image, br=0.25): """Function to randomly change the brightness of an image Args: image (numpy array): RGB array of input image br (float): V-channel will be scaled by a random between br to 1+br Returns: numpy array of brighness adjusted RGB image of same size as input """ rand_image = cv2.cvtColor(image, cv2.COLOR_RGB2HSV) rand_bright = br + np.random.uniform() rand_image[:, :, 2] = rand_image[:, :, 2] * rand_bright rand_image = cv2.cvtColor(rand_image, cv2.COLOR_HSV2RGB) return rand_image
def randBright(image, br=0.25): """Function to randomly change the brightness of an image Args: image (numpy array): RGB array of input image br (float): V-channel will be scaled by a random between br to 1+br Returns: numpy array of brighness adjusted RGB image of same size as input """ rand_image = cv2.cvtColor(image, cv2.COLOR_RGB2HSV) rand_bright = br + np.random.uniform() rand_image[:, :, 2] = rand_image[:, :, 2] * rand_bright rand_image = cv2.cvtColor(rand_image, cv2.COLOR_HSV2RGB) return rand_image
Python
def readProcess(image_file): """Function to read an image file and crop and resize it for input layer Args: image_file (str): Image filename (expected in 'data/' subdirectory) Returns: numpy array of size 66x200x3, for the image that was read from disk """ # Read file from disk image = mpimg.imread('data/' + image_file.strip()) # Remove the top 20 and bottom 20 pixels of 160x320x3 images image = image[20:140, :, :] # Resize the image to match input layer of the model resize = (200, 66) image = cv2.resize(image, resize, interpolation=cv2.INTER_AREA) return image
def readProcess(image_file): """Function to read an image file and crop and resize it for input layer Args: image_file (str): Image filename (expected in 'data/' subdirectory) Returns: numpy array of size 66x200x3, for the image that was read from disk """ # Read file from disk image = mpimg.imread('data/' + image_file.strip()) # Remove the top 20 and bottom 20 pixels of 160x320x3 images image = image[20:140, :, :] # Resize the image to match input layer of the model resize = (200, 66) image = cv2.resize(image, resize, interpolation=cv2.INTER_AREA) return image
Python
def preprocess(image): """ Combine all preprocess functions into one """ image1 = crop(image) image1 = resize(image1) image1 = rgb2yuv(image1) return image1
def preprocess(image): """ Combine all preprocess functions into one """ image1 = crop(image) image1 = resize(image1) image1 = rgb2yuv(image1) return image1
Python
def augment_single(center, steering_angle, range_x=100, range_y=10): """ Generate an augmented image and adjust steering angle. (The steering angle is associated with the center image) """ image, steering_angle = random_flip(center, steering_angle) image, steering_angle = random_translate(image, steering_angle, range_x, range_y) image = random_shadow(image) image = random_brightness(image) return image, steering_angle
def augment_single(center, steering_angle, range_x=100, range_y=10): """ Generate an augmented image and adjust steering angle. (The steering angle is associated with the center image) """ image, steering_angle = random_flip(center, steering_angle) image, steering_angle = random_translate(image, steering_angle, range_x, range_y) image = random_shadow(image) image = random_brightness(image) return image, steering_angle
Python
def load_data(args): """ Load training data and split it into training and validation set """ tracks = ["track1", "track2", "track3"] drive = ["normal", "reverse", "recovery", "sport_normal", "sport_reverse"] x = None y = None path = None x_train = None y_train = None x_valid = None y_valid = None for track in tracks: for drive_style in drive: try: path = os.path.join('datasets', args.data_dir, track, drive_style, 'driving_log.csv') data_df = pd.read_csv(path) if x is None: x = data_df[['center', 'left', 'right']].values y = data_df['steering'].values else: x = np.concatenate((x, data_df[['center', 'left', 'right']].values), axis=0) y = np.concatenate((y, data_df['steering'].values), axis=0) except FileNotFoundError: print("Unable to read file %s" % path) continue if x is None or y is None: print("No driving data were provided for training. Provide correct paths to the driving_log.csv files") exit() try: x_train, x_valid, y_train, y_valid = train_test_split(x, y, test_size=args.test_size, random_state=0) except TypeError: print("Missing header to csv files") exit() print("Train dataset: " + str(len(x_train)) + " elements") print("Test dataset: " + str(len(x_valid)) + " elements") return x_train, x_valid, y_train, y_valid
def load_data(args): """ Load training data and split it into training and validation set """ tracks = ["track1", "track2", "track3"] drive = ["normal", "reverse", "recovery", "sport_normal", "sport_reverse"] x = None y = None path = None x_train = None y_train = None x_valid = None y_valid = None for track in tracks: for drive_style in drive: try: path = os.path.join('datasets', args.data_dir, track, drive_style, 'driving_log.csv') data_df = pd.read_csv(path) if x is None: x = data_df[['center', 'left', 'right']].values y = data_df['steering'].values else: x = np.concatenate((x, data_df[['center', 'left', 'right']].values), axis=0) y = np.concatenate((y, data_df['steering'].values), axis=0) except FileNotFoundError: print("Unable to read file %s" % path) continue if x is None or y is None: print("No driving data were provided for training. Provide correct paths to the driving_log.csv files") exit() try: x_train, x_valid, y_train, y_valid = train_test_split(x, y, test_size=args.test_size, random_state=0) except TypeError: print("Missing header to csv files") exit() print("Train dataset: " + str(len(x_train)) + " elements") print("Test dataset: " + str(len(x_valid)) + " elements") return x_train, x_valid, y_train, y_valid
Python
def main(): """ Load train/validation data set and train the model """ parser = argparse.ArgumentParser(description='Behavioral Cloning Training Program') parser.add_argument('-d', help='data directory', dest='data_dir', type=str, default='dataset5') parser.add_argument('-t', help='test size fraction', dest='test_size', type=float, default=0.2) parser.add_argument('-k', help='drop out probability', dest='keep_prob', type=float, default=0.5) parser.add_argument('-n', help='number of epochs', dest='nb_epoch', type=int, default=50) parser.add_argument('-s', help='samples per epoch', dest='samples_per_epoch', type=int, default=100) parser.add_argument('-b', help='batch size', dest='batch_size', type=int, default=64) parser.add_argument('-o', help='save best models only', dest='save_best_only', type=s2b, default='true') parser.add_argument('-l', help='learning rate', dest='learning_rate', type=float, default=1.0e-4) parser.add_argument('-tn', help='training num', dest='train_num', type=int, default=103) args = parser.parse_args() print('-' * 30) print('Parameters') print('-' * 30) for key, value in vars(args).items(): print('{:<20} := {}'.format(key, value)) print('-' * 30) data = load_data(args) model = build_model(args) import time start = time.process_time() train_model(model, args, *data) end = time.process_time() print("training finished in %.2f seconds" % (end - start))
def main(): """ Load train/validation data set and train the model """ parser = argparse.ArgumentParser(description='Behavioral Cloning Training Program') parser.add_argument('-d', help='data directory', dest='data_dir', type=str, default='dataset5') parser.add_argument('-t', help='test size fraction', dest='test_size', type=float, default=0.2) parser.add_argument('-k', help='drop out probability', dest='keep_prob', type=float, default=0.5) parser.add_argument('-n', help='number of epochs', dest='nb_epoch', type=int, default=50) parser.add_argument('-s', help='samples per epoch', dest='samples_per_epoch', type=int, default=100) parser.add_argument('-b', help='batch size', dest='batch_size', type=int, default=64) parser.add_argument('-o', help='save best models only', dest='save_best_only', type=s2b, default='true') parser.add_argument('-l', help='learning rate', dest='learning_rate', type=float, default=1.0e-4) parser.add_argument('-tn', help='training num', dest='train_num', type=int, default=103) args = parser.parse_args() print('-' * 30) print('Parameters') print('-' * 30) for key, value in vars(args).items(): print('{:<20} := {}'.format(key, value)) print('-' * 30) data = load_data(args) model = build_model(args) import time start = time.process_time() train_model(model, args, *data) end = time.process_time() print("training finished in %.2f seconds" % (end - start))
Python
def runtests(self, line = None, cell = None, filename = '<input>'): """ The %runtests magic searches your IPython namespace for functions with names that begin with 'test_'. It will attempt to run these functions (calling them with no arguments), and report whether they pass, fail (raise an AssertionError), or error (raise any other kind of error). For tests that fail or error %runtests will show the exception raised but not the traceback, so write informative messages! """ import collections import time ip = get_ipython() tests = {} # collect tests, only find functions that start with 'test' for k, v in ip.user_ns.items(): if k.startswith('test') and isinstance(v, collections.Callable) and ((not line) or (k not in tests_run)): tests[k] = v tests_run[k] = True # initialize table object tbl = table(CLASS='data') tbl.addColGroup(h.col(), h.col()) tbl.addCaption('Collected {} tests.\n'.format(len(tests))) tbl.addHeadRow(h.tr(h.th('Test function name'), h.th('Status'))) # run tests ok = 0 fail = {} error = {} t1 = time.time() for name, func in tests.items(): try: func() except AssertionError as e: msg = 'failed' fail[name] = e except Exception as e: msg = 'error' error[name] = e else: msg = 'successful' ok += 1 tbl.addBodyRow(h.tr(h.td(name), h.td(msg), Class=msg)) t2 = time.time() # collect info on any failures if fail: tbl.addBodyRows(h.tr(h.th("Failed", span=2))) trs = [] for name, e in fail.items(): trs.append(h.tr(h.td(name), h.td(repr(e)))) tbl.addBodyRows(*trs, CLASS='failures') # collect info on any errors if error: tbl.addBodyRows(h.tr(h.th("Errors", span=2))) trs = [] for name, e in error.items(): trs.append(h.tr(h.td(name), h.td(repr(e)))) tbl.addBodyRows(*trs, CLASS='errors') # summary and timer of the tests tbl.addFootRow(h.tr(h.td('Successful', Class="right"), h.td('{}'.format(ok)))) tbl.addFootRow(h.tr(h.td('Failed', Class="right"), h.td('{}'.format(len(fail))))) tbl.addFootRow(h.tr(h.td('Errors', Class="right"), h.td('{}'.format(len(error))))) tbl.addFootRow(h.tr(h.td("Execution", Class="right"), h.td('{:.4g} seconds'.format(t2 - t1)))) # return html table string return HTML(str(tbl))
def runtests(self, line = None, cell = None, filename = '<input>'): """ The %runtests magic searches your IPython namespace for functions with names that begin with 'test_'. It will attempt to run these functions (calling them with no arguments), and report whether they pass, fail (raise an AssertionError), or error (raise any other kind of error). For tests that fail or error %runtests will show the exception raised but not the traceback, so write informative messages! """ import collections import time ip = get_ipython() tests = {} # collect tests, only find functions that start with 'test' for k, v in ip.user_ns.items(): if k.startswith('test') and isinstance(v, collections.Callable) and ((not line) or (k not in tests_run)): tests[k] = v tests_run[k] = True # initialize table object tbl = table(CLASS='data') tbl.addColGroup(h.col(), h.col()) tbl.addCaption('Collected {} tests.\n'.format(len(tests))) tbl.addHeadRow(h.tr(h.th('Test function name'), h.th('Status'))) # run tests ok = 0 fail = {} error = {} t1 = time.time() for name, func in tests.items(): try: func() except AssertionError as e: msg = 'failed' fail[name] = e except Exception as e: msg = 'error' error[name] = e else: msg = 'successful' ok += 1 tbl.addBodyRow(h.tr(h.td(name), h.td(msg), Class=msg)) t2 = time.time() # collect info on any failures if fail: tbl.addBodyRows(h.tr(h.th("Failed", span=2))) trs = [] for name, e in fail.items(): trs.append(h.tr(h.td(name), h.td(repr(e)))) tbl.addBodyRows(*trs, CLASS='failures') # collect info on any errors if error: tbl.addBodyRows(h.tr(h.th("Errors", span=2))) trs = [] for name, e in error.items(): trs.append(h.tr(h.td(name), h.td(repr(e)))) tbl.addBodyRows(*trs, CLASS='errors') # summary and timer of the tests tbl.addFootRow(h.tr(h.td('Successful', Class="right"), h.td('{}'.format(ok)))) tbl.addFootRow(h.tr(h.td('Failed', Class="right"), h.td('{}'.format(len(fail))))) tbl.addFootRow(h.tr(h.td('Errors', Class="right"), h.td('{}'.format(len(error))))) tbl.addFootRow(h.tr(h.td("Execution", Class="right"), h.td('{:.4g} seconds'.format(t2 - t1)))) # return html table string return HTML(str(tbl))
Python
async def home(): """Render the homepage of guweb's admin panel.""" if not 'authenticated' in session: return await flash('error', 'Please login first.', 'login') author = await glob.db.fetch("SELECT priv FROM users WHERE id=%s", session['user_data']['id']) session['user_data']['priv'] = author['priv'] author = Privileges(int(author['priv'])) if Privileges.Admin not in author: return await flash('error', f'You have insufficient privileges. If you have privileges, try entering your profile to reload them.', 'home') # fetch data from database dash_data = await glob.db.fetch( 'SELECT COUNT(id) registered, ' '(SELECT name FROM users ORDER BY id DESC LIMIT 1) latest_user, ' '(SELECT COUNT(id) FROM users WHERE NOT priv & 1) banned, ' '(SELECT COUNT(id) FROM users WHERE priv & 16 OR priv & 32) supporter ' 'FROM users' ) most_played = await glob.db.fetch( "SELECT id, set_id, CONCAT(artist, ' - ', title, ' [', version, ']') " "AS `map_info`, passes FROM maps ORDER BY passes DESC LIMIT 1" ) recent_logs = await glob.db.fetchall( "SELECT `logs`.`from`, `logs`.`to`, `logs`.`msg`, `logs`.`time`, " "`atbl`.`name` AS `author_name`, `rtbl`.`name` AS `receiver_name` FROM " "`logs` LEFT JOIN users AS `atbl` ON `logs`.`from` = atbl.id LEFT JOIN " "users AS `rtbl` ON `logs`.`to` = rtbl.id ORDER BY time DESC LIMIT 8" ) for el in recent_logs: if '> restricted' in el['msg']: el['msg'] = el['msg'].split('for "', 1) el['msg'] = f"Reason: {el['msg'][1][:-2]}" el['color'] = "red" el['type'] = "restricted" el['icon'] = "fas fa-user-slash" el['time'] = time_ago(datetime.datetime.utcnow(), pandas.to_datetime(el['time'], format="%Y-%m-%d %H:%M:%S"), time_limit=1) + " ago" elif 'note' in el['msg']: el['msg'] = el['msg'].split('added note:', 1) el['msg'] = f"Note Content: {el['msg'][1]}" el['color'] = "blue" el['type'] = "added note to" el['icon'] = "fas fa-sticky-note" el['time'] = time_ago(datetime.datetime.utcnow(), pandas.to_datetime(el['time'], format="%Y-%m-%d %H:%M:%S"), time_limit=1) + " ago" elif '> unrestricted' in el['msg']: el['msg'] = el['msg'].split('for "', 1) el['msg'] = f"Reason: {el['msg'][1][:-2]}" el['color'] = "green" el['type'] = "unrestricted" el['icon'] = "fas fa-user-check" el['time'] = time_ago(datetime.datetime.utcnow(), pandas.to_datetime(el['time'], format="%Y-%m-%d %H:%M:%S"), time_limit=1) + " ago" elif '> unsilenced' in el['msg']: el['msg'] = "" el['color'] = "lime" el['type'] = "unsilenced" el['icon'] = "fas fa-comment" el['time'] = time_ago(datetime.datetime.utcnow(), pandas.to_datetime(el['time'], format="%Y-%m-%d %H:%M:%S"), time_limit=1) + " ago" elif '> silenced' in el['msg']: el['msg'] = el['msg'].split("silenced (", 1) el['msg'] = el['msg'][1].split(') for "', 1) el['msg'][0] = datetime.timedelta(seconds=int(el['msg'][0][:-1])) el['msg'] = f"Reason: {el['msg'][1][:-2]}. | Silenced for {el['msg'][0]} hours" el['color'] = "orange" el['type'] = "silenced" el['icon'] = "fas fa-comment-slash" el['time'] = time_ago(datetime.datetime.utcnow(), pandas.to_datetime(el['time'], format="%Y-%m-%d %H:%M:%S"), time_limit=1) + " ago" else: el['color'] = "cyan" el['type'] = "Other" el['icon'] = "fas fa-question" el['time'] = time_ago(datetime.datetime.utcnow(), pandas.to_datetime(el['time'], format="%Y-%m-%d %H:%M:%S"), time_limit=1) + " ago" return await render_template( 'admin/home.html', dash_data=dash_data, datetime=datetime, timeago=timeago, most_played=most_played, recent_logs=recent_logs )
async def home(): """Render the homepage of guweb's admin panel.""" if not 'authenticated' in session: return await flash('error', 'Please login first.', 'login') author = await glob.db.fetch("SELECT priv FROM users WHERE id=%s", session['user_data']['id']) session['user_data']['priv'] = author['priv'] author = Privileges(int(author['priv'])) if Privileges.Admin not in author: return await flash('error', f'You have insufficient privileges. If you have privileges, try entering your profile to reload them.', 'home') # fetch data from database dash_data = await glob.db.fetch( 'SELECT COUNT(id) registered, ' '(SELECT name FROM users ORDER BY id DESC LIMIT 1) latest_user, ' '(SELECT COUNT(id) FROM users WHERE NOT priv & 1) banned, ' '(SELECT COUNT(id) FROM users WHERE priv & 16 OR priv & 32) supporter ' 'FROM users' ) most_played = await glob.db.fetch( "SELECT id, set_id, CONCAT(artist, ' - ', title, ' [', version, ']') " "AS `map_info`, passes FROM maps ORDER BY passes DESC LIMIT 1" ) recent_logs = await glob.db.fetchall( "SELECT `logs`.`from`, `logs`.`to`, `logs`.`msg`, `logs`.`time`, " "`atbl`.`name` AS `author_name`, `rtbl`.`name` AS `receiver_name` FROM " "`logs` LEFT JOIN users AS `atbl` ON `logs`.`from` = atbl.id LEFT JOIN " "users AS `rtbl` ON `logs`.`to` = rtbl.id ORDER BY time DESC LIMIT 8" ) for el in recent_logs: if '> restricted' in el['msg']: el['msg'] = el['msg'].split('for "', 1) el['msg'] = f"Reason: {el['msg'][1][:-2]}" el['color'] = "red" el['type'] = "restricted" el['icon'] = "fas fa-user-slash" el['time'] = time_ago(datetime.datetime.utcnow(), pandas.to_datetime(el['time'], format="%Y-%m-%d %H:%M:%S"), time_limit=1) + " ago" elif 'note' in el['msg']: el['msg'] = el['msg'].split('added note:', 1) el['msg'] = f"Note Content: {el['msg'][1]}" el['color'] = "blue" el['type'] = "added note to" el['icon'] = "fas fa-sticky-note" el['time'] = time_ago(datetime.datetime.utcnow(), pandas.to_datetime(el['time'], format="%Y-%m-%d %H:%M:%S"), time_limit=1) + " ago" elif '> unrestricted' in el['msg']: el['msg'] = el['msg'].split('for "', 1) el['msg'] = f"Reason: {el['msg'][1][:-2]}" el['color'] = "green" el['type'] = "unrestricted" el['icon'] = "fas fa-user-check" el['time'] = time_ago(datetime.datetime.utcnow(), pandas.to_datetime(el['time'], format="%Y-%m-%d %H:%M:%S"), time_limit=1) + " ago" elif '> unsilenced' in el['msg']: el['msg'] = "" el['color'] = "lime" el['type'] = "unsilenced" el['icon'] = "fas fa-comment" el['time'] = time_ago(datetime.datetime.utcnow(), pandas.to_datetime(el['time'], format="%Y-%m-%d %H:%M:%S"), time_limit=1) + " ago" elif '> silenced' in el['msg']: el['msg'] = el['msg'].split("silenced (", 1) el['msg'] = el['msg'][1].split(') for "', 1) el['msg'][0] = datetime.timedelta(seconds=int(el['msg'][0][:-1])) el['msg'] = f"Reason: {el['msg'][1][:-2]}. | Silenced for {el['msg'][0]} hours" el['color'] = "orange" el['type'] = "silenced" el['icon'] = "fas fa-comment-slash" el['time'] = time_ago(datetime.datetime.utcnow(), pandas.to_datetime(el['time'], format="%Y-%m-%d %H:%M:%S"), time_limit=1) + " ago" else: el['color'] = "cyan" el['type'] = "Other" el['icon'] = "fas fa-question" el['time'] = time_ago(datetime.datetime.utcnow(), pandas.to_datetime(el['time'], format="%Y-%m-%d %H:%M:%S"), time_limit=1) + " ago" return await render_template( 'admin/home.html', dash_data=dash_data, datetime=datetime, timeago=timeago, most_played=most_played, recent_logs=recent_logs )
Python
def check_null_values(data_table: pd.DataFrame) -> bool: """ Checks if the entire DataFrame contains any null values :param data_table: Raw data DataFrame :return: bool """ return data_table.isnull().any().any()
def check_null_values(data_table: pd.DataFrame) -> bool: """ Checks if the entire DataFrame contains any null values :param data_table: Raw data DataFrame :return: bool """ return data_table.isnull().any().any()
Python
def check_question_mark_as_null(data_table: pd.DataFrame) -> bool: """ Checks that at least one column has ? value :param data_table: Raw data DataFrame :return: bool """ return any([Values.RAW_DATA_NULL.value in set(data_table[column_value]) for column_value in data_table])
def check_question_mark_as_null(data_table: pd.DataFrame) -> bool: """ Checks that at least one column has ? value :param data_table: Raw data DataFrame :return: bool """ return any([Values.RAW_DATA_NULL.value in set(data_table[column_value]) for column_value in data_table])
Python
def view_column_hist(data_table: pd.DataFrame, column: str) -> None: """Plots and saves the column's histogram""" data_table.hist(column=column) file_name = '{}.svg'.format(column) file_path = join_dir(get_parent_dir(get_parent_dir(get_abs_dir(__file__))), FolderNames.FIGURES.value, file_name) plt.savefig(file_path) plt.show() plt.close()
def view_column_hist(data_table: pd.DataFrame, column: str) -> None: """Plots and saves the column's histogram""" data_table.hist(column=column) file_name = '{}.svg'.format(column) file_path = join_dir(get_parent_dir(get_parent_dir(get_abs_dir(__file__))), FolderNames.FIGURES.value, file_name) plt.savefig(file_path) plt.show() plt.close()
Python
def detachVM(self, return_vm=False, replace_vm=False): """detachVM - Detach the VM from this worker. The options are to return it to the pool's free list (return_vm), destroy it (not return_vm), and if destroying it, whether to replace it or not in the pool (replace_vm). The worker must always call this function before returning. """ # job-owned instance, simply destroy after job is completed if self.job.accessKeyId: self.vmms.safeDestroyVM(self.job.vm) elif return_vm: self.preallocator.freeVM(self.job.vm) else: self.vmms.safeDestroyVM(self.job.vm) if replace_vm: self.preallocator.createVM(self.job.vm) # Important: don't remove the VM from the pool until its # replacement has been created. Otherwise there is a # potential race where the job manager thinks that the # pool is empty and creates a spurious vm. self.preallocator.removeVM(self.job.vm)
def detachVM(self, return_vm=False, replace_vm=False): """detachVM - Detach the VM from this worker. The options are to return it to the pool's free list (return_vm), destroy it (not return_vm), and if destroying it, whether to replace it or not in the pool (replace_vm). The worker must always call this function before returning. """ # job-owned instance, simply destroy after job is completed if self.job.accessKeyId: self.vmms.safeDestroyVM(self.job.vm) elif return_vm: self.preallocator.freeVM(self.job.vm) else: self.vmms.safeDestroyVM(self.job.vm) if replace_vm: self.preallocator.createVM(self.job.vm) # Important: don't remove the VM from the pool until its # replacement has been created. Otherwise there is a # potential race where the job manager thinks that the # pool is empty and creates a spurious vm. self.preallocator.removeVM(self.job.vm)
Python
def rescheduleJob(self, hdrfile, ret, err): """rescheduleJob - Reschedule a job that has failed because of a system error, such as a VM timing out or a connection failure. """ self.log.error("Job %s:%d failed: %s" % (self.job.name, self.job.id, err)) self.job.appendTrace( "%s|Job %s:%d failed: %s" % (datetime.now().ctime(), self.job.name, self.job.id, err) ) # Try a few times before giving up if self.job.retries < Config.JOB_RETRIES: try: os.remove(hdrfile) except OSError: pass self.detachVM(return_vm=False, replace_vm=True) self.jobQueue.unassignJob(self.job.id) # Here is where we give up else: self.jobQueue.makeDead(self.job.id, err) self.appendMsg( hdrfile, "Internal error: Unable to complete job after %d tries. Pleae resubmit" % (Config.JOB_RETRIES), ) self.appendMsg( hdrfile, "Job status: waitVM=%s copyIn=%s runJob=%s copyOut=%s" % (ret["waitvm"], ret["copyin"], ret["runjob"], ret["copyout"]), ) self.catFiles(hdrfile, self.job.outputFile) self.detachVM(return_vm=False, replace_vm=True) self.notifyServer(self.job)
def rescheduleJob(self, hdrfile, ret, err): """rescheduleJob - Reschedule a job that has failed because of a system error, such as a VM timing out or a connection failure. """ self.log.error("Job %s:%d failed: %s" % (self.job.name, self.job.id, err)) self.job.appendTrace( "%s|Job %s:%d failed: %s" % (datetime.now().ctime(), self.job.name, self.job.id, err) ) # Try a few times before giving up if self.job.retries < Config.JOB_RETRIES: try: os.remove(hdrfile) except OSError: pass self.detachVM(return_vm=False, replace_vm=True) self.jobQueue.unassignJob(self.job.id) # Here is where we give up else: self.jobQueue.makeDead(self.job.id, err) self.appendMsg( hdrfile, "Internal error: Unable to complete job after %d tries. Pleae resubmit" % (Config.JOB_RETRIES), ) self.appendMsg( hdrfile, "Job status: waitVM=%s copyIn=%s runJob=%s copyOut=%s" % (ret["waitvm"], ret["copyin"], ret["runjob"], ret["copyout"]), ) self.catFiles(hdrfile, self.job.outputFile) self.detachVM(return_vm=False, replace_vm=True) self.notifyServer(self.job)
Python
def appendMsg(self, filename, msg): """appendMsg - Append a timestamped Tango message to a file""" f = open(filename, "a") f.write("Autograder [%s]: %s\n" % (datetime.now().ctime(), msg)) f.close()
def appendMsg(self, filename, msg): """appendMsg - Append a timestamped Tango message to a file""" f = open(filename, "a") f.write("Autograder [%s]: %s\n" % (datetime.now().ctime(), msg)) f.close()
Python
def catFiles(self, f1, f2): """catFiles - cat f1 f2 > f2, where f1 is the Tango header and f2 is the output from the Autodriver """ self.appendMsg(f1, "Here is the output from the autograder:\n---") (wfd, tmpname) = tempfile.mkstemp(dir=os.path.dirname(f2)) wf = os.fdopen(wfd, "ab") with open(f1, "rb") as f1fd: shutil.copyfileobj(f1fd, wf) # f2 may not exist if autograder failed try: with open(f2, "rb") as f2fd: shutil.copyfileobj(f2fd, wf) except OSError: pass wf.close() os.rename(tmpname, f2) os.remove(f1)
def catFiles(self, f1, f2): """catFiles - cat f1 f2 > f2, where f1 is the Tango header and f2 is the output from the Autodriver """ self.appendMsg(f1, "Here is the output from the autograder:\n---") (wfd, tmpname) = tempfile.mkstemp(dir=os.path.dirname(f2)) wf = os.fdopen(wfd, "ab") with open(f1, "rb") as f1fd: shutil.copyfileobj(f1fd, wf) # f2 may not exist if autograder failed try: with open(f2, "rb") as f2fd: shutil.copyfileobj(f2fd, wf) except OSError: pass wf.close() os.rename(tmpname, f2) os.remove(f1)
Python
def acquire(self): """ Check if the source or destination endpoint are not being used by another active transfer. If both source and destination endpoints locks are not locked, lock them for this transfer. """ with GlobusTransfer.lock: if self.source.available() and self.destination.available(): self.source.acquire() if self.source != self.destination: self.destination.acquire() return True return False
def acquire(self): """ Check if the source or destination endpoint are not being used by another active transfer. If both source and destination endpoints locks are not locked, lock them for this transfer. """ with GlobusTransfer.lock: if self.source.available() and self.destination.available(): self.source.acquire() if self.source != self.destination: self.destination.acquire() return True return False
Python
def main(set, config): engine = create_engine(settings.database.get("url")) DBSession.configure(bind=engine) Base.metadata.create_all(engine) """ Lists of Endpoint and Transfer objectwith locks are shared between threads. The classes and objects store all locks and necessary information to sychronized transfers. """ global endpoints global transfers endpoints = [Endpoint(e) for e in config.get("endpoints")] datasets = list(config.get("datasets").keys()) params = config.get("params") if params: GlobusTransfer.deadline = params.get("deadline", 3600) transfers = [GlobusTransfer(set, s, d, t) for s in endpoints for d in endpoints for t in datasets] executor = Executor(max_workers=4) while GlobusTransfer.transfers2do > 0: for t in transfers: if t.done: continue if t.acquire(): executor.submit(t.run) sleep(10)
def main(set, config): engine = create_engine(settings.database.get("url")) DBSession.configure(bind=engine) Base.metadata.create_all(engine) """ Lists of Endpoint and Transfer objectwith locks are shared between threads. The classes and objects store all locks and necessary information to sychronized transfers. """ global endpoints global transfers endpoints = [Endpoint(e) for e in config.get("endpoints")] datasets = list(config.get("datasets").keys()) params = config.get("params") if params: GlobusTransfer.deadline = params.get("deadline", 3600) transfers = [GlobusTransfer(set, s, d, t) for s in endpoints for d in endpoints for t in datasets] executor = Executor(max_workers=4) while GlobusTransfer.transfers2do > 0: for t in transfers: if t.done: continue if t.acquire(): executor.submit(t.run) sleep(10)
Python
def run(self, **kwargs): """Creates the XML files, runs OpenMC, and returns the path to the last statepoint file generated. .. versionchanged:: 0.12 Instead of returning the final k-effective value, this function now returns the path to the final statepoint written. Parameters ---------- **kwargs Keyword arguments passed to :func:`openmc.run` Returns ------- Path Path to the last statepoint written by this run (None if no statepoint was written) """ self.export_to_xml() # Setting tstart here ensures we don't pick up any pre-existing statepoint # files in the output directory tstart = time.time() last_statepoint = None openmc.run(**kwargs) # Get output directory and return the last statepoint written by this run if self.settings.output and 'path' in self.settings.output: output_dir = Path(self.settings.output['path']) else: output_dir = Path.cwd() for sp in output_dir.glob('statepoint.*.h5'): mtime = sp.stat().st_mtime if mtime >= tstart: # >= allows for poor clock resolution tstart = mtime last_statepoint = sp return last_statepoint
def run(self, **kwargs): """Creates the XML files, runs OpenMC, and returns the path to the last statepoint file generated. .. versionchanged:: 0.12 Instead of returning the final k-effective value, this function now returns the path to the final statepoint written. Parameters ---------- **kwargs Keyword arguments passed to :func:`openmc.run` Returns ------- Path Path to the last statepoint written by this run (None if no statepoint was written) """ self.export_to_xml() # Setting tstart here ensures we don't pick up any pre-existing statepoint # files in the output directory tstart = time.time() last_statepoint = None openmc.run(**kwargs) # Get output directory and return the last statepoint written by this run if self.settings.output and 'path' in self.settings.output: output_dir = Path(self.settings.output['path']) else: output_dir = Path.cwd() for sp in output_dir.glob('statepoint.*.h5'): mtime = sp.stat().st_mtime if mtime >= tstart: # >= allows for poor clock resolution tstart = mtime last_statepoint = sp return last_statepoint
Python
def __strip_source_code(self, code: Optional[str]) -> str: """ Strips the source code from a Discord message. It strips: code wrapped in backticks ` (one line code) code wrapped in triple backtick ``` (multiline code) code wrapped in triple backticks and language keyword ```python (syntax highlighting) """ code = code.strip("`") if re.match(r"\w*\n", code): code = "\n".join(code.split("\n")[1:]) return code
def __strip_source_code(self, code: Optional[str]) -> str: """ Strips the source code from a Discord message. It strips: code wrapped in backticks ` (one line code) code wrapped in triple backtick ``` (multiline code) code wrapped in triple backticks and language keyword ```python (syntax highlighting) """ code = code.strip("`") if re.match(r"\w*\n", code): code = "\n".join(code.split("\n")[1:]) return code
Python
async def __create_output_embed( self, token: str, source_code: Optional[str], stdout: str, stderr: str, compile_output: str, time: float, memory: int, language: str, language_id: int, language_icon: str, description: str, author_name: str, author_icon: str, ): """ Creates a Discord embed for the submission execution. Includes: Author of the submission. Green or red color of the embed depending on the description. Output (stdout, stderr, compile output) Link for full output (if any) Time and memeroy usage Language name, icon and version. Datetime of the execution. """ ide_link = "https://ide.judge0.com/?" color = Color.green() if description == "Accepted" else Color.red() embed = Embed(colour=color, timestamp=datetime.utcnow()) embed.set_author(name=f"{author_name}'s code execution", icon_url=author_icon) output = str() if stdout: output += base64.b64decode(stdout.encode()).decode() if stderr: output += base64.b64decode(stderr.encode()).decode() if compile_output and not output: output += base64.b64decode(compile_output.encode()).decode() if not output: output = "No output" print(len(output)) print(output.count("\n")) if len(output) > 300 or output.count("\n") > 10: embed.description = f"Output too large - [Full output]({ide_link}{token})" if output.count("\n") > 10: output = "\n".join(output.split("\n")[:10]) + "\n(...)" else: output = output[:300] + "\n(...)" else: embed.description = f"Edit this code in an online IDE - [here]({ide_link}{token})" embed.add_field(name="Output", value=f"```yaml\n{output}```", inline=False) if time: embed.add_field(name="Time", value=f"{time} s") if memory: embed.add_field(name="Memory", value=f"{round(memory / 1000, 2)} MB") embed.set_footer(text=f"{language} | {description}", icon_url=language_icon) return embed
async def __create_output_embed( self, token: str, source_code: Optional[str], stdout: str, stderr: str, compile_output: str, time: float, memory: int, language: str, language_id: int, language_icon: str, description: str, author_name: str, author_icon: str, ): """ Creates a Discord embed for the submission execution. Includes: Author of the submission. Green or red color of the embed depending on the description. Output (stdout, stderr, compile output) Link for full output (if any) Time and memeroy usage Language name, icon and version. Datetime of the execution. """ ide_link = "https://ide.judge0.com/?" color = Color.green() if description == "Accepted" else Color.red() embed = Embed(colour=color, timestamp=datetime.utcnow()) embed.set_author(name=f"{author_name}'s code execution", icon_url=author_icon) output = str() if stdout: output += base64.b64decode(stdout.encode()).decode() if stderr: output += base64.b64decode(stderr.encode()).decode() if compile_output and not output: output += base64.b64decode(compile_output.encode()).decode() if not output: output = "No output" print(len(output)) print(output.count("\n")) if len(output) > 300 or output.count("\n") > 10: embed.description = f"Output too large - [Full output]({ide_link}{token})" if output.count("\n") > 10: output = "\n".join(output.split("\n")[:10]) + "\n(...)" else: output = output[:300] + "\n(...)" else: embed.description = f"Edit this code in an online IDE - [here]({ide_link}{token})" embed.add_field(name="Output", value=f"```yaml\n{output}```", inline=False) if time: embed.add_field(name="Time", value=f"{time} s") if memory: embed.add_field(name="Memory", value=f"{round(memory / 1000, 2)} MB") embed.set_footer(text=f"{language} | {description}", icon_url=language_icon) return embed
Python
def __create_how_to_pass_embed(self, ctx, lang): """ Creates a Discord embed guide for passing code. Includes the 3 methods of passing source code. """ embed = Embed(title=f"How to pass {lang.version.split('(')[0]}source code?") embed.set_thumbnail(url=lang.icon) embed.add_field( name="Method 1 (Plain)", value=(f"{ctx.prefix}{lang.command}\n" "code"), inline=False, ) embed.add_field( name="Method 2 (Code block)", value=(f"{ctx.prefix}{lang.command}\n" "\`\`\`code\`\`\`"), inline=False, ) embed.add_field( name="Method 3 (Syntax Highlighting)", value=(f"{self.bot.command_prefix}{lang.command}\n" f"\`\`\`{lang.command}\n" "code\`\`\`"), inline=False, ) return embed
def __create_how_to_pass_embed(self, ctx, lang): """ Creates a Discord embed guide for passing code. Includes the 3 methods of passing source code. """ embed = Embed(title=f"How to pass {lang.version.split('(')[0]}source code?") embed.set_thumbnail(url=lang.icon) embed.add_field( name="Method 1 (Plain)", value=(f"{ctx.prefix}{lang.command}\n" "code"), inline=False, ) embed.add_field( name="Method 2 (Code block)", value=(f"{ctx.prefix}{lang.command}\n" "\`\`\`code\`\`\`"), inline=False, ) embed.add_field( name="Method 3 (Syntax Highlighting)", value=(f"{self.bot.command_prefix}{lang.command}\n" f"\`\`\`{lang.command}\n" "code\`\`\`"), inline=False, ) return embed
Python
async def __get_submission( self, source_code: Optional[str], language_id: int ) -> dict: """ Sends submission in judge0 API and waits for output. """ base_url = "https://api.judge0.com/submissions/" base64_code = base64.b64encode(source_code.encode()).decode() payload = {"source_code": base64_code, "language_id": language_id} async with aiohttp.ClientSession() as cs: async with cs.post(f"{base_url}?base64_encoded=true", data=payload) as r: if r.status not in [200, 201]: return f"{r.status} {responses[r.status]}" res = await r.json() token = res["token"] print(token) while True: submission = await cs.get(f"{base_url}{token}?base64_encoded=true") if submission.status not in [200, 201]: return f"{submission.status} {responses[submission.status]}" adict = await submission.json() if adict["status"]["id"] not in [1, 2]: break adict["token"] = token adict.update(payload) return adict
async def __get_submission( self, source_code: Optional[str], language_id: int ) -> dict: """ Sends submission in judge0 API and waits for output. """ base_url = "https://api.judge0.com/submissions/" base64_code = base64.b64encode(source_code.encode()).decode() payload = {"source_code": base64_code, "language_id": language_id} async with aiohttp.ClientSession() as cs: async with cs.post(f"{base_url}?base64_encoded=true", data=payload) as r: if r.status not in [200, 201]: return f"{r.status} {responses[r.status]}" res = await r.json() token = res["token"] print(token) while True: submission = await cs.get(f"{base_url}{token}?base64_encoded=true") if submission.status not in [200, 201]: return f"{submission.status} {responses[submission.status]}" adict = await submission.json() if adict["status"]["id"] not in [1, 2]: break adict["token"] = token adict.update(payload) return adict
Python
async def __execute_code(self, ctx, lang, code: Optional[str]): """ The main method for executing source code from a message. If version check is passed for arg - it sends language version only. The steps for executing code: strips the source code creates and waits for sumbission output if is error it sends the error otherwise it creates an embed for the output and sends it in the same chat """ if code == None: await ctx.send(embed=self.__create_how_to_pass_embed(ctx, lang)) return if code.startswith("-v") or code.startswith("-version"): await ctx.send(f"> {lang.version}") return code = self.__strip_source_code(code) submission = await self.__get_submission(code, lang.id) if isinstance(submission, str): # it is error code await ctx.send(submission) await ctx.message.remove_reaction( "<a:typing:597589448607399949>", self.bot.user ) return await ctx.send( embed=await self.__create_output_embed( token=submission["token"], source_code=submission["source_code"], stdout=submission["stdout"], stderr=submission["stderr"], compile_output=submission["compile_output"], time=submission["time"], memory=submission["memory"], language=lang.version, language_id=submission["language_id"], language_icon=lang.icon, description=submission["status"]["description"], author_name=str(ctx.message.author), author_icon=ctx.message.author.avatar_url, ) )
async def __execute_code(self, ctx, lang, code: Optional[str]): """ The main method for executing source code from a message. If version check is passed for arg - it sends language version only. The steps for executing code: strips the source code creates and waits for sumbission output if is error it sends the error otherwise it creates an embed for the output and sends it in the same chat """ if code == None: await ctx.send(embed=self.__create_how_to_pass_embed(ctx, lang)) return if code.startswith("-v") or code.startswith("-version"): await ctx.send(f"> {lang.version}") return code = self.__strip_source_code(code) submission = await self.__get_submission(code, lang.id) if isinstance(submission, str): # it is error code await ctx.send(submission) await ctx.message.remove_reaction( "<a:typing:597589448607399949>", self.bot.user ) return await ctx.send( embed=await self.__create_output_embed( token=submission["token"], source_code=submission["source_code"], stdout=submission["stdout"], stderr=submission["stderr"], compile_output=submission["compile_output"], time=submission["time"], memory=submission["memory"], language=lang.version, language_id=submission["language_id"], language_icon=lang.icon, description=submission["status"]["description"], author_name=str(ctx.message.author), author_icon=ctx.message.author.avatar_url, ) )
Python
def read_baidu_examples(input_file, is_training): """Read a baidu json file into a list of BaiduExample.""" def is_whitespace(c): if c == " " or c == "\t" or c == "\r" or c == "\n" or ord(c) == 0x202F: return True return False with open(input_file, "r", encoding='utf-8') as reader: examples = [] for line in reader: example = json.loads(line) # qas_id = example['id'] # question_text = example['question'] # context_tokens = example['seg_para'] #seg para就是 分词后的文本 qas_id = example['question_id'] question_text = example['question'] context_tokens = example['doc_tokens'] start_position = None end_position = None orig_answer_text = None #若不是训练,那么数据应该只包含问题,文本,以上三个信息都为None #若是训练的话, if is_training: orig_answer_text = example['fake_answer'][0] start_position = int(example['answer_span'][0]) end_position = int(example['answer_span'][1]) # 检测一下给出的fake answer 能否在文中找出来。 找不出来就跳过。 actual_text = "".join(context_tokens[start_position:(end_position+1)]) cleaned_answer_text = orig_answer_text if actual_text.find(cleaned_answer_text) == -1: logger.warning("Could not find answer: '%s' vs. '%s'", actual_text, cleaned_answer_text) continue per_example = BaiduExample( qas_id=qas_id, question_text=question_text, doc_tokens=context_tokens, orig_answer_text=orig_answer_text, start_position=start_position, end_position=end_position, ) examples.append(per_example) return examples
def read_baidu_examples(input_file, is_training): """Read a baidu json file into a list of BaiduExample.""" def is_whitespace(c): if c == " " or c == "\t" or c == "\r" or c == "\n" or ord(c) == 0x202F: return True return False with open(input_file, "r", encoding='utf-8') as reader: examples = [] for line in reader: example = json.loads(line) # qas_id = example['id'] # question_text = example['question'] # context_tokens = example['seg_para'] #seg para就是 分词后的文本 qas_id = example['question_id'] question_text = example['question'] context_tokens = example['doc_tokens'] start_position = None end_position = None orig_answer_text = None #若不是训练,那么数据应该只包含问题,文本,以上三个信息都为None #若是训练的话, if is_training: orig_answer_text = example['fake_answer'][0] start_position = int(example['answer_span'][0]) end_position = int(example['answer_span'][1]) # 检测一下给出的fake answer 能否在文中找出来。 找不出来就跳过。 actual_text = "".join(context_tokens[start_position:(end_position+1)]) cleaned_answer_text = orig_answer_text if actual_text.find(cleaned_answer_text) == -1: logger.warning("Could not find answer: '%s' vs. '%s'", actual_text, cleaned_answer_text) continue per_example = BaiduExample( qas_id=qas_id, question_text=question_text, doc_tokens=context_tokens, orig_answer_text=orig_answer_text, start_position=start_position, end_position=end_position, ) examples.append(per_example) return examples
Python
def write_predictions(all_examples, all_features, all_results, n_best_size, max_answer_length, do_lower_case, output_prediction_file, output_nbest_file, verbose_logging): """Write final predictions to the json file.""" logger.info("Writing predictions to: %s" % (output_prediction_file)) logger.info("Writing nbest to: %s" % (output_nbest_file)) example_index_to_features = collections.defaultdict(list) for feature in all_features: example_index_to_features[feature.example_index].append(feature) unique_id_to_result = {} for result in all_results: unique_id_to_result[result.unique_id] = result _PrelimPrediction = collections.namedtuple( # pylint: disable=invalid-name "PrelimPrediction", ["feature_index", "start_index", "end_index", "start_logit", "end_logit"]) all_predictions = collections.OrderedDict() all_nbest_json = collections.OrderedDict() for (example_index, example) in enumerate(all_examples): features = example_index_to_features[example_index] prelim_predictions = [] for (feature_index, feature) in enumerate(features): result = unique_id_to_result[feature.unique_id] start_indexes = _get_best_indexes(result.start_logits, n_best_size) end_indexes = _get_best_indexes(result.end_logits, n_best_size) for start_index in start_indexes: for end_index in end_indexes: # We could hypothetically create invalid predictions, e.g., predict # that the start of the span is in the question. We throw out all # invalid predictions. if start_index >= len(feature.tokens): continue if end_index >= len(feature.tokens): continue if start_index not in feature.token_to_orig_map: continue if end_index not in feature.token_to_orig_map: continue if not feature.token_is_max_context.get(start_index, False): continue if end_index < start_index: continue length = end_index - start_index + 1 if length > max_answer_length: continue prelim_predictions.append( _PrelimPrediction( feature_index=feature_index, start_index=start_index, end_index=end_index, start_logit=result.start_logits[start_index], end_logit=result.end_logits[end_index])) prelim_predictions = sorted( prelim_predictions, key=lambda x: (x.start_logit + x.end_logit), reverse=True) _NbestPrediction = collections.namedtuple( # pylint: disable=invalid-name "NbestPrediction", ["text", "start_logit", "end_logit"]) seen_predictions = {} nbest = [] for pred in prelim_predictions: if len(nbest) >= n_best_size: break feature = features[pred.feature_index] tok_tokens = feature.tokens[pred.start_index:(pred.end_index + 1)] orig_doc_start = feature.token_to_orig_map[pred.start_index] orig_doc_end = feature.token_to_orig_map[pred.end_index] orig_tokens = example.doc_tokens[orig_doc_start:(orig_doc_end + 1)] tok_text = " ".join(tok_tokens) # De-tokenize WordPieces that have been split off. tok_text = tok_text.replace(" ##", "") tok_text = tok_text.replace("##", "") # Clean whitespace tok_text = tok_text.strip() tok_text = " ".join(tok_text.split()) orig_text = " ".join(orig_tokens) final_text = get_final_text(tok_text, orig_text, do_lower_case, verbose_logging) if final_text in seen_predictions: continue seen_predictions[final_text] = True nbest.append( _NbestPrediction( text=final_text, start_logit=pred.start_logit, end_logit=pred.end_logit)) # In very rare edge cases we could have no valid predictions. So we # just create a nonce prediction in this case to avoid failure. if not nbest: nbest.append( _NbestPrediction(text="empty", start_logit=0.0, end_logit=0.0)) assert len(nbest) >= 1 total_scores = [] for entry in nbest: total_scores.append(entry.start_logit + entry.end_logit) probs = _compute_softmax(total_scores) nbest_json = [] for (i, entry) in enumerate(nbest): output = collections.OrderedDict() output["text"] = entry.text output["probability"] = probs[i] output["start_logit"] = entry.start_logit output["end_logit"] = entry.end_logit nbest_json.append(output) assert len(nbest_json) >= 1 i = 0 # 这里我做了一个修改,把“。’这个情况给抹去。 # 但是为什么不起作用!!!??? for i in range(n_best_size): if nbest_json[i]['text'] == "。": continue else: all_predictions[example.qas_id] = nbest_json[i]["text"] break # while i < n_best_size: # if nbest_json[i]['text'] == "。": # i +=1 # else: # all_predictions[example.qas_id] = nbest_json[i]["text"] # break # all_predictions[example.qas_id] = nbest_json[0]["text"] all_nbest_json[example.qas_id] = nbest_json with open(output_prediction_file, "w") as writer: writer.write(json.dumps(all_predictions, indent=4,ensure_ascii=False) + "\n") with open(output_nbest_file, "w") as writer: writer.write(json.dumps(all_nbest_json, indent=4,ensure_ascii=False) + "\n")
def write_predictions(all_examples, all_features, all_results, n_best_size, max_answer_length, do_lower_case, output_prediction_file, output_nbest_file, verbose_logging): """Write final predictions to the json file.""" logger.info("Writing predictions to: %s" % (output_prediction_file)) logger.info("Writing nbest to: %s" % (output_nbest_file)) example_index_to_features = collections.defaultdict(list) for feature in all_features: example_index_to_features[feature.example_index].append(feature) unique_id_to_result = {} for result in all_results: unique_id_to_result[result.unique_id] = result _PrelimPrediction = collections.namedtuple( # pylint: disable=invalid-name "PrelimPrediction", ["feature_index", "start_index", "end_index", "start_logit", "end_logit"]) all_predictions = collections.OrderedDict() all_nbest_json = collections.OrderedDict() for (example_index, example) in enumerate(all_examples): features = example_index_to_features[example_index] prelim_predictions = [] for (feature_index, feature) in enumerate(features): result = unique_id_to_result[feature.unique_id] start_indexes = _get_best_indexes(result.start_logits, n_best_size) end_indexes = _get_best_indexes(result.end_logits, n_best_size) for start_index in start_indexes: for end_index in end_indexes: # We could hypothetically create invalid predictions, e.g., predict # that the start of the span is in the question. We throw out all # invalid predictions. if start_index >= len(feature.tokens): continue if end_index >= len(feature.tokens): continue if start_index not in feature.token_to_orig_map: continue if end_index not in feature.token_to_orig_map: continue if not feature.token_is_max_context.get(start_index, False): continue if end_index < start_index: continue length = end_index - start_index + 1 if length > max_answer_length: continue prelim_predictions.append( _PrelimPrediction( feature_index=feature_index, start_index=start_index, end_index=end_index, start_logit=result.start_logits[start_index], end_logit=result.end_logits[end_index])) prelim_predictions = sorted( prelim_predictions, key=lambda x: (x.start_logit + x.end_logit), reverse=True) _NbestPrediction = collections.namedtuple( # pylint: disable=invalid-name "NbestPrediction", ["text", "start_logit", "end_logit"]) seen_predictions = {} nbest = [] for pred in prelim_predictions: if len(nbest) >= n_best_size: break feature = features[pred.feature_index] tok_tokens = feature.tokens[pred.start_index:(pred.end_index + 1)] orig_doc_start = feature.token_to_orig_map[pred.start_index] orig_doc_end = feature.token_to_orig_map[pred.end_index] orig_tokens = example.doc_tokens[orig_doc_start:(orig_doc_end + 1)] tok_text = " ".join(tok_tokens) # De-tokenize WordPieces that have been split off. tok_text = tok_text.replace(" ##", "") tok_text = tok_text.replace("##", "") # Clean whitespace tok_text = tok_text.strip() tok_text = " ".join(tok_text.split()) orig_text = " ".join(orig_tokens) final_text = get_final_text(tok_text, orig_text, do_lower_case, verbose_logging) if final_text in seen_predictions: continue seen_predictions[final_text] = True nbest.append( _NbestPrediction( text=final_text, start_logit=pred.start_logit, end_logit=pred.end_logit)) # In very rare edge cases we could have no valid predictions. So we # just create a nonce prediction in this case to avoid failure. if not nbest: nbest.append( _NbestPrediction(text="empty", start_logit=0.0, end_logit=0.0)) assert len(nbest) >= 1 total_scores = [] for entry in nbest: total_scores.append(entry.start_logit + entry.end_logit) probs = _compute_softmax(total_scores) nbest_json = [] for (i, entry) in enumerate(nbest): output = collections.OrderedDict() output["text"] = entry.text output["probability"] = probs[i] output["start_logit"] = entry.start_logit output["end_logit"] = entry.end_logit nbest_json.append(output) assert len(nbest_json) >= 1 i = 0 # 这里我做了一个修改,把“。’这个情况给抹去。 # 但是为什么不起作用!!!??? for i in range(n_best_size): if nbest_json[i]['text'] == "。": continue else: all_predictions[example.qas_id] = nbest_json[i]["text"] break # while i < n_best_size: # if nbest_json[i]['text'] == "。": # i +=1 # else: # all_predictions[example.qas_id] = nbest_json[i]["text"] # break # all_predictions[example.qas_id] = nbest_json[0]["text"] all_nbest_json[example.qas_id] = nbest_json with open(output_prediction_file, "w") as writer: writer.write(json.dumps(all_predictions, indent=4,ensure_ascii=False) + "\n") with open(output_nbest_file, "w") as writer: writer.write(json.dumps(all_nbest_json, indent=4,ensure_ascii=False) + "\n")
Python
def add_task(self, task, priority=0): 'Add a new task or update the priority of an existing task' if task in self.entry_finder: self.remove_task(task) count = next(self.counter) entry = [-priority, count, task] self.entry_finder[task] = entry heappush(self.pq, entry)
def add_task(self, task, priority=0): 'Add a new task or update the priority of an existing task' if task in self.entry_finder: self.remove_task(task) count = next(self.counter) entry = [-priority, count, task] self.entry_finder[task] = entry heappush(self.pq, entry)
Python
def pop_task(self): 'Remove and return the lowest priority task. Raise KeyError if empty.' while self.pq: priority, count, task = heappop(self.pq) if task is not self.REMOVED: del self.entry_finder[task] return task raise KeyError('pop from an empty priority queue')
def pop_task(self): 'Remove and return the lowest priority task. Raise KeyError if empty.' while self.pq: priority, count, task = heappop(self.pq) if task is not self.REMOVED: del self.entry_finder[task] return task raise KeyError('pop from an empty priority queue')
Python
def _nv_loss(self, cu, co): """Create a newsvendor loss function with the given under- and overage costs""" def customized_loss(y_true, y_pred): y_true = cast(y_true, y_pred.dtype) loss = switch(less(y_pred, y_true), cu * (y_true - y_pred), co * (y_pred - y_true)) return ksum(loss) return customized_loss
def _nv_loss(self, cu, co): """Create a newsvendor loss function with the given under- and overage costs""" def customized_loss(y_true, y_pred): y_true = cast(y_true, y_pred.dtype) loss = switch(less(y_pred, y_true), cu * (y_true - y_pred), co * (y_pred - y_true)) return ksum(loss) return customized_loss
Python
def doWriteBack(source, destination): """ Do a write back to Stash using SciTokens :param str source: The location of the local file :param str destination: The location of the remote file, in stash:// format """ start1 = int(time.time()*1000) scitoken_contents = getToken() if scitoken_contents is None: logging.error("Unable to find scitokens.use file") return 1 # Check if the source file is zero-length statinfo = os.stat(source) if statinfo.st_size == 0: command = "curl -v --connect-timeout 30 --speed-time 5 --speed-limit 1024 -X PUT --fail --upload-file %s -H \"Authorization: Bearer %s\" %s%s" % (source, scitoken_contents, writeback_host, destination) else: command = "curl -v --connect-timeout 30 --speed-limit 1024 -X PUT --fail --upload-file %s -H \"Authorization: Bearer %s\" %s%s" % (source, scitoken_contents, writeback_host, destination) if 'http_proxy' in os.environ: del os.environ['http_proxy'] logging.debug("curl command: %s" % command) curl=subprocess.Popen([command ],shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE) (stdout, stderr) = curl.communicate() curl_exit=curl.returncode if statinfo.st_size == 0 and curl_exit == 28: logging.debug("Got curl exit code 28, but that's ok for zero-length files. This doesn't capture connection timeouts") curl_exit = 0 elif curl_exit != 0: logging.error(stdout) logging.error(stderr) sitename = os.environ.setdefault("OSG_SITE_NAME", "siteNotFound") end1=int(time.time()*1000) # Send the payload payload = { 'filename': source, 'sitename': sitename, 'timestamp': end1, 'host': writeback_host, 'upload_size': os.stat(source).st_size, 'status': 'Success', 'tries': 1, 'start1': start1, 'end1': end1, 'cache': 'None', 'writeback': 'True' } payload.update(parse_job_ad()) if curl_exit != 0: payload['status'] = "Failure" es_send(payload) return curl_exit
def doWriteBack(source, destination): """ Do a write back to Stash using SciTokens :param str source: The location of the local file :param str destination: The location of the remote file, in stash:// format """ start1 = int(time.time()*1000) scitoken_contents = getToken() if scitoken_contents is None: logging.error("Unable to find scitokens.use file") return 1 # Check if the source file is zero-length statinfo = os.stat(source) if statinfo.st_size == 0: command = "curl -v --connect-timeout 30 --speed-time 5 --speed-limit 1024 -X PUT --fail --upload-file %s -H \"Authorization: Bearer %s\" %s%s" % (source, scitoken_contents, writeback_host, destination) else: command = "curl -v --connect-timeout 30 --speed-limit 1024 -X PUT --fail --upload-file %s -H \"Authorization: Bearer %s\" %s%s" % (source, scitoken_contents, writeback_host, destination) if 'http_proxy' in os.environ: del os.environ['http_proxy'] logging.debug("curl command: %s" % command) curl=subprocess.Popen([command ],shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE) (stdout, stderr) = curl.communicate() curl_exit=curl.returncode if statinfo.st_size == 0 and curl_exit == 28: logging.debug("Got curl exit code 28, but that's ok for zero-length files. This doesn't capture connection timeouts") curl_exit = 0 elif curl_exit != 0: logging.error(stdout) logging.error(stderr) sitename = os.environ.setdefault("OSG_SITE_NAME", "siteNotFound") end1=int(time.time()*1000) # Send the payload payload = { 'filename': source, 'sitename': sitename, 'timestamp': end1, 'host': writeback_host, 'upload_size': os.stat(source).st_size, 'status': 'Success', 'tries': 1, 'start1': start1, 'end1': end1, 'cache': 'None', 'writeback': 'True' } payload.update(parse_job_ad()) if curl_exit != 0: payload['status'] = "Failure" es_send(payload) return curl_exit
Python
def doStashCpSingle(sourceFile, destination, methods, debug=False): """ Perform a single copy from StashCache federation """ global nearest_cache # Parse the source and destination with urlparse source_url = urlparse(sourceFile) dest_url = urlparse(destination) understoodSchemes = ["stash", "file", ""] if source_url.scheme not in understoodSchemes: logging.error("Do not understand scheme: %s", source_url.scheme) return 1 if dest_url.scheme not in understoodSchemes: logging.error("Do not understand scheme: %s", dest_url.scheme) return 1 if dest_url.scheme == "stash": return doWriteBack(source_url.path, dest_url.path) if dest_url.scheme == "file": destination = dest_url.path if source_url.scheme == "stash": sourceFile = source_url.path if not sourceFile.startswith("/"): sourceFile = "/" + sourceFile sitename = os.environ.setdefault("OSG_SITE_NAME", "siteNotFound") # Fill out the payload as much as possible filename = destination + '/' + sourceFile.split('/')[-1] payload = {} payload['filename'] = sourceFile payload['sitename'] = sitename payload.update(parse_job_ad()) # Calculate the starting time start1 = int(time.time()*1000) # Go through the download methods cur_method = methods[0] success = False for method in methods: cur_method = method if method == "cvmfs": logging.info("Trying CVMFS...") if download_cvmfs(sourceFile, destination, debug, payload): success = True break elif method == "xrootd": logging.info("Trying XrootD...") if download_xrootd(sourceFile, destination, debug, payload): success = True break elif method == "http": logging.info("Trying HTTP...") if download_http(sourceFile, destination, debug, payload): success = True break else: logging.error("Unknown transfer method: %s", method) end1 = int(time.time()*1000) payload['start1']=start1 payload['end1']=end1 payload['timestamp']=end1 payload['download_time']=end1-start1 if success: payload['status'] = 'Success' # Get the final size of the downloaded file if os.path.isdir(destination): destination += "/" dest_dir, dest_filename = os.path.split(destination) if dest_filename: final_destination = destination else: final_destination = os.path.join(dest_dir, os.path.basename(sourceFile)) payload['filesize'] = os.stat(final_destination).st_size payload['download_size'] = payload['filesize'] else: logging.error("All methods failed! Unable to download file.") payload['status'] = 'Fail' es_send(payload) return 0 if success else 1
def doStashCpSingle(sourceFile, destination, methods, debug=False): """ Perform a single copy from StashCache federation """ global nearest_cache # Parse the source and destination with urlparse source_url = urlparse(sourceFile) dest_url = urlparse(destination) understoodSchemes = ["stash", "file", ""] if source_url.scheme not in understoodSchemes: logging.error("Do not understand scheme: %s", source_url.scheme) return 1 if dest_url.scheme not in understoodSchemes: logging.error("Do not understand scheme: %s", dest_url.scheme) return 1 if dest_url.scheme == "stash": return doWriteBack(source_url.path, dest_url.path) if dest_url.scheme == "file": destination = dest_url.path if source_url.scheme == "stash": sourceFile = source_url.path if not sourceFile.startswith("/"): sourceFile = "/" + sourceFile sitename = os.environ.setdefault("OSG_SITE_NAME", "siteNotFound") # Fill out the payload as much as possible filename = destination + '/' + sourceFile.split('/')[-1] payload = {} payload['filename'] = sourceFile payload['sitename'] = sitename payload.update(parse_job_ad()) # Calculate the starting time start1 = int(time.time()*1000) # Go through the download methods cur_method = methods[0] success = False for method in methods: cur_method = method if method == "cvmfs": logging.info("Trying CVMFS...") if download_cvmfs(sourceFile, destination, debug, payload): success = True break elif method == "xrootd": logging.info("Trying XrootD...") if download_xrootd(sourceFile, destination, debug, payload): success = True break elif method == "http": logging.info("Trying HTTP...") if download_http(sourceFile, destination, debug, payload): success = True break else: logging.error("Unknown transfer method: %s", method) end1 = int(time.time()*1000) payload['start1']=start1 payload['end1']=end1 payload['timestamp']=end1 payload['download_time']=end1-start1 if success: payload['status'] = 'Success' # Get the final size of the downloaded file if os.path.isdir(destination): destination += "/" dest_dir, dest_filename = os.path.split(destination) if dest_filename: final_destination = destination else: final_destination = os.path.join(dest_dir, os.path.basename(sourceFile)) payload['filesize'] = os.stat(final_destination).st_size payload['download_size'] = payload['filesize'] else: logging.error("All methods failed! Unable to download file.") payload['status'] = 'Fail' es_send(payload) return 0 if success else 1
Python
def download_xrootd(sourceFile, destination, debug, payload): """ Download from the nearest cache, if that fails, fallback to the stash origin. """ global nearest_cache global nearest_cache_list # Check for xrootd, return quickly if it's not available if not check_for_xrootd(): return False # If the cache is not specified by the command line, then look for the closest if not nearest_cache: nearest_cache = get_best_stashcache() cache = nearest_cache logging.debug("Using Cache %s", nearest_cache) xrd_exit = timed_transfer(filename=sourceFile, debug=debug, cache=cache, destination=destination) payload['xrdexit1']=xrd_exit if xrd_exit=='0': #worked first try logging.debug("Transfer success using %s", nearest_cache) payload['tries'] = 1 payload['cache'] = cache else: #pull from origin logging.info("XrdCP from cache failed on %s, pulling from main redirector", nearest_cache) cache = main_redirector xrd_exit=timed_transfer(filename=sourceFile, cache=cache, debug=debug, destination=destination) if xrd_exit=='0': logging.info("Trunk Success") status = 'Trunk Sucess' tries=2 else: logging.info("stashcp failed after 2 xrootd attempts") status = 'Timeout' tries = 2 payload['status']=status payload['xrdexit2']=xrd_exit payload['tries']=tries payload['cache'] = cache if xrd_exit == '0': return True else: return False return True
def download_xrootd(sourceFile, destination, debug, payload): """ Download from the nearest cache, if that fails, fallback to the stash origin. """ global nearest_cache global nearest_cache_list # Check for xrootd, return quickly if it's not available if not check_for_xrootd(): return False # If the cache is not specified by the command line, then look for the closest if not nearest_cache: nearest_cache = get_best_stashcache() cache = nearest_cache logging.debug("Using Cache %s", nearest_cache) xrd_exit = timed_transfer(filename=sourceFile, debug=debug, cache=cache, destination=destination) payload['xrdexit1']=xrd_exit if xrd_exit=='0': #worked first try logging.debug("Transfer success using %s", nearest_cache) payload['tries'] = 1 payload['cache'] = cache else: #pull from origin logging.info("XrdCP from cache failed on %s, pulling from main redirector", nearest_cache) cache = main_redirector xrd_exit=timed_transfer(filename=sourceFile, cache=cache, debug=debug, destination=destination) if xrd_exit=='0': logging.info("Trunk Success") status = 'Trunk Sucess' tries=2 else: logging.info("stashcp failed after 2 xrootd attempts") status = 'Timeout' tries = 2 payload['status']=status payload['xrdexit2']=xrd_exit payload['tries']=tries payload['cache'] = cache if xrd_exit == '0': return True else: return False return True
Python
def check_for_xrootd(): """ Check if xrootd is installed by checking if the xrdcp command returns a reasonable output """ # xrdcp output the version on stderr, what?!? check_command = "xrdcp -V 2>&1" logging.debug("Running the command to check of xrdcp existance: %s", check_command) command_object = subprocess.Popen([check_command], stdout=subprocess.PIPE, shell=True) xrdcp_version = command_object.communicate()[0] if command_object.returncode == 0: logging.debug("xrdcp version: %s", xrdcp_version) return xrdcp_version else: logging.debug("xrdcp command returned exit code: %i", command_object.returncode) return False
def check_for_xrootd(): """ Check if xrootd is installed by checking if the xrdcp command returns a reasonable output """ # xrdcp output the version on stderr, what?!? check_command = "xrdcp -V 2>&1" logging.debug("Running the command to check of xrdcp existance: %s", check_command) command_object = subprocess.Popen([check_command], stdout=subprocess.PIPE, shell=True) xrdcp_version = command_object.communicate()[0] if command_object.returncode == 0: logging.debug("xrdcp version: %s", xrdcp_version) return xrdcp_version else: logging.debug("xrdcp command returned exit code: %i", command_object.returncode) return False
Python
def download_http(source, destination, debug, payload): """ Download from the nearest cache with HTTP """ global nearest_cache global nearest_cache_list logging.debug("Downloading with HTTP") #scitoken_contents = getToken() scitoken_contents = None if not nearest_cache: nearest_cache = get_best_stashcache() # Ok, now run the curl command: if debug: output_mode = "-v" else: output_mode = "-s" # The command will cd into destination directory and then run curl if os.path.isdir(destination): destination += "/" dest_dir, dest_filename = os.path.split(destination) if not dest_dir: dest_dir = "." if dest_filename: download_output = "-o %s" % dest_filename final_destination = destination else: download_output = "-O" final_destination = os.path.join(dest_dir, os.path.basename(source)) # Try 2 nearest caches success = False start = end = 0 tried_cache = "" for cache in nearest_cache_list[:2]: tried_cache = cache # Parse the nearest_cache url, make sure it uses http # Should really use urlparse, but python3 and python2 urlparse imports are # very different if cache.startswith('root://'): cache = cache.replace('root://', 'http://') # Append port 8000, which is just a convention for now, not set in stone # Check if the cache already has a port attached to it parsed_url = urlparse(cache) if not parsed_url.port: cache += ":8000" # Quote the source URL, which may have weird, dangerous characters quoted_source = urllib2.quote(source) if scitoken_contents: curl_command = "curl %s -L --connect-timeout 30 --speed-limit 1024 %s --fail -H \"Authorization: Bearer %s\" %s%s" % (output_mode, download_output, scitoken_contents, cache, quoted_source) else: curl_command = "curl %s -L --connect-timeout 30 --speed-limit 1024 %s --fail %s%s" % (output_mode, download_output, cache, quoted_source) logging.debug("About to run curl command: %s", curl_command) start = int(time.time()*1000) command_object = subprocess.Popen([curl_command], shell=True, cwd=dest_dir) command_object.wait() end = int(time.time()*1000) if command_object.returncode == 0: success = True break if success: dlSz=os.stat(final_destination).st_size filesize = dlSz status = 'Success' payload['download_size']=dlSz payload['filesize'] = filesize payload['host']=tried_cache payload['tries']=1 payload['cache']=tried_cache if success: return True else: return False
def download_http(source, destination, debug, payload): """ Download from the nearest cache with HTTP """ global nearest_cache global nearest_cache_list logging.debug("Downloading with HTTP") #scitoken_contents = getToken() scitoken_contents = None if not nearest_cache: nearest_cache = get_best_stashcache() # Ok, now run the curl command: if debug: output_mode = "-v" else: output_mode = "-s" # The command will cd into destination directory and then run curl if os.path.isdir(destination): destination += "/" dest_dir, dest_filename = os.path.split(destination) if not dest_dir: dest_dir = "." if dest_filename: download_output = "-o %s" % dest_filename final_destination = destination else: download_output = "-O" final_destination = os.path.join(dest_dir, os.path.basename(source)) # Try 2 nearest caches success = False start = end = 0 tried_cache = "" for cache in nearest_cache_list[:2]: tried_cache = cache # Parse the nearest_cache url, make sure it uses http # Should really use urlparse, but python3 and python2 urlparse imports are # very different if cache.startswith('root://'): cache = cache.replace('root://', 'http://') # Append port 8000, which is just a convention for now, not set in stone # Check if the cache already has a port attached to it parsed_url = urlparse(cache) if not parsed_url.port: cache += ":8000" # Quote the source URL, which may have weird, dangerous characters quoted_source = urllib2.quote(source) if scitoken_contents: curl_command = "curl %s -L --connect-timeout 30 --speed-limit 1024 %s --fail -H \"Authorization: Bearer %s\" %s%s" % (output_mode, download_output, scitoken_contents, cache, quoted_source) else: curl_command = "curl %s -L --connect-timeout 30 --speed-limit 1024 %s --fail %s%s" % (output_mode, download_output, cache, quoted_source) logging.debug("About to run curl command: %s", curl_command) start = int(time.time()*1000) command_object = subprocess.Popen([curl_command], shell=True, cwd=dest_dir) command_object.wait() end = int(time.time()*1000) if command_object.returncode == 0: success = True break if success: dlSz=os.stat(final_destination).st_size filesize = dlSz status = 'Success' payload['download_size']=dlSz payload['filesize'] = filesize payload['host']=tried_cache payload['tries']=1 payload['cache']=tried_cache if success: return True else: return False
Python
def parse_job_ad(): """ Parse the .job.ad file for the Owner (username) and ProjectName of the callee. """ temp_list = {} try: if '_CONDOR_JOB_AD' in os.environ: filename = os.environ['_CONDOR_JOB_AD'] elif os.path.exists(".job.ad"): filename = ".job.ad" else: return {} with open(filename) as job_file: for line in job_file.readlines(): match = re.search('^\s*(Owner|ProjectName)\s=\s"(.*)"', line, re.IGNORECASE) if match: temp_list[match.group(1)] = match.group(2) except IOError, e: logging.error("Unable to open the .job.ad file") return temp_list
def parse_job_ad(): """ Parse the .job.ad file for the Owner (username) and ProjectName of the callee. """ temp_list = {} try: if '_CONDOR_JOB_AD' in os.environ: filename = os.environ['_CONDOR_JOB_AD'] elif os.path.exists(".job.ad"): filename = ".job.ad" else: return {} with open(filename) as job_file: for line in job_file.readlines(): match = re.search('^\s*(Owner|ProjectName)\s=\s"(.*)"', line, re.IGNORECASE) if match: temp_list[match.group(1)] = match.group(2) except IOError, e: logging.error("Unable to open the .job.ad file") return temp_list
Python
def timed_transfer(filename, destination, cache, debug=False, ): """ Transfer the filename from the cache to the destination using xrdcp """ # All these values can be found on the xrdcp man page os.environ.setdefault("XRD_REQUESTTIMEOUT", "30") # How long to wait for a read request (s) os.environ.setdefault("XRD_CPCHUNKSIZE", "8388608") # Size of each read request (8MB) os.environ.setdefault("XRD_TIMEOUTRESOLUTION", "5") # How often to check the timeouts os.environ.setdefault("XRD_CONNECTIONWINDOW", "30") # How long to wait for the initial TCP connection os.environ.setdefault("XRD_CONNECTIONRETRY", "2") # How many time should we retry the TCP connection os.environ.setdefault("XRD_STREAMTIMEOUT", "30") # How long to wait for TCP activity if not filename.startswith("/"): filepath=cache+":1094//"+ filename else: filepath=cache+":1094/"+ filename if debug: command="xrdcp -d 2 --nopbar -f " + filepath + " " + destination else: command="xrdcp -s -f " + filepath + " " + destination filename="./"+filename.split("/")[-1] if os.path.isfile(filename): os.remove(filename) if debug: logging.debug("xrdcp command: %s", command) xrdcp=subprocess.Popen([command ],shell=True,stdout=subprocess.PIPE) else: xrdcp=subprocess.Popen([command ],shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE) xrdcp.communicate() xrd_exit=xrdcp.returncode return str(xrd_exit)
def timed_transfer(filename, destination, cache, debug=False, ): """ Transfer the filename from the cache to the destination using xrdcp """ # All these values can be found on the xrdcp man page os.environ.setdefault("XRD_REQUESTTIMEOUT", "30") # How long to wait for a read request (s) os.environ.setdefault("XRD_CPCHUNKSIZE", "8388608") # Size of each read request (8MB) os.environ.setdefault("XRD_TIMEOUTRESOLUTION", "5") # How often to check the timeouts os.environ.setdefault("XRD_CONNECTIONWINDOW", "30") # How long to wait for the initial TCP connection os.environ.setdefault("XRD_CONNECTIONRETRY", "2") # How many time should we retry the TCP connection os.environ.setdefault("XRD_STREAMTIMEOUT", "30") # How long to wait for TCP activity if not filename.startswith("/"): filepath=cache+":1094//"+ filename else: filepath=cache+":1094/"+ filename if debug: command="xrdcp -d 2 --nopbar -f " + filepath + " " + destination else: command="xrdcp -s -f " + filepath + " " + destination filename="./"+filename.split("/")[-1] if os.path.isfile(filename): os.remove(filename) if debug: logging.debug("xrdcp command: %s", command) xrdcp=subprocess.Popen([command ],shell=True,stdout=subprocess.PIPE) else: xrdcp=subprocess.Popen([command ],shell=True,stdout=subprocess.PIPE, stderr=subprocess.PIPE) xrdcp.communicate() xrd_exit=xrdcp.returncode return str(xrd_exit)
Python
def compute_summary_statistics(self, hist_sat, hist_cen, hist_mem_sat, hist_mem_cen): """ Calculate mean, and scatter of red sequence and red fraction. """ tot_sat = np.sum(hist_sat, axis=(1,2)) tot_cen = np.sum(hist_cen, axis=(1,2)) tot_sat_mem = np.sum(hist_mem_sat, axis=(1,2)) tot_cen_mem = np.sum(hist_mem_cen, axis=(1,2)) rs_mean = np.sum(self.c_mean.reshape(1,-1,1) * (hist_mem_sat + hist_mem_cen) * self.dc.reshape(1,-1,1), axis=1) / np.sum((hist_mem_sat + hist_mem_cen) * self.dc.reshape(1,-1,1), axis=1) rs_scat = np.sqrt(np.sum((self.c_mean.reshape(1,-1,1) - rs_mean.reshape(-1,1,self.n_bands-1)) ** 2 * (hist_mem_sat + hist_mem_cen) * self.dc.reshape(1,-1,1), axis=1) / np.sum((hist_mem_sat + hist_mem_cen) * self.dc.reshape(1,-1,1), axis=1)) red_frac_sat = 1 - np.sum((hist_sat - hist_mem_sat)/tot_sat.reshape(-1,1,1), axis=(1,2)) red_frac_cen = 1 - np.sum((hist_cen - hist_mem_cen)/tot_cen.reshape(-1,1,1), axis=(1,2)) return rs_mean, rs_scat, red_frac_sat, red_frac_cen
def compute_summary_statistics(self, hist_sat, hist_cen, hist_mem_sat, hist_mem_cen): """ Calculate mean, and scatter of red sequence and red fraction. """ tot_sat = np.sum(hist_sat, axis=(1,2)) tot_cen = np.sum(hist_cen, axis=(1,2)) tot_sat_mem = np.sum(hist_mem_sat, axis=(1,2)) tot_cen_mem = np.sum(hist_mem_cen, axis=(1,2)) rs_mean = np.sum(self.c_mean.reshape(1,-1,1) * (hist_mem_sat + hist_mem_cen) * self.dc.reshape(1,-1,1), axis=1) / np.sum((hist_mem_sat + hist_mem_cen) * self.dc.reshape(1,-1,1), axis=1) rs_scat = np.sqrt(np.sum((self.c_mean.reshape(1,-1,1) - rs_mean.reshape(-1,1,self.n_bands-1)) ** 2 * (hist_mem_sat + hist_mem_cen) * self.dc.reshape(1,-1,1), axis=1) / np.sum((hist_mem_sat + hist_mem_cen) * self.dc.reshape(1,-1,1), axis=1)) red_frac_sat = 1 - np.sum((hist_sat - hist_mem_sat)/tot_sat.reshape(-1,1,1), axis=(1,2)) red_frac_cen = 1 - np.sum((hist_cen - hist_mem_cen)/tot_cen.reshape(-1,1,1), axis=(1,2)) return rs_mean, rs_scat, red_frac_sat, red_frac_cen
Python
def enter(self): """Called when the player enters the room.""" print 'You enter the {}.'.format(self.name) print "You've been here {} time(s) before.".format(self.visits) self.visits += 1 self.describe()
def enter(self): """Called when the player enters the room.""" print 'You enter the {}.'.format(self.name) print "You've been here {} time(s) before.".format(self.visits) self.visits += 1 self.describe()
Python
def is_every_letter_guessed(word, guesses): """Returns if every letter in the target word is accounted for in the user's guesses. """ # It's easier to check the converse; are we missing anything? Check # if any one of the letters in the target word *isn't* guessed. for letter in word: if letter not in guesses: # Found a letter in the word that hasn't been guessed yet! return False # If we've reached this point, the whole word has been gone over. # Not one of its letters was missing from the list of guesses. return True
def is_every_letter_guessed(word, guesses): """Returns if every letter in the target word is accounted for in the user's guesses. """ # It's easier to check the converse; are we missing anything? Check # if any one of the letters in the target word *isn't* guessed. for letter in word: if letter not in guesses: # Found a letter in the word that hasn't been guessed yet! return False # If we've reached this point, the whole word has been gone over. # Not one of its letters was missing from the list of guesses. return True
Python
def partial_word(word, guesses): """Returns the target word, each character separated by spaces, with letters not yet guessed shown as underscores. """ # gradually build the correctly-guessed/hidden parts of the word word_so_far = '' for letter in word: # write either the current letter of the target word, or a # period, depending on if the user has guessed the letter # yet or not if letter in guesses: word_so_far += letter else: word_so_far += '.' return word_so_far
def partial_word(word, guesses): """Returns the target word, each character separated by spaces, with letters not yet guessed shown as underscores. """ # gradually build the correctly-guessed/hidden parts of the word word_so_far = '' for letter in word: # write either the current letter of the target word, or a # period, depending on if the user has guessed the letter # yet or not if letter in guesses: word_so_far += letter else: word_so_far += '.' return word_so_far
Python
def compute_score(self, graphs : list, termination : torch.Tensor, validity : torch.Tensor, uniqueness : torch.Tensor) -> \ torch.Tensor: """ Computes the overall score for the input molecular graphs. Args: ---- graphs (list) : Contains molecular graphs to evaluate. termination (torch.Tensor) : Termination status of input molecular graphs. validity (torch.Tensor) : Validity of input molecular graphs. uniqueness (torch.Tensor) : Uniqueness of input molecular graphs. Returns: ------- final_score (torch.Tensor) : The final scores for each input graph. """ self.n_graphs = len(graphs) contributions_to_score = self.get_contributions_to_score(graphs=graphs) if len(self.score_components) == 1: final_score = contributions_to_score[0] elif self.score_type == "continuous": final_score = contributions_to_score[0] for component in contributions_to_score[1:]: final_score *= component elif self.score_type == "binary": component_masks = [] for idx, score_component in enumerate(contributions_to_score): component_mask = torch.where( score_component > self.score_thresholds[idx], torch.ones(self.n_graphs, device=self.device, dtype=torch.uint8), torch.zeros(self.n_graphs, device=self.device, dtype=torch.uint8) ) component_masks.append(component_mask) final_score = component_masks[0] for mask in component_masks[1:]: final_score *= mask final_score = final_score.float() else: raise NotImplementedError # remove contribution of duplicate molecules to the score final_score *= uniqueness # remove contribution of invalid molecules to the score final_score *= validity # remove contribution of improperly-terminated molecules to the score final_score *= termination return final_score
def compute_score(self, graphs : list, termination : torch.Tensor, validity : torch.Tensor, uniqueness : torch.Tensor) -> \ torch.Tensor: """ Computes the overall score for the input molecular graphs. Args: ---- graphs (list) : Contains molecular graphs to evaluate. termination (torch.Tensor) : Termination status of input molecular graphs. validity (torch.Tensor) : Validity of input molecular graphs. uniqueness (torch.Tensor) : Uniqueness of input molecular graphs. Returns: ------- final_score (torch.Tensor) : The final scores for each input graph. """ self.n_graphs = len(graphs) contributions_to_score = self.get_contributions_to_score(graphs=graphs) if len(self.score_components) == 1: final_score = contributions_to_score[0] elif self.score_type == "continuous": final_score = contributions_to_score[0] for component in contributions_to_score[1:]: final_score *= component elif self.score_type == "binary": component_masks = [] for idx, score_component in enumerate(contributions_to_score): component_mask = torch.where( score_component > self.score_thresholds[idx], torch.ones(self.n_graphs, device=self.device, dtype=torch.uint8), torch.zeros(self.n_graphs, device=self.device, dtype=torch.uint8) ) component_masks.append(component_mask) final_score = component_masks[0] for mask in component_masks[1:]: final_score *= mask final_score = final_score.float() else: raise NotImplementedError # remove contribution of duplicate molecules to the score final_score *= uniqueness # remove contribution of invalid molecules to the score final_score *= validity # remove contribution of improperly-terminated molecules to the score final_score *= termination return final_score
Python
def compute_activity(self, mols : list, activity_model : sklearn.svm.classes.SVC) -> list: """ Note: this function may have to be tuned/replicated depending on how the activity model is saved. Args: ---- mols (list) : Contains `rdkit.Mol` objects corresponding to molecular graphs sampled. activity_model (sklearn.svm.classes.SVC) : Pre-trained QSAR model. Returns: ------- activity (list) : Contains predicted activities for input molecules. """ n_mols = len(mols) activity = torch.zeros(n_mols, device=self.device) for idx, mol in enumerate(mols): try: fingerprint = AllChem.GetMorganFingerprintAsBitVect(mol, 2, nBits=2048) ecfp4 = np.zeros((2048,)) DataStructs.ConvertToNumpyArray(fingerprint, ecfp4) activity[idx] = activity_model.predict_proba([ecfp4])[0][1] except: pass # activity[idx] will remain 0.0 return activity
def compute_activity(self, mols : list, activity_model : sklearn.svm.classes.SVC) -> list: """ Note: this function may have to be tuned/replicated depending on how the activity model is saved. Args: ---- mols (list) : Contains `rdkit.Mol` objects corresponding to molecular graphs sampled. activity_model (sklearn.svm.classes.SVC) : Pre-trained QSAR model. Returns: ------- activity (list) : Contains predicted activities for input molecules. """ n_mols = len(mols) activity = torch.zeros(n_mols, device=self.device) for idx, mol in enumerate(mols): try: fingerprint = AllChem.GetMorganFingerprintAsBitVect(mol, 2, nBits=2048) ecfp4 = np.zeros((2048,)) DataStructs.ConvertToNumpyArray(fingerprint, ecfp4) activity[idx] = activity_model.predict_proba([ecfp4])[0][1] except: pass # activity[idx] will remain 0.0 return activity
Python
def escape(value): '''Escape XML value :param string value Input value :return string Escaped input ''' return htmlspecialchars(value, ENT_XML1, 'UTF-8')
def escape(value): '''Escape XML value :param string value Input value :return string Escaped input ''' return htmlspecialchars(value, ENT_XML1, 'UTF-8')
Python
def randomId(): ''' Generate random ID This method is used for generating random IDs required when signing the document. :return int Random number ''' retval= None if (function_exists('random_int')): retval= random_int(0x10000000, 0x7FFFFFFF) else: retval= rand(100000, 999999) return retval
def randomId(): ''' Generate random ID This method is used for generating random IDs required when signing the document. :return int Random number ''' retval= None if (function_exists('random_int')): retval= random_int(0x10000000, 0x7FFFFFFF) else: retval= rand(100000, 999999) return retval
Python
def injectNamespaces(xml, newNs): ''' Inject namespaces :param string xml Input XML :param string|string[] newNs Namespaces :return string Canonicalized XML with new namespaces ''' #if (!is_array(newNs)) newNs = array(newNs) xml = explode(">", xml, 2) oldNs = explode(" ", xml[0]) elementName = array_shift(oldNs) # Combine and sort namespaces xmlns = array() attributes = array() allNs= oldNs+newNs for name in allNs: if(strpos(name, 'xmlns:') == 0): xmlns.append(name) else: attributes.append(name) sort(xmlns) sort(attributes) ns = array_merge(xmlns, attributes) # Generate new XML element xml = elementName + " " + implode(' ', ns) + ">" + xml[1] return xml
def injectNamespaces(xml, newNs): ''' Inject namespaces :param string xml Input XML :param string|string[] newNs Namespaces :return string Canonicalized XML with new namespaces ''' #if (!is_array(newNs)) newNs = array(newNs) xml = explode(">", xml, 2) oldNs = explode(" ", xml[0]) elementName = array_shift(oldNs) # Combine and sort namespaces xmlns = array() attributes = array() allNs= oldNs+newNs for name in allNs: if(strpos(name, 'xmlns:') == 0): xmlns.append(name) else: attributes.append(name) sort(xmlns) sort(attributes) ns = array_merge(xmlns, attributes) # Generate new XML element xml = elementName + " " + implode(' ', ns) + ">" + xml[1] return xml
Python
def toBase64(bytes, pretty= False): ''' To Base64 :param string bytes Input :param boolean pretty Pretty Base64 response :return string Base64 response ''' res = base64_encode(bytes) if(pretty): return this.prettify(res) else: return res
def toBase64(bytes, pretty= False): ''' To Base64 :param string bytes Input :param boolean pretty Pretty Base64 response :return string Base64 response ''' res = base64_encode(bytes) if(pretty): return this.prettify(res) else: return res
Python
def maybe_set_cache(nick, verified): """ If running in uWSGI, it sets a key to a value depending on if it verified successfully or if the nick no longer exists. key is the sha1 hash of the nick, and the value is 1 for successful verification and 0 for nonexistent. """ if IS_UWSGI: if verified == True: key = hashlib.sha1(nick.encode()).digest() uwsgi.cache_set(key, b'1', cfg['CACHE_EXPIRE'], 'verified') elif verified == None: key = hashlib.sha1(nick.encode()).digest() uwsgi.cache_set(key, b'0', cfg['CACHE_EXPIRE'], 'verified')
def maybe_set_cache(nick, verified): """ If running in uWSGI, it sets a key to a value depending on if it verified successfully or if the nick no longer exists. key is the sha1 hash of the nick, and the value is 1 for successful verification and 0 for nonexistent. """ if IS_UWSGI: if verified == True: key = hashlib.sha1(nick.encode()).digest() uwsgi.cache_set(key, b'1', cfg['CACHE_EXPIRE'], 'verified') elif verified == None: key = hashlib.sha1(nick.encode()).digest() uwsgi.cache_set(key, b'0', cfg['CACHE_EXPIRE'], 'verified')
Python
def realm_entity(table, row): """ Assign a Realm Entity to records """ db = current.db s3db = current.s3db tablename = original_tablename(table) realm_entity = 0 if tablename == "pr_person": pass # using default elif tablename in ("pr_address", "pr_contact", "pr_contact_emergency", "pr_image", ): # Inherit from person via PE table = s3db.table(tablename) ptable = s3db.pr_person query = (table._id == row.id) & \ (ptable.pe_id == table.pe_id) person = db(query).select(ptable.realm_entity, limitby = (0, 1), ).first() if person: realm_entity = person.realm_entity elif tablename in ("pr_group_membership", "pr_person_details", "pr_person_tag", ): # Inherit from person via person_id table = s3db.table(tablename) ptable = s3db.pr_person query = (table._id == row.id) & \ (ptable.id == table.person_id) person = db(query).select(ptable.realm_entity, limitby = (0, 1), ).first() if person: realm_entity = person.realm_entity elif tablename == "cr_shelter_population": # Inherit from shelter table = s3db.table(tablename) stable = s3db.cr_shelter query = (table._id == row.id) & \ (stable.id == table.shelter_id) shelter = db(query).select(stable.realm_entity, limitby = (0, 1), ).first() if shelter: realm_entity = shelter.realm_entity return realm_entity
def realm_entity(table, row): """ Assign a Realm Entity to records """ db = current.db s3db = current.s3db tablename = original_tablename(table) realm_entity = 0 if tablename == "pr_person": pass # using default elif tablename in ("pr_address", "pr_contact", "pr_contact_emergency", "pr_image", ): # Inherit from person via PE table = s3db.table(tablename) ptable = s3db.pr_person query = (table._id == row.id) & \ (ptable.pe_id == table.pe_id) person = db(query).select(ptable.realm_entity, limitby = (0, 1), ).first() if person: realm_entity = person.realm_entity elif tablename in ("pr_group_membership", "pr_person_details", "pr_person_tag", ): # Inherit from person via person_id table = s3db.table(tablename) ptable = s3db.pr_person query = (table._id == row.id) & \ (ptable.id == table.person_id) person = db(query).select(ptable.realm_entity, limitby = (0, 1), ).first() if person: realm_entity = person.realm_entity elif tablename == "cr_shelter_population": # Inherit from shelter table = s3db.table(tablename) stable = s3db.cr_shelter query = (table._id == row.id) & \ (stable.id == table.shelter_id) shelter = db(query).select(stable.realm_entity, limitby = (0, 1), ).first() if shelter: realm_entity = shelter.realm_entity return realm_entity
Python
def update_commune_group_shelter_reader(user_id): """ Automatically assign/remove the SHELTER_READER role for commune groups depending on which districts the user has the role for Args: user_id: the user ID """ db = current.db s3db = current.s3db auth = current.auth # Get the group ID of the SHELTER_READER role rtable = auth.settings.table_group role = db(rtable.uuid == "SHELTER_READER").select(rtable.id, limitby = (0, 1), ).first() if not role: return role_id = role.id # Get all current SHELTER_READER assignments atable = auth.settings.table_membership query = (atable.user_id == user_id) & \ (atable.group_id == role_id) & \ (atable.deleted == False) assigned = db(query).select(atable.pe_id, atable.system).as_dict(key="pe_id") if not assigned: return elif 0 in assigned: # Global role => remove all system-assigned (as they are redundant) remove = [k for k, v in assigned.items() if v["system"]] assign = None else: # Look up all DISTRICTS and COMMUNES groups from ..config import DISTRICTS, COMMUNES gtable = s3db.org_group query = ((gtable.name == DISTRICTS) | (gtable.name.like("%s%%" % COMMUNES))) & \ (gtable.name != COMMUNES) & \ (gtable.deleted == False) groups = db(query).select(gtable.id, gtable.pe_id, gtable.name, ).as_dict(key="name") districts = groups[DISTRICTS] if districts["pe_id"] in assigned: # User has the role for the DISTRICTS org group # => auto-assign for all COMMUNES groups remove = None assign = [] for name, group in groups.items(): pe_id = group["pe_id"] if name.startswith(COMMUNES) and pe_id not in assigned: assign.append(pe_id) else: # Get the pe_ids and district IDs of all districts mtable = s3db.org_group_membership otable = s3db.org_organisation ttable = s3db.org_organisation_tag join = [mtable.on((mtable.organisation_id == otable.id) & \ (mtable.group_id == districts["id"]) & \ (mtable.deleted == False)), ttable.on((ttable.organisation_id == otable.id) & \ (ttable.tag == "DistrictID") & \ (ttable.deleted == False)), ] query = (otable.deleted == False) rows = db(query).select(otable.pe_id, ttable.value, join = join, ) # Determine which district groups the user should have and which not add, rmv = [], [] for row in rows: district = row.org_organisation district_id = row.org_organisation_tag.value if not district_id: continue district_group_name = "%s (%s)" % (COMMUNES, district_id) if district.pe_id in assigned: add.append(district_group_name) else: rmv.append(district_group_name) # Also remove those district groups for which there is no district for name, group in groups.items(): if name.startswith(COMMUNES) and name not in add: rmv.append(name) # Determine which assignments need to be added/removed assign, remove = [], [] for name, group in groups.items(): pe_id = group["pe_id"] if name in add and pe_id not in assigned: assign.append(pe_id) elif name in rmv and pe_id in assigned and assigned[pe_id]["system"]: remove.append(pe_id) # Remove/add assignments as needed if remove: for pe_id in remove: auth.s3_remove_role(user_id, role_id, for_pe=pe_id) if assign: for pe_id in assign: auth.s3_assign_role(user_id, role_id, for_pe=pe_id, system=True)
def update_commune_group_shelter_reader(user_id): """ Automatically assign/remove the SHELTER_READER role for commune groups depending on which districts the user has the role for Args: user_id: the user ID """ db = current.db s3db = current.s3db auth = current.auth # Get the group ID of the SHELTER_READER role rtable = auth.settings.table_group role = db(rtable.uuid == "SHELTER_READER").select(rtable.id, limitby = (0, 1), ).first() if not role: return role_id = role.id # Get all current SHELTER_READER assignments atable = auth.settings.table_membership query = (atable.user_id == user_id) & \ (atable.group_id == role_id) & \ (atable.deleted == False) assigned = db(query).select(atable.pe_id, atable.system).as_dict(key="pe_id") if not assigned: return elif 0 in assigned: # Global role => remove all system-assigned (as they are redundant) remove = [k for k, v in assigned.items() if v["system"]] assign = None else: # Look up all DISTRICTS and COMMUNES groups from ..config import DISTRICTS, COMMUNES gtable = s3db.org_group query = ((gtable.name == DISTRICTS) | (gtable.name.like("%s%%" % COMMUNES))) & \ (gtable.name != COMMUNES) & \ (gtable.deleted == False) groups = db(query).select(gtable.id, gtable.pe_id, gtable.name, ).as_dict(key="name") districts = groups[DISTRICTS] if districts["pe_id"] in assigned: # User has the role for the DISTRICTS org group # => auto-assign for all COMMUNES groups remove = None assign = [] for name, group in groups.items(): pe_id = group["pe_id"] if name.startswith(COMMUNES) and pe_id not in assigned: assign.append(pe_id) else: # Get the pe_ids and district IDs of all districts mtable = s3db.org_group_membership otable = s3db.org_organisation ttable = s3db.org_organisation_tag join = [mtable.on((mtable.organisation_id == otable.id) & \ (mtable.group_id == districts["id"]) & \ (mtable.deleted == False)), ttable.on((ttable.organisation_id == otable.id) & \ (ttable.tag == "DistrictID") & \ (ttable.deleted == False)), ] query = (otable.deleted == False) rows = db(query).select(otable.pe_id, ttable.value, join = join, ) # Determine which district groups the user should have and which not add, rmv = [], [] for row in rows: district = row.org_organisation district_id = row.org_organisation_tag.value if not district_id: continue district_group_name = "%s (%s)" % (COMMUNES, district_id) if district.pe_id in assigned: add.append(district_group_name) else: rmv.append(district_group_name) # Also remove those district groups for which there is no district for name, group in groups.items(): if name.startswith(COMMUNES) and name not in add: rmv.append(name) # Determine which assignments need to be added/removed assign, remove = [], [] for name, group in groups.items(): pe_id = group["pe_id"] if name in add and pe_id not in assigned: assign.append(pe_id) elif name in rmv and pe_id in assigned and assigned[pe_id]["system"]: remove.append(pe_id) # Remove/add assignments as needed if remove: for pe_id in remove: auth.s3_remove_role(user_id, role_id, for_pe=pe_id) if assign: for pe_id in assign: auth.s3_assign_role(user_id, role_id, for_pe=pe_id, system=True)
Python
def assign_role(user_id, role_id, for_pe=None): """ Extend standard role assignment with auto-assignment of SHELTER_READER """ current.auth.s3_assign_role(user_id, role_id, for_pe=for_pe) update_commune_group_shelter_reader(user_id)
def assign_role(user_id, role_id, for_pe=None): """ Extend standard role assignment with auto-assignment of SHELTER_READER """ current.auth.s3_assign_role(user_id, role_id, for_pe=for_pe) update_commune_group_shelter_reader(user_id)
Python
def remove_role(user_id, role_id, for_pe=None): """ Extend standard role assignment with auto-assignment of SHELTER_READER """ current.auth.s3_remove_role(user_id, role_id, for_pe=for_pe) update_commune_group_shelter_reader(user_id)
def remove_role(user_id, role_id, for_pe=None): """ Extend standard role assignment with auto-assignment of SHELTER_READER """ current.auth.s3_remove_role(user_id, role_id, for_pe=for_pe) update_commune_group_shelter_reader(user_id)
Python
def defaults(): """ Safe defaults for names in case the module is disabled """ return {"fin_voucher_invoice_status": {}, "fin_voucher_claim_status_opts": {}, "fin_voucher_billing_status_opts": {}, }
def defaults(): """ Safe defaults for names in case the module is disabled """ return {"fin_voucher_invoice_status": {}, "fin_voucher_claim_status_opts": {}, "fin_voucher_billing_status_opts": {}, }
Python
def billing_onvalidation(form): """ Validation of billing form: - must not change date or status once process has started - can only change status to ABORT before start - date must be after any active or completed billing - date must be on a different day than any scheduled billing """ T = current.T db = current.db s3db = current.s3db form_vars = form.vars if "id" in form_vars: record_id = form_vars.id elif hasattr(form, "record_id"): record_id = form.record_id else: record_id = None table = s3db.fin_voucher_billing # Get the existing record if record_id: query = (table.id == record_id) record = db(query).select(table.id, table.program_id, table.date, table.status, limitby=(0, 1), ).first() else: record = None # Get the program if "program_id" in form_vars: program_id = form_vars["program_id"] elif record: program_id = record.program_id else: program_id = table.program_id.default date_error = False if record: # Update if "date" in form_vars: date = form_vars["date"] if record.status != "SCHEDULED" and record.date != date: form.errors.date = T("Date can only be changed while process has not yet started") date_error = True if "status" in form_vars: status = form_vars["status"] if status != record.status: if record.status != "SCHEDULED": form.errors.status = T("Status cannot be changed once process has started") elif status != "ABORTED": form.errors.status = T("Invalid status") if not date_error and "date" in form_vars: date = form_vars["date"] if date: p = fin_VoucherProgram(program_id) earliest = p.earliest_billing_date(billing_id = record_id) if earliest and date < earliest: form.errors.date = T("Date must be %(min)s or later!") % {"min": earliest} else: query = (table.program_id == program_id) if record_id: query &= (table.id != record_id) query &= (table.status == "SCHEDULED") & \ (table.date == date) & \ (table.deleted == False) row = db(query).select(table.id, limitby = (0, 1)).first() if row: form.errors.date = T("Billing already scheduled for that date")
def billing_onvalidation(form): """ Validation of billing form: - must not change date or status once process has started - can only change status to ABORT before start - date must be after any active or completed billing - date must be on a different day than any scheduled billing """ T = current.T db = current.db s3db = current.s3db form_vars = form.vars if "id" in form_vars: record_id = form_vars.id elif hasattr(form, "record_id"): record_id = form.record_id else: record_id = None table = s3db.fin_voucher_billing # Get the existing record if record_id: query = (table.id == record_id) record = db(query).select(table.id, table.program_id, table.date, table.status, limitby=(0, 1), ).first() else: record = None # Get the program if "program_id" in form_vars: program_id = form_vars["program_id"] elif record: program_id = record.program_id else: program_id = table.program_id.default date_error = False if record: # Update if "date" in form_vars: date = form_vars["date"] if record.status != "SCHEDULED" and record.date != date: form.errors.date = T("Date can only be changed while process has not yet started") date_error = True if "status" in form_vars: status = form_vars["status"] if status != record.status: if record.status != "SCHEDULED": form.errors.status = T("Status cannot be changed once process has started") elif status != "ABORTED": form.errors.status = T("Invalid status") if not date_error and "date" in form_vars: date = form_vars["date"] if date: p = fin_VoucherProgram(program_id) earliest = p.earliest_billing_date(billing_id = record_id) if earliest and date < earliest: form.errors.date = T("Date must be %(min)s or later!") % {"min": earliest} else: query = (table.program_id == program_id) if record_id: query &= (table.id != record_id) query &= (table.status == "SCHEDULED") & \ (table.date == date) & \ (table.deleted == False) row = db(query).select(table.id, limitby = (0, 1)).first() if row: form.errors.date = T("Billing already scheduled for that date")
Python
def billing_onaccept(form): """ Onaccept-routine for billing - schedule task to start the process """ # Get record ID form_vars = form.vars if "id" in form_vars: record_id = form_vars.id elif hasattr(form, "record_id"): record_id = form.record_id else: return db = current.db s3db = current.s3db # Get the record table = s3db.fin_voucher_billing query = (table.id == record_id) billing = db(query).select(table.id, table.date, table.status, table.task_id, limitby = (0, 1), ).first() if not billing: return # Get the scheduler task (if any) ttable = s3db.scheduler_task task_id = billing.task_id if task_id: query = (ttable.id == task_id) task = db(query).select(ttable.id, ttable.status, ttable.start_time, ttable.next_run_time, limitby = (0, 1), ).first() else: task = None if billing.status == "SCHEDULED" and billing.date: now = datetime.datetime.utcnow() start = datetime.datetime.combine(billing.date, datetime.time(0,0,0)) if start < now: # Earliest start time 30 seconds in the future # => to leave a grace period to manually abort the process start = now + datetime.timedelta(seconds=30) if task: # Make sure task starts at the right time if task.status not in ("ASSIGNED", "RUNNING"): task.update_record(start_time = start, next_run_time = start, stop_time = None, status = "QUEUED", enabled = True, ) else: # Schedule task scheduler = current.s3task.scheduler application = "%s/default" % current.request.application task = scheduler.queue_task("s3db_task", pargs = ["fin_voucher_start_billing"], pvars = {"billing_id": billing.id}, application_name = application, start_time = start, stop_time = None, timeout = 1800, repeats = 1, ) if task: task_id = task.id elif task and task.status == "QUEUED": # Remove the task task.delete_record() task_id = None # Store the task_id billing.update_record(task_id = task_id, modified_by = table.modified_by, modified_on = table.modified_on, )
def billing_onaccept(form): """ Onaccept-routine for billing - schedule task to start the process """ # Get record ID form_vars = form.vars if "id" in form_vars: record_id = form_vars.id elif hasattr(form, "record_id"): record_id = form.record_id else: return db = current.db s3db = current.s3db # Get the record table = s3db.fin_voucher_billing query = (table.id == record_id) billing = db(query).select(table.id, table.date, table.status, table.task_id, limitby = (0, 1), ).first() if not billing: return # Get the scheduler task (if any) ttable = s3db.scheduler_task task_id = billing.task_id if task_id: query = (ttable.id == task_id) task = db(query).select(ttable.id, ttable.status, ttable.start_time, ttable.next_run_time, limitby = (0, 1), ).first() else: task = None if billing.status == "SCHEDULED" and billing.date: now = datetime.datetime.utcnow() start = datetime.datetime.combine(billing.date, datetime.time(0,0,0)) if start < now: # Earliest start time 30 seconds in the future # => to leave a grace period to manually abort the process start = now + datetime.timedelta(seconds=30) if task: # Make sure task starts at the right time if task.status not in ("ASSIGNED", "RUNNING"): task.update_record(start_time = start, next_run_time = start, stop_time = None, status = "QUEUED", enabled = True, ) else: # Schedule task scheduler = current.s3task.scheduler application = "%s/default" % current.request.application task = scheduler.queue_task("s3db_task", pargs = ["fin_voucher_start_billing"], pvars = {"billing_id": billing.id}, application_name = application, start_time = start, stop_time = None, timeout = 1800, repeats = 1, ) if task: task_id = task.id elif task and task.status == "QUEUED": # Remove the task task.delete_record() task_id = None # Store the task_id billing.update_record(task_id = task_id, modified_by = table.modified_by, modified_on = table.modified_on, )
Python
def claim_onvalidation(form): """ Onvalidation of claims: - if claim has already been confirmed, or even invoiced, immutable fields can no longer be changed - if claim is to be any other status than new, bank account details are required """ form_vars = form.vars if "id" in form_vars: record_id = form_vars.id elif hasattr(form, "record_id"): record_id = form.record_id else: record_id = None db = current.db s3db = current.s3db table = s3db.fin_voucher_claim if record_id: # Get the record query = (table.id == record_id) record = db(query).select(table.id, table.status, table.invoice_id, table.account_holder, table.account_number, limitby = (0, 1), ).first() else: record = None T = current.T has_status = "status" in form_vars status = form_vars.get("status") if record: change_status = has_status and status != record.status check = {"account_holder": T("Account holder is required"), "account_number": T("Account number is required"), } if record.invoice_id or record.status != "NEW": # This claim has already been invoiced and cannot be changed immutable = ("program_id", "billing_id", "pe_id", "date", "vouchers_total", "quantity_total", "price_per_unit", "amount_receivable", "account_holder", "account_number", "status", "invoice_id", ) for fn in immutable: if fn in form_vars: form.errors[fn] = T("Value can no longer be changed") elif record.status == "NEW" and \ has_status and not change_status and \ all(fn in form_vars and form_vars[fn] or record[fn] for fn in check): # Warn if the user has entered bank account details, but # not confirmed the claim current.response.warning = T('You must change the status to "confirmed" before an invoice can be issued') elif change_status and status != "NEW": # Changing status of a NEW claim requires account details for fn, msg in check.items(): value = form_vars[fn] if fn in form_vars else record[fn] if value is None or not value.strip(): if fn in form_vars: form.errors[fn] = msg else: form.errors["status"] = msg break elif change_status and status != "NEW": # A new claim can only have status "NEW" form.errors["status"] = T("Invalid status")
def claim_onvalidation(form): """ Onvalidation of claims: - if claim has already been confirmed, or even invoiced, immutable fields can no longer be changed - if claim is to be any other status than new, bank account details are required """ form_vars = form.vars if "id" in form_vars: record_id = form_vars.id elif hasattr(form, "record_id"): record_id = form.record_id else: record_id = None db = current.db s3db = current.s3db table = s3db.fin_voucher_claim if record_id: # Get the record query = (table.id == record_id) record = db(query).select(table.id, table.status, table.invoice_id, table.account_holder, table.account_number, limitby = (0, 1), ).first() else: record = None T = current.T has_status = "status" in form_vars status = form_vars.get("status") if record: change_status = has_status and status != record.status check = {"account_holder": T("Account holder is required"), "account_number": T("Account number is required"), } if record.invoice_id or record.status != "NEW": # This claim has already been invoiced and cannot be changed immutable = ("program_id", "billing_id", "pe_id", "date", "vouchers_total", "quantity_total", "price_per_unit", "amount_receivable", "account_holder", "account_number", "status", "invoice_id", ) for fn in immutable: if fn in form_vars: form.errors[fn] = T("Value can no longer be changed") elif record.status == "NEW" and \ has_status and not change_status and \ all(fn in form_vars and form_vars[fn] or record[fn] for fn in check): # Warn if the user has entered bank account details, but # not confirmed the claim current.response.warning = T('You must change the status to "confirmed" before an invoice can be issued') elif change_status and status != "NEW": # Changing status of a NEW claim requires account details for fn, msg in check.items(): value = form_vars[fn] if fn in form_vars else record[fn] if value is None or not value.strip(): if fn in form_vars: form.errors[fn] = msg else: form.errors["status"] = msg break elif change_status and status != "NEW": # A new claim can only have status "NEW" form.errors["status"] = T("Invalid status")
Python
def invoice_onvalidation(form): """ Onvalidation of invoices: - new invoices can only have status NEW - status PAID requires payment order number - status REJECTED requires a reason - changing status to anything other than NEW or REJECTED requires the verification hashes to be intact and the claim to have INVOICED status - once marked as PAID, the status can no longer be changed """ form_vars = form.vars if "id" in form_vars: record_id = form_vars.id elif hasattr(form, "record_id"): record_id = form.record_id else: record_id = None db = current.db s3db = current.s3db table = s3db.fin_voucher_invoice if record_id: # Get the record query = (table.id == record_id) record = db(query).select(table.id, table.status, limitby = (0, 1), ).first() else: record = None T = current.T status = form_vars.get("status") change_status = "status" in form_vars if record: change_status = change_status and status != record.status status_error = False if record.status == "PAID": # Status cannot be changed if change_status: form.errors.status = T("Status cannot be changed") status_error = True elif change_status and status not in ("NEW", "REJECTED"): # Verify the hashes if not fin_VoucherBilling.check_invoice(record.id): form.errors.status = T("Invoice integrity compromised") status_error = True else: # Verify that the claim status is INVOICED ctable = s3db.fin_voucher_claim query = (ctable.invoice_id == record.id) & \ (ctable.deleted == False) claim = db(query).select(ctable.id, ctable.status, limitby = (0, 1), ).first() if not claim or claim.status != "INVOICED": form.errors.status = T("Claim has incorrect status") status_error = True if not status_error: # Check required fields if not change_status: status = record.status if status == "REJECTED": # Rejection requires a reason ff = fn = "reason" if fn in form_vars: value = form_vars[fn] else: value = record[fn] ff = "status" if value is None or not value.strip(): form.errors[ff] = T("Reason must be specified") elif change_status and status != "NEW": # A new invoice can only have status "NEW" form.errors["status"] = T("Invalid status")
def invoice_onvalidation(form): """ Onvalidation of invoices: - new invoices can only have status NEW - status PAID requires payment order number - status REJECTED requires a reason - changing status to anything other than NEW or REJECTED requires the verification hashes to be intact and the claim to have INVOICED status - once marked as PAID, the status can no longer be changed """ form_vars = form.vars if "id" in form_vars: record_id = form_vars.id elif hasattr(form, "record_id"): record_id = form.record_id else: record_id = None db = current.db s3db = current.s3db table = s3db.fin_voucher_invoice if record_id: # Get the record query = (table.id == record_id) record = db(query).select(table.id, table.status, limitby = (0, 1), ).first() else: record = None T = current.T status = form_vars.get("status") change_status = "status" in form_vars if record: change_status = change_status and status != record.status status_error = False if record.status == "PAID": # Status cannot be changed if change_status: form.errors.status = T("Status cannot be changed") status_error = True elif change_status and status not in ("NEW", "REJECTED"): # Verify the hashes if not fin_VoucherBilling.check_invoice(record.id): form.errors.status = T("Invoice integrity compromised") status_error = True else: # Verify that the claim status is INVOICED ctable = s3db.fin_voucher_claim query = (ctable.invoice_id == record.id) & \ (ctable.deleted == False) claim = db(query).select(ctable.id, ctable.status, limitby = (0, 1), ).first() if not claim or claim.status != "INVOICED": form.errors.status = T("Claim has incorrect status") status_error = True if not status_error: # Check required fields if not change_status: status = record.status if status == "REJECTED": # Rejection requires a reason ff = fn = "reason" if fn in form_vars: value = form_vars[fn] else: value = record[fn] ff = "status" if value is None or not value.strip(): form.errors[ff] = T("Reason must be specified") elif change_status and status != "NEW": # A new invoice can only have status "NEW" form.errors["status"] = T("Invalid status")
Python
def invoice_onaccept(form): """ Onaccept procedure for invoices: - if status is PAID and the claim is still INVOICED, start the settle_invoice process (async, if possible) """ # Get record ID form_vars = form.vars if "id" in form_vars: record_id = form_vars.id elif hasattr(form, "record_id"): record_id = form.record_id else: return db = current.db s3db = current.s3db # Get the record table = s3db.fin_voucher_invoice query = (table.id == record_id) record = db(query).select(table.id, table.status, table.ptoken, limitby = (0, 1), ).first() if not record: return # Get the underlying claim ctable = s3db.fin_voucher_claim query = (ctable.invoice_id == record.id) & \ (ctable.deleted == False) claim = db(query).select(ctable.id, ctable.status, limitby = (0, 1), orderby = ctable.id, ).first() if record.status == "PAID" and not record.ptoken and \ claim.status == "INVOICED": # Generate authorization token import uuid ptoken = str(int(uuid.uuid4())) record.update_record(ptoken = ptoken, modified_by = table.modified_by, modified_on = table.modified_on, ) current.s3task.run_async("s3db_task", args = ["fin_voucher_settle_invoice"], vars = {"invoice_id": record.id, "ptoken": ptoken, }, timeout = 600, )
def invoice_onaccept(form): """ Onaccept procedure for invoices: - if status is PAID and the claim is still INVOICED, start the settle_invoice process (async, if possible) """ # Get record ID form_vars = form.vars if "id" in form_vars: record_id = form_vars.id elif hasattr(form, "record_id"): record_id = form.record_id else: return db = current.db s3db = current.s3db # Get the record table = s3db.fin_voucher_invoice query = (table.id == record_id) record = db(query).select(table.id, table.status, table.ptoken, limitby = (0, 1), ).first() if not record: return # Get the underlying claim ctable = s3db.fin_voucher_claim query = (ctable.invoice_id == record.id) & \ (ctable.deleted == False) claim = db(query).select(ctable.id, ctable.status, limitby = (0, 1), orderby = ctable.id, ).first() if record.status == "PAID" and not record.ptoken and \ claim.status == "INVOICED": # Generate authorization token import uuid ptoken = str(int(uuid.uuid4())) record.update_record(ptoken = ptoken, modified_by = table.modified_by, modified_on = table.modified_on, ) current.s3task.run_async("s3db_task", args = ["fin_voucher_settle_invoice"], vars = {"invoice_id": record.id, "ptoken": ptoken, }, timeout = 600, )
Python
def voucher_create_onvalidation(form): """ Form validation of new vouchers: - check that program is active and hasn't ended - if using eligibility types, verify that the chosen type matches the program and is permissible for the issuer """ db = current.db s3db = current.s3db T = current.T settings = current.deployment_settings table = s3db.fin_voucher ptable = s3db.fin_voucher_program form_vars = form.vars if "program_id" in form_vars: program_id = form_vars.program_id else: program_id = table.program_id.default query = (ptable.id == program_id) & \ (ptable.deleted == False) program = db(query).select(ptable.status, ptable.end_date, limitby = (0, 1), ).first() if program: if program.status != "ACTIVE": form.errors["program_id"] = T("Program inactive") end_date = program.end_date if end_date and end_date < current.request.utcnow.date(): form.errors["program_id"] = T("Program has ended") if settings.get_fin_voucher_eligibility_types() and \ "eligibility_type_id" in form_vars: # Verify that eligibility type matches program eligibility_type_id = form_vars["eligibility_type_id"] ttable = s3db.fin_voucher_eligibility_type if eligibility_type_id: query = (ttable.id == eligibility_type_id) & \ (ttable.program_id == program_id) & \ (ttable.deleted == False) etype = db(query).select(ttable.issuer_types, limitby = (0, 1), ).first() if not etype: form.errors["eligibility_type_id"] = T("Invalid eligibility type") else: query = (ttable.program_id == program_id) & \ (ttable.deleted == False) anytype = db(query).select(ttable.id, limitby=(0, 1)).first() if anytype: form.errors["eligibility_type_id"] = T("Eligibility type required") etype = None # Verify that eligibility type is permissible for issuer if etype and etype.issuer_types: if "pe_id" in form_vars: issuer = form_vars["pe_id"] else: issuer = table.pe_id.default permitted_types = etype.issuer_types otable = s3db.org_organisation ltable = s3db.org_organisation_organisation_type join = ltable.on((ltable.organisation_id == otable.id) & \ (ltable.organisation_type_id.belongs(permitted_types)) & \ (ltable.deleted == False)) query = (otable.pe_id == issuer) row = db(query).select(otable.id, join = join, limitby = (0, 1), ).first() if not row: form.errors["eligibility_type_id"] = T("Eligibility type not permissible for issuer")
def voucher_create_onvalidation(form): """ Form validation of new vouchers: - check that program is active and hasn't ended - if using eligibility types, verify that the chosen type matches the program and is permissible for the issuer """ db = current.db s3db = current.s3db T = current.T settings = current.deployment_settings table = s3db.fin_voucher ptable = s3db.fin_voucher_program form_vars = form.vars if "program_id" in form_vars: program_id = form_vars.program_id else: program_id = table.program_id.default query = (ptable.id == program_id) & \ (ptable.deleted == False) program = db(query).select(ptable.status, ptable.end_date, limitby = (0, 1), ).first() if program: if program.status != "ACTIVE": form.errors["program_id"] = T("Program inactive") end_date = program.end_date if end_date and end_date < current.request.utcnow.date(): form.errors["program_id"] = T("Program has ended") if settings.get_fin_voucher_eligibility_types() and \ "eligibility_type_id" in form_vars: # Verify that eligibility type matches program eligibility_type_id = form_vars["eligibility_type_id"] ttable = s3db.fin_voucher_eligibility_type if eligibility_type_id: query = (ttable.id == eligibility_type_id) & \ (ttable.program_id == program_id) & \ (ttable.deleted == False) etype = db(query).select(ttable.issuer_types, limitby = (0, 1), ).first() if not etype: form.errors["eligibility_type_id"] = T("Invalid eligibility type") else: query = (ttable.program_id == program_id) & \ (ttable.deleted == False) anytype = db(query).select(ttable.id, limitby=(0, 1)).first() if anytype: form.errors["eligibility_type_id"] = T("Eligibility type required") etype = None # Verify that eligibility type is permissible for issuer if etype and etype.issuer_types: if "pe_id" in form_vars: issuer = form_vars["pe_id"] else: issuer = table.pe_id.default permitted_types = etype.issuer_types otable = s3db.org_organisation ltable = s3db.org_organisation_organisation_type join = ltable.on((ltable.organisation_id == otable.id) & \ (ltable.organisation_type_id.belongs(permitted_types)) & \ (ltable.deleted == False)) query = (otable.pe_id == issuer) row = db(query).select(otable.id, join = join, limitby = (0, 1), ).first() if not row: form.errors["eligibility_type_id"] = T("Eligibility type not permissible for issuer")
Python
def voucher_create_onaccept(form): """ Onaccept of new voucher: - transfer initial credit to the voucher (issue) - generate voucher signature - set expiration date Args: form: the FORM """ # Get record ID form_vars = form.vars if "id" in form_vars: record_id = form_vars.id elif hasattr(form, "record_id"): record_id = form.record_id else: return table = current.s3db.fin_voucher query = (table.id == record_id) voucher = current.db(query).select(table.id, table.uuid, table.program_id, table.initial_credit, limitby = (0, 1), ).first() if not voucher: return update = {} program = fin_VoucherProgram(voucher.program_id) # Set end-date if program prescribes it pdata = program.program if pdata: validity_period = pdata.validity_period if validity_period: now = current.request.utcnow update["valid_until"] = (now + datetime.timedelta(days=validity_period)).date() # Set default initial credit from program if voucher.initial_credit is None: default_credit = pdata.default_credit if default_credit: update["initial_credit"] = default_credit # Generate voucher signature import uuid vn = "0%s0%s" % (voucher.program_id, voucher.id) update["signature"] = "%s%s" % (str(uuid.uuid4().int)[:(16-len(vn))], vn) voucher.update_record(**update) program.issue(voucher.id)
def voucher_create_onaccept(form): """ Onaccept of new voucher: - transfer initial credit to the voucher (issue) - generate voucher signature - set expiration date Args: form: the FORM """ # Get record ID form_vars = form.vars if "id" in form_vars: record_id = form_vars.id elif hasattr(form, "record_id"): record_id = form.record_id else: return table = current.s3db.fin_voucher query = (table.id == record_id) voucher = current.db(query).select(table.id, table.uuid, table.program_id, table.initial_credit, limitby = (0, 1), ).first() if not voucher: return update = {} program = fin_VoucherProgram(voucher.program_id) # Set end-date if program prescribes it pdata = program.program if pdata: validity_period = pdata.validity_period if validity_period: now = current.request.utcnow update["valid_until"] = (now + datetime.timedelta(days=validity_period)).date() # Set default initial credit from program if voucher.initial_credit is None: default_credit = pdata.default_credit if default_credit: update["initial_credit"] = default_credit # Generate voucher signature import uuid vn = "0%s0%s" % (voucher.program_id, voucher.id) update["signature"] = "%s%s" % (str(uuid.uuid4().int)[:(16-len(vn))], vn) voucher.update_record(**update) program.issue(voucher.id)
Python
def voucher_status(row): """ Virtual field indicating the status of the voucher """ T = current.T if hasattr(row, "fin_voucher"): row = row.fin_voucher if hasattr(row, "balance") and row.balance is not None: if row.balance > 0: return T("Issued##fin") else: return T("Redeemed##fin") else: return current.messages["NONE"]
def voucher_status(row): """ Virtual field indicating the status of the voucher """ T = current.T if hasattr(row, "fin_voucher"): row = row.fin_voucher if hasattr(row, "balance") and row.balance is not None: if row.balance > 0: return T("Issued##fin") else: return T("Redeemed##fin") else: return current.messages["NONE"]
Python
def debit_onvalidation(form): """ Validate debit: - identify the voucher to debit - verify bearer identity features, program and voucher status """ T = current.T s3db = current.s3db form_vars = form.vars if "signature" not in form_vars: form.errors["signature"] = T("Missing voucher signature") return if "program_id" in form_vars: program_id = form_vars.program_id else: table = s3db.fin_voucher_debit program_id = table.program_id.default signature = form_vars["signature"] error = None settings = current.deployment_settings # Find the voucher vtable = s3db.fin_voucher ptable = s3db.fin_voucher_program join = ptable.on(ptable.id == vtable.program_id) query = (vtable.signature == signature) & \ (vtable.deleted == False) row = current.db(query).select(vtable.id, vtable.bearer_dob, vtable.bearer_pin, vtable.balance, vtable.valid_until, vtable.single_debit, ptable.id, ptable.status, ptable.end_date, join = join, limitby = (0, 1), ).first() field = "signature" if not row: error = T("Invalid voucher") else: program = row.fin_voucher_program voucher = row.fin_voucher today = current.request.utcnow.date() valid_until = voucher.valid_until balance = voucher.balance personalize = settings.get_fin_voucher_personalize() # Voucher must match the selected program if program_id and str(program_id) != str(program.id): error = T("Voucher is for a different program") if not error: # Verify bearer identity feature (if required) if personalize == "dob" and voucher.bearer_dob: bearer_dob = form_vars.get("bearer_dob") if bearer_dob != voucher.bearer_dob: field = "bearer_dob" error = T("Incorrect Date of Birth") elif personalize == "pin" and voucher.bearer_pin: bearer_pin = form_vars.get("bearer_pin") if bearer_pin != voucher.bearer_pin: field = "bearer_pin" error = T("Incorrect PIN") if not error: # Verify program status if program.status != "ACTIVE": error = T("Voucher program suspended") elif program.end_date and program.end_date < today: error = T("Voucher program has ended") # Verify voucher status elif valid_until and valid_until < today: error = T("Voucher expired") elif balance <= 0: error = T("Voucher credit exhausted") if not error: # Verify quantity/balance if "quantity" in form_vars: quantity = form_vars["quantity"] if quantity > balance: field = "quantity" error = T("Max %(number)s allowed") % {"number": balance} elif balance > 1 and voucher.single_debit: error = T("Group voucher! - use dedicated form") if error: form.errors[field] = error
def debit_onvalidation(form): """ Validate debit: - identify the voucher to debit - verify bearer identity features, program and voucher status """ T = current.T s3db = current.s3db form_vars = form.vars if "signature" not in form_vars: form.errors["signature"] = T("Missing voucher signature") return if "program_id" in form_vars: program_id = form_vars.program_id else: table = s3db.fin_voucher_debit program_id = table.program_id.default signature = form_vars["signature"] error = None settings = current.deployment_settings # Find the voucher vtable = s3db.fin_voucher ptable = s3db.fin_voucher_program join = ptable.on(ptable.id == vtable.program_id) query = (vtable.signature == signature) & \ (vtable.deleted == False) row = current.db(query).select(vtable.id, vtable.bearer_dob, vtable.bearer_pin, vtable.balance, vtable.valid_until, vtable.single_debit, ptable.id, ptable.status, ptable.end_date, join = join, limitby = (0, 1), ).first() field = "signature" if not row: error = T("Invalid voucher") else: program = row.fin_voucher_program voucher = row.fin_voucher today = current.request.utcnow.date() valid_until = voucher.valid_until balance = voucher.balance personalize = settings.get_fin_voucher_personalize() # Voucher must match the selected program if program_id and str(program_id) != str(program.id): error = T("Voucher is for a different program") if not error: # Verify bearer identity feature (if required) if personalize == "dob" and voucher.bearer_dob: bearer_dob = form_vars.get("bearer_dob") if bearer_dob != voucher.bearer_dob: field = "bearer_dob" error = T("Incorrect Date of Birth") elif personalize == "pin" and voucher.bearer_pin: bearer_pin = form_vars.get("bearer_pin") if bearer_pin != voucher.bearer_pin: field = "bearer_pin" error = T("Incorrect PIN") if not error: # Verify program status if program.status != "ACTIVE": error = T("Voucher program suspended") elif program.end_date and program.end_date < today: error = T("Voucher program has ended") # Verify voucher status elif valid_until and valid_until < today: error = T("Voucher expired") elif balance <= 0: error = T("Voucher credit exhausted") if not error: # Verify quantity/balance if "quantity" in form_vars: quantity = form_vars["quantity"] if quantity > balance: field = "quantity" error = T("Max %(number)s allowed") % {"number": balance} elif balance > 1 and voucher.single_debit: error = T("Group voucher! - use dedicated form") if error: form.errors[field] = error
Python
def debit_status(row): """ Virtual field indicating the status of the debit """ T = current.T if hasattr(row, "fin_voucher_debit"): row = row.fin_voucher_debit if hasattr(row, "balance") and row.balance is not None: if row.balance > 0: return T("Compensation pending##fin") elif row.cancelled: return T("Cancelled##fin") else: return T("Compensated##fin") else: return current.messages["NONE"]
def debit_status(row): """ Virtual field indicating the status of the debit """ T = current.T if hasattr(row, "fin_voucher_debit"): row = row.fin_voucher_debit if hasattr(row, "balance") and row.balance is not None: if row.balance > 0: return T("Compensation pending##fin") elif row.cancelled: return T("Cancelled##fin") else: return T("Compensated##fin") else: return current.messages["NONE"]
Python
def program(self): """ The program record (lazy property) Returns: the program record (Row) """ program = self._program if program is None: # Look up program table = current.s3db.fin_voucher_program query = (table.id == self.program_id) & \ (table.status == "ACTIVE") & \ (table.deleted == False) program = current.db(query).select(table.id, table.uuid, table.status, table.default_credit, table.validity_period, table.end_date, table.credit, table.compensation, table.unit, table.price_per_unit, table.currency, limitby = (0, 1), ).first() self._program = program return program
def program(self): """ The program record (lazy property) Returns: the program record (Row) """ program = self._program if program is None: # Look up program table = current.s3db.fin_voucher_program query = (table.id == self.program_id) & \ (table.status == "ACTIVE") & \ (table.deleted == False) program = current.db(query).select(table.id, table.uuid, table.status, table.default_credit, table.validity_period, table.end_date, table.credit, table.compensation, table.unit, table.price_per_unit, table.currency, limitby = (0, 1), ).first() self._program = program return program
Python
def issue(self, voucher_id, credit=None): """ Transfer credit from the program to the voucher Args: voucher_id: the new voucher credit: the initial credit to transfer to the voucher Returns: the number of credit transferred to the voucher, or None on failure """ program = self.program if not program: return None s3db = current.s3db table = s3db.fin_voucher query = (table.id == voucher_id) & \ (table.program_id == program.id) & \ (table.deleted == False) voucher = current.db(query).select(table.id, table.initial_credit, table.balance, limitby = (0, 1), ).first() if not voucher: return None if not isinstance(credit, int): credit = voucher.initial_credit if credit is None: credit = program.default_credit if credit is None: return 0 balance = voucher.balance if balance: credit -= balance if credit: # Transfer initial credit to the voucher transaction = {"type": "ISS", "credit": -credit, "voucher": credit, "voucher_id": voucher_id, } if self.__transaction(transaction): voucher.update_record( balance = voucher.balance + transaction["voucher"], ) ptable = s3db.fin_voucher_program program.update_record( credit = ptable.credit + transaction["credit"], modified_on = ptable.modified_on, modified_by = ptable.modified_by, ) else: return None return credit
def issue(self, voucher_id, credit=None): """ Transfer credit from the program to the voucher Args: voucher_id: the new voucher credit: the initial credit to transfer to the voucher Returns: the number of credit transferred to the voucher, or None on failure """ program = self.program if not program: return None s3db = current.s3db table = s3db.fin_voucher query = (table.id == voucher_id) & \ (table.program_id == program.id) & \ (table.deleted == False) voucher = current.db(query).select(table.id, table.initial_credit, table.balance, limitby = (0, 1), ).first() if not voucher: return None if not isinstance(credit, int): credit = voucher.initial_credit if credit is None: credit = program.default_credit if credit is None: return 0 balance = voucher.balance if balance: credit -= balance if credit: # Transfer initial credit to the voucher transaction = {"type": "ISS", "credit": -credit, "voucher": credit, "voucher_id": voucher_id, } if self.__transaction(transaction): voucher.update_record( balance = voucher.balance + transaction["voucher"], ) ptable = s3db.fin_voucher_program program.update_record( credit = ptable.credit + transaction["credit"], modified_on = ptable.modified_on, modified_by = ptable.modified_by, ) else: return None return credit
Python
def void(self, voucher_id): """ Charge back the remaining balance of a voucher to the program, thereby voiding the voucher Args: voucher_id: the voucher ID Returns: the number of credits charged back to the program, or None on failure """ program = self.program if not program: return None s3db = current.s3db table = s3db.fin_voucher query = (table.id == voucher_id) & \ (table.program_id == program.id) & \ (table.deleted == False) voucher = current.db(query).select(table.id, table.balance, limitby = (0, 1), ).first() if not voucher: return None balance = voucher.balance if balance and balance > 0: # Transfer remaining balance back to the program transaction = {"type": "VOI", "credit": balance, "voucher": -balance, "voucher_id": voucher_id, } if self.__transaction(transaction): voucher.update_record( balance = voucher.balance + transaction["voucher"], ) ptable = s3db.fin_voucher_program program.update_record( credit = ptable.credit + transaction["credit"], modified_on = ptable.modified_on, modified_by = ptable.modified_by, ) else: return None else: return 0 return balance
def void(self, voucher_id): """ Charge back the remaining balance of a voucher to the program, thereby voiding the voucher Args: voucher_id: the voucher ID Returns: the number of credits charged back to the program, or None on failure """ program = self.program if not program: return None s3db = current.s3db table = s3db.fin_voucher query = (table.id == voucher_id) & \ (table.program_id == program.id) & \ (table.deleted == False) voucher = current.db(query).select(table.id, table.balance, limitby = (0, 1), ).first() if not voucher: return None balance = voucher.balance if balance and balance > 0: # Transfer remaining balance back to the program transaction = {"type": "VOI", "credit": balance, "voucher": -balance, "voucher_id": voucher_id, } if self.__transaction(transaction): voucher.update_record( balance = voucher.balance + transaction["voucher"], ) ptable = s3db.fin_voucher_program program.update_record( credit = ptable.credit + transaction["credit"], modified_on = ptable.modified_on, modified_by = ptable.modified_by, ) else: return None else: return 0 return balance
Python
def debit(self, voucher_id, debit_id, credit=None): """ Transfer credit to the provider when redeeming a voucher, i.e. debit a voucher. Args: voucher_id: the voucher ID debit_id: the debit ID credit: the credit to transfer (default 1) Returns: the credit deducted from the voucher Notes: Actually a double transaction: 1) transfer credit from the program's compensation account to the debit 2) return credit from the voucher to the program's credit account """ program = self.program if not program: return None program_id = program.id db = current.db s3db = current.s3db vtable = s3db.fin_voucher query = (vtable.id == voucher_id) & \ (vtable.program_id == program_id) & \ (vtable.deleted == False) voucher = db(query).select(vtable.id, vtable.balance, vtable.single_debit, vtable.credit_spent, limitby = (0, 1), ).first() if not voucher: return None dtable = s3db.fin_voucher_debit query = (dtable.id == debit_id) & \ (dtable.program_id == program_id) & \ (dtable.deleted == False) debit = db(query).select(dtable.id, dtable.quantity, dtable.balance, limitby = (0, 1), ).first() if not debit: return None if not isinstance(credit, int): credit = debit.quantity if credit is None: credit = 1 if credit > voucher.balance: return None if credit > 0: transaction = {"type": "DBT", "credit": credit, "voucher": -credit, "debit": credit, "compensation": -credit, "voucher_id": voucher_id, "debit_id": debit_id, } if self.__transaction(transaction): credit_spent = voucher.credit_spent if credit_spent: credit_spent += transaction["debit"] else: credit_spent = transaction["debit"] voucher.update_record( credit_spent = credit_spent, balance = voucher.balance + transaction["voucher"], ) debit.update_record( balance = debit.balance + transaction["debit"], ) ptable = s3db.fin_voucher_program program.update_record( credit = ptable.credit + transaction["credit"], compensation = ptable.compensation + transaction["compensation"], modified_on = ptable.modified_on, modified_by = ptable.modified_by, ) if voucher.balance > 0 and voucher.single_debit: # Voucher can only be debited once self.void(voucher.id) else: return None return credit
def debit(self, voucher_id, debit_id, credit=None): """ Transfer credit to the provider when redeeming a voucher, i.e. debit a voucher. Args: voucher_id: the voucher ID debit_id: the debit ID credit: the credit to transfer (default 1) Returns: the credit deducted from the voucher Notes: Actually a double transaction: 1) transfer credit from the program's compensation account to the debit 2) return credit from the voucher to the program's credit account """ program = self.program if not program: return None program_id = program.id db = current.db s3db = current.s3db vtable = s3db.fin_voucher query = (vtable.id == voucher_id) & \ (vtable.program_id == program_id) & \ (vtable.deleted == False) voucher = db(query).select(vtable.id, vtable.balance, vtable.single_debit, vtable.credit_spent, limitby = (0, 1), ).first() if not voucher: return None dtable = s3db.fin_voucher_debit query = (dtable.id == debit_id) & \ (dtable.program_id == program_id) & \ (dtable.deleted == False) debit = db(query).select(dtable.id, dtable.quantity, dtable.balance, limitby = (0, 1), ).first() if not debit: return None if not isinstance(credit, int): credit = debit.quantity if credit is None: credit = 1 if credit > voucher.balance: return None if credit > 0: transaction = {"type": "DBT", "credit": credit, "voucher": -credit, "debit": credit, "compensation": -credit, "voucher_id": voucher_id, "debit_id": debit_id, } if self.__transaction(transaction): credit_spent = voucher.credit_spent if credit_spent: credit_spent += transaction["debit"] else: credit_spent = transaction["debit"] voucher.update_record( credit_spent = credit_spent, balance = voucher.balance + transaction["voucher"], ) debit.update_record( balance = debit.balance + transaction["debit"], ) ptable = s3db.fin_voucher_program program.update_record( credit = ptable.credit + transaction["credit"], compensation = ptable.compensation + transaction["compensation"], modified_on = ptable.modified_on, modified_by = ptable.modified_by, ) if voucher.balance > 0 and voucher.single_debit: # Voucher can only be debited once self.void(voucher.id) else: return None return credit
Python
def cancel(self, debit_id, reason): """ Cancel a debit - transfers the credit back to the voucher, and adjusts the program's credit/compensation balances accordingly, also reverses any voiding of single-debit vouchers Args: debit_id: the debit ID reason: the reason for cancellation (required) Returns: tuple (credits, error) - the number of credits returned, or None on failure - the failure reason Notes: - Cancelling a debit is only possible while the debit is not part of any other transactions, and has not yet been included in a billing or compensated - Implementations should ensure that debits can only be cancelled by the organisation that originally created them (i.e. the provider who has accepted the voucher), so as to not breach trust """ program = self.program if not program: return None, "Program not found" db = current.db s3db = current.s3db # Reason must not be empty if not isinstance(reason, str) or not reason: return None, "No reason specified" # Get the debit and verify that it can be cancelled debit, error = self.cancellable(debit_id) if error: return None, error total_credit = debit.balance # Look up all DBT-transactions for this debit (normally only one) ttable = s3db.fin_voucher_transaction query = (ttable.debit_id == debit.id) & \ (ttable.type == "DBT") & \ (ttable.deleted == False) rows = db(query).select(ttable.id, ttable.type, ttable.credit, ttable.voucher, ttable.debit, ttable.compensation, ttable.voucher_id, ttable.debit_id, ) if rows: # Totals must match (don't fix) if sum(t.debit for t in rows) != total_credit: return None, "Debit balance does not match transactions" vtable = s3db.fin_voucher for row in rows: # Get the voucher query = (vtable.id == row.voucher_id) voucher = db(query).select(vtable.id, vtable.balance, vtable.credit_spent, vtable.single_debit, vtable.initial_credit, limitby = (0, 1), ).first() if not voucher: continue # Reverse the DBT transaction credit = row.debit transaction = {"type": "CNC", "credit": -credit, "voucher": credit, "debit": -credit, "compensation": credit, "voucher_id": row.voucher_id, "debit_id": debit.id, } if self.__transaction(transaction): # Update the voucher balance voucher.update_record( balance = voucher.balance + transaction["voucher"], credit_spent = voucher.credit_spent + transaction["debit"], ) if voucher.single_debit: # Restore the initial credit initial_credit = voucher.initial_credit if initial_credit and initial_credit > voucher.balance: self.issue(voucher.id, initial_credit) # Update the debit debit.update_record( balance = debit.balance + transaction["debit"], quantity = 0, voucher_id = None, signature = None, bearer_dob = None, cancelled = True, cancel_reason = reason, ) # Update the program balance ptable = s3db.fin_voucher_program program.update_record( credit = ptable.credit + transaction["credit"], compensation = ptable.compensation + transaction["compensation"], modified_on = ptable.modified_on, modified_by = ptable.modified_by, ) return total_credit, None
def cancel(self, debit_id, reason): """ Cancel a debit - transfers the credit back to the voucher, and adjusts the program's credit/compensation balances accordingly, also reverses any voiding of single-debit vouchers Args: debit_id: the debit ID reason: the reason for cancellation (required) Returns: tuple (credits, error) - the number of credits returned, or None on failure - the failure reason Notes: - Cancelling a debit is only possible while the debit is not part of any other transactions, and has not yet been included in a billing or compensated - Implementations should ensure that debits can only be cancelled by the organisation that originally created them (i.e. the provider who has accepted the voucher), so as to not breach trust """ program = self.program if not program: return None, "Program not found" db = current.db s3db = current.s3db # Reason must not be empty if not isinstance(reason, str) or not reason: return None, "No reason specified" # Get the debit and verify that it can be cancelled debit, error = self.cancellable(debit_id) if error: return None, error total_credit = debit.balance # Look up all DBT-transactions for this debit (normally only one) ttable = s3db.fin_voucher_transaction query = (ttable.debit_id == debit.id) & \ (ttable.type == "DBT") & \ (ttable.deleted == False) rows = db(query).select(ttable.id, ttable.type, ttable.credit, ttable.voucher, ttable.debit, ttable.compensation, ttable.voucher_id, ttable.debit_id, ) if rows: # Totals must match (don't fix) if sum(t.debit for t in rows) != total_credit: return None, "Debit balance does not match transactions" vtable = s3db.fin_voucher for row in rows: # Get the voucher query = (vtable.id == row.voucher_id) voucher = db(query).select(vtable.id, vtable.balance, vtable.credit_spent, vtable.single_debit, vtable.initial_credit, limitby = (0, 1), ).first() if not voucher: continue # Reverse the DBT transaction credit = row.debit transaction = {"type": "CNC", "credit": -credit, "voucher": credit, "debit": -credit, "compensation": credit, "voucher_id": row.voucher_id, "debit_id": debit.id, } if self.__transaction(transaction): # Update the voucher balance voucher.update_record( balance = voucher.balance + transaction["voucher"], credit_spent = voucher.credit_spent + transaction["debit"], ) if voucher.single_debit: # Restore the initial credit initial_credit = voucher.initial_credit if initial_credit and initial_credit > voucher.balance: self.issue(voucher.id, initial_credit) # Update the debit debit.update_record( balance = debit.balance + transaction["debit"], quantity = 0, voucher_id = None, signature = None, bearer_dob = None, cancelled = True, cancel_reason = reason, ) # Update the program balance ptable = s3db.fin_voucher_program program.update_record( credit = ptable.credit + transaction["credit"], compensation = ptable.compensation + transaction["compensation"], modified_on = ptable.modified_on, modified_by = ptable.modified_by, ) return total_credit, None
Python
def cancellable(self, debit_id): """ Verify if a debit can still be cancelled Args: debit_id: the debit ID Returns: tuple (debit, error) - the debit record if cancellable - otherwise None, and the reason why not """ program = self.program if not program: return None, "Program not found" program_id = program.id db = current.db s3db = current.s3db # Get the debit and verify its status error = None dtable = s3db.fin_voucher_debit query = (dtable.id == debit_id) & \ (dtable.program_id == program_id) & \ (dtable.deleted == False) debit = db(query).select(dtable.id, dtable.balance, dtable.billing_id, dtable.cancelled, limitby = (0, 1), ).first() if not debit: error = "Debit not found" elif debit.cancelled: error = "Debit already cancelled" elif debit.billing_id: error = "Debit already included in billing" if error: return None, error # Check if there is any transaction other than DBT for the debit ttable = s3db.fin_voucher_transaction query = (ttable.debit_id == debit.id) & \ (ttable.type != "DBT") & \ (ttable.deleted == False) rows = db(query).select(ttable.id, limitby=(0, 1)) if rows: return None, "Debit already part of other transactions" return debit, None
def cancellable(self, debit_id): """ Verify if a debit can still be cancelled Args: debit_id: the debit ID Returns: tuple (debit, error) - the debit record if cancellable - otherwise None, and the reason why not """ program = self.program if not program: return None, "Program not found" program_id = program.id db = current.db s3db = current.s3db # Get the debit and verify its status error = None dtable = s3db.fin_voucher_debit query = (dtable.id == debit_id) & \ (dtable.program_id == program_id) & \ (dtable.deleted == False) debit = db(query).select(dtable.id, dtable.balance, dtable.billing_id, dtable.cancelled, limitby = (0, 1), ).first() if not debit: error = "Debit not found" elif debit.cancelled: error = "Debit already cancelled" elif debit.billing_id: error = "Debit already included in billing" if error: return None, error # Check if there is any transaction other than DBT for the debit ttable = s3db.fin_voucher_transaction query = (ttable.debit_id == debit.id) & \ (ttable.type != "DBT") & \ (ttable.deleted == False) rows = db(query).select(ttable.id, limitby=(0, 1)) if rows: return None, "Debit already part of other transactions" return debit, None
Python
def compensate(self, debit_id, credit=None): """ Compensate a debit (transfer credit back to the program), usually when the provider is compensated for the service rendered Args: debit_id: the debit ID credit: the number of credits compensated Returns: the number of credits transferred, None on failure """ program = self.program if not program: return None s3db = current.s3db table = s3db.fin_voucher_debit query = (table.id == debit_id) & \ (table.program_id == program.id) & \ (table.deleted == False) debit = current.db(query).select(table.id, table.balance, limitby = (0, 1), ).first() if not debit: return None if not isinstance(credit, int): credit = debit.balance if credit is None or credit > debit.balance: return None if credit: # Transfer remaining balance to the compensation account transaction = {"type": "CMP", "compensation": credit, "debit": -credit, "debit_id": debit.id, } if self.__transaction(transaction): debit.update_record( balance = debit.balance + transaction["debit"], ) ptable = s3db.fin_voucher_program program.update_record( compensation = ptable.compensation + transaction["compensation"], modified_on = ptable.modified_on, modified_by = ptable.modified_by, ) else: return None else: return 0 return credit
def compensate(self, debit_id, credit=None): """ Compensate a debit (transfer credit back to the program), usually when the provider is compensated for the service rendered Args: debit_id: the debit ID credit: the number of credits compensated Returns: the number of credits transferred, None on failure """ program = self.program if not program: return None s3db = current.s3db table = s3db.fin_voucher_debit query = (table.id == debit_id) & \ (table.program_id == program.id) & \ (table.deleted == False) debit = current.db(query).select(table.id, table.balance, limitby = (0, 1), ).first() if not debit: return None if not isinstance(credit, int): credit = debit.balance if credit is None or credit > debit.balance: return None if credit: # Transfer remaining balance to the compensation account transaction = {"type": "CMP", "compensation": credit, "debit": -credit, "debit_id": debit.id, } if self.__transaction(transaction): debit.update_record( balance = debit.balance + transaction["debit"], ) ptable = s3db.fin_voucher_program program.update_record( compensation = ptable.compensation + transaction["compensation"], modified_on = ptable.modified_on, modified_by = ptable.modified_by, ) else: return None else: return 0 return credit
Python
def verify(self, transaction_id): """ Verify integrity of a transaction (=check the vhash) Args: transaction_id: the transaction record ID Returns: True|False whether the transaction is intact """ db = current.db table = current.s3db.fin_voucher_transaction # Get the transaction record query = (table.id == transaction_id) transaction = db(query).select(table.ALL, limitby = (0, 1), ).first() if not transaction: return False # Get preceding transaction's hash ouuid = transaction.ouuid if ouuid: query = (table.uuid == ouuid) & \ (table.program_id == transaction.program_id) p_transaction = db(query).select(table.vhash, limitby = (0, 1), ).first() if p_transaction is None: return False ohash = p_transaction.vhash else: ohash = None # Verify the hash data = {"ouuid": ouuid, "date": transaction.date, "type": transaction.type, "credit": transaction.credit, "voucher": transaction.voucher, "debit": transaction.debit, "compensation": transaction.compensation, "voucher_id": transaction.voucher_id, "debit_id": transaction.debit_id, } vhash = self._hash(data, ohash) return vhash == transaction.vhash
def verify(self, transaction_id): """ Verify integrity of a transaction (=check the vhash) Args: transaction_id: the transaction record ID Returns: True|False whether the transaction is intact """ db = current.db table = current.s3db.fin_voucher_transaction # Get the transaction record query = (table.id == transaction_id) transaction = db(query).select(table.ALL, limitby = (0, 1), ).first() if not transaction: return False # Get preceding transaction's hash ouuid = transaction.ouuid if ouuid: query = (table.uuid == ouuid) & \ (table.program_id == transaction.program_id) p_transaction = db(query).select(table.vhash, limitby = (0, 1), ).first() if p_transaction is None: return False ohash = p_transaction.vhash else: ohash = None # Verify the hash data = {"ouuid": ouuid, "date": transaction.date, "type": transaction.type, "credit": transaction.credit, "voucher": transaction.voucher, "debit": transaction.debit, "compensation": transaction.compensation, "voucher_id": transaction.voucher_id, "debit_id": transaction.debit_id, } vhash = self._hash(data, ohash) return vhash == transaction.vhash
Python
def audit(self, correct=False): """ Run a full audit of the entire program: - verify all transactions - verify all balances, vouchers and debits Args: correct: correct any incorrect balances Returns: audit report TODO implement """ return True
def audit(self, correct=False): """ Run a full audit of the entire program: - verify all transactions - verify all balances, vouchers and debits Args: correct: correct any incorrect balances Returns: audit report TODO implement """ return True
Python
def earliest_billing_date(self, billing_id=None, configure=None): """ Get the earliest possible billing date for the program - must be after any active or completed billing processes Args: billing_id: the billing ID configure: a Field to configure accordingly (typically fin_voucher_billing.date itself) Returns: the earliest possible billing date """ program = self.program if not program: return None db = current.db s3db = current.s3db btable = s3db.fin_voucher_billing query = (btable.program_id == program.id) & \ (btable.status.belongs(("IN PROGRESS", "COMPLETED"))) if billing_id: query &= (btable.id != billing_id) query &= (btable.date != None) & (btable.deleted == False) row = db(query).select(btable.date, limitby = (0, 1), orderby = ~btable.date, ).first() earliest = row.date + datetime.timedelta(days=1) if row else None if earliest and configure: default = configure.default configure.default = max(earliest, default) if default else earliest configure.requires = IS_EMPTY_OR(IS_UTC_DATE(minimum=earliest)) configure.widget = S3CalendarWidget(minimum=earliest) return earliest
def earliest_billing_date(self, billing_id=None, configure=None): """ Get the earliest possible billing date for the program - must be after any active or completed billing processes Args: billing_id: the billing ID configure: a Field to configure accordingly (typically fin_voucher_billing.date itself) Returns: the earliest possible billing date """ program = self.program if not program: return None db = current.db s3db = current.s3db btable = s3db.fin_voucher_billing query = (btable.program_id == program.id) & \ (btable.status.belongs(("IN PROGRESS", "COMPLETED"))) if billing_id: query &= (btable.id != billing_id) query &= (btable.date != None) & (btable.deleted == False) row = db(query).select(btable.date, limitby = (0, 1), orderby = ~btable.date, ).first() earliest = row.date + datetime.timedelta(days=1) if row else None if earliest and configure: default = configure.default configure.default = max(earliest, default) if default else earliest configure.requires = IS_EMPTY_OR(IS_UTC_DATE(minimum=earliest)) configure.widget = S3CalendarWidget(minimum=earliest) return earliest
Python
def _hash(self, transaction, ohash): """ Generate a verification hash (vhash) for the transaction Args: transaction: the transaction data ohash: the hash of the preceding transaction Returns: the hash as string """ # Generate signature from transaction data signature = {} signature.update(transaction) signature["date"] = s3_format_datetime(transaction["date"]) # Hash it, together with program UUID and ohash data = {"puuid": self.program.uuid, "ohash": ohash, "signature": signature, } inp = json.dumps(data, separators=JSONSEPARATORS) crypt = CRYPT(key = current.deployment_settings.hmac_key, digest_alg = "sha512", salt = False, ) return str(crypt(inp)[0])
def _hash(self, transaction, ohash): """ Generate a verification hash (vhash) for the transaction Args: transaction: the transaction data ohash: the hash of the preceding transaction Returns: the hash as string """ # Generate signature from transaction data signature = {} signature.update(transaction) signature["date"] = s3_format_datetime(transaction["date"]) # Hash it, together with program UUID and ohash data = {"puuid": self.program.uuid, "ohash": ohash, "signature": signature, } inp = json.dumps(data, separators=JSONSEPARATORS) crypt = CRYPT(key = current.deployment_settings.hmac_key, digest_alg = "sha512", salt = False, ) return str(crypt(inp)[0])
Python
def __transaction(self, data): """ Record a transaction under this program Args: data: the transaction details Returns: True|False for success or failure """ program = self.program if not program: return False # Prevent recording of unbalanced transactions total = data.get("credit", 0) + \ data.get("voucher", 0) + \ data.get("debit", 0) + \ data.get("compensation", 0) if total != 0: # Invalid - total change must always be 0 return False # Get last preceding transaction in this program table = current.s3db.fin_voucher_transaction query = (table.program_id == program.id) row = current.db(query).select(table.uuid, table.vhash, limitby = (0, 1), orderby = ~(table.created_on) ).first() if row: ouuid = row.uuid ohash = row.vhash else: # This is the first transaction ouuid = ohash = None # Build the transaction record transaction = {"ouuid": ouuid, "date": current.request.utcnow, "type": None, "credit": 0, "voucher": 0, "debit": 0, "compensation": 0, "voucher_id": None, "debit_id": None } transaction.update(data) transaction["ouuid"] = ouuid transaction["vhash"] = self._hash(transaction, ohash) transaction["program_id"] = program.id s3db = current.s3db # Write the transaction table = s3db.fin_voucher_transaction transaction["id"] = table.insert(**transaction) # Post-process it current.auth.s3_set_record_owner(table, transaction) s3db.onaccept(table, transaction, method="create") return True
def __transaction(self, data): """ Record a transaction under this program Args: data: the transaction details Returns: True|False for success or failure """ program = self.program if not program: return False # Prevent recording of unbalanced transactions total = data.get("credit", 0) + \ data.get("voucher", 0) + \ data.get("debit", 0) + \ data.get("compensation", 0) if total != 0: # Invalid - total change must always be 0 return False # Get last preceding transaction in this program table = current.s3db.fin_voucher_transaction query = (table.program_id == program.id) row = current.db(query).select(table.uuid, table.vhash, limitby = (0, 1), orderby = ~(table.created_on) ).first() if row: ouuid = row.uuid ohash = row.vhash else: # This is the first transaction ouuid = ohash = None # Build the transaction record transaction = {"ouuid": ouuid, "date": current.request.utcnow, "type": None, "credit": 0, "voucher": 0, "debit": 0, "compensation": 0, "voucher_id": None, "debit_id": None } transaction.update(data) transaction["ouuid"] = ouuid transaction["vhash"] = self._hash(transaction, ohash) transaction["program_id"] = program.id s3db = current.s3db # Write the transaction table = s3db.fin_voucher_transaction transaction["id"] = table.insert(**transaction) # Post-process it current.auth.s3_set_record_owner(table, transaction) s3db.onaccept(table, transaction, method="create") return True
Python
def billing(self): """ Get the billing record (lazy property) Returns: Row Raises: ValueError: if the billing reference is invalid """ billing = self._record if not billing: btable = current.s3db.fin_voucher_billing query = (btable.id == self.billing_id) & \ (btable.deleted == False) billing = current.db(query).select(btable.id, btable.program_id, btable.date, btable.status, limitby = (0, 1), ).first() if not billing: raise ValueError("Billing record not found") self._record = billing return billing
def billing(self): """ Get the billing record (lazy property) Returns: Row Raises: ValueError: if the billing reference is invalid """ billing = self._record if not billing: btable = current.s3db.fin_voucher_billing query = (btable.id == self.billing_id) & \ (btable.deleted == False) billing = current.db(query).select(btable.id, btable.program_id, btable.date, btable.status, limitby = (0, 1), ).first() if not billing: raise ValueError("Billing record not found") self._record = billing return billing
Python
def program(self): """ Get the voucher program for this billing process (lazy property) Returns: fin_VoucherProgram Raises: ValueError: if the program reference is invalid """ program = self._program if not program: program = fin_VoucherProgram(self.billing.program_id) if not program.program: raise ValueError("Invalid program reference") self._program = program return program
def program(self): """ Get the voucher program for this billing process (lazy property) Returns: fin_VoucherProgram Raises: ValueError: if the program reference is invalid """ program = self._program if not program: program = fin_VoucherProgram(self.billing.program_id) if not program.program: raise ValueError("Invalid program reference") self._program = program return program
Python
def verify(self): """ Verify all relevant debits, fix any incorrect balances Returns: number of invalid transactions """ db = current.db s3db = current.s3db billing = self.billing program = self.program dtable = s3db.fin_voucher_debit ttable = s3db.fin_voucher_transaction query = (dtable.billing_id == billing.id) & \ (dtable.claim_id == None) & \ (dtable.deleted == False) left = ttable.on((ttable.debit_id == dtable.id) & \ (ttable.deleted == False)) rows = db(query).select(dtable.id, dtable.balance, ttable.id, ttable.debit, left = left, orderby = dtable.date, ) log = current.log invalid_debit = "Voucher program billing - invalid debit: #%s" invalid_transaction = "Voucher program billing - corrupted transaction: #%s" invalid = 0 totals = {} debits = {} for row in rows: debit = row.fin_voucher_debit transaction = row.fin_voucher_transaction if transaction.id is None: # Invalid debit, drop it from billing log.warning(invalid_debit % debit.id) debit.update_record(billing_id = None, modified_on = dtable.modified_on, modified_by = dtable.modified_by, ) elif program.verify(transaction.id): # Valid transaction debit_id = debit.id if debit_id in totals: totals[debit_id] += transaction.debit else: totals[debit_id] = transaction.debit debits[debit_id] = debit else: # Invalid transaction log.error(invalid_transaction % transaction.id) invalid += 1 if not invalid: # Fix any incorrect debit balances for debit_id, total in totals.items(): debit = debits[debit_id] if debit.balance != total: debit.update_record(balance = total, modified_on = dtable.modified_on, modified_by = dtable.modified_by, ) return invalid
def verify(self): """ Verify all relevant debits, fix any incorrect balances Returns: number of invalid transactions """ db = current.db s3db = current.s3db billing = self.billing program = self.program dtable = s3db.fin_voucher_debit ttable = s3db.fin_voucher_transaction query = (dtable.billing_id == billing.id) & \ (dtable.claim_id == None) & \ (dtable.deleted == False) left = ttable.on((ttable.debit_id == dtable.id) & \ (ttable.deleted == False)) rows = db(query).select(dtable.id, dtable.balance, ttable.id, ttable.debit, left = left, orderby = dtable.date, ) log = current.log invalid_debit = "Voucher program billing - invalid debit: #%s" invalid_transaction = "Voucher program billing - corrupted transaction: #%s" invalid = 0 totals = {} debits = {} for row in rows: debit = row.fin_voucher_debit transaction = row.fin_voucher_transaction if transaction.id is None: # Invalid debit, drop it from billing log.warning(invalid_debit % debit.id) debit.update_record(billing_id = None, modified_on = dtable.modified_on, modified_by = dtable.modified_by, ) elif program.verify(transaction.id): # Valid transaction debit_id = debit.id if debit_id in totals: totals[debit_id] += transaction.debit else: totals[debit_id] = transaction.debit debits[debit_id] = debit else: # Invalid transaction log.error(invalid_transaction % transaction.id) invalid += 1 if not invalid: # Fix any incorrect debit balances for debit_id, total in totals.items(): debit = debits[debit_id] if debit.balance != total: debit.update_record(balance = total, modified_on = dtable.modified_on, modified_by = dtable.modified_by, ) return invalid
Python
def generate_claims(self): """ Generate claims for compensation for any unprocessed debits under this billing process Returns: number of claims generated, None on error Raises: ValueError: if the action is invalid """ # Activate the billing process billing = self.__activate() # Get the program try: program = self.program except ValueError: self.__abort("Program not found") raise # Abort if there is no unit price (never raise zero-charge claims) pdata = program.program ppu = pdata.price_per_unit if not ppu or ppu <= 0: error = "Program has no valid unit price!" self.__abort(error) raise ValueError(error) # Verify all relevant transactions invalid = self.verify() if invalid: error = "%s invalid transactions found!" % invalid self.__abort(error) raise ValueError(error) db = current.db s3db = current.s3db dtable = s3db.fin_voucher_debit ctable = s3db.fin_voucher_claim # Customise claim resource from core import CRUDRequest r = CRUDRequest("fin", "voucher_claim", args=[], get_vars={}) r.customise_resource("fin_voucher_claim") # Base query query = (dtable.billing_id == billing.id) & \ (dtable.claim_id == None) & \ (dtable.deleted == False) # Get totals per provider provider_id = dtable.pe_id balance_total = dtable.balance.sum() num_vouchers = dtable.voucher_id.count() rows = db(query).select(provider_id, balance_total, num_vouchers, groupby = provider_id, having = balance_total > 0, ) # Generate claims billing_id = billing.id total_claims = 0 s3db_onaccept = s3db.onaccept set_record_owner = current.auth.s3_set_record_owner for row in rows: # Check provider provider = row[provider_id] if not provider: continue # Compute amount receivable vouchers = row[num_vouchers] quantity = row[balance_total] amount = quantity * ppu if ppu else 0 # Claim details data = {"program_id": pdata.id, "billing_id": billing_id, "pe_id": provider, "date": datetime.datetime.utcnow(), "status": "NEW", "vouchers_total": vouchers, "quantity_total": quantity, "price_per_unit": ppu, "amount_receivable": amount, "currency": pdata.currency, } data["id"] = claim_id = ctable.insert(**data) if claim_id: # Post-process the claim set_record_owner(ctable, data) s3db_onaccept(ctable, data, method="create") # Update all debits with claim_id q = query & (dtable.pe_id == provider) db(q).update(claim_id = claim_id, modified_by = dtable.modified_by, modified_on = dtable.modified_on, ) total_claims += 1 # Update totals in billing query = (ctable.billing_id == billing_id) & \ (ctable.deleted == False) vouchers_total = ctable.vouchers_total.sum() quantity_total = ctable.quantity_total.sum() row = db(query).select(vouchers_total, quantity_total, ).first() btable = s3db.fin_voucher_billing billing.update_record(vouchers_total = row[vouchers_total], quantity_total = row[quantity_total], modified_by = btable.modified_by, modified_on = btable.modified_on, ) # If no claims have been generated, conclude the billing # right away (as there will be no later trigger) if total_claims == 0: self.check_complete(claims_complete=True) return total_claims
def generate_claims(self): """ Generate claims for compensation for any unprocessed debits under this billing process Returns: number of claims generated, None on error Raises: ValueError: if the action is invalid """ # Activate the billing process billing = self.__activate() # Get the program try: program = self.program except ValueError: self.__abort("Program not found") raise # Abort if there is no unit price (never raise zero-charge claims) pdata = program.program ppu = pdata.price_per_unit if not ppu or ppu <= 0: error = "Program has no valid unit price!" self.__abort(error) raise ValueError(error) # Verify all relevant transactions invalid = self.verify() if invalid: error = "%s invalid transactions found!" % invalid self.__abort(error) raise ValueError(error) db = current.db s3db = current.s3db dtable = s3db.fin_voucher_debit ctable = s3db.fin_voucher_claim # Customise claim resource from core import CRUDRequest r = CRUDRequest("fin", "voucher_claim", args=[], get_vars={}) r.customise_resource("fin_voucher_claim") # Base query query = (dtable.billing_id == billing.id) & \ (dtable.claim_id == None) & \ (dtable.deleted == False) # Get totals per provider provider_id = dtable.pe_id balance_total = dtable.balance.sum() num_vouchers = dtable.voucher_id.count() rows = db(query).select(provider_id, balance_total, num_vouchers, groupby = provider_id, having = balance_total > 0, ) # Generate claims billing_id = billing.id total_claims = 0 s3db_onaccept = s3db.onaccept set_record_owner = current.auth.s3_set_record_owner for row in rows: # Check provider provider = row[provider_id] if not provider: continue # Compute amount receivable vouchers = row[num_vouchers] quantity = row[balance_total] amount = quantity * ppu if ppu else 0 # Claim details data = {"program_id": pdata.id, "billing_id": billing_id, "pe_id": provider, "date": datetime.datetime.utcnow(), "status": "NEW", "vouchers_total": vouchers, "quantity_total": quantity, "price_per_unit": ppu, "amount_receivable": amount, "currency": pdata.currency, } data["id"] = claim_id = ctable.insert(**data) if claim_id: # Post-process the claim set_record_owner(ctable, data) s3db_onaccept(ctable, data, method="create") # Update all debits with claim_id q = query & (dtable.pe_id == provider) db(q).update(claim_id = claim_id, modified_by = dtable.modified_by, modified_on = dtable.modified_on, ) total_claims += 1 # Update totals in billing query = (ctable.billing_id == billing_id) & \ (ctable.deleted == False) vouchers_total = ctable.vouchers_total.sum() quantity_total = ctable.quantity_total.sum() row = db(query).select(vouchers_total, quantity_total, ).first() btable = s3db.fin_voucher_billing billing.update_record(vouchers_total = row[vouchers_total], quantity_total = row[quantity_total], modified_by = btable.modified_by, modified_on = btable.modified_on, ) # If no claims have been generated, conclude the billing # right away (as there will be no later trigger) if total_claims == 0: self.check_complete(claims_complete=True) return total_claims
Python
def generate_invoice(cls, claim_id): """ Generate an invoice for a claim Args: claim_id: the claim record ID Returns: tuple (invoice_id, error) """ db = current.db s3db = current.s3db ctable = s3db.fin_voucher_claim dtable = s3db.fin_voucher_debit itable = s3db.fin_voucher_invoice invoice_fields = cls.invoice_fields # Get the claim fields = [ctable.id, ctable.uuid, ctable.date, ctable.status, ctable.invoice_id] + \ [ctable[fn] for fn in invoice_fields] + \ [ctable.bank_name, ctable.bank_address] query = (ctable.id == claim_id) & \ (ctable.deleted == False) claim = db(query).select(limitby=(0, 1), *fields).first() # Verify claim status if not claim: return None, "Claim not found" if claim.status != "CONFIRMED": return None, "Claim must be confirmed" if claim.invoice_id: return None, "Claim already invoiced" # Verify claim details query = (dtable.claim_id == claim.id) & \ (dtable.deleted == False) total = dtable.balance.sum() row = db(query).select(total).first() quantity_total = claim.quantity_total if quantity_total != row[total]: return None, "Invalid claim: incorrect quantity" ppu = claim.price_per_unit if not ppu or ppu <= 0: return None, "Invalid claim: no valid unit price" amount_receivable = claim.amount_receivable if not amount_receivable or amount_receivable != quantity_total * ppu: return None, "Invalid claim: incorrect total amount receivable" # The claim details data = {fn: claim[fn] for fn in invoice_fields} # Generate an invoice number btable = s3db.fin_voucher_billing query = (btable.id == claim.billing_id) billing = db(query).select(btable.uuid, limitby=(0, 1)).first() try: bprefix = (billing.uuid.rsplit(":", 1)[-1][:4]).upper() except (TypeError, AttributeError): bprefix = "" invoice_no = "B%s%02dC%04d" % (bprefix, claim.billing_id, claim.id) # Customise invoice resource from core import CRUDRequest r = CRUDRequest("fin", "voucher_invoice", args=[], get_vars={}) r.customise_resource("fin_voucher_invoice") # Generate invoice idata = {"date": datetime.datetime.utcnow().date(), "invoice_no": invoice_no, "status": "NEW", "vhash": cls._hash(claim.uuid, claim.date, data), "bank_name": claim.bank_name, "bank_address": claim.bank_address, } idata.update(data) invoice_id = idata["id"] = itable.insert(**idata) # Postprocess invoice current.auth.s3_set_record_owner(itable, idata) s3db.onaccept(itable, idata, method="create") # Update claim with invoice_id and verification hash query = (itable.id == invoice_id) invoice = db(query).select(itable.uuid, itable.billing_id, itable.date, itable.quantity_total, limitby = (0, 1), ).first() claim.update_record(invoice_id = invoice_id, status = "INVOICED", vhash = cls._hash(invoice.uuid, invoice.date, data), modified_on = ctable.modified_on, modified_by = ctable.modified_by, ) # Update totals in billing query = (itable.billing_id == invoice.billing_id) & \ (itable.deleted == False) quantity_total = itable.quantity_total.sum() row = db(query).select(quantity_total).first() btable = s3db.fin_voucher_billing query = (btable.id == invoice.billing_id) db(query).update(quantity_invoiced = row[quantity_total], modified_by = btable.modified_by, modified_on = btable.modified_on, ) return invoice_id, None
def generate_invoice(cls, claim_id): """ Generate an invoice for a claim Args: claim_id: the claim record ID Returns: tuple (invoice_id, error) """ db = current.db s3db = current.s3db ctable = s3db.fin_voucher_claim dtable = s3db.fin_voucher_debit itable = s3db.fin_voucher_invoice invoice_fields = cls.invoice_fields # Get the claim fields = [ctable.id, ctable.uuid, ctable.date, ctable.status, ctable.invoice_id] + \ [ctable[fn] for fn in invoice_fields] + \ [ctable.bank_name, ctable.bank_address] query = (ctable.id == claim_id) & \ (ctable.deleted == False) claim = db(query).select(limitby=(0, 1), *fields).first() # Verify claim status if not claim: return None, "Claim not found" if claim.status != "CONFIRMED": return None, "Claim must be confirmed" if claim.invoice_id: return None, "Claim already invoiced" # Verify claim details query = (dtable.claim_id == claim.id) & \ (dtable.deleted == False) total = dtable.balance.sum() row = db(query).select(total).first() quantity_total = claim.quantity_total if quantity_total != row[total]: return None, "Invalid claim: incorrect quantity" ppu = claim.price_per_unit if not ppu or ppu <= 0: return None, "Invalid claim: no valid unit price" amount_receivable = claim.amount_receivable if not amount_receivable or amount_receivable != quantity_total * ppu: return None, "Invalid claim: incorrect total amount receivable" # The claim details data = {fn: claim[fn] for fn in invoice_fields} # Generate an invoice number btable = s3db.fin_voucher_billing query = (btable.id == claim.billing_id) billing = db(query).select(btable.uuid, limitby=(0, 1)).first() try: bprefix = (billing.uuid.rsplit(":", 1)[-1][:4]).upper() except (TypeError, AttributeError): bprefix = "" invoice_no = "B%s%02dC%04d" % (bprefix, claim.billing_id, claim.id) # Customise invoice resource from core import CRUDRequest r = CRUDRequest("fin", "voucher_invoice", args=[], get_vars={}) r.customise_resource("fin_voucher_invoice") # Generate invoice idata = {"date": datetime.datetime.utcnow().date(), "invoice_no": invoice_no, "status": "NEW", "vhash": cls._hash(claim.uuid, claim.date, data), "bank_name": claim.bank_name, "bank_address": claim.bank_address, } idata.update(data) invoice_id = idata["id"] = itable.insert(**idata) # Postprocess invoice current.auth.s3_set_record_owner(itable, idata) s3db.onaccept(itable, idata, method="create") # Update claim with invoice_id and verification hash query = (itable.id == invoice_id) invoice = db(query).select(itable.uuid, itable.billing_id, itable.date, itable.quantity_total, limitby = (0, 1), ).first() claim.update_record(invoice_id = invoice_id, status = "INVOICED", vhash = cls._hash(invoice.uuid, invoice.date, data), modified_on = ctable.modified_on, modified_by = ctable.modified_by, ) # Update totals in billing query = (itable.billing_id == invoice.billing_id) & \ (itable.deleted == False) quantity_total = itable.quantity_total.sum() row = db(query).select(quantity_total).first() btable = s3db.fin_voucher_billing query = (btable.id == invoice.billing_id) db(query).update(quantity_invoiced = row[quantity_total], modified_by = btable.modified_by, modified_on = btable.modified_on, ) return invoice_id, None
Python
def check_invoice(cls, invoice_id): """ Check the integrity of an invoice/claim pair (=check the hashes) Args: invoice_id: the invoice ID Returns: True|False """ db = current.db s3db = current.s3db invoice_fields = cls.invoice_fields # Get the invoice itable = s3db.fin_voucher_invoice fields = [itable.id, itable.uuid, itable.date, itable.vhash] fields += [itable[fn] for fn in invoice_fields] query = (itable.id == invoice_id) & \ (itable.deleted == False) invoice = db(query).select(limitby=(0, 1), *fields).first() if not invoice: return False # Get the claim ctable = s3db.fin_voucher_claim fields = [ctable.id, ctable.uuid, ctable.date, ctable.vhash] fields += [ctable[fn] for fn in invoice_fields] query = (ctable.invoice_id == invoice.id) & \ (ctable.deleted == False) claim = db(query).select(limitby = (0, 1), orderby = ctable.id, *fields).first() if not claim: return False # Verify the hashes data = {fn: claim[fn] for fn in invoice_fields} vhash = cls._hash(claim.uuid, claim.date, data) if invoice.vhash != vhash: return False data = {fn: invoice[fn] for fn in invoice_fields} vhash = cls._hash(invoice.uuid, invoice.date, data) if claim.vhash != vhash: return False return True
def check_invoice(cls, invoice_id): """ Check the integrity of an invoice/claim pair (=check the hashes) Args: invoice_id: the invoice ID Returns: True|False """ db = current.db s3db = current.s3db invoice_fields = cls.invoice_fields # Get the invoice itable = s3db.fin_voucher_invoice fields = [itable.id, itable.uuid, itable.date, itable.vhash] fields += [itable[fn] for fn in invoice_fields] query = (itable.id == invoice_id) & \ (itable.deleted == False) invoice = db(query).select(limitby=(0, 1), *fields).first() if not invoice: return False # Get the claim ctable = s3db.fin_voucher_claim fields = [ctable.id, ctable.uuid, ctable.date, ctable.vhash] fields += [ctable[fn] for fn in invoice_fields] query = (ctable.invoice_id == invoice.id) & \ (ctable.deleted == False) claim = db(query).select(limitby = (0, 1), orderby = ctable.id, *fields).first() if not claim: return False # Verify the hashes data = {fn: claim[fn] for fn in invoice_fields} vhash = cls._hash(claim.uuid, claim.date, data) if invoice.vhash != vhash: return False data = {fn: invoice[fn] for fn in invoice_fields} vhash = cls._hash(invoice.uuid, invoice.date, data) if claim.vhash != vhash: return False return True
Python
def check_complete(self, claims_complete=False): """ Check whether this billing process is complete (+update status if so) Args: claims_complete: confirm that claim generation is complete Returns: True|False """ db = current.db s3db = current.s3db billing = self.billing ctable = s3db.fin_voucher_claim query = (ctable.billing_id == billing.id) & \ (ctable.deleted == False) existing = db(query).select(ctable.id, limitby=(0, 1)).first() if existing: # Check if there are any unpaid claims query &= (ctable.status != "PAID") pending = db(query).select(ctable.id, limitby=(0, 1)).first() else: # No claims generated yet? pending = not claims_complete if pending: return False btable = s3db.fin_voucher_billing billing.update_record(status = "COMPLETE", modified_on = btable.modified_on, modified_by = btable.modified_by, ) return True
def check_complete(self, claims_complete=False): """ Check whether this billing process is complete (+update status if so) Args: claims_complete: confirm that claim generation is complete Returns: True|False """ db = current.db s3db = current.s3db billing = self.billing ctable = s3db.fin_voucher_claim query = (ctable.billing_id == billing.id) & \ (ctable.deleted == False) existing = db(query).select(ctable.id, limitby=(0, 1)).first() if existing: # Check if there are any unpaid claims query &= (ctable.status != "PAID") pending = db(query).select(ctable.id, limitby=(0, 1)).first() else: # No claims generated yet? pending = not claims_complete if pending: return False btable = s3db.fin_voucher_billing billing.update_record(status = "COMPLETE", modified_on = btable.modified_on, modified_by = btable.modified_by, ) return True
Python
def has_claims_or_invoices(self): """ Check if this billing process has generated any claims or invoices Returns: True|False """ db = current.db s3db = current.s3db billing = self.billing # Check for existing claims ctable = s3db.fin_voucher_claim query = (ctable.billing_id == billing.id) & \ (ctable.deleted == False) if db(query).select(ctable.id, limitby=(0, 1)).first(): return True # Check for existing invoices itable = s3db.fin_voucher_invoice query = (itable.billing_id == billing.id) & \ (itable.deleted == False) if db(query).select(itable.id, limitby=(0, 1)).first(): return True return False
def has_claims_or_invoices(self): """ Check if this billing process has generated any claims or invoices Returns: True|False """ db = current.db s3db = current.s3db billing = self.billing # Check for existing claims ctable = s3db.fin_voucher_claim query = (ctable.billing_id == billing.id) & \ (ctable.deleted == False) if db(query).select(ctable.id, limitby=(0, 1)).first(): return True # Check for existing invoices itable = s3db.fin_voucher_invoice query = (itable.billing_id == billing.id) & \ (itable.deleted == False) if db(query).select(itable.id, limitby=(0, 1)).first(): return True return False
Python
def _hash(uuid, date, data): """ Generate a verification hash (vhash) Args: uuid: the uuid of the reference record date: the date of the reference record data: the data to hash Returns: the hash as string """ data = {"data": data, "date": date.isoformat(), "uuid": uuid, } inp = json.dumps(data, separators=JSONSEPARATORS) crypt = CRYPT(key = current.deployment_settings.hmac_key, digest_alg = "sha512", salt = False, ) return str(crypt(inp)[0])
def _hash(uuid, date, data): """ Generate a verification hash (vhash) Args: uuid: the uuid of the reference record date: the date of the reference record data: the data to hash Returns: the hash as string """ data = {"data": data, "date": date.isoformat(), "uuid": uuid, } inp = json.dumps(data, separators=JSONSEPARATORS) crypt = CRYPT(key = current.deployment_settings.hmac_key, digest_alg = "sha512", salt = False, ) return str(crypt(inp)[0])
Python
def __activate(self): """ Activate the billing process - allocate all relevant debits of the program to the billing - set the process status to "in progress" Returns: the billing record (Row) Raises: ValueError: if the billing reference is invalid, or when the billing process is already closed """ billing = self.billing # Check status if billing.status not in ("SCHEDULED", "IN PROGRESS"): raise ValueError("Invalid process status") db = current.db s3db = current.s3db # Allocate relevant debits to this billing # - all debits under the program effected before the billing date, # which have not yet been allocated to the billing and are not # yet part of any existing claim dtable = s3db.fin_voucher_debit query = (dtable.program_id == billing.program_id) & \ (dtable.date <= billing.date) & \ (dtable.billing_id == None) & \ (dtable.claim_id == None) & \ (dtable.deleted == False) db(query).update(billing_id = billing.id, modified_on = dtable.modified_on, modified_by = dtable.modified_by, ) # Update billing process status btable = s3db.fin_voucher_billing billing.update_record(status = "IN PROGRESS", modified_on = btable.modified_on, modified_by = btable.modified_by, ) db.commit() return billing
def __activate(self): """ Activate the billing process - allocate all relevant debits of the program to the billing - set the process status to "in progress" Returns: the billing record (Row) Raises: ValueError: if the billing reference is invalid, or when the billing process is already closed """ billing = self.billing # Check status if billing.status not in ("SCHEDULED", "IN PROGRESS"): raise ValueError("Invalid process status") db = current.db s3db = current.s3db # Allocate relevant debits to this billing # - all debits under the program effected before the billing date, # which have not yet been allocated to the billing and are not # yet part of any existing claim dtable = s3db.fin_voucher_debit query = (dtable.program_id == billing.program_id) & \ (dtable.date <= billing.date) & \ (dtable.billing_id == None) & \ (dtable.claim_id == None) & \ (dtable.deleted == False) db(query).update(billing_id = billing.id, modified_on = dtable.modified_on, modified_by = dtable.modified_by, ) # Update billing process status btable = s3db.fin_voucher_billing billing.update_record(status = "IN PROGRESS", modified_on = btable.modified_on, modified_by = btable.modified_by, ) db.commit() return billing
Python
def __abort(self, reason): """ Abort this billing process - release all debits allocated to this process - set the process status to "aborted" and record reason Args: reason: the reason to abort the process Raises: ValueError: if the billing reference is invalid, or when the billing process is already closed """ db = current.db s3db = current.s3db billing = self.billing # Release all allocated debits that have not been processed yet dtable = s3db.fin_voucher_debit query = (dtable.billing_id == billing.id) & \ (dtable.claim_id == None) db(query).update(billing_id = None, modified_on = dtable.modified_on, modified_by = dtable.modified_by, ) # Update billing process status if billing.status in ("SCHEDULED", "IN PROGRESS") and \ not self.has_claims_or_invoices(): btable = s3db.fin_voucher_billing billing.update_record(status = "ABORTED", verification = reason, modified_on = btable.modified_on, modified_by = btable.modified_by, ) db.commit()
def __abort(self, reason): """ Abort this billing process - release all debits allocated to this process - set the process status to "aborted" and record reason Args: reason: the reason to abort the process Raises: ValueError: if the billing reference is invalid, or when the billing process is already closed """ db = current.db s3db = current.s3db billing = self.billing # Release all allocated debits that have not been processed yet dtable = s3db.fin_voucher_debit query = (dtable.billing_id == billing.id) & \ (dtable.claim_id == None) db(query).update(billing_id = None, modified_on = dtable.modified_on, modified_by = dtable.modified_by, ) # Update billing process status if billing.status in ("SCHEDULED", "IN PROGRESS") and \ not self.has_claims_or_invoices(): btable = s3db.fin_voucher_billing billing.update_record(status = "ABORTED", verification = reason, modified_on = btable.modified_on, modified_by = btable.modified_by, ) db.commit()
Python
def apply_method(self, r, **attr): """ Applies the method (controller entry point). Args: r: the CRUDRequest instance attr: controller attributes """ resource = r.resource if resource.tablename != "fin_voucher_debit" or not r.record: r.error(400, current.ERROR.BAD_RESOURCE) output = {} if r.http in ("GET", "POST"): if r.interactive: output = self.cancel(r, **attr) else: r.error(415, current.ERROR.BAD_FORMAT) else: r.error(405, current.ERROR.BAD_METHOD) current.response.view = self._view(r, "delete.html") return output
def apply_method(self, r, **attr): """ Applies the method (controller entry point). Args: r: the CRUDRequest instance attr: controller attributes """ resource = r.resource if resource.tablename != "fin_voucher_debit" or not r.record: r.error(400, current.ERROR.BAD_RESOURCE) output = {} if r.http in ("GET", "POST"): if r.interactive: output = self.cancel(r, **attr) else: r.error(415, current.ERROR.BAD_FORMAT) else: r.error(405, current.ERROR.BAD_METHOD) current.response.view = self._view(r, "delete.html") return output
Python
def fin_voucher_eligibility_types(program_ids, organisation_ids=None): """ Look up permissible eligibility types for programs Args: program_ids: voucher program IDs organisation_ids: issuer organisation IDs Returns: dict {program_id: [eligibility_type_ids]} """ db = current.db s3db = current.s3db if organisation_ids: # Look up issuer organisation types ltable = s3db.org_organisation_organisation_type query = (ltable.organisation_id.belongs(organisation_ids)) & \ (ltable.deleted == False) rows = db(query).select(ltable.organisation_type_id, groupby = ltable.organisation_type_id, ) issuer_types = [row.organisation_type_id for row in rows] ttable = s3db.fin_voucher_eligibility_type query = (ttable.program_id.belongs(program_ids)) & \ (ttable.deleted == False) rows = db(query).select(ttable.program_id) # Include programs that do not require eligibility types unlimited = set(program_ids) - {row.program_id for row in rows} eligibility_types = {p: None for p in unlimited} if issuer_types: # Limit to issuer organisation types query &= (ttable.issuer_types.contains(issuer_types, all=False)) | \ (ttable.issuer_types == None) | \ (ttable.issuer_types == []) rows = db(query).select(ttable.id, ttable.program_id) for row in rows: program_id = row.program_id if program_id in eligibility_types: eligibility_types[program_id].append(row.id) else: eligibility_types[program_id] = [row.id] return eligibility_types
def fin_voucher_eligibility_types(program_ids, organisation_ids=None): """ Look up permissible eligibility types for programs Args: program_ids: voucher program IDs organisation_ids: issuer organisation IDs Returns: dict {program_id: [eligibility_type_ids]} """ db = current.db s3db = current.s3db if organisation_ids: # Look up issuer organisation types ltable = s3db.org_organisation_organisation_type query = (ltable.organisation_id.belongs(organisation_ids)) & \ (ltable.deleted == False) rows = db(query).select(ltable.organisation_type_id, groupby = ltable.organisation_type_id, ) issuer_types = [row.organisation_type_id for row in rows] ttable = s3db.fin_voucher_eligibility_type query = (ttable.program_id.belongs(program_ids)) & \ (ttable.deleted == False) rows = db(query).select(ttable.program_id) # Include programs that do not require eligibility types unlimited = set(program_ids) - {row.program_id for row in rows} eligibility_types = {p: None for p in unlimited} if issuer_types: # Limit to issuer organisation types query &= (ttable.issuer_types.contains(issuer_types, all=False)) | \ (ttable.issuer_types == None) | \ (ttable.issuer_types == []) rows = db(query).select(ttable.id, ttable.program_id) for row in rows: program_id = row.program_id if program_id in eligibility_types: eligibility_types[program_id].append(row.id) else: eligibility_types[program_id] = [row.id] return eligibility_types
Python
def fin_voucher_permitted_programs(mode = "issuer", partners_only = False, c = None, f = None, ): """ Get a list of programs and organisations the current user is permitted to issue/accept vouchers for Args: mode: the permission to look for ('issuer'|'provider') partners_only: organisations must also be project partners for the project under which a voucher program runs, in order to issue/accept vouchers under that program c: override request.controller to look up for a different controller context f: override request.function to look up for a different controller context Reutrns: tuple of lists (program_ids, org_ids, pe_ids) """ s3db = current.s3db otable = s3db.org_organisation ptable = s3db.fin_voucher_program mtable = s3db.org_group_membership permitted_realms = current.auth.permission.permitted_realms if mode == "issuer": fn = "issuers_id" realms = permitted_realms("fin_voucher", method = "create", c = c, f = f, ) else: fn = "providers_id" realms = permitted_realms("fin_voucher_debit", method = "create", c = c, f = f, ) if realms is not None and not realms: # No access to any programs for any orgs return None, None, None today = current.request.utcnow.date() join = [mtable.on((mtable.organisation_id == otable.id) & \ (mtable.deleted == False)), ptable.on((ptable[fn] == mtable.group_id) & \ (ptable.deleted == False) & \ (ptable.status == "ACTIVE") & \ ((ptable.end_date == None) | (ptable.end_date >= today))), ] if partners_only: ltable = s3db.project_organisation join.append(ltable.on((ltable.project_id == ptable.project_id) & \ (ltable.organisation_id == otable.id) & \ (ltable.deleted == False))) query = (otable.deleted == False) if realms: query = (otable.pe_id.belongs(realms)) & query rows = current.db(query).select(otable.id, otable.pe_id, ptable.id, join = join, ) program_ids = set() org_ids = set() pe_ids = set() for row in rows: program_ids.add(row.fin_voucher_program.id) organisation = row.org_organisation org_ids.add(organisation.id) pe_ids.add(organisation.pe_id) return list(program_ids), list(org_ids), list(pe_ids)
def fin_voucher_permitted_programs(mode = "issuer", partners_only = False, c = None, f = None, ): """ Get a list of programs and organisations the current user is permitted to issue/accept vouchers for Args: mode: the permission to look for ('issuer'|'provider') partners_only: organisations must also be project partners for the project under which a voucher program runs, in order to issue/accept vouchers under that program c: override request.controller to look up for a different controller context f: override request.function to look up for a different controller context Reutrns: tuple of lists (program_ids, org_ids, pe_ids) """ s3db = current.s3db otable = s3db.org_organisation ptable = s3db.fin_voucher_program mtable = s3db.org_group_membership permitted_realms = current.auth.permission.permitted_realms if mode == "issuer": fn = "issuers_id" realms = permitted_realms("fin_voucher", method = "create", c = c, f = f, ) else: fn = "providers_id" realms = permitted_realms("fin_voucher_debit", method = "create", c = c, f = f, ) if realms is not None and not realms: # No access to any programs for any orgs return None, None, None today = current.request.utcnow.date() join = [mtable.on((mtable.organisation_id == otable.id) & \ (mtable.deleted == False)), ptable.on((ptable[fn] == mtable.group_id) & \ (ptable.deleted == False) & \ (ptable.status == "ACTIVE") & \ ((ptable.end_date == None) | (ptable.end_date >= today))), ] if partners_only: ltable = s3db.project_organisation join.append(ltable.on((ltable.project_id == ptable.project_id) & \ (ltable.organisation_id == otable.id) & \ (ltable.deleted == False))) query = (otable.deleted == False) if realms: query = (otable.pe_id.belongs(realms)) & query rows = current.db(query).select(otable.id, otable.pe_id, ptable.id, join = join, ) program_ids = set() org_ids = set() pe_ids = set() for row in rows: program_ids.add(row.fin_voucher_program.id) organisation = row.org_organisation org_ids.add(organisation.id) pe_ids.add(organisation.pe_id) return list(program_ids), list(org_ids), list(pe_ids)
Python
def fin_voucher_start_billing(billing_id=None): """ Scheduler task to start a billing process, to be scheduled via s3db_task Args: billing_id: the billing ID Returns: success message """ if not billing_id: raise TypeError("Argument missing: billing ID") billing = fin_VoucherBilling(billing_id) claims = billing.generate_claims() return "Billing process started (%s claims generated)" % claims
def fin_voucher_start_billing(billing_id=None): """ Scheduler task to start a billing process, to be scheduled via s3db_task Args: billing_id: the billing ID Returns: success message """ if not billing_id: raise TypeError("Argument missing: billing ID") billing = fin_VoucherBilling(billing_id) claims = billing.generate_claims() return "Billing process started (%s claims generated)" % claims
Python
def fin_voucher_settle_invoice(invoice_id=None, ptoken=None, user_id=None): """ Scheduler task to settle an invoice, to be scheduled via s3db_task Args: invoice_id: the invoice ID ptoken: the processing authorization token Returns: success message """ auth = current.auth if user_id and not auth.s3_logged_in(): auth.s3_impersonate(user_id) if not ptoken: raise TypeError("Argument missing: authorization token") if not invoice_id: raise TypeError("Argument missing: invoice ID") if not auth.s3_has_permission("update", "fin_voucher_invoice", record_id = invoice_id, c = "fin", f = "voucher_invoice", ): raise auth.permission.error("Operation not permitted") quantity = fin_VoucherBilling.settle_invoice(invoice_id, ptoken) return "Invoice settled (%s units compensated)" % quantity
def fin_voucher_settle_invoice(invoice_id=None, ptoken=None, user_id=None): """ Scheduler task to settle an invoice, to be scheduled via s3db_task Args: invoice_id: the invoice ID ptoken: the processing authorization token Returns: success message """ auth = current.auth if user_id and not auth.s3_logged_in(): auth.s3_impersonate(user_id) if not ptoken: raise TypeError("Argument missing: authorization token") if not invoice_id: raise TypeError("Argument missing: invoice ID") if not auth.s3_has_permission("update", "fin_voucher_invoice", record_id = invoice_id, c = "fin", f = "voucher_invoice", ): raise auth.permission.error("Operation not permitted") quantity = fin_VoucherBilling.settle_invoice(invoice_id, ptoken) return "Invoice settled (%s units compensated)" % quantity
Python
def node_json(self, r, **attr): """ Return a single node as JSON (id, parent and label) Args: r: the CRUDRequest attr: controller attributes """ resource = self.resource tablename = resource.tablename h = S3Hierarchy(tablename = tablename) if not h.config: r.error(405, "No hierarchy configured for %s" % tablename) data = {} node_id = r.get_vars["node"] if node_id: try: node_id = int(node_id) except ValueError: pass else: data["node"] = node_id label = h.label(node_id) data["label"] = label if label else None data["parent"] = h.parent(node_id) children = h.children(node_id) if children: nodes = [] h._represent(node_ids=children) for child_id in children: label = h.label(child_id) # @todo: include CRUD permissions? nodes.append({"node": child_id, "label": label if label else None, }) data["children"] = nodes current.response.headers["Content-Type"] = "application/json" return json.dumps(data, separators = JSONSEPARATORS)
def node_json(self, r, **attr): """ Return a single node as JSON (id, parent and label) Args: r: the CRUDRequest attr: controller attributes """ resource = self.resource tablename = resource.tablename h = S3Hierarchy(tablename = tablename) if not h.config: r.error(405, "No hierarchy configured for %s" % tablename) data = {} node_id = r.get_vars["node"] if node_id: try: node_id = int(node_id) except ValueError: pass else: data["node"] = node_id label = h.label(node_id) data["label"] = label if label else None data["parent"] = h.parent(node_id) children = h.children(node_id) if children: nodes = [] h._represent(node_ids=children) for child_id in children: label = h.label(child_id) # @todo: include CRUD permissions? nodes.append({"node": child_id, "label": label if label else None, }) data["children"] = nodes current.response.headers["Content-Type"] = "application/json" return json.dumps(data, separators = JSONSEPARATORS)
Python
def include_scripts(widget_id, widget_opts): """ Include JS & CSS needed for hierarchical CRUD """ s3 = current.response.s3 scripts = s3.scripts theme = current.deployment_settings.get_ui_hierarchy_theme() # Include static scripts & stylesheets script_dir = "/%s/static/scripts" % current.request.application if s3.debug: script = "%s/jstree.js" % script_dir if script not in scripts: scripts.append(script) script = "%s/S3/s3.ui.hierarchicalcrud.js" % script_dir if script not in scripts: scripts.append(script) style = "%s/jstree.css" % theme.get("css", "plugins") if style not in s3.stylesheets: s3.stylesheets.append(style) else: script = "%s/S3/s3.jstree.min.js" % script_dir if script not in scripts: scripts.append(script) style = "%s/jstree.min.css" % theme.get("css", "plugins") if style not in s3.stylesheets: s3.stylesheets.append(style) # Apply the widget JS script = '''$('#%(widget_id)s').hierarchicalcrud(%(widget_opts)s)''' % \ {"widget_id": widget_id, "widget_opts": json.dumps(widget_opts, separators=JSONSEPARATORS), } s3.jquery_ready.append(script) return
def include_scripts(widget_id, widget_opts): """ Include JS & CSS needed for hierarchical CRUD """ s3 = current.response.s3 scripts = s3.scripts theme = current.deployment_settings.get_ui_hierarchy_theme() # Include static scripts & stylesheets script_dir = "/%s/static/scripts" % current.request.application if s3.debug: script = "%s/jstree.js" % script_dir if script not in scripts: scripts.append(script) script = "%s/S3/s3.ui.hierarchicalcrud.js" % script_dir if script not in scripts: scripts.append(script) style = "%s/jstree.css" % theme.get("css", "plugins") if style not in s3.stylesheets: s3.stylesheets.append(style) else: script = "%s/S3/s3.jstree.min.js" % script_dir if script not in scripts: scripts.append(script) style = "%s/jstree.min.css" % theme.get("css", "plugins") if style not in s3.stylesheets: s3.stylesheets.append(style) # Apply the widget JS script = '''$('#%(widget_id)s').hierarchicalcrud(%(widget_opts)s)''' % \ {"widget_id": widget_id, "widget_opts": json.dumps(widget_opts, separators=JSONSEPARATORS), } s3.jquery_ready.append(script) return
Python
def is_org_group(organisation_id, group, cacheable=True): """ Check whether an organisation is member of an organisation group Args: organisation_id: the organisation ID group: the organisation group name Returns: boolean """ s3db = current.s3db gtable = s3db.org_group mtable = s3db.org_group_membership join = [gtable.on((gtable.id == mtable.group_id) & \ (gtable.name == group) )] query = (mtable.organisation_id == organisation_id) & \ (mtable.deleted == False) row = current.db(query).select(mtable.id, cache = s3db.cache, join = join, limitby = (0, 1), ).first() return bool(row)
def is_org_group(organisation_id, group, cacheable=True): """ Check whether an organisation is member of an organisation group Args: organisation_id: the organisation ID group: the organisation group name Returns: boolean """ s3db = current.s3db gtable = s3db.org_group mtable = s3db.org_group_membership join = [gtable.on((gtable.id == mtable.group_id) & \ (gtable.name == group) )] query = (mtable.organisation_id == organisation_id) & \ (mtable.deleted == False) row = current.db(query).select(mtable.id, cache = s3db.cache, join = join, limitby = (0, 1), ).first() return bool(row)
Python
def is_org_type_tag(organisation_id, tag, value=None): """ Check if a type of an organisation has a certain tag Args: organisation_id: the organisation ID tag: the tag name value: the tag value (optional) Returns: boolean """ db = current.db s3db = current.s3db ltable = s3db.org_organisation_organisation_type ttable = s3db.org_organisation_type_tag joinq = (ttable.organisation_type_id == ltable.organisation_type_id) & \ (ttable.tag == tag) if value is not None: joinq &= (ttable.value == value) join = ttable.on(joinq & (ttable.deleted == False)) query = (ltable.organisation_id == organisation_id) & \ (ltable.deleted == False) row = db(query).select(ttable.id, join=join, limitby=(0, 1)).first() return bool(row)
def is_org_type_tag(organisation_id, tag, value=None): """ Check if a type of an organisation has a certain tag Args: organisation_id: the organisation ID tag: the tag name value: the tag value (optional) Returns: boolean """ db = current.db s3db = current.s3db ltable = s3db.org_organisation_organisation_type ttable = s3db.org_organisation_type_tag joinq = (ttable.organisation_type_id == ltable.organisation_type_id) & \ (ttable.tag == tag) if value is not None: joinq &= (ttable.value == value) join = ttable.on(joinq & (ttable.deleted == False)) query = (ltable.organisation_id == organisation_id) & \ (ltable.deleted == False) row = db(query).select(ttable.id, join=join, limitby=(0, 1)).first() return bool(row)
Python
def restrict_data_formats(r): """ Restrict data exports (prevent S3XML/S3JSON of records) Args: r: the CRUDRequest """ settings = current.deployment_settings allowed = ("html", "iframe", "popup", "aadata", "plain", "geojson", "pdf", "xlsx") if r.record: allowed += ("card",) if r.method in ("report", "timeplot", "filter", "lookup", "info"): allowed += ("json",) elif r.method == "options": allowed += ("s3json",) settings.ui.export_formats = ("pdf", "xlsx") if r.representation not in allowed: r.error(403, current.ERROR.NOT_PERMITTED)
def restrict_data_formats(r): """ Restrict data exports (prevent S3XML/S3JSON of records) Args: r: the CRUDRequest """ settings = current.deployment_settings allowed = ("html", "iframe", "popup", "aadata", "plain", "geojson", "pdf", "xlsx") if r.record: allowed += ("card",) if r.method in ("report", "timeplot", "filter", "lookup", "info"): allowed += ("json",) elif r.method == "options": allowed += ("s3json",) settings.ui.export_formats = ("pdf", "xlsx") if r.representation not in allowed: r.error(403, current.ERROR.NOT_PERMITTED)
Python
def assign_pending_invoices(billing_id, organisation_id=None, invoice_id=None): """ Auto-assign pending invoices in a billing to accountants, taking into account their current workload Args: billing_id: the billing ID organisation_id: the ID of the accountant organisation invoice_id: assign only this invoice """ db = current.db s3db = current.s3db if not organisation_id: # Look up the accounting organisation for the billing btable = s3db.fin_voucher_billing query = (btable.id == billing_id) billing = db(query).select(btable.organisation_id, limitby = (0, 1), ).first() if not billing: return organisation_id = billing.organisation_id if organisation_id: # Look up the active accountants of the accountant org accountants = get_role_hrs("PROGRAM_ACCOUNTANT", organisation_id = organisation_id, ) else: accountants = None # Query for any pending invoices of this billing cycle itable = s3db.fin_voucher_invoice if invoice_id: query = (itable.id == invoice_id) else: query = (itable.billing_id == billing_id) query &= (itable.status != "PAID") & (itable.deleted == False) if accountants: # Limit to invoices that have not yet been assigned to any # of the accountants in charge: query &= ((itable.human_resource_id == None) | \ (~(itable.human_resource_id.belongs(accountants)))) # Get the invoices invoices = db(query).select(itable.id, itable.human_resource_id, ) if not invoices: return # Look up the number of pending invoices assigned to each # accountant, to get a measure for their current workload workload = {hr_id: 0 for hr_id in accountants} query = (itable.status != "PAID") & \ (itable.human_resource_id.belongs(accountants)) & \ (itable.deleted == False) num_assigned = itable.id.count() rows = db(query).select(itable.human_resource_id, num_assigned, groupby = itable.human_resource_id, ) for row in rows: workload[row[itable.human_resource_id]] = row[num_assigned] # Re-assign invoices # - try to distribute workload evenly among the accountants for invoice in invoices: hr_id, num = min(workload.items(), key=lambda item: item[1]) invoice.update_record(human_resource_id = hr_id) workload[hr_id] = num + 1 elif not invoice_id: # Unassign all pending invoices db(query).update(human_resource_id = None)
def assign_pending_invoices(billing_id, organisation_id=None, invoice_id=None): """ Auto-assign pending invoices in a billing to accountants, taking into account their current workload Args: billing_id: the billing ID organisation_id: the ID of the accountant organisation invoice_id: assign only this invoice """ db = current.db s3db = current.s3db if not organisation_id: # Look up the accounting organisation for the billing btable = s3db.fin_voucher_billing query = (btable.id == billing_id) billing = db(query).select(btable.organisation_id, limitby = (0, 1), ).first() if not billing: return organisation_id = billing.organisation_id if organisation_id: # Look up the active accountants of the accountant org accountants = get_role_hrs("PROGRAM_ACCOUNTANT", organisation_id = organisation_id, ) else: accountants = None # Query for any pending invoices of this billing cycle itable = s3db.fin_voucher_invoice if invoice_id: query = (itable.id == invoice_id) else: query = (itable.billing_id == billing_id) query &= (itable.status != "PAID") & (itable.deleted == False) if accountants: # Limit to invoices that have not yet been assigned to any # of the accountants in charge: query &= ((itable.human_resource_id == None) | \ (~(itable.human_resource_id.belongs(accountants)))) # Get the invoices invoices = db(query).select(itable.id, itable.human_resource_id, ) if not invoices: return # Look up the number of pending invoices assigned to each # accountant, to get a measure for their current workload workload = {hr_id: 0 for hr_id in accountants} query = (itable.status != "PAID") & \ (itable.human_resource_id.belongs(accountants)) & \ (itable.deleted == False) num_assigned = itable.id.count() rows = db(query).select(itable.human_resource_id, num_assigned, groupby = itable.human_resource_id, ) for row in rows: workload[row[itable.human_resource_id]] = row[num_assigned] # Re-assign invoices # - try to distribute workload evenly among the accountants for invoice in invoices: hr_id, num = min(workload.items(), key=lambda item: item[1]) invoice.update_record(human_resource_id = hr_id) workload[hr_id] = num + 1 elif not invoice_id: # Unassign all pending invoices db(query).update(human_resource_id = None)