language
stringclasses
6 values
original_string
stringlengths
25
887k
text
stringlengths
25
887k
Python
def restore_references(self): """ Restore the job's reference structure after loading items from the item table """ db = current.db UID = current.xml.UID for item in self.items.values(): for citem_id in item.load_components: if citem_id in self.items: item.components.append(self.items[citem_id]) item.load_components = [] for ritem in item.load_references: field = ritem["field"] if "item_id" in ritem: item_id = ritem["item_id"] if item_id in self.items: _item = self.items[item_id] entry = Storage(tablename=_item.tablename, element=_item.element, uid=_item.uid, id=_item.id, item_id=item_id) item.references.append(Storage(field=field, entry=entry)) else: _id = None uid = ritem.get("uid", None) tablename = ritem.get("tablename", None) if tablename and uid: try: table = current.s3db[tablename] except AttributeError: continue if UID not in table.fields: continue query = table[UID] == uid row = db(query).select(table._id, limitby=(0, 1)).first() if row: _id = row[table._id.name] else: continue entry = Storage(tablename = ritem["tablename"], element=None, uid = ritem["uid"], id = _id, item_id = None) item.references.append(Storage(field=field, entry=entry)) item.load_references = [] if item.load_parent is not None: parent = self.items[item.load_parent] if parent is None: # Parent has been removed item.skip = True else: item.parent = parent item.load_parent = None
def restore_references(self): """ Restore the job's reference structure after loading items from the item table """ db = current.db UID = current.xml.UID for item in self.items.values(): for citem_id in item.load_components: if citem_id in self.items: item.components.append(self.items[citem_id]) item.load_components = [] for ritem in item.load_references: field = ritem["field"] if "item_id" in ritem: item_id = ritem["item_id"] if item_id in self.items: _item = self.items[item_id] entry = Storage(tablename=_item.tablename, element=_item.element, uid=_item.uid, id=_item.id, item_id=item_id) item.references.append(Storage(field=field, entry=entry)) else: _id = None uid = ritem.get("uid", None) tablename = ritem.get("tablename", None) if tablename and uid: try: table = current.s3db[tablename] except AttributeError: continue if UID not in table.fields: continue query = table[UID] == uid row = db(query).select(table._id, limitby=(0, 1)).first() if row: _id = row[table._id.name] else: continue entry = Storage(tablename = ritem["tablename"], element=None, uid = ritem["uid"], id = _id, item_id = None) item.references.append(Storage(field=field, entry=entry)) item.load_references = [] if item.load_parent is not None: parent = self.items[item.load_parent] if parent is None: # Parent has been removed item.skip = True else: item.parent = parent item.load_parent = None
Python
def authorize(self): """ Authorize the import of this item, sets self.permitted """ if not self.table: return False auth = current.auth tablename = self.tablename # Check whether self.table is protected if not auth.override and tablename.split("_", 1)[0] in auth.PROTECTED: return False # Determine the method METHOD = self.METHOD if self.data.deleted is True: if self.data.deleted_rb: self.method = METHOD["MERGE"] else: self.method = METHOD["DELETE"] self.accepted = True if self.id else False elif self.id: if not self.original: from ..resource import CRUDResource fields = CRUDResource.import_fields(self.table, self.data, mandatory = self._mandatory_fields(), ) query = (self.table.id == self.id) self.original = current.db(query).select(limitby=(0, 1), *fields).first() if self.original: self.method = METHOD["UPDATE"] else: self.method = METHOD["CREATE"] else: self.method = METHOD["CREATE"] # Set self.id if self.method == METHOD["CREATE"]: self.id = 0 # Authorization authorize = current.auth.s3_has_permission if authorize: self.permitted = authorize(self.method, tablename, record_id=self.id) else: self.permitted = True return self.permitted
def authorize(self): """ Authorize the import of this item, sets self.permitted """ if not self.table: return False auth = current.auth tablename = self.tablename # Check whether self.table is protected if not auth.override and tablename.split("_", 1)[0] in auth.PROTECTED: return False # Determine the method METHOD = self.METHOD if self.data.deleted is True: if self.data.deleted_rb: self.method = METHOD["MERGE"] else: self.method = METHOD["DELETE"] self.accepted = True if self.id else False elif self.id: if not self.original: from ..resource import CRUDResource fields = CRUDResource.import_fields(self.table, self.data, mandatory = self._mandatory_fields(), ) query = (self.table.id == self.id) self.original = current.db(query).select(limitby=(0, 1), *fields).first() if self.original: self.method = METHOD["UPDATE"] else: self.method = METHOD["CREATE"] else: self.method = METHOD["CREATE"] # Set self.id if self.method == METHOD["CREATE"]: self.id = 0 # Authorization authorize = current.auth.s3_has_permission if authorize: self.permitted = authorize(self.method, tablename, record_id=self.id) else: self.permitted = True return self.permitted
Python
def validate(self): """ Validate this item (=record onvalidation), sets self.accepted """ data = self.data if self.accepted is not None: return self.accepted if data is None or not self.table: self.accepted = False return False xml = current.xml ERROR = xml.ATTRIBUTE["error"] METHOD = self.METHOD DELETE = METHOD.DELETE MERGE = METHOD.MERGE # Detect update if not self.id: self.deduplicate() if self.accepted is False: # Item rejected by deduplicator (e.g. due to ambiguity) return False # Don't need to validate skipped or deleted records if self.skip or self.method in (DELETE, MERGE): self.accepted = True if self.id else False return True # Set dynamic defaults for new records if not self.id: self._dynamic_defaults(data) # Check for mandatory fields required_fields = self._mandatory_fields() all_fields = list(data.keys()) failed_references = [] items = self.job.items for reference in self.references: resolvable = resolved = True entry = reference.entry if entry and not entry.id: if entry.item_id: item = items[entry.item_id] if item.error: relement = reference.element if relement is not None: # Repeat the errors from the referenced record # in the <reference> element (better reasoning) msg = "; ".join(xml.collect_errors(entry.element)) relement.set(ERROR, msg) else: resolvable = False resolved = False else: resolvable = resolved = False field = reference.field if isinstance(field, (tuple, list)): field = field[1] if resolved: all_fields.append(field) elif resolvable: # Both reference and referenced record are in the XML, # => treat foreign key as mandatory, and mark as failed if field not in required_fields: required_fields.append(field) if field not in failed_references: failed_references.append(field) missing = [fname for fname in required_fields if fname not in all_fields] original = self.original if missing: if original: missing = [fname for fname in missing if fname not in original] if missing: fields = [f for f in missing if f not in failed_references] if fields: errors = ["%s: value(s) required" % ", ".join(fields)] else: errors = [] if failed_references: fields = ", ".join(failed_references) errors.append("%s: reference import(s) failed" % ", ".join(failed_references)) self.error = "; ".join(errors) self.element.set(ERROR, self.error) self.accepted = False return False # Run onvalidation form = Storage(method = self.method, vars = data, request_vars = data, # Useless since always incomplete: #record = original, ) if self.id: form.vars.id = self.id form.errors = Storage() tablename = self.tablename key = "%s_onvalidation" % self.method get_config = current.s3db.get_config onvalidation = get_config(tablename, key, get_config(tablename, "onvalidation")) if onvalidation: try: callback(onvalidation, form, tablename=tablename) except: from traceback import format_exc current.log.error("S3Import %s onvalidation exception:" % tablename) current.log.debug(format_exc(10)) accepted = True if form.errors: element = self.element for k in form.errors: e = element.findall("data[@field='%s']" % k) if not e: e = element.findall("reference[@field='%s']" % k) if not e: e = element form.errors[k] = "[%s] %s" % (k, form.errors[k]) else: e = e[0] e.set(ERROR, s3_str(form.errors[k])) self.error = current.ERROR.VALIDATION_ERROR accepted = False self.accepted = accepted return accepted
def validate(self): """ Validate this item (=record onvalidation), sets self.accepted """ data = self.data if self.accepted is not None: return self.accepted if data is None or not self.table: self.accepted = False return False xml = current.xml ERROR = xml.ATTRIBUTE["error"] METHOD = self.METHOD DELETE = METHOD.DELETE MERGE = METHOD.MERGE # Detect update if not self.id: self.deduplicate() if self.accepted is False: # Item rejected by deduplicator (e.g. due to ambiguity) return False # Don't need to validate skipped or deleted records if self.skip or self.method in (DELETE, MERGE): self.accepted = True if self.id else False return True # Set dynamic defaults for new records if not self.id: self._dynamic_defaults(data) # Check for mandatory fields required_fields = self._mandatory_fields() all_fields = list(data.keys()) failed_references = [] items = self.job.items for reference in self.references: resolvable = resolved = True entry = reference.entry if entry and not entry.id: if entry.item_id: item = items[entry.item_id] if item.error: relement = reference.element if relement is not None: # Repeat the errors from the referenced record # in the <reference> element (better reasoning) msg = "; ".join(xml.collect_errors(entry.element)) relement.set(ERROR, msg) else: resolvable = False resolved = False else: resolvable = resolved = False field = reference.field if isinstance(field, (tuple, list)): field = field[1] if resolved: all_fields.append(field) elif resolvable: # Both reference and referenced record are in the XML, # => treat foreign key as mandatory, and mark as failed if field not in required_fields: required_fields.append(field) if field not in failed_references: failed_references.append(field) missing = [fname for fname in required_fields if fname not in all_fields] original = self.original if missing: if original: missing = [fname for fname in missing if fname not in original] if missing: fields = [f for f in missing if f not in failed_references] if fields: errors = ["%s: value(s) required" % ", ".join(fields)] else: errors = [] if failed_references: fields = ", ".join(failed_references) errors.append("%s: reference import(s) failed" % ", ".join(failed_references)) self.error = "; ".join(errors) self.element.set(ERROR, self.error) self.accepted = False return False # Run onvalidation form = Storage(method = self.method, vars = data, request_vars = data, # Useless since always incomplete: #record = original, ) if self.id: form.vars.id = self.id form.errors = Storage() tablename = self.tablename key = "%s_onvalidation" % self.method get_config = current.s3db.get_config onvalidation = get_config(tablename, key, get_config(tablename, "onvalidation")) if onvalidation: try: callback(onvalidation, form, tablename=tablename) except: from traceback import format_exc current.log.error("S3Import %s onvalidation exception:" % tablename) current.log.debug(format_exc(10)) accepted = True if form.errors: element = self.element for k in form.errors: e = element.findall("data[@field='%s']" % k) if not e: e = element.findall("reference[@field='%s']" % k) if not e: e = element form.errors[k] = "[%s] %s" % (k, form.errors[k]) else: e = e[0] e.set(ERROR, s3_str(form.errors[k])) self.error = current.ERROR.VALIDATION_ERROR accepted = False self.accepted = accepted return accepted
Python
def _dynamic_defaults(self, data): """ Applies dynamic defaults from any keys in data that start with an underscore, used only for new records and only if the respective field is not populated yet. Args: data: the data dict """ for k, v in list(data.items()): if k[0] == "_": fn = k[1:] if fn in self.table.fields and fn not in data: data[fn] = v
def _dynamic_defaults(self, data): """ Applies dynamic defaults from any keys in data that start with an underscore, used only for new records and only if the respective field is not populated yet. Args: data: the data dict """ for k, v in list(data.items()): if k[0] == "_": fn = k[1:] if fn in self.table.fields and fn not in data: data[fn] = v
Python
def _resolve_references(self): """ Resolve the references of this item (=look up all foreign keys from other items of the same job). If a foreign key is not yet available, it will be scheduled for later update. """ table = self.table if not table: return db = current.db items = self.job.items for reference in self.references: entry = reference.entry if not entry: continue field = reference.field # Resolve key tuples if isinstance(field, (list, tuple)): pkey, fkey = field else: pkey, fkey = ("id", field) f = table[fkey] if f.type == "json": is_json = True objref = reference.objref if not objref: objref = ObjectReferences(self.data.get(fkey)) refkey = reference.refkey if not refkey: continue else: is_json = False refkey = objref = None ktablename, _, multiple = s3_get_foreign_key(f) if not ktablename: continue # Get the lookup table if entry.tablename: ktablename = entry.tablename try: ktable = current.s3db[ktablename] except AttributeError: continue # Resolve the foreign key (value) item = None fk = entry.id if entry.item_id: item = items[entry.item_id] if item: if item.original and \ item.original.get("deleted") and \ not item.committed: # Original is deleted and has not been updated fk = None else: fk = item.id if fk and pkey != "id": row = db(ktable._id == fk).select(ktable[pkey], limitby=(0, 1)).first() if not row: fk = None continue else: fk = row[pkey] # Update record data if fk: if is_json: objref.resolve(refkey[0], refkey[1], refkey[2], fk) elif multiple: val = self.data.get(fkey, []) if fk not in val: val.append(fk) self.data[fkey] = val else: self.data[fkey] = fk else: if fkey in self.data and not multiple and not is_json: del self.data[fkey] if item: update = {"item": self, "field": fkey} if is_json: update["refkey"] = refkey item.update.append(update)
def _resolve_references(self): """ Resolve the references of this item (=look up all foreign keys from other items of the same job). If a foreign key is not yet available, it will be scheduled for later update. """ table = self.table if not table: return db = current.db items = self.job.items for reference in self.references: entry = reference.entry if not entry: continue field = reference.field # Resolve key tuples if isinstance(field, (list, tuple)): pkey, fkey = field else: pkey, fkey = ("id", field) f = table[fkey] if f.type == "json": is_json = True objref = reference.objref if not objref: objref = ObjectReferences(self.data.get(fkey)) refkey = reference.refkey if not refkey: continue else: is_json = False refkey = objref = None ktablename, _, multiple = s3_get_foreign_key(f) if not ktablename: continue # Get the lookup table if entry.tablename: ktablename = entry.tablename try: ktable = current.s3db[ktablename] except AttributeError: continue # Resolve the foreign key (value) item = None fk = entry.id if entry.item_id: item = items[entry.item_id] if item: if item.original and \ item.original.get("deleted") and \ not item.committed: # Original is deleted and has not been updated fk = None else: fk = item.id if fk and pkey != "id": row = db(ktable._id == fk).select(ktable[pkey], limitby=(0, 1)).first() if not row: fk = None continue else: fk = row[pkey] # Update record data if fk: if is_json: objref.resolve(refkey[0], refkey[1], refkey[2], fk) elif multiple: val = self.data.get(fkey, []) if fk not in val: val.append(fk) self.data[fkey] = val else: self.data[fkey] = fk else: if fkey in self.data and not multiple and not is_json: del self.data[fkey] if item: update = {"item": self, "field": fkey} if is_json: update["refkey"] = refkey item.update.append(update)
Python
def _update_reference(self, field, value): """ Helper method to update a foreign key in an already written record. Will be called by the referenced item after (and only if) it has been committed. This is only needed if the reference could not be resolved before commit due to circular references. Args: field: the field name of the foreign key value: the value of the foreign key """ table = self.table record_id = self.id if not value or not table or not record_id or not self.permitted: return db = current.db update = None fieldtype = str(table[field].type) if fieldtype.startswith("list:reference"): query = (table._id == record_id) record = db(query).select(table[field], limitby = (0, 1), ).first() if record: values = record[field] if value not in values: values.append(value) update = {field: values} else: update = {field: value} if update: if "modified_on" in table.fields: update["modified_on"] = table.modified_on if "modified_by" in table.fields: update["modified_by"] = table.modified_by db(table._id == record_id).update(**update)
def _update_reference(self, field, value): """ Helper method to update a foreign key in an already written record. Will be called by the referenced item after (and only if) it has been committed. This is only needed if the reference could not be resolved before commit due to circular references. Args: field: the field name of the foreign key value: the value of the foreign key """ table = self.table record_id = self.id if not value or not table or not record_id or not self.permitted: return db = current.db update = None fieldtype = str(table[field].type) if fieldtype.startswith("list:reference"): query = (table._id == record_id) record = db(query).select(table[field], limitby = (0, 1), ).first() if record: values = record[field] if value not in values: values.append(value) update = {field: values} else: update = {field: value} if update: if "modified_on" in table.fields: update["modified_on"] = table.modified_on if "modified_by" in table.fields: update["modified_by"] = table.modified_by db(table._id == record_id).update(**update)
Python
def _update_objref(self, field, refkey, value): """ Update object references in a JSON field Args: fieldname: the name of the JSON field refkey: the reference key, a tuple (tablename, uidtype, uid) value: the foreign key value """ table = self.table record_id = self.id if not value or not table or not record_id or not self.permitted: return db = current.db query = (table._id == record_id) record = db(query).select(table._id, table[field], limitby = (0, 1), ).first() if record: obj = record[field] tn, uidtype, uid = refkey ObjectReferences(obj).resolve(tn, uidtype, uid, value) update = {field: obj} if "modified_on" in table.fields: update["modified_on"] = table.modified_on if "modified_by" in table.fields: update["modified_by"] = table.modified_by record.update_record(**update)
def _update_objref(self, field, refkey, value): """ Update object references in a JSON field Args: fieldname: the name of the JSON field refkey: the reference key, a tuple (tablename, uidtype, uid) value: the foreign key value """ table = self.table record_id = self.id if not value or not table or not record_id or not self.permitted: return db = current.db query = (table._id == record_id) record = db(query).select(table._id, table[field], limitby = (0, 1), ).first() if record: obj = record[field] tn, uidtype, uid = refkey ObjectReferences(obj).resolve(tn, uidtype, uid, value) update = {field: obj} if "modified_on" in table.fields: update["modified_on"] = table.modified_on if "modified_by" in table.fields: update["modified_by"] = table.modified_by record.update_record(**update)
Python
def store(self, item_table=None): """ Store this item in the DB """ if item_table is None: return None item_id = self.item_id db = current.db row = db(item_table.item_id == item_id).select(item_table.id, limitby=(0, 1) ).first() if row: record_id = row.id else: record_id = None record = Storage(job_id = self.job.job_id, item_id = item_id, tablename = self.tablename, record_uid = self.uid, skip = self.skip, error = self.error or "", ) if self.element is not None: element_str = current.xml.tostring(self.element, xml_declaration=False) record.update(element=element_str) self_data = self.data if self_data is not None: table = self.table fields = table.fields data = Storage() for f in self_data.keys(): if f not in fields: continue field = table[f] field_type = str(field.type) if field_type == "id" or s3_has_foreign_key(field): continue data_ = self_data[f] if isinstance(data_, Field): # Not picklable # This is likely to be a modified_on to avoid updating this field, which skipping does just fine too continue data.update({f: data_}) record["data"] = pickle.dumps(data) ritems = [] for reference in self.references: field = reference.field entry = reference.entry store_entry = None if entry: if entry.item_id is not None: store_entry = {"field": field, "item_id": str(entry.item_id), } elif entry.uid is not None: store_entry = {"field": field, "tablename": entry.tablename, "uid": str(entry.uid), } if store_entry is not None: ritems.append(json.dumps(store_entry)) if ritems: record.update(ritems=ritems) citems = [c.item_id for c in self.components] if citems: record.update(citems=citems) if self.parent: record.update(parent=self.parent.item_id) if record_id: db(item_table.id == record_id).update(**record) else: record_id = item_table.insert(**record) return record_id
def store(self, item_table=None): """ Store this item in the DB """ if item_table is None: return None item_id = self.item_id db = current.db row = db(item_table.item_id == item_id).select(item_table.id, limitby=(0, 1) ).first() if row: record_id = row.id else: record_id = None record = Storage(job_id = self.job.job_id, item_id = item_id, tablename = self.tablename, record_uid = self.uid, skip = self.skip, error = self.error or "", ) if self.element is not None: element_str = current.xml.tostring(self.element, xml_declaration=False) record.update(element=element_str) self_data = self.data if self_data is not None: table = self.table fields = table.fields data = Storage() for f in self_data.keys(): if f not in fields: continue field = table[f] field_type = str(field.type) if field_type == "id" or s3_has_foreign_key(field): continue data_ = self_data[f] if isinstance(data_, Field): # Not picklable # This is likely to be a modified_on to avoid updating this field, which skipping does just fine too continue data.update({f: data_}) record["data"] = pickle.dumps(data) ritems = [] for reference in self.references: field = reference.field entry = reference.entry store_entry = None if entry: if entry.item_id is not None: store_entry = {"field": field, "item_id": str(entry.item_id), } elif entry.uid is not None: store_entry = {"field": field, "tablename": entry.tablename, "uid": str(entry.uid), } if store_entry is not None: ritems.append(json.dumps(store_entry)) if ritems: record.update(ritems=ritems) citems = [c.item_id for c in self.components] if citems: record.update(citems=citems) if self.parent: record.update(parent=self.parent.item_id) if record_id: db(item_table.id == record_id).update(**record) else: record_id = item_table.insert(**record) return record_id
Python
def restore(self, row): """ Restore an item from a item table row. This does not restore the references (since this can not be done before all items are restored), must call job.restore_references() to do that Args: row: the item table row """ xml = current.xml self.item_id = row.item_id self.accepted = None self.permitted = False self.committed = False tablename = row.tablename self.id = None self.uid = row.record_uid self.skip = row.skip if row.data is not None: self.data = pickle.loads(row.data) else: self.data = Storage() data = self.data if xml.MTIME in data: self.mtime = data[xml.MTIME] if xml.MCI in data: self.mci = data[xml.MCI] UID = xml.UID if UID in data: self.uid = data[UID] self.element = etree.fromstring(row.element) if row.citems: self.load_components = row.citems if row.ritems: self.load_references = [json.loads(ritem) for ritem in row.ritems] self.load_parent = row.parent s3db = current.s3db try: table = s3db[tablename] except AttributeError: self.error = current.ERROR.BAD_RESOURCE return False else: self.table = table self.tablename = tablename from ..resource import CRUDResource original = CRUDResource.original(table, self.data, mandatory = self._mandatory_fields(), ) if original is not None: self.original = original self.id = original[table._id.name] if not current.response.s3.synchronise_uuids and UID in original: self.uid = self.data[UID] = original[UID] self.error = row.error postprocess = s3db.get_config(self.tablename, "xml_post_parse") if postprocess: postprocess(self.element, self.data) if self.error and not self.data: # Validation error return False return True
def restore(self, row): """ Restore an item from a item table row. This does not restore the references (since this can not be done before all items are restored), must call job.restore_references() to do that Args: row: the item table row """ xml = current.xml self.item_id = row.item_id self.accepted = None self.permitted = False self.committed = False tablename = row.tablename self.id = None self.uid = row.record_uid self.skip = row.skip if row.data is not None: self.data = pickle.loads(row.data) else: self.data = Storage() data = self.data if xml.MTIME in data: self.mtime = data[xml.MTIME] if xml.MCI in data: self.mci = data[xml.MCI] UID = xml.UID if UID in data: self.uid = data[UID] self.element = etree.fromstring(row.element) if row.citems: self.load_components = row.citems if row.ritems: self.load_references = [json.loads(ritem) for ritem in row.ritems] self.load_parent = row.parent s3db = current.s3db try: table = s3db[tablename] except AttributeError: self.error = current.ERROR.BAD_RESOURCE return False else: self.table = table self.tablename = tablename from ..resource import CRUDResource original = CRUDResource.original(table, self.data, mandatory = self._mandatory_fields(), ) if original is not None: self.original = original self.id = original[table._id.name] if not current.response.s3.synchronise_uuids and UID in original: self.uid = self.data[UID] = original[UID] self.error = row.error postprocess = s3db.get_config(self.tablename, "xml_post_parse") if postprocess: postprocess(self.element, self.data) if self.error and not self.data: # Validation error return False return True
Python
def refs(self): """ List of references discovered in the object (lazy property) Returns: a list of tuples (tablename, uidtype, uid) """ if self._refs is None: self._refs = [] self._objs = {} self._traverse(self.obj) return self._refs
def refs(self): """ List of references discovered in the object (lazy property) Returns: a list of tuples (tablename, uidtype, uid) """ if self._refs is None: self._refs = [] self._objs = {} self._traverse(self.obj) return self._refs
Python
def objs(self): """ A dict with pointers to the references inside the object Returns: a dict {(tablename, uidtype, uid): (obj, key)} """ if self._objs is None: self._refs = [] self._objs = {} self._traverse(self.obj) return self._objs
def objs(self): """ A dict with pointers to the references inside the object Returns: a dict {(tablename, uidtype, uid): (obj, key)} """ if self._objs is None: self._refs = [] self._objs = {} self._traverse(self.obj) return self._objs
Python
def _traverse(self, obj): """ Traverse a (possibly nested) object and find all references, populates self.refs and self.objs Args: obj: the object to inspect """ refs = self._refs objs = self._objs if type(obj) is list: for item in obj: self._traverse(item) elif type(obj) is dict: for key, value in obj.items(): if key[:3] == "$k_" and type(value) is dict: tablename = uid = uid_type = None for k in self.TABLENAME_KEYS: tablename = value.get(k) if tablename: break if tablename: for k in self.UUID_KEYS: uid = value.get(k) if uid: uid_type = "uuid" break if tablename and not uid: for k in self.TUID_KEYS: uid = value.get(k) if uid: uid_type = "tuid" break if not tablename or not uid: self._traverse(value) else: ref = (tablename, uid_type, uid) if ref not in objs: refs.append(ref) objs[ref] = [(obj, key)] else: objs[ref].append((obj, key)) else: self._traverse(value)
def _traverse(self, obj): """ Traverse a (possibly nested) object and find all references, populates self.refs and self.objs Args: obj: the object to inspect """ refs = self._refs objs = self._objs if type(obj) is list: for item in obj: self._traverse(item) elif type(obj) is dict: for key, value in obj.items(): if key[:3] == "$k_" and type(value) is dict: tablename = uid = uid_type = None for k in self.TABLENAME_KEYS: tablename = value.get(k) if tablename: break if tablename: for k in self.UUID_KEYS: uid = value.get(k) if uid: uid_type = "uuid" break if tablename and not uid: for k in self.TUID_KEYS: uid = value.get(k) if uid: uid_type = "tuid" break if not tablename or not uid: self._traverse(value) else: ref = (tablename, uid_type, uid) if ref not in objs: refs.append(ref) objs[ref] = [(obj, key)] else: objs[ref].append((obj, key)) else: self._traverse(value)
Python
def resolve(self, tablename, uidtype, uid, value): """ Resolve a reference in self.obj with the given value; will resolve all occurences of the reference Args: tablename: the referenced table uidtype: the type of uid (uuid or tuid) uid: the uuid or tuid value: the value to resolve the reference """ items = self.objs.get((tablename, uidtype, uid)) if items: for obj, key in items: if len(key) > 3: obj[key[3:]] = value obj.pop(key, None)
def resolve(self, tablename, uidtype, uid, value): """ Resolve a reference in self.obj with the given value; will resolve all occurences of the reference Args: tablename: the referenced table uidtype: the type of uid (uuid or tuid) uid: the uuid or tuid value: the value to resolve the reference """ items = self.objs.get((tablename, uidtype, uid)) if items: for obj, key in items: if len(key) > 3: obj[key[3:]] = value obj.pop(key, None)
Python
def match(self, field, value): """ Helper function to generate a match-query Args: field: the Field value: the value Returns: a Query """ ftype = str(field.type) ignore_case = self.ignore_case if ignore_case and \ hasattr(value, "lower") and ftype in ("string", "text"): # NB Must convert to unicode before lower() in order to correctly # convert certain unicode-characters (e.g. İ=>i, or Ẽ=>ẽ) # => PostgreSQL LOWER() on Windows may not convert correctly, (same for SQLite) # which seems to be a locale issue: # http://stackoverflow.com/questions/18507589/the-lower-function-on-international-characters-in-postgresql # => works fine on Debian servers if the locale is a .UTF-8 before # the Postgres cluster is created query = (field.lower() == s3_str(value).lower()) else: query = (field == value) return query
def match(self, field, value): """ Helper function to generate a match-query Args: field: the Field value: the value Returns: a Query """ ftype = str(field.type) ignore_case = self.ignore_case if ignore_case and \ hasattr(value, "lower") and ftype in ("string", "text"): # NB Must convert to unicode before lower() in order to correctly # convert certain unicode-characters (e.g. İ=>i, or Ẽ=>ẽ) # => PostgreSQL LOWER() on Windows may not convert correctly, (same for SQLite) # which seems to be a locale issue: # http://stackoverflow.com/questions/18507589/the-lower-function-on-international-characters-in-postgresql # => works fine on Debian servers if the locale is a .UTF-8 before # the Postgres cluster is created query = (field.lower() == s3_str(value).lower()) else: query = (field == value) return query
Python
def orderby(self): """ Converts the DAL orderby expression into a datatable ordering expression Returns: list of tuples [(col_idx, direction), ...] """ dt_ordering = self.dt_ordering if not dt_ordering: orderby = self._orderby if orderby: # Resolve orderby expression into column names orderby_dirs = {} orderby_cols = [] adapter = S3DAL() INVERT = adapter.INVERT append = orderby_cols.append for f in s3_orderby_fields(None, orderby, expr=True): if type(f) is Expression: colname = str(f.first) direction = "desc" if f.op == INVERT else "asc" else: colname = str(f) direction = "asc" orderby_dirs[colname] = direction append(colname) # Helper function to resolve a reference's "sortby" into # a list of column names ftuples = {} def resolve_sortby(rfield): colname = rfield.colname if colname in ftuples: return ftuples[colname] ftype = rfield.ftype sortby = None if ftype[:9] == "reference": field = rfield.field if hasattr(field, "sortby") and field.sortby: sortby = field.sortby if not isinstance(sortby, (tuple, list)): sortby = [sortby] p = "%s.%%s" % ftype[10:].split(".")[0] sortby = [p % fname for fname in sortby] ftuples[colname] = sortby return sortby dt_ordering = [] # order expression for datatable append = dt_ordering.append # Match orderby-fields against table columns (=rfields) pos = 0 seen = set() skip = seen.add for i, colname in enumerate(orderby_cols): if i < pos: # Already consumed by sortby-tuple continue direction = orderby_dirs[colname] for col_idx, rfield in enumerate(self.rfields): if col_idx in seen: # Column already in dt_ordering continue sortby = None if rfield.colname == colname: # Match a single orderby-field sortby = (colname,) else: # Match between sortby and the orderby-field tuple # (must appear in same order and sorting direction) sortby = resolve_sortby(rfield) if not sortby or \ sortby != orderby_cols[i:i + len(sortby)] or \ any(orderby_dirs[c] != direction for c in sortby): sortby = None if sortby: append([col_idx, direction]) pos += len(sortby) skip(col_idx) break else: dt_ordering = [[1, "asc"]] self.dt_ordering = dt_ordering return dt_ordering
def orderby(self): """ Converts the DAL orderby expression into a datatable ordering expression Returns: list of tuples [(col_idx, direction), ...] """ dt_ordering = self.dt_ordering if not dt_ordering: orderby = self._orderby if orderby: # Resolve orderby expression into column names orderby_dirs = {} orderby_cols = [] adapter = S3DAL() INVERT = adapter.INVERT append = orderby_cols.append for f in s3_orderby_fields(None, orderby, expr=True): if type(f) is Expression: colname = str(f.first) direction = "desc" if f.op == INVERT else "asc" else: colname = str(f) direction = "asc" orderby_dirs[colname] = direction append(colname) # Helper function to resolve a reference's "sortby" into # a list of column names ftuples = {} def resolve_sortby(rfield): colname = rfield.colname if colname in ftuples: return ftuples[colname] ftype = rfield.ftype sortby = None if ftype[:9] == "reference": field = rfield.field if hasattr(field, "sortby") and field.sortby: sortby = field.sortby if not isinstance(sortby, (tuple, list)): sortby = [sortby] p = "%s.%%s" % ftype[10:].split(".")[0] sortby = [p % fname for fname in sortby] ftuples[colname] = sortby return sortby dt_ordering = [] # order expression for datatable append = dt_ordering.append # Match orderby-fields against table columns (=rfields) pos = 0 seen = set() skip = seen.add for i, colname in enumerate(orderby_cols): if i < pos: # Already consumed by sortby-tuple continue direction = orderby_dirs[colname] for col_idx, rfield in enumerate(self.rfields): if col_idx in seen: # Column already in dt_ordering continue sortby = None if rfield.colname == colname: # Match a single orderby-field sortby = (colname,) else: # Match between sortby and the orderby-field tuple # (must appear in same order and sorting direction) sortby = resolve_sortby(rfield) if not sortby or \ sortby != orderby_cols[i:i + len(sortby)] or \ any(orderby_dirs[c] != direction for c in sortby): sortby = None if sortby: append([col_idx, direction]) pos += len(sortby) skip(col_idx) break else: dt_ordering = [[1, "asc"]] self.dt_ordering = dt_ordering return dt_ordering
Python
def html(self, totalrows, filteredrows, **attr): """ Builds the datatable HTML (=a FORM with the embedded TABLE) Args: totalrows: number of rows available filteredrows: number of rows matching filters Keyword Args: see config() Returns: a FORM instance """ colnames, action_col = self.columns(self.colnames, attr) table_id = self.table_id pagination = attr.get("dt_pagination", True) table = self.table(table_id, colnames, action_col, # Pagination passes data via cacheLastJson, # rendering only one row here to produce the # <table> structure limit = 1 if pagination else None, ) if pagination: numrows = len(self.data) cache_data = self.json(totalrows, filteredrows, 1, # draw colnames = colnames, action_col = action_col, stringify = False, ) cache = {"cacheLower": 0, "cacheUpper": numrows if filteredrows > numrows else filteredrows, "cacheLastJson": cache_data, } else: cache = None config = self.config(self.orderby, **attr) config["id"] = table_id return self.form(table, table_id, config, #self.orderby, self.rfields, cache, **attr)
def html(self, totalrows, filteredrows, **attr): """ Builds the datatable HTML (=a FORM with the embedded TABLE) Args: totalrows: number of rows available filteredrows: number of rows matching filters Keyword Args: see config() Returns: a FORM instance """ colnames, action_col = self.columns(self.colnames, attr) table_id = self.table_id pagination = attr.get("dt_pagination", True) table = self.table(table_id, colnames, action_col, # Pagination passes data via cacheLastJson, # rendering only one row here to produce the # <table> structure limit = 1 if pagination else None, ) if pagination: numrows = len(self.data) cache_data = self.json(totalrows, filteredrows, 1, # draw colnames = colnames, action_col = action_col, stringify = False, ) cache = {"cacheLower": 0, "cacheUpper": numrows if filteredrows > numrows else filteredrows, "cacheLastJson": cache_data, } else: cache = None config = self.config(self.orderby, **attr) config["id"] = table_id return self.form(table, table_id, config, #self.orderby, self.rfields, cache, **attr)
Python
def config(orderby, **attr): """ Generates the datatable config JSON (value for hidden input) Args: orderby: the datatable ordering expression Keyword Args: ** Basic configuration dt_ajax_url: The URL to be used for the Ajax call dt_base_url: base URL to construct export format URLs, resource default URL without any URL method or query part dt_dom : The Datatable DOM initialisation variable, describing the order in which elements are displayed. See http://datatables.net/ref for more details. dt_formkey: a form key (XSRF protection for Ajax-actions) ** Pagination dt_pagination : Is pagination enabled, dafault True dt_pageLength : The default number of records that will be shown dt_lengthMenu: The menu options for the number of records to be shown dt_pagingType : How the pagination buttons are displayed ** Searching dt_searching: Enable or disable filtering of data. ** Row Actions dt_row_actions: list of actions (each a dict), overrides current.response.s3.actions dt_action_col: The column where the action buttons will be placed ** Bulk Actions dt_bulk_actions: list of labels for the bulk actions. dt_bulk_col: The column in which the checkboxes will appear, by default it will be the column immediately before the first data item dt_bulk_single: only allow a single row to be selected dt_bulk_selected: A list of selected items ** Grouping dt_group: The column(s) that is(are) used to group the data dt_group_totals: The number of record in each group. This will be displayed in parenthesis after the group title. dt_group_titles: The titles to be used for each group. These are a list of lists with the inner list consisting of two values, the repr from the db and the label to display. This can be more than the actual number of groups (giving an empty group). dt_group_space: Insert a space between the group heading and the next group dt_shrink_groups: If set then the rows within a group will be hidden two types are supported, 'individual' and 'accordion' dt_group_types: The type of indicator for groups that can be 'shrunk' Permitted valies are: 'icon' (the default) 'text' and 'none' ** Contents Rendering dt_text_maximum_len: The maximum length of text before it is condensed dt_text_condense_len: The length displayed text is condensed down to ** Styles dt_styles: dictionary of styles to be applied to a list of ids for example: {"warning" : [1,3,6,7,9], "alert" : [2,10,13]} dt_col_widths: dictionary of columns to apply a width to for example: {1 : 15, 2 : 20} ** Other Features dt_double_scroll: Render double scroll bars (top+bottom), only available with settings.ui.datatables_responsive=False """ request = current.request settings = current.deployment_settings s3 = current.response.s3 attr_get = attr.get # Default Ajax URL ajax_url = attr_get("dt_ajax_url") if not ajax_url: ajax_url = URL(c = request.controller, f = request.function, args = request.args, vars = request.get_vars, ) ajax_url = s3_set_extension(ajax_url, "aadata") # Default length menu if settings.get_base_bigtable(): default_length_menu = [[25, 50, 100], # not All [25, 50, 100], ] else: default_length_menu = [[25, 50, -1], [25, 50, s3_str(current.T("All"))], ] # Configuration (passed to client-side script via hidden input) config = { "utf8": False, # Basic Configuration "ajaxUrl": ajax_url, "dom": attr_get("dt_dom", settings.get_ui_datatables_dom()), # Pagination "pagination": attr_get("dt_pagination", True), "pageLength": attr_get("dt_pageLength", s3.ROWSPERPAGE), "lengthMenu": attr_get("dt_lengthMenu", default_length_menu), "pagingType": attr_get("dt_pagingType", settings.get_ui_datatables_pagingType()), # Searching "searching": attr_get("dt_searching", True), # Contents Rendering "textMaxLength": attr_get("dt_text_maximum_len", 80), "textShrinkLength": attr_get("dt_text_condense_len", 75), } action_col = attr_get("dt_action_col", 0) # Bulk Actions bulk_col = attr_get("dt_bulk_col", 0) bulk_actions = attr_get("dt_bulk_actions") if bulk_actions: if not isinstance(bulk_actions, list): bulk_actions = [bulk_actions] config.update(bulkActions = bulk_actions, bulkCol = bulk_col, bulkSingle = bool(attr_get("dt_bulk_single")), ) if bulk_col <= action_col: action_col += 1 # Row actions row_actions = attr_get("dt_row_actions", s3.actions) if row_actions is None: row_actions = [] config.update(actionCol = action_col, rowActions = row_actions, ) # Grouping groups = attr_get("dt_group") if groups: if not isinstance(groups, list): groups = [groups] dt_group = [] for group in groups: if bulk_actions and bulk_col <= group: group += 1 if action_col >= group: group -= 1 dt_group.append([group, "asc"]) config.update(group = dt_group, groupTotals = attr_get("dt_group_totals", []), groupTitles = attr_get("dt_group_titles", []), groupSpacing = attr_get("dt_group_space"), groupIcon = attr_get("dt_group_types", []), shrinkGroupedRows = attr_get("dt_shrink_groups"), ) # Orderby for order in orderby: if bulk_actions: if bulk_col <= order[0]: order[0] += 1 if action_col > 0 and action_col >= order[0]: order[0] -= 1 config["order"] = orderby # Fixed column widths col_widths = attr_get("dt_col_widths") if col_widths is not None: # NB This requires "table-layout:fixed" in your CSS # You will likely need to specify all column widths if you do this # & won't have responsiveness config["colWidths"] = col_widths row_styles = attr_get("dt_styles") if not row_styles: row_styles = s3.dataTableStyle if row_styles: config["rowStyles"] = row_styles return config
def config(orderby, **attr): """ Generates the datatable config JSON (value for hidden input) Args: orderby: the datatable ordering expression Keyword Args: ** Basic configuration dt_ajax_url: The URL to be used for the Ajax call dt_base_url: base URL to construct export format URLs, resource default URL without any URL method or query part dt_dom : The Datatable DOM initialisation variable, describing the order in which elements are displayed. See http://datatables.net/ref for more details. dt_formkey: a form key (XSRF protection for Ajax-actions) ** Pagination dt_pagination : Is pagination enabled, dafault True dt_pageLength : The default number of records that will be shown dt_lengthMenu: The menu options for the number of records to be shown dt_pagingType : How the pagination buttons are displayed ** Searching dt_searching: Enable or disable filtering of data. ** Row Actions dt_row_actions: list of actions (each a dict), overrides current.response.s3.actions dt_action_col: The column where the action buttons will be placed ** Bulk Actions dt_bulk_actions: list of labels for the bulk actions. dt_bulk_col: The column in which the checkboxes will appear, by default it will be the column immediately before the first data item dt_bulk_single: only allow a single row to be selected dt_bulk_selected: A list of selected items ** Grouping dt_group: The column(s) that is(are) used to group the data dt_group_totals: The number of record in each group. This will be displayed in parenthesis after the group title. dt_group_titles: The titles to be used for each group. These are a list of lists with the inner list consisting of two values, the repr from the db and the label to display. This can be more than the actual number of groups (giving an empty group). dt_group_space: Insert a space between the group heading and the next group dt_shrink_groups: If set then the rows within a group will be hidden two types are supported, 'individual' and 'accordion' dt_group_types: The type of indicator for groups that can be 'shrunk' Permitted valies are: 'icon' (the default) 'text' and 'none' ** Contents Rendering dt_text_maximum_len: The maximum length of text before it is condensed dt_text_condense_len: The length displayed text is condensed down to ** Styles dt_styles: dictionary of styles to be applied to a list of ids for example: {"warning" : [1,3,6,7,9], "alert" : [2,10,13]} dt_col_widths: dictionary of columns to apply a width to for example: {1 : 15, 2 : 20} ** Other Features dt_double_scroll: Render double scroll bars (top+bottom), only available with settings.ui.datatables_responsive=False """ request = current.request settings = current.deployment_settings s3 = current.response.s3 attr_get = attr.get # Default Ajax URL ajax_url = attr_get("dt_ajax_url") if not ajax_url: ajax_url = URL(c = request.controller, f = request.function, args = request.args, vars = request.get_vars, ) ajax_url = s3_set_extension(ajax_url, "aadata") # Default length menu if settings.get_base_bigtable(): default_length_menu = [[25, 50, 100], # not All [25, 50, 100], ] else: default_length_menu = [[25, 50, -1], [25, 50, s3_str(current.T("All"))], ] # Configuration (passed to client-side script via hidden input) config = { "utf8": False, # Basic Configuration "ajaxUrl": ajax_url, "dom": attr_get("dt_dom", settings.get_ui_datatables_dom()), # Pagination "pagination": attr_get("dt_pagination", True), "pageLength": attr_get("dt_pageLength", s3.ROWSPERPAGE), "lengthMenu": attr_get("dt_lengthMenu", default_length_menu), "pagingType": attr_get("dt_pagingType", settings.get_ui_datatables_pagingType()), # Searching "searching": attr_get("dt_searching", True), # Contents Rendering "textMaxLength": attr_get("dt_text_maximum_len", 80), "textShrinkLength": attr_get("dt_text_condense_len", 75), } action_col = attr_get("dt_action_col", 0) # Bulk Actions bulk_col = attr_get("dt_bulk_col", 0) bulk_actions = attr_get("dt_bulk_actions") if bulk_actions: if not isinstance(bulk_actions, list): bulk_actions = [bulk_actions] config.update(bulkActions = bulk_actions, bulkCol = bulk_col, bulkSingle = bool(attr_get("dt_bulk_single")), ) if bulk_col <= action_col: action_col += 1 # Row actions row_actions = attr_get("dt_row_actions", s3.actions) if row_actions is None: row_actions = [] config.update(actionCol = action_col, rowActions = row_actions, ) # Grouping groups = attr_get("dt_group") if groups: if not isinstance(groups, list): groups = [groups] dt_group = [] for group in groups: if bulk_actions and bulk_col <= group: group += 1 if action_col >= group: group -= 1 dt_group.append([group, "asc"]) config.update(group = dt_group, groupTotals = attr_get("dt_group_totals", []), groupTitles = attr_get("dt_group_titles", []), groupSpacing = attr_get("dt_group_space"), groupIcon = attr_get("dt_group_types", []), shrinkGroupedRows = attr_get("dt_shrink_groups"), ) # Orderby for order in orderby: if bulk_actions: if bulk_col <= order[0]: order[0] += 1 if action_col > 0 and action_col >= order[0]: order[0] -= 1 config["order"] = orderby # Fixed column widths col_widths = attr_get("dt_col_widths") if col_widths is not None: # NB This requires "table-layout:fixed" in your CSS # You will likely need to specify all column widths if you do this # & won't have responsiveness config["colWidths"] = col_widths row_styles = attr_get("dt_styles") if not row_styles: row_styles = s3.dataTableStyle if row_styles: config["rowStyles"] = row_styles return config
Python
def form(cls, table, table_id, config, rfields=None, cache=None, **attr): """ Assembles the wrapper FORM for the data table, including - export icons - hidden inputs with configuration and cache parameters Args: table: The HTML table table_id: The DOM ID of the table orderby: the datatable ordering expression - see http://datatables.net/reference/option/order rfields: the table columns (list of S3ResourceField) cache: parameters/data for the client-side cache Keyword Args: see config() Returns: a FORM instance """ request = current.request s3 = current.response.s3 settings = current.deployment_settings # Append table ID to response.s3.dataTableID table_ids = s3.dataTableID if not table_ids or not isinstance(table_ids, list): s3.dataTableID = [table_id] elif table_id not in table_ids: table_ids.append(table_id) attr_get = attr.get # Double Scroll if not settings.get_ui_datatables_responsive(): double_scroll = attr_get("dt_double_scroll") if double_scroll is None: double_scroll = settings.get_ui_datatables_double_scroll() if double_scroll: if s3.debug: script = "/%s/static/scripts/jquery.doubleScroll.js" % request.application else: script = "/%s/static/scripts/jquery.doubleScroll.min.js" % request.application if script not in s3.scripts: s3.scripts.append(script) table.add_class("doublescroll") # Build the form form = FORM(_class="dt-wrapper") # Form key (XSRF protection for Ajax actions) formkey = attr_get("dt_formkey") if formkey: form["hidden"] = {"_formkey": formkey} # Export formats if not s3.no_formats: form.append(cls.export_formats(base_url = attr_get("dt_base_url"), permalink = attr_get("dt_permalink"), rfields = rfields, )) # The HTML table form.append(table) # Hidden inputs for configuration and data def add_hidden(name, suffix, value): form.append(INPUT(_type = "hidden", _id = "%s_%s" % (table_id, suffix), _name = name, _value = value, )) add_hidden("config", "configurations", jsons(config)) if cache: add_hidden("cache", "dataTable_cache", jsons(cache)) bulk_actions = attr_get("dt_bulk_actions") if bulk_actions: bulk_selected = attr_get("dt_bulk_selected", "") if isinstance(bulk_selected, list): bulk_selected = ",".join(bulk_selected) add_hidden("mode", "dataTable_bulkMode", "Inclusive") add_hidden("selected", "dataTable_bulkSelection", "[%s]" % bulk_selected) add_hidden("filterURL", "dataTable_filterURL", config["ajaxUrl"]) # InitComplete callback (processed in views/dataTables.html) callback = settings.get_ui_datatables_initComplete() if callback: s3.dataTable_initComplete = callback return form
def form(cls, table, table_id, config, rfields=None, cache=None, **attr): """ Assembles the wrapper FORM for the data table, including - export icons - hidden inputs with configuration and cache parameters Args: table: The HTML table table_id: The DOM ID of the table orderby: the datatable ordering expression - see http://datatables.net/reference/option/order rfields: the table columns (list of S3ResourceField) cache: parameters/data for the client-side cache Keyword Args: see config() Returns: a FORM instance """ request = current.request s3 = current.response.s3 settings = current.deployment_settings # Append table ID to response.s3.dataTableID table_ids = s3.dataTableID if not table_ids or not isinstance(table_ids, list): s3.dataTableID = [table_id] elif table_id not in table_ids: table_ids.append(table_id) attr_get = attr.get # Double Scroll if not settings.get_ui_datatables_responsive(): double_scroll = attr_get("dt_double_scroll") if double_scroll is None: double_scroll = settings.get_ui_datatables_double_scroll() if double_scroll: if s3.debug: script = "/%s/static/scripts/jquery.doubleScroll.js" % request.application else: script = "/%s/static/scripts/jquery.doubleScroll.min.js" % request.application if script not in s3.scripts: s3.scripts.append(script) table.add_class("doublescroll") # Build the form form = FORM(_class="dt-wrapper") # Form key (XSRF protection for Ajax actions) formkey = attr_get("dt_formkey") if formkey: form["hidden"] = {"_formkey": formkey} # Export formats if not s3.no_formats: form.append(cls.export_formats(base_url = attr_get("dt_base_url"), permalink = attr_get("dt_permalink"), rfields = rfields, )) # The HTML table form.append(table) # Hidden inputs for configuration and data def add_hidden(name, suffix, value): form.append(INPUT(_type = "hidden", _id = "%s_%s" % (table_id, suffix), _name = name, _value = value, )) add_hidden("config", "configurations", jsons(config)) if cache: add_hidden("cache", "dataTable_cache", jsons(cache)) bulk_actions = attr_get("dt_bulk_actions") if bulk_actions: bulk_selected = attr_get("dt_bulk_selected", "") if isinstance(bulk_selected, list): bulk_selected = ",".join(bulk_selected) add_hidden("mode", "dataTable_bulkMode", "Inclusive") add_hidden("selected", "dataTable_bulkSelection", "[%s]" % bulk_selected) add_hidden("filterURL", "dataTable_filterURL", config["ajaxUrl"]) # InitComplete callback (processed in views/dataTables.html) callback = settings.get_ui_datatables_initComplete() if callback: s3.dataTable_initComplete = callback return form
Python
def json(self, totalrows, filteredrows, draw, colnames = None, action_col = None, stringify = True, **attr): """ Builds a JSON object to update the data table Args: totalrows: number of rows available filteredrows: number of rows matching filters draw: unaltered copy of "draw" parameter sent from the client stringify: serialize the JSON object as string Keyword Args: dt_action_col: see config() dt_bulk_actions: see config() dt_bulk_col: see config() """ if not colnames: colnames, action_col = self.columns(self.colnames, attr) dbid = colnames[action_col] bulk_checkbox = self.bulk_checkbox data_array = [] addrow = data_array.append for row in self.data: details = [] append = details.append for colname in colnames: if colname == "BULK": append(str(bulk_checkbox(row[dbid]))) else: append(s3_str(row[colname])) addrow(details) output = {"recordsTotal": totalrows, "recordsFiltered": filteredrows, "data": data_array, "draw": draw, } if stringify: output = jsons(output) return output
def json(self, totalrows, filteredrows, draw, colnames = None, action_col = None, stringify = True, **attr): """ Builds a JSON object to update the data table Args: totalrows: number of rows available filteredrows: number of rows matching filters draw: unaltered copy of "draw" parameter sent from the client stringify: serialize the JSON object as string Keyword Args: dt_action_col: see config() dt_bulk_actions: see config() dt_bulk_col: see config() """ if not colnames: colnames, action_col = self.columns(self.colnames, attr) dbid = colnames[action_col] bulk_checkbox = self.bulk_checkbox data_array = [] addrow = data_array.append for row in self.data: details = [] append = details.append for colname in colnames: if colname == "BULK": append(str(bulk_checkbox(row[dbid]))) else: append(s3_str(row[colname])) addrow(details) output = {"recordsTotal": totalrows, "recordsFiltered": filteredrows, "data": data_array, "draw": draw, } if stringify: output = jsons(output) return output
Python
def columns(colnames, attr): """ Adds the action columns into the columns list Args: colnames: the list of column keys Keyword Args: dt_action_col: see config() dt_bulk_actions: see config() dt_bulk_col: see config() Returns: tuple (colnames, action_col), with the revised list of column keys and the index of the action column """ attr_get = attr.get # Move the action column (first column) to the right place action_col = attr_get("dt_action_col", 0) % len(colnames) if action_col != 0: colnames = colnames[1:action_col+1] + [colnames[0]] + colnames[action_col+1:] # Insert the bulk action column, if necessary bulk_actions = attr_get("dt_bulk_actions") if bulk_actions: bulk_col = attr_get("dt_bulk_col", 0) % len(colnames) colnames.insert(bulk_col, "BULK") if bulk_col <= action_col: action_col += 1 return colnames, action_col
def columns(colnames, attr): """ Adds the action columns into the columns list Args: colnames: the list of column keys Keyword Args: dt_action_col: see config() dt_bulk_actions: see config() dt_bulk_col: see config() Returns: tuple (colnames, action_col), with the revised list of column keys and the index of the action column """ attr_get = attr.get # Move the action column (first column) to the right place action_col = attr_get("dt_action_col", 0) % len(colnames) if action_col != 0: colnames = colnames[1:action_col+1] + [colnames[0]] + colnames[action_col+1:] # Insert the bulk action column, if necessary bulk_actions = attr_get("dt_bulk_actions") if bulk_actions: bulk_col = attr_get("dt_bulk_col", 0) % len(colnames) colnames.insert(bulk_col, "BULK") if bulk_col <= action_col: action_col += 1 return colnames, action_col
Python
def bulk_checkbox(dbid): """ Constructs a checkbox to select a row for bulk action Args: dbid: the row ID (=value of the action column) Returns: the checkbox (INPUT instance) """ return INPUT(_class = "bulkcheckbox", _type = "checkbox", _value = False, data = {"dbid": dbid}, )
def bulk_checkbox(dbid): """ Constructs a checkbox to select a row for bulk action Args: dbid: the row ID (=value of the action column) Returns: the checkbox (INPUT instance) """ return INPUT(_class = "bulkcheckbox", _type = "checkbox", _value = False, data = {"dbid": dbid}, )
Python
def i18n(): """ Generates a JavaScript fragment to inject translated strings (i18n.*) used by s3.ui.datatable.js, added to view in views/dataTables.html returns: the JavaScript fragment as str """ T = current.T strings = {"sortAscending": T("activate to sort column ascending"), "sortDescending": T("activate to sort column descending"), "first": T("First"), "last": T("Last"), "next": T("Next"), "previous": T("Previous"), "emptyTable": T("No records found"), "info": T("Showing _START_ to _END_ of _TOTAL_ entries"), "infoEmpty": T("Showing 0 to 0 of 0 entries"), "infoFiltered": T("(filtered from _MAX_ total entries)"), "infoThousands": current.deployment_settings.get_L10n_thousands_separator(), "lengthMenu": T("Show %(number)s entries") % {"number": "_MENU_"}, "loadingRecords": T("Loading"), "processing": T("Processing"), "search": T("Search"), "zeroRecords": T("No matching records found"), "selectAll": T("Select All"), } return "\n".join('''i18n.%s="%s"'''% (k, v) for k, v in strings.items())
def i18n(): """ Generates a JavaScript fragment to inject translated strings (i18n.*) used by s3.ui.datatable.js, added to view in views/dataTables.html returns: the JavaScript fragment as str """ T = current.T strings = {"sortAscending": T("activate to sort column ascending"), "sortDescending": T("activate to sort column descending"), "first": T("First"), "last": T("Last"), "next": T("Next"), "previous": T("Previous"), "emptyTable": T("No records found"), "info": T("Showing _START_ to _END_ of _TOTAL_ entries"), "infoEmpty": T("Showing 0 to 0 of 0 entries"), "infoFiltered": T("(filtered from _MAX_ total entries)"), "infoThousands": current.deployment_settings.get_L10n_thousands_separator(), "lengthMenu": T("Show %(number)s entries") % {"number": "_MENU_"}, "loadingRecords": T("Loading"), "processing": T("Processing"), "search": T("Search"), "zeroRecords": T("No matching records found"), "selectAll": T("Select All"), } return "\n".join('''i18n.%s="%s"'''% (k, v) for k, v in strings.items())
Python
def widget(self, field, value, **attributes): """ Produce a form widget to request consent, for embedding of consent questions in other forms Args: field: the Field (to hold the response) value: the current or default value attributes: HTML attributes for the widget """ T = current.T fieldname = field.name # Consent options to ask opts = self.extract() # Current consent status (from form) selected = self.parse(value) value = {} # Widget ID widget_id = attributes.get("_id") if not widget_id: widget_id = "%s-consent" % fieldname # The widget widget = DIV(_id = widget_id, _class = "consent-widget", ) # Construct the consent options has_mandatory_opts = False if self.processing_types: # Preserve order items = ((k, opts[k]) for k in self.processing_types if k in opts) else: items = opts.items() for code, spec in items: # Title title = spec.get("name") if not title: continue # Current selected-status of this option status = selected.get(code) v = status[1] if status is not None else spec.get("default", False) # The question for this option question = LABEL(INPUT(_type="checkbox", _class = "consent-checkbox", value = v, data = {"code": code}, ), SPAN(title, _class = "consent-title", ), _class = "consent-question", ) if spec.get("mandatory"): has_mandatory_opts = True question.append(SPAN("*", _class="req")) # The option option = DIV(question, _class="consent-option") # Optional explanation description = spec.get("description") if description: option.append(P(XML(description), _class="consent-explanation")) # Append to widget widget.append(option) # Add selected-status to hidden input # JSON format: {"code": [id, consenting]} value[code] = [spec.get("id"), v] # Mandatory options advice if has_mandatory_opts: widget.append(P("* %s" % T("Consent required"), _class="req_key")) # The hidden input hidden_input = INPUT(_type = "hidden", _name = attributes.get("_name", fieldname), _id = "%s-response" % widget_id, _value = json.dumps(value), requires = self.validate, ) widget.append(hidden_input) # Inject client-side script and instantiate UI widget self.inject_script(widget_id, {}) return widget
def widget(self, field, value, **attributes): """ Produce a form widget to request consent, for embedding of consent questions in other forms Args: field: the Field (to hold the response) value: the current or default value attributes: HTML attributes for the widget """ T = current.T fieldname = field.name # Consent options to ask opts = self.extract() # Current consent status (from form) selected = self.parse(value) value = {} # Widget ID widget_id = attributes.get("_id") if not widget_id: widget_id = "%s-consent" % fieldname # The widget widget = DIV(_id = widget_id, _class = "consent-widget", ) # Construct the consent options has_mandatory_opts = False if self.processing_types: # Preserve order items = ((k, opts[k]) for k in self.processing_types if k in opts) else: items = opts.items() for code, spec in items: # Title title = spec.get("name") if not title: continue # Current selected-status of this option status = selected.get(code) v = status[1] if status is not None else spec.get("default", False) # The question for this option question = LABEL(INPUT(_type="checkbox", _class = "consent-checkbox", value = v, data = {"code": code}, ), SPAN(title, _class = "consent-title", ), _class = "consent-question", ) if spec.get("mandatory"): has_mandatory_opts = True question.append(SPAN("*", _class="req")) # The option option = DIV(question, _class="consent-option") # Optional explanation description = spec.get("description") if description: option.append(P(XML(description), _class="consent-explanation")) # Append to widget widget.append(option) # Add selected-status to hidden input # JSON format: {"code": [id, consenting]} value[code] = [spec.get("id"), v] # Mandatory options advice if has_mandatory_opts: widget.append(P("* %s" % T("Consent required"), _class="req_key")) # The hidden input hidden_input = INPUT(_type = "hidden", _name = attributes.get("_name", fieldname), _id = "%s-response" % widget_id, _value = json.dumps(value), requires = self.validate, ) widget.append(hidden_input) # Inject client-side script and instantiate UI widget self.inject_script(widget_id, {}) return widget
Python
def parse(cls, value): """ Parse the JSON string returned by the widget Args: value: the JSON string Returns: dict with consent question responses, format {code: [id, consenting], ...} """ parsed = {} if value is not None: try: parsed = json.loads(value) except JSONERRORS: pass return parsed
def parse(cls, value): """ Parse the JSON string returned by the widget Args: value: the JSON string Returns: dict with consent question responses, format {code: [id, consenting], ...} """ parsed = {} if value is not None: try: parsed = json.loads(value) except JSONERRORS: pass return parsed
Python
def validate(cls, value, record_id=None): """ Validate a consent response (for use with Field.requires) Args: value: the value returned from the widget """ T = current.T invalid = T("Invalid value") error = None parsed = cls.parse(value) if not parsed or not isinstance(parsed, dict): error = invalid else: try: option_ids = {v[0] for v in parsed.values()} except (TypeError, IndexError): error = invalid else: # Retrieve the relevant consent options s3db = current.s3db ttable = s3db.auth_processing_type otable = s3db.auth_consent_option join = ttable.on(ttable.id == otable.type_id) query = otable.id.belongs(option_ids) rows = current.db(query).select(otable.id, otable.obsolete, otable.mandatory, ttable.code, join = join, ) options = {} for row in rows: processing = row.auth_processing_type option = row.auth_consent_option options[option.id] = (processing.code, option.obsolete, option.mandatory) # Validate each response for code, spec in parsed.items(): option_id, consenting = spec option = options.get(option_id) if not option or option[0] != code: # Option does not exist or does not match the code error = invalid break if option[1]: # Option is obsolete error = T("Obsolete option: %(code)s") % {"code": code} break if option[2] and not consenting: # Required consent has not been given error = T("Required consent not given") break return (None, error) if error else (value, None)
def validate(cls, value, record_id=None): """ Validate a consent response (for use with Field.requires) Args: value: the value returned from the widget """ T = current.T invalid = T("Invalid value") error = None parsed = cls.parse(value) if not parsed or not isinstance(parsed, dict): error = invalid else: try: option_ids = {v[0] for v in parsed.values()} except (TypeError, IndexError): error = invalid else: # Retrieve the relevant consent options s3db = current.s3db ttable = s3db.auth_processing_type otable = s3db.auth_consent_option join = ttable.on(ttable.id == otable.type_id) query = otable.id.belongs(option_ids) rows = current.db(query).select(otable.id, otable.obsolete, otable.mandatory, ttable.code, join = join, ) options = {} for row in rows: processing = row.auth_processing_type option = row.auth_consent_option options[option.id] = (processing.code, option.obsolete, option.mandatory) # Validate each response for code, spec in parsed.items(): option_id, consenting = spec option = options.get(option_id) if not option or option[0] != code: # Option does not exist or does not match the code error = invalid break if option[1]: # Option is obsolete error = T("Obsolete option: %(code)s") % {"code": code} break if option[2] and not consenting: # Required consent has not been given error = T("Required consent not given") break return (None, error) if error else (value, None)
Python
def inject_script(widget_id, options): """ Inject static JS and instantiate client-side UI widget Args: widget_id: the widget ID options: JSON-serializable dict with UI widget options """ request = current.request s3 = current.response.s3 # Static script if s3.debug: script = "/%s/static/scripts/S3/s3.ui.consent.js" % \ request.application else: script = "/%s/static/scripts/S3/s3.ui.consent.min.js" % \ request.application scripts = s3.scripts if script not in scripts: scripts.append(script) # Widget options opts = {} if options: opts.update(options) # Widget instantiation script = '''$('#%(widget_id)s').consentQuestion(%(options)s)''' % \ {"widget_id": widget_id, "options": json.dumps(opts), } jquery_ready = s3.jquery_ready if script not in jquery_ready: jquery_ready.append(script)
def inject_script(widget_id, options): """ Inject static JS and instantiate client-side UI widget Args: widget_id: the widget ID options: JSON-serializable dict with UI widget options """ request = current.request s3 = current.response.s3 # Static script if s3.debug: script = "/%s/static/scripts/S3/s3.ui.consent.js" % \ request.application else: script = "/%s/static/scripts/S3/s3.ui.consent.min.js" % \ request.application scripts = s3.scripts if script not in scripts: scripts.append(script) # Widget options opts = {} if options: opts.update(options) # Widget instantiation script = '''$('#%(widget_id)s').consentQuestion(%(options)s)''' % \ {"widget_id": widget_id, "options": json.dumps(opts), } jquery_ready = s3.jquery_ready if script not in jquery_ready: jquery_ready.append(script)
Python
def register_consent(cls, user_id): """ Track consent responses given during user self-registration Args: user_id: the auth_user ID """ db = current.db s3db = current.s3db ltable = s3db.pr_person_user ptable = s3db.pr_person # Look up the person ID join = ptable.on(ptable.pe_id == ltable.pe_id) person = db(ltable.user_id == user_id).select(ptable.id, join = join, limitby = (0, 1), ).first() if person: person_id = person.id # Look up the consent response from temp user record ttable = s3db.auth_user_temp row = db(ttable.user_id == user_id).select(ttable.id, ttable.consent, ttable.created_on, limitby = (0, 1), ).first() if row and row.consent: # Track consent cls.track(person_id, row.consent, timestmp = row.created_on, ) # Reset consent response in temp user record row.update_record(consent=None)
def register_consent(cls, user_id): """ Track consent responses given during user self-registration Args: user_id: the auth_user ID """ db = current.db s3db = current.s3db ltable = s3db.pr_person_user ptable = s3db.pr_person # Look up the person ID join = ptable.on(ptable.pe_id == ltable.pe_id) person = db(ltable.user_id == user_id).select(ptable.id, join = join, limitby = (0, 1), ).first() if person: person_id = person.id # Look up the consent response from temp user record ttable = s3db.auth_user_temp row = db(ttable.user_id == user_id).select(ttable.id, ttable.consent, ttable.created_on, limitby = (0, 1), ).first() if row and row.consent: # Track consent cls.track(person_id, row.consent, timestmp = row.created_on, ) # Reset consent response in temp user record row.update_record(consent=None)
Python
def assert_consent(cls, context, code, value, person_id = None, timestmp = None, allow_obsolete = False, ): """ Assert consent of a non-local entity Args: context: string specifying the transaction to which consent was to be obtained code: the processing type code value: the value returned from the consent widget person_id: the person asserting consent (defaults to the current user) timestmp: datetime when consent was obtained (defaults to current time) allow_obsolete: allow recording assertions for obsolete consent options Returns: the consent assertion record ID Raises: TypeError: for invalid parameter types ValueError: for invalid input data """ if not context: raise ValueError("Context is required") context = str(context) now = current.request.utcnow if not timestmp: timestmp = now elif not isinstance(timestmp, datetime.datetime): raise TypeError("Invalid timestmp type, expected datetime but got %s" % type(timestmp)) elif timestmp > now: raise ValueError("Future timestmp not permitted") timestmp = timestmp.replace(microsecond=0) if not person_id: person_id = current.auth.s3_logged_in_person() if not person_id: raise ValueError("Must be logged in or specify a person_id") # Parse the value and extract the option_id parsed = cls.parse(value) consent = parsed.get(code) if not consent: raise ValueError("Invalid JSON, or no response for processing type found") option_id, response = consent # Get all current+valid options matching the codes db = current.db s3db = current.s3db ttable = s3db.auth_processing_type otable = s3db.auth_consent_option hash_fields = s3db.auth_consent_option_hash_fields option_fields = {"id"} | set(hash_fields) fields = [otable[fn] for fn in option_fields] join = ttable.on((ttable.id == otable.type_id) & \ (ttable.code == code)) query = (otable.id == option_id) & \ (otable.deleted == False) if not allow_obsolete: query &= (otable.obsolete == False) option = db(query).select(*fields, join = join, limitby = (0, 1), ).first() if not option: raise ValueError("Invalid consent option for processing type") ohash = cls.get_hash([(fn, option[fn]) for fn in hash_fields]) consent = (("person_id", person_id), ("context", context), ("date", timestmp.isoformat()), ("option_id", option.id), ("consented", bool(response)), ("ohash", ohash), ) # Generate verification hash vhash = cls.get_hash(consent) consent = dict(consent[:5]) consent["vhash"] = vhash consent["date"] = timestmp atable = s3db.auth_consent_assertion record_id = atable.insert(**consent) if record_id: consent["id"] = record_id s3db.onaccept(atable, consent) return record_id
def assert_consent(cls, context, code, value, person_id = None, timestmp = None, allow_obsolete = False, ): """ Assert consent of a non-local entity Args: context: string specifying the transaction to which consent was to be obtained code: the processing type code value: the value returned from the consent widget person_id: the person asserting consent (defaults to the current user) timestmp: datetime when consent was obtained (defaults to current time) allow_obsolete: allow recording assertions for obsolete consent options Returns: the consent assertion record ID Raises: TypeError: for invalid parameter types ValueError: for invalid input data """ if not context: raise ValueError("Context is required") context = str(context) now = current.request.utcnow if not timestmp: timestmp = now elif not isinstance(timestmp, datetime.datetime): raise TypeError("Invalid timestmp type, expected datetime but got %s" % type(timestmp)) elif timestmp > now: raise ValueError("Future timestmp not permitted") timestmp = timestmp.replace(microsecond=0) if not person_id: person_id = current.auth.s3_logged_in_person() if not person_id: raise ValueError("Must be logged in or specify a person_id") # Parse the value and extract the option_id parsed = cls.parse(value) consent = parsed.get(code) if not consent: raise ValueError("Invalid JSON, or no response for processing type found") option_id, response = consent # Get all current+valid options matching the codes db = current.db s3db = current.s3db ttable = s3db.auth_processing_type otable = s3db.auth_consent_option hash_fields = s3db.auth_consent_option_hash_fields option_fields = {"id"} | set(hash_fields) fields = [otable[fn] for fn in option_fields] join = ttable.on((ttable.id == otable.type_id) & \ (ttable.code == code)) query = (otable.id == option_id) & \ (otable.deleted == False) if not allow_obsolete: query &= (otable.obsolete == False) option = db(query).select(*fields, join = join, limitby = (0, 1), ).first() if not option: raise ValueError("Invalid consent option for processing type") ohash = cls.get_hash([(fn, option[fn]) for fn in hash_fields]) consent = (("person_id", person_id), ("context", context), ("date", timestmp.isoformat()), ("option_id", option.id), ("consented", bool(response)), ("ohash", ohash), ) # Generate verification hash vhash = cls.get_hash(consent) consent = dict(consent[:5]) consent["vhash"] = vhash consent["date"] = timestmp atable = s3db.auth_consent_assertion record_id = atable.insert(**consent) if record_id: consent["id"] = record_id s3db.onaccept(atable, consent) return record_id
Python
def verify(cls, record_id): """ Verify a consent record (checks the hash, not expiry) Args: record_id: the consent record ID """ db = current.db s3db = current.s3db # Consent option hash fields hash_fields = s3db.auth_consent_option_hash_fields # Load consent record and referenced option otable = s3db.auth_consent_option ctable = s3db.auth_consent join = otable.on(otable.id == ctable.option_id) query = (ctable.id == record_id) & (ctable.deleted == False) fields = [otable.id, ctable.date, ctable.person_id, ctable.option_id, ctable.vsign, ctable.vhash, ctable.consenting, ] + [otable[fn] for fn in hash_fields] row = db(query).select(join=join, limitby=(0, 1), *fields).first() if not row: return False option = row.auth_consent_option context = [(fn, option[fn]) for fn in hash_fields] consent = row.auth_consent verify = (("date", consent.date.isoformat()), ("option_id", consent.option_id), ("person_id", consent.person_id), ("vsign", consent.vsign), ("consenting", consent.consenting), ("ohash", cls.get_hash(context)), ) return consent.vhash == cls.get_hash(verify)
def verify(cls, record_id): """ Verify a consent record (checks the hash, not expiry) Args: record_id: the consent record ID """ db = current.db s3db = current.s3db # Consent option hash fields hash_fields = s3db.auth_consent_option_hash_fields # Load consent record and referenced option otable = s3db.auth_consent_option ctable = s3db.auth_consent join = otable.on(otable.id == ctable.option_id) query = (ctable.id == record_id) & (ctable.deleted == False) fields = [otable.id, ctable.date, ctable.person_id, ctable.option_id, ctable.vsign, ctable.vhash, ctable.consenting, ] + [otable[fn] for fn in hash_fields] row = db(query).select(join=join, limitby=(0, 1), *fields).first() if not row: return False option = row.auth_consent_option context = [(fn, option[fn]) for fn in hash_fields] consent = row.auth_consent verify = (("date", consent.date.isoformat()), ("option_id", consent.option_id), ("person_id", consent.person_id), ("vsign", consent.vsign), ("consenting", consent.consenting), ("ohash", cls.get_hash(context)), ) return consent.vhash == cls.get_hash(verify)
Python
def has_consented(cls, person_id, code): """ Check valid+current consent for a particular processing type Args: person_id: the person to check consent for code: the data processing type code Returns: True|False whether or not the person has consented to this type of data processing and consent has not expired Example: consent = ConsentTracking() if consent.has_consented(auth.s3_logged_in_person(), "PIDSHARE"): # perform PIDSHARE... """ # Get all current consent options for the code option_ids = cls.get_consent_options(code) if not option_ids: return False # Check if there is a positive consent record for this person # for any of these consent options that has not expired today = current.request.utcnow.date() ctable = current.s3db.auth_consent query = (ctable.person_id == person_id) & \ (ctable.option_id.belongs(option_ids)) & \ ((ctable.expires_on == None) | (ctable.expires_on > today)) & \ (ctable.consenting == True) & \ (ctable.deleted == False) row = current.db(query).select(ctable.id, limitby = (0, 1)).first() return row is not None
def has_consented(cls, person_id, code): """ Check valid+current consent for a particular processing type Args: person_id: the person to check consent for code: the data processing type code Returns: True|False whether or not the person has consented to this type of data processing and consent has not expired Example: consent = ConsentTracking() if consent.has_consented(auth.s3_logged_in_person(), "PIDSHARE"): # perform PIDSHARE... """ # Get all current consent options for the code option_ids = cls.get_consent_options(code) if not option_ids: return False # Check if there is a positive consent record for this person # for any of these consent options that has not expired today = current.request.utcnow.date() ctable = current.s3db.auth_consent query = (ctable.person_id == person_id) & \ (ctable.option_id.belongs(option_ids)) & \ ((ctable.expires_on == None) | (ctable.expires_on > today)) & \ (ctable.consenting == True) & \ (ctable.deleted == False) row = current.db(query).select(ctable.id, limitby = (0, 1)).first() return row is not None
Python
def pending_responses(self, person_id): """ Identify all processing types for which a person has not responded to the updated consent questions, or where their previously given consent has expired Args: person_id: the person ID Returns: list of processing type codes """ # Get all current consent options for the given processing types options = self.extract() option_ids = {spec["id"] for spec in options.values()} # Find all responses of this person to these options today = current.request.utcnow.date() ctable = current.s3db.auth_consent query = (ctable.person_id == person_id) & \ (ctable.option_id.belongs(option_ids)) & \ ((ctable.consenting == False) | \ (ctable.expires_on == None) | \ (ctable.expires_on > today)) & \ (ctable.deleted == False) rows = current.db(query).select(ctable.option_id) # Identify any pending responses responded = {row.option_id for row in rows} pending = [] for code, spec in options.items(): if spec["id"] not in responded: pending.append(code) return pending
def pending_responses(self, person_id): """ Identify all processing types for which a person has not responded to the updated consent questions, or where their previously given consent has expired Args: person_id: the person ID Returns: list of processing type codes """ # Get all current consent options for the given processing types options = self.extract() option_ids = {spec["id"] for spec in options.values()} # Find all responses of this person to these options today = current.request.utcnow.date() ctable = current.s3db.auth_consent query = (ctable.person_id == person_id) & \ (ctable.option_id.belongs(option_ids)) & \ ((ctable.consenting == False) | \ (ctable.expires_on == None) | \ (ctable.expires_on > today)) & \ (ctable.deleted == False) rows = current.db(query).select(ctable.option_id) # Identify any pending responses responded = {row.option_id for row in rows} pending = [] for code, spec in options.items(): if spec["id"] not in responded: pending.append(code) return pending
Python
def consent_query(cls, table, code, field=None): """ Get a query for table for records where the person identified by field has consented to a certain type of data processing. - useful to limit background processing that requires consent Args: table: the table to query code: the processing type code to check field: the field in the table referencing pr_person.id Returns: Query Example: consent = ConsentTracking() query = consent.consent_query(table, "PIDSHARE") & (table.deleted == False) # Perform PIDSHARE with query result... rows = db(query).select(*fields) """ if field is None: if original_tablename(table) == "pr_person": field = table.id else: field = table.person_id elif isinstance(field, str): field = table[field] option_ids = cls.get_consent_options(code) today = current.request.utcnow.date() ctable = current.s3db.auth_consent query = (ctable.person_id == field) & \ (ctable.option_id.belongs(option_ids)) & \ ((ctable.expires_on == None) | (ctable.expires_on > today)) & \ (ctable.consenting == True) & \ (ctable.deleted == False) return query
def consent_query(cls, table, code, field=None): """ Get a query for table for records where the person identified by field has consented to a certain type of data processing. - useful to limit background processing that requires consent Args: table: the table to query code: the processing type code to check field: the field in the table referencing pr_person.id Returns: Query Example: consent = ConsentTracking() query = consent.consent_query(table, "PIDSHARE") & (table.deleted == False) # Perform PIDSHARE with query result... rows = db(query).select(*fields) """ if field is None: if original_tablename(table) == "pr_person": field = table.id else: field = table.person_id elif isinstance(field, str): field = table[field] option_ids = cls.get_consent_options(code) today = current.request.utcnow.date() ctable = current.s3db.auth_consent query = (ctable.person_id == field) & \ (ctable.option_id.belongs(option_ids)) & \ ((ctable.expires_on == None) | (ctable.expires_on > today)) & \ (ctable.consenting == True) & \ (ctable.deleted == False) return query
Python
def consent_filter(cls, code, selector=None): """ Filter resource for records where the person identified by selector has consented to a certain type of data processing. - useful to limit REST methods that require consent Args: code: the processing type code to check selector: a field selector (string) that references pr_person.id; if not specified pr_person is assumed to be the master resource Returns: S3ResourceQuery Example: consent = ConsentTracking resource.add_filter(consent.consent_filter("PIDSHARE", "~.person_id")) Note: only one consent filter can be used for the same resource; if multiple consent options must be checked and/or multiple person_id references apply independently, then either aliased auth_consent components can be used to construct a filter, or the query must be split (the latter typically performs better). Ideally, however, the consent decision for a single operation should not be complex or second-guessing. """ option_ids = cls.get_consent_options(code) today = current.request.utcnow.date() # Construct sub-selectors if selector and selector not in ("id", "~.id"): consent = "%s$person_id:auth_consent" % selector else: # Assume pr_person is master consent = "person_id:auth_consent" option_id = FS("%s.option_id" % consent) expires_on = FS("%s.expires_on" % consent) consenting = FS("%s.consenting" % consent) query = (option_id.belongs(option_ids)) & \ ((expires_on == None) | (expires_on > today)) & \ (consenting == True) return query
def consent_filter(cls, code, selector=None): """ Filter resource for records where the person identified by selector has consented to a certain type of data processing. - useful to limit REST methods that require consent Args: code: the processing type code to check selector: a field selector (string) that references pr_person.id; if not specified pr_person is assumed to be the master resource Returns: S3ResourceQuery Example: consent = ConsentTracking resource.add_filter(consent.consent_filter("PIDSHARE", "~.person_id")) Note: only one consent filter can be used for the same resource; if multiple consent options must be checked and/or multiple person_id references apply independently, then either aliased auth_consent components can be used to construct a filter, or the query must be split (the latter typically performs better). Ideally, however, the consent decision for a single operation should not be complex or second-guessing. """ option_ids = cls.get_consent_options(code) today = current.request.utcnow.date() # Construct sub-selectors if selector and selector not in ("id", "~.id"): consent = "%s$person_id:auth_consent" % selector else: # Assume pr_person is master consent = "person_id:auth_consent" option_id = FS("%s.option_id" % consent) expires_on = FS("%s.expires_on" % consent) consenting = FS("%s.consenting" % consent) query = (option_id.belongs(option_ids)) & \ ((expires_on == None) | (expires_on > today)) & \ (consenting == True) return query
Python
def inv_inv_item_onvalidate(form): """ When a inv_inv_item record is created with a source number, then the source number needs to be unique within the organisation. """ item_source_no = form.vars.item_source_no if not item_source_no: return if hasattr(form, "record"): record = form.record if record and \ record.item_source_no and \ record.item_source_no == item_source_no: # The tracking number hasn't changed so no validation needed return itable = current.s3db.inv_inv_item # Was: "track_org_id" - but inv_inv_item has no "track_org_id"! org_field = "owner_org_id" query = (itable[org_field] == form.vars[org_field]) & \ (itable.item_source_no == item_source_no) record = current.db(query).select(itable[org_field], limitby = (0, 1) ).first() if record: org = current.response.s3 \ .org_organisation_represent(record[org_field]) form.errors.item_source_no = current.T("The Tracking Number %s " "is already used by %s.") % \ (item_source_no, org)
def inv_inv_item_onvalidate(form): """ When a inv_inv_item record is created with a source number, then the source number needs to be unique within the organisation. """ item_source_no = form.vars.item_source_no if not item_source_no: return if hasattr(form, "record"): record = form.record if record and \ record.item_source_no and \ record.item_source_no == item_source_no: # The tracking number hasn't changed so no validation needed return itable = current.s3db.inv_inv_item # Was: "track_org_id" - but inv_inv_item has no "track_org_id"! org_field = "owner_org_id" query = (itable[org_field] == form.vars[org_field]) & \ (itable.item_source_no == item_source_no) record = current.db(query).select(itable[org_field], limitby = (0, 1) ).first() if record: org = current.response.s3 \ .org_organisation_represent(record[org_field]) form.errors.item_source_no = current.T("The Tracking Number %s " "is already used by %s.") % \ (item_source_no, org)
Python
def inv_remove(inv_rec, required_total, required_pack_value = 1, current_track_total = 0, update = True, ): """ Check that the required_total can be removed from the inv_record if there is insufficient stock then set up the total to being what is in stock otherwise set it to be the required total. If the update flag is true then remove it from stock. The current total is what has already been removed for this transaction. """ db = current.db inv_item_table = db.inv_inv_item siptable = db.supply_item_pack inv_p_qnty = db(siptable.id == inv_rec.item_pack_id).select(siptable.quantity, limitby = (0, 1) ).first().quantity inv_qnty = inv_rec.quantity * inv_p_qnty cur_qnty = current_track_total * inv_p_qnty req_qnty = required_total * required_pack_value # It already matches so no change required if cur_qnty == req_qnty: return required_total if inv_qnty + cur_qnty > req_qnty: send_item_quantity = req_qnty new_qnty = (inv_qnty + cur_qnty - req_qnty) / inv_p_qnty else: send_item_quantity = inv_qnty + cur_qnty new_qnty = 0 send_item_quantity = send_item_quantity / inv_p_qnty if update: # Update the levels in stock if new_qnty: db(inv_item_table.id == inv_rec.id).update(quantity = new_qnty) else: db(inv_item_table.id == inv_rec.id).update(deleted = True) return send_item_quantity
def inv_remove(inv_rec, required_total, required_pack_value = 1, current_track_total = 0, update = True, ): """ Check that the required_total can be removed from the inv_record if there is insufficient stock then set up the total to being what is in stock otherwise set it to be the required total. If the update flag is true then remove it from stock. The current total is what has already been removed for this transaction. """ db = current.db inv_item_table = db.inv_inv_item siptable = db.supply_item_pack inv_p_qnty = db(siptable.id == inv_rec.item_pack_id).select(siptable.quantity, limitby = (0, 1) ).first().quantity inv_qnty = inv_rec.quantity * inv_p_qnty cur_qnty = current_track_total * inv_p_qnty req_qnty = required_total * required_pack_value # It already matches so no change required if cur_qnty == req_qnty: return required_total if inv_qnty + cur_qnty > req_qnty: send_item_quantity = req_qnty new_qnty = (inv_qnty + cur_qnty - req_qnty) / inv_p_qnty else: send_item_quantity = inv_qnty + cur_qnty new_qnty = 0 send_item_quantity = send_item_quantity / inv_p_qnty if update: # Update the levels in stock if new_qnty: db(inv_item_table.id == inv_rec.id).update(quantity = new_qnty) else: db(inv_item_table.id == inv_rec.id).update(deleted = True) return send_item_quantity
Python
def inv_prep(r): """ Used in site REST controllers to Filter out items which are already in this inventory """ if r.component: if r.component.name == "inv_item": db = current.db table = db.inv_inv_item # Filter out items which are already in this inventory query = (table.site_id == r.record.site_id) & \ (table.deleted == False) inv_item_rows = db(query).select(table.item_id) item_ids = [row.item_id for row in inv_item_rows] # Ensure that the current item CAN be selected if r.method == "update": item = db(table.id == r.args[2]).select(table.item_id, limitby = (0, 1) ).first() item_ids.remove(item.item_id) table.item_id.requires.set_filter(not_filterby = "id", not_filter_opts = item_ids) elif r.component.name == "send": # Default to the Search tab in the location selector widget1 current.response.s3.gis.tab = "search" #if current.request.get_vars.get("select", "sent") == "incoming": # # Display only incoming shipments which haven't been received yet # filter = (current.s3db.inv_send.status == SHIP_STATUS_SENT) # r.resource.add_component_filter("send", filter)
def inv_prep(r): """ Used in site REST controllers to Filter out items which are already in this inventory """ if r.component: if r.component.name == "inv_item": db = current.db table = db.inv_inv_item # Filter out items which are already in this inventory query = (table.site_id == r.record.site_id) & \ (table.deleted == False) inv_item_rows = db(query).select(table.item_id) item_ids = [row.item_id for row in inv_item_rows] # Ensure that the current item CAN be selected if r.method == "update": item = db(table.id == r.args[2]).select(table.item_id, limitby = (0, 1) ).first() item_ids.remove(item.item_id) table.item_id.requires.set_filter(not_filterby = "id", not_filter_opts = item_ids) elif r.component.name == "send": # Default to the Search tab in the location selector widget1 current.response.s3.gis.tab = "search" #if current.request.get_vars.get("select", "sent") == "incoming": # # Display only incoming shipments which haven't been received yet # filter = (current.s3db.inv_send.status == SHIP_STATUS_SENT) # r.resource.add_component_filter("send", filter)
Python
def defaults(self): """ Safe defaults for model-global names in case module is disabled """ return {"inv_recv_id": S3ReusableField.dummy("recv_id"), }
def defaults(self): """ Safe defaults for model-global names in case module is disabled """ return {"inv_recv_id": S3ReusableField.dummy("recv_id"), }
Python
def inv_send_onaccept(form): """ When a inv send record is created then create the send_ref. """ db = current.db formvars = form.vars record_id = formvars.id shipment_type = formvars.type if shipment_type: # Add all inv_items with status matching the send shipment type # eg. Items for Dump, Sale, Reject, Surplus inv_track_item_onaccept = current.s3db.inv_track_item_onaccept site_id = formvars.site_id itable = db.inv_inv_item tracktable = db.inv_track_item query = (itable.site_id == site_id) & \ (itable.status == int(shipment_type)) rows = db(query).select() for row in rows: if row.quantity != 0: # Insert inv_item to inv_track_item inv_track_id = tracktable.insert(send_id = record_id, send_inv_item_id = row.id, item_id = row.item_id, quantity = row.quantity, currency = row.currency, pack_value = row.pack_value, expiry_date = row.expiry_date, owner_org_id = row.owner_org_id, supply_org_id = row.supply_org_id, item_source_no = row.item_source_no, item_pack_id = row.item_pack_id, inv_item_status = row.status, #status = TRACK_STATUS_PREPARING, ) # Construct form.vars for inv_track_item_onaccept formvars = Storage() formvars.id = inv_track_id formvars.quantity = row.quantity formvars.item_pack_id = row.item_pack_id formvars.send_inv_item_id = row.id # Call inv_track_item_onaccept to remove inv_item from stock inv_track_item_onaccept(Storage(vars=formvars)) stable = db.inv_send # If the send_ref is None then set it up record = stable[record_id] if not record.send_ref: code = current.s3db.supply_get_shipping_code( current.deployment_settings.get_inv_send_shortname(), record.site_id, stable.send_ref, ) db(stable.id == record_id).update(send_ref=code)
def inv_send_onaccept(form): """ When a inv send record is created then create the send_ref. """ db = current.db formvars = form.vars record_id = formvars.id shipment_type = formvars.type if shipment_type: # Add all inv_items with status matching the send shipment type # eg. Items for Dump, Sale, Reject, Surplus inv_track_item_onaccept = current.s3db.inv_track_item_onaccept site_id = formvars.site_id itable = db.inv_inv_item tracktable = db.inv_track_item query = (itable.site_id == site_id) & \ (itable.status == int(shipment_type)) rows = db(query).select() for row in rows: if row.quantity != 0: # Insert inv_item to inv_track_item inv_track_id = tracktable.insert(send_id = record_id, send_inv_item_id = row.id, item_id = row.item_id, quantity = row.quantity, currency = row.currency, pack_value = row.pack_value, expiry_date = row.expiry_date, owner_org_id = row.owner_org_id, supply_org_id = row.supply_org_id, item_source_no = row.item_source_no, item_pack_id = row.item_pack_id, inv_item_status = row.status, #status = TRACK_STATUS_PREPARING, ) # Construct form.vars for inv_track_item_onaccept formvars = Storage() formvars.id = inv_track_id formvars.quantity = row.quantity formvars.item_pack_id = row.item_pack_id formvars.send_inv_item_id = row.id # Call inv_track_item_onaccept to remove inv_item from stock inv_track_item_onaccept(Storage(vars=formvars)) stable = db.inv_send # If the send_ref is None then set it up record = stable[record_id] if not record.send_ref: code = current.s3db.supply_get_shipping_code( current.deployment_settings.get_inv_send_shortname(), record.site_id, stable.send_ref, ) db(stable.id == record_id).update(send_ref=code)
Python
def inv_recv_onaccept(form): """ When a inv recv record is created then create the recv_ref. """ db = current.db rtable = db.inv_recv # If the recv_ref is None then set it up record_id = form.vars.id record = rtable[record_id] if not record.recv_ref: # AR Number code = current.s3db.supply_get_shipping_code( current.deployment_settings.get_inv_recv_shortname(), record.site_id, rtable.recv_ref, ) db(rtable.id == record_id).update(recv_ref = code)
def inv_recv_onaccept(form): """ When a inv recv record is created then create the recv_ref. """ db = current.db rtable = db.inv_recv # If the recv_ref is None then set it up record_id = form.vars.id record = rtable[record_id] if not record.recv_ref: # AR Number code = current.s3db.supply_get_shipping_code( current.deployment_settings.get_inv_recv_shortname(), record.site_id, rtable.recv_ref, ) db(rtable.id == record_id).update(recv_ref = code)
Python
def inv_recv_donation_cert (r, **attr): """ Generate a PDF of a Donation certificate """ T = current.T db = current.db table = db.inv_recv table.date.readable = True table.type.readable = False field = table.site_id field.readable = True field.label = T("By %(site)s") % \ {"site": T(current.deployment_settings.get_inv_facility_label())} field.represent = current.s3db.org_site_represent record = table[r.id] site_id = record.site_id site = field.represent(site_id, False) from core import DataExporter exporter = DataExporter.pdf return exporter(r.resource, request = r, method = "list", pdf_title = "Donation Certificate", pdf_filename = "DC-%s" % site, pdf_hide_comments = True, pdf_componentname = "track_item", **attr )
def inv_recv_donation_cert (r, **attr): """ Generate a PDF of a Donation certificate """ T = current.T db = current.db table = db.inv_recv table.date.readable = True table.type.readable = False field = table.site_id field.readable = True field.label = T("By %(site)s") % \ {"site": T(current.deployment_settings.get_inv_facility_label())} field.represent = current.s3db.org_site_represent record = table[r.id] site_id = record.site_id site = field.represent(site_id, False) from core import DataExporter exporter = DataExporter.pdf return exporter(r.resource, request = r, method = "list", pdf_title = "Donation Certificate", pdf_filename = "DC-%s" % site, pdf_hide_comments = True, pdf_componentname = "track_item", **attr )
Python
def inv_send_ref_represent(value, show_link=True): """ Represent for the Tall Out number, if show_link is True then it will generate a link to the pdf """ if value: if show_link: db = current.db table = db.inv_send row = db(table.send_ref == value).select(table.id, limitby = (0, 1) ).first() if row: return A(value, _href = URL(c = "inv", f = "send", args = [row.id, "form"], extension = "", ), ) else: return value else: return value else: return current.messages["NONE"]
def inv_send_ref_represent(value, show_link=True): """ Represent for the Tall Out number, if show_link is True then it will generate a link to the pdf """ if value: if show_link: db = current.db table = db.inv_send row = db(table.send_ref == value).select(table.id, limitby = (0, 1) ).first() if row: return A(value, _href = URL(c = "inv", f = "send", args = [row.id, "form"], extension = "", ), ) else: return value else: return value else: return current.messages["NONE"]
Python
def inv_recv_ref_represent(value, show_link=True): """ Represent for the Goods Received Note if show_link is True then it will generate a link to the pdf """ if value: if show_link: db = current.db table = db.inv_recv recv_row = db(table.recv_ref == value).select(table.id, limitby = (0, 1) ).first() return A(value, _href = URL(c = "inv", f = "recv", args = [recv_row.id, "form"], extension = "", ), ) else: return B(value) else: return current.messages["NONE"]
def inv_recv_ref_represent(value, show_link=True): """ Represent for the Goods Received Note if show_link is True then it will generate a link to the pdf """ if value: if show_link: db = current.db table = db.inv_recv recv_row = db(table.recv_ref == value).select(table.id, limitby = (0, 1) ).first() return A(value, _href = URL(c = "inv", f = "recv", args = [recv_row.id, "form"], extension = "", ), ) else: return B(value) else: return current.messages["NONE"]
Python
def inv_track_item_onvalidate(form): """ When a track item record is being created with a tracking number then the tracking number needs to be unique within the organisation. If the inv. item is coming out of a warehouse then the inv. item details need to be copied across (org, expiry etc) If the inv. item is being received then their might be a selected bin ensure that the correct bin is selected and save those details. """ form_vars = form.vars send_inv_item_id = form_vars.send_inv_item_id if send_inv_item_id: # Copy the data from the sent inv_item db = current.db itable = db.inv_inv_item query = (itable.id == send_inv_item_id) record = db(query).select(limitby=(0, 1)).first() form_vars.item_id = record.item_id form_vars.item_source_no = record.item_source_no form_vars.expiry_date = record.expiry_date form_vars.bin = record.bin form_vars.owner_org_id = record.owner_org_id form_vars.supply_org_id = record.supply_org_id form_vars.pack_value = record.pack_value form_vars.currency = record.currency form_vars.inv_item_status = record.status # Save the organisation from where this tracking originates stable = current.s3db.org_site query = query & (itable.site_id == stable.id) record = db(query).select(stable.organisation_id, limitby=(0, 1)).first() form_vars.track_org_id = record.organisation_id if not form_vars.recv_quantity and "quantity" in form_vars: # If we have no send_id and no recv_quantity then # copy the quantity sent directly into the received field # This is for when there is no related send record # The Quantity received ALWAYS defaults to the quantity sent # (Please do not change this unless there is a specific user requirement) #db.inv_track_item.recv_quantity.default = form_vars.quantity form_vars.recv_quantity = form_vars.quantity recv_bin = form_vars.recv_bin if recv_bin: # If there is a receiving bin then select the right one if isinstance(recv_bin, list): if recv_bin[1] != "": recv_bin = recv_bin[1] else: recv_bin = recv_bin[0]
def inv_track_item_onvalidate(form): """ When a track item record is being created with a tracking number then the tracking number needs to be unique within the organisation. If the inv. item is coming out of a warehouse then the inv. item details need to be copied across (org, expiry etc) If the inv. item is being received then their might be a selected bin ensure that the correct bin is selected and save those details. """ form_vars = form.vars send_inv_item_id = form_vars.send_inv_item_id if send_inv_item_id: # Copy the data from the sent inv_item db = current.db itable = db.inv_inv_item query = (itable.id == send_inv_item_id) record = db(query).select(limitby=(0, 1)).first() form_vars.item_id = record.item_id form_vars.item_source_no = record.item_source_no form_vars.expiry_date = record.expiry_date form_vars.bin = record.bin form_vars.owner_org_id = record.owner_org_id form_vars.supply_org_id = record.supply_org_id form_vars.pack_value = record.pack_value form_vars.currency = record.currency form_vars.inv_item_status = record.status # Save the organisation from where this tracking originates stable = current.s3db.org_site query = query & (itable.site_id == stable.id) record = db(query).select(stable.organisation_id, limitby=(0, 1)).first() form_vars.track_org_id = record.organisation_id if not form_vars.recv_quantity and "quantity" in form_vars: # If we have no send_id and no recv_quantity then # copy the quantity sent directly into the received field # This is for when there is no related send record # The Quantity received ALWAYS defaults to the quantity sent # (Please do not change this unless there is a specific user requirement) #db.inv_track_item.recv_quantity.default = form_vars.quantity form_vars.recv_quantity = form_vars.quantity recv_bin = form_vars.recv_bin if recv_bin: # If there is a receiving bin then select the right one if isinstance(recv_bin, list): if recv_bin[1] != "": recv_bin = recv_bin[1] else: recv_bin = recv_bin[0]
Python
def inv_kitting_onvalidate(form): """ Check that we have sufficient inv_item in stock to build the kits """ form_vars = form.vars item_id = form_vars.item_id item_pack_id = form_vars.item_pack_id quantity = form_vars.quantity site_id = form_vars.site_id db = current.db s3db = current.s3db ktable = s3db.supply_kit_item ptable = db.supply_item_pack iitable = s3db.inv_inv_item # Get contents of this kit query = (ktable.parent_item_id == item_id) rows = db(query).select(ktable.item_id, ktable.quantity, ktable.item_pack_id, ) # How many kits are we building? p_id_field = ptable.id p_qty_field = ptable.quantity pack_qty = db(p_id_field == item_pack_id).select(p_qty_field, limitby = (0, 1) ).first().quantity quantity = quantity * pack_qty max_kits = None ii_pack_field = iitable.item_pack_id ii_qty_field = iitable.quantity ii_expiry_field = iitable.expiry_date # Base Query: The Facility at which we're building these kits # Filter out Stock which is in Bad condition or Expired squery = (iitable.site_id == site_id) & \ (iitable.deleted == False) & \ ((ii_expiry_field >= current.request.now) | ((ii_expiry_field == None))) & \ (iitable.status == 0) # Loop through each supply_item in the kit for record in rows: # How much of this supply_item is required per kit? pack_qty = db(p_id_field == record.item_pack_id).select(p_qty_field, limitby = (0, 1) ).first().quantity one_kit = record.quantity * pack_qty # How much of this supply_item do we have in stock? stock_amount = 0 query = squery & (iitable.item_id == record.item_id) wh_items = db(query).select(#iitable.id, ii_qty_field, ii_pack_field, ) for wh_item in wh_items: pack_qty = db(p_id_field == wh_item.item_pack_id).select(p_qty_field, limitby = (0, 1) ).first().quantity amount = wh_item.quantity * pack_qty stock_amount += amount # How many Kits can we create based on this item? kits = stock_amount / one_kit if max_kits is None: # 1st run so this item starts the list max_kits = kits else: # Reduce the total possible if less than for previous items if kits < max_kits: max_kits = kits # @ToDo: Save the results for the onaccept? if max_kits is None: form.errors.item_id = current.T("This kit hasn't got any Kit Items defined") elif max_kits < quantity: form.errors.quantity = current.T("You can only make %d kit(s) with the available stock") % \ int(max_kits)
def inv_kitting_onvalidate(form): """ Check that we have sufficient inv_item in stock to build the kits """ form_vars = form.vars item_id = form_vars.item_id item_pack_id = form_vars.item_pack_id quantity = form_vars.quantity site_id = form_vars.site_id db = current.db s3db = current.s3db ktable = s3db.supply_kit_item ptable = db.supply_item_pack iitable = s3db.inv_inv_item # Get contents of this kit query = (ktable.parent_item_id == item_id) rows = db(query).select(ktable.item_id, ktable.quantity, ktable.item_pack_id, ) # How many kits are we building? p_id_field = ptable.id p_qty_field = ptable.quantity pack_qty = db(p_id_field == item_pack_id).select(p_qty_field, limitby = (0, 1) ).first().quantity quantity = quantity * pack_qty max_kits = None ii_pack_field = iitable.item_pack_id ii_qty_field = iitable.quantity ii_expiry_field = iitable.expiry_date # Base Query: The Facility at which we're building these kits # Filter out Stock which is in Bad condition or Expired squery = (iitable.site_id == site_id) & \ (iitable.deleted == False) & \ ((ii_expiry_field >= current.request.now) | ((ii_expiry_field == None))) & \ (iitable.status == 0) # Loop through each supply_item in the kit for record in rows: # How much of this supply_item is required per kit? pack_qty = db(p_id_field == record.item_pack_id).select(p_qty_field, limitby = (0, 1) ).first().quantity one_kit = record.quantity * pack_qty # How much of this supply_item do we have in stock? stock_amount = 0 query = squery & (iitable.item_id == record.item_id) wh_items = db(query).select(#iitable.id, ii_qty_field, ii_pack_field, ) for wh_item in wh_items: pack_qty = db(p_id_field == wh_item.item_pack_id).select(p_qty_field, limitby = (0, 1) ).first().quantity amount = wh_item.quantity * pack_qty stock_amount += amount # How many Kits can we create based on this item? kits = stock_amount / one_kit if max_kits is None: # 1st run so this item starts the list max_kits = kits else: # Reduce the total possible if less than for previous items if kits < max_kits: max_kits = kits # @ToDo: Save the results for the onaccept? if max_kits is None: form.errors.item_id = current.T("This kit hasn't got any Kit Items defined") elif max_kits < quantity: form.errors.quantity = current.T("You can only make %d kit(s) with the available stock") % \ int(max_kits)
Python
def inv_kitting_onaccept(form): """ Adjust the Inventory stocks reduce the components & increase the kits - picks items which have an earlier expiry_date where they have them, earlier purchase_data otherwise Provide a pick list to ensure that the right stock items are used to build the kits: inv_kitting_item """ form_vars = form.vars kitting_id = form_vars.id item_id = form_vars.item_id item_pack_id = form_vars.item_pack_id quantity = form_vars.quantity site_id = form_vars.site_id db = current.db s3db = current.s3db ktable = s3db.supply_kit_item ptable = db.supply_item_pack iitable = s3db.inv_inv_item insert = s3db.inv_kitting_item.insert inv_remove = s3db.inv_remove # Get contents of this kit query = (ktable.parent_item_id == item_id) rows = db(query).select(ktable.item_id, ktable.quantity, ktable.item_pack_id, ) # How many kits are we building? p_id_field = ptable.id p_qty_field = ptable.quantity pack_qty = db(p_id_field == item_pack_id).select(p_qty_field, limitby = (0, 1) ).first().quantity quantity = quantity * pack_qty ii_id_field = iitable.id ii_bin_field = iitable.bin ii_pack_field = iitable.item_pack_id ii_qty_field = iitable.quantity ii_expiry_field = iitable.expiry_date ii_purchase_field = iitable.purchase_date ii_src_field = iitable.item_source_no # Match Stock based on oldest expiry date or purchase date orderby = ii_expiry_field | ii_purchase_field # We set expiry date of the kit to the oldest expiry date of the components expiry_date = None # Base Query: The Facility at which we're building these kits # Filter out Stock which is in Bad condition or Expired squery = (iitable.site_id == site_id) & \ (iitable.deleted == False) & \ ((ii_expiry_field >= current.request.now) | ((ii_expiry_field == None))) & \ (iitable.status == 0) # Loop through each supply_item in the kit for record in rows: # How much of this supply_item is required per kit? pack_qty = db(p_id_field == record.item_pack_id).select(p_qty_field, limitby = (0, 1) ).first().quantity one_kit = record.quantity * pack_qty # How much is required for all Kits? required = one_kit * quantity # List of what we have available in stock ritem_id = record.item_id query = squery & (iitable.item_id == ritem_id) wh_items = db(query).select(ii_id_field, ii_qty_field, ii_expiry_field, ii_purchase_field, # Included just for orderby on Postgres ii_pack_field, ii_bin_field, ii_src_field, orderby = orderby, ) for wh_item in wh_items: # Get the pack_qty pack_qty = db(p_id_field == wh_item.item_pack_id).select(p_qty_field, limitby = (0, 1) ).first().quantity # How many of this item can we use for these kits? amount = wh_item.quantity * pack_qty # How many of this item will we use for the kits? if amount > required: # Use only what is required amount = required #else: # # We use all if wh_item.expiry_date: if expiry_date is None: # No expiry date set so this item starts the list expiry_date = wh_item.expiry_date else: # Shorten the expiry date if less than for previous items if wh_item.expiry_date < expiry_date: expiry_date = wh_item.expiry_date # @ToDo: Record which components are to be used for the kits # Store results in a table? # Remove from stock inv_remove(wh_item, amount) # Add to Pick List insert(site_id = site_id, kitting_id = kitting_id, item_id = ritem_id, item_pack_id = wh_item.item_pack_id, bin = wh_item.bin, item_source_no = wh_item.item_source_no, quantity = amount, inv_item_id = wh_item.id, ) # Update how much is still required required -= amount if not required: # No more required: move on to the next component break # Add Kits to Stock # @ToDo: Keep track of Donor? Owner? # @ToDo: Update Pack Value new_id = iitable.insert(site_id = site_id, item_id = item_id, item_pack_id = item_pack_id, quantity = quantity, expiry_date = expiry_date, ) s3db.update_super(iitable, {"id": new_id})
def inv_kitting_onaccept(form): """ Adjust the Inventory stocks reduce the components & increase the kits - picks items which have an earlier expiry_date where they have them, earlier purchase_data otherwise Provide a pick list to ensure that the right stock items are used to build the kits: inv_kitting_item """ form_vars = form.vars kitting_id = form_vars.id item_id = form_vars.item_id item_pack_id = form_vars.item_pack_id quantity = form_vars.quantity site_id = form_vars.site_id db = current.db s3db = current.s3db ktable = s3db.supply_kit_item ptable = db.supply_item_pack iitable = s3db.inv_inv_item insert = s3db.inv_kitting_item.insert inv_remove = s3db.inv_remove # Get contents of this kit query = (ktable.parent_item_id == item_id) rows = db(query).select(ktable.item_id, ktable.quantity, ktable.item_pack_id, ) # How many kits are we building? p_id_field = ptable.id p_qty_field = ptable.quantity pack_qty = db(p_id_field == item_pack_id).select(p_qty_field, limitby = (0, 1) ).first().quantity quantity = quantity * pack_qty ii_id_field = iitable.id ii_bin_field = iitable.bin ii_pack_field = iitable.item_pack_id ii_qty_field = iitable.quantity ii_expiry_field = iitable.expiry_date ii_purchase_field = iitable.purchase_date ii_src_field = iitable.item_source_no # Match Stock based on oldest expiry date or purchase date orderby = ii_expiry_field | ii_purchase_field # We set expiry date of the kit to the oldest expiry date of the components expiry_date = None # Base Query: The Facility at which we're building these kits # Filter out Stock which is in Bad condition or Expired squery = (iitable.site_id == site_id) & \ (iitable.deleted == False) & \ ((ii_expiry_field >= current.request.now) | ((ii_expiry_field == None))) & \ (iitable.status == 0) # Loop through each supply_item in the kit for record in rows: # How much of this supply_item is required per kit? pack_qty = db(p_id_field == record.item_pack_id).select(p_qty_field, limitby = (0, 1) ).first().quantity one_kit = record.quantity * pack_qty # How much is required for all Kits? required = one_kit * quantity # List of what we have available in stock ritem_id = record.item_id query = squery & (iitable.item_id == ritem_id) wh_items = db(query).select(ii_id_field, ii_qty_field, ii_expiry_field, ii_purchase_field, # Included just for orderby on Postgres ii_pack_field, ii_bin_field, ii_src_field, orderby = orderby, ) for wh_item in wh_items: # Get the pack_qty pack_qty = db(p_id_field == wh_item.item_pack_id).select(p_qty_field, limitby = (0, 1) ).first().quantity # How many of this item can we use for these kits? amount = wh_item.quantity * pack_qty # How many of this item will we use for the kits? if amount > required: # Use only what is required amount = required #else: # # We use all if wh_item.expiry_date: if expiry_date is None: # No expiry date set so this item starts the list expiry_date = wh_item.expiry_date else: # Shorten the expiry date if less than for previous items if wh_item.expiry_date < expiry_date: expiry_date = wh_item.expiry_date # @ToDo: Record which components are to be used for the kits # Store results in a table? # Remove from stock inv_remove(wh_item, amount) # Add to Pick List insert(site_id = site_id, kitting_id = kitting_id, item_id = ritem_id, item_pack_id = wh_item.item_pack_id, bin = wh_item.bin, item_source_no = wh_item.item_source_no, quantity = amount, inv_item_id = wh_item.id, ) # Update how much is still required required -= amount if not required: # No more required: move on to the next component break # Add Kits to Stock # @ToDo: Keep track of Donor? Owner? # @ToDo: Update Pack Value new_id = iitable.insert(site_id = site_id, item_id = item_id, item_pack_id = item_pack_id, quantity = quantity, expiry_date = expiry_date, ) s3db.update_super(iitable, {"id": new_id})
Python
def inv_track_item_deleting(record_id): """ A track item can only be deleted if the status is Preparing When a track item record is deleted and it is linked to an inv_item then the inv_item quantity will be reduced. """ db = current.db s3db = current.s3db tracktable = db.inv_track_item inv_item_table = db.inv_inv_item ritable = s3db.req_req_item siptable = db.supply_item_pack record = tracktable[record_id] if record.status != 1: return False # if this is linked to a request # then remove these items from the quantity in transit if record.req_item_id: req_id = record.req_item_id req_item = ritable[req_id] req_quantity = req_item.quantity_transit req_pack_quantity = siptable[req_item.item_pack_id].quantity track_pack_quantity = siptable[record.item_pack_id].quantity quantity_transit = s3db.supply_item_add(req_quantity, req_pack_quantity, - record.quantity, track_pack_quantity ) db(ritable.id == req_id).update(quantity_transit = quantity_transit) s3db.req_update_status(req_id) # Check that we have a link to a warehouse if record.send_inv_item_id: track_total = record.quantity # Remove the total from this record and place it back in the warehouse db(inv_item_table.id == record.send_inv_item_id).update(quantity = inv_item_table.quantity + track_total) db(tracktable.id == record_id).update(quantity = 0, comments = "%sQuantity was: %s" % \ (inv_item_table.comments, track_total, ), ) return True
def inv_track_item_deleting(record_id): """ A track item can only be deleted if the status is Preparing When a track item record is deleted and it is linked to an inv_item then the inv_item quantity will be reduced. """ db = current.db s3db = current.s3db tracktable = db.inv_track_item inv_item_table = db.inv_inv_item ritable = s3db.req_req_item siptable = db.supply_item_pack record = tracktable[record_id] if record.status != 1: return False # if this is linked to a request # then remove these items from the quantity in transit if record.req_item_id: req_id = record.req_item_id req_item = ritable[req_id] req_quantity = req_item.quantity_transit req_pack_quantity = siptable[req_item.item_pack_id].quantity track_pack_quantity = siptable[record.item_pack_id].quantity quantity_transit = s3db.supply_item_add(req_quantity, req_pack_quantity, - record.quantity, track_pack_quantity ) db(ritable.id == req_id).update(quantity_transit = quantity_transit) s3db.req_update_status(req_id) # Check that we have a link to a warehouse if record.send_inv_item_id: track_total = record.quantity # Remove the total from this record and place it back in the warehouse db(inv_item_table.id == record.send_inv_item_id).update(quantity = inv_item_table.quantity + track_total) db(tracktable.id == record_id).update(quantity = 0, comments = "%sQuantity was: %s" % \ (inv_item_table.comments, track_total, ), ) return True
Python
def inv_tabs(r): """ Add an expandable set of Tabs for a Site's Inventory Tasks @ToDo: Make these Expand/Contract without a server-side call """ settings = current.deployment_settings if settings.get_org_site_inv_req_tabs(): if settings.has_module("inv") and \ current.auth.s3_has_permission("read", "inv_inv_item", c="inv"): T = current.T s3 = current.session.s3 collapse_tabs = settings.get_inv_collapse_tabs() tablename = s3_rheader_resource(r)[0] if collapse_tabs and not (tablename == "inv_warehouse"): # Test if the tabs are collapsed show_collapse = True show_inv = r.get_vars.show_inv if show_inv == "True": show_inv = True elif show_inv == "False": show_inv = False else: show_inv = None if show_inv == True or show_inv == False: if not s3.show_inv: s3.show_inv = Storage() s3.show_inv["%s_%s" % (r.name, r.id)] = show_inv elif s3.show_inv: show_inv = s3.show_inv.get("%s_%s" % (r.name, r.id)) else: show_inv = False else: show_inv = True show_collapse = False if show_inv: recv_label = settings.get_inv_recv_tab_label() send_label = settings.get_inv_send_tab_label() tabs = [(T("Stock"), "inv_item"), #(T("Incoming"), "incoming/"), (T(recv_label), "recv"), (T(send_label), "send"), ] if settings.has_module("proc"): tabs.append((T("Planned Procurements"), "plan")) if show_collapse: tabs.append(("- %s" % T("Warehouse"), None, {"show_inv": "False"})) else: tabs = [("+ %s" % T("Warehouse"), "inv_item", {"show_inv": "True"}), ] return tabs return []
def inv_tabs(r): """ Add an expandable set of Tabs for a Site's Inventory Tasks @ToDo: Make these Expand/Contract without a server-side call """ settings = current.deployment_settings if settings.get_org_site_inv_req_tabs(): if settings.has_module("inv") and \ current.auth.s3_has_permission("read", "inv_inv_item", c="inv"): T = current.T s3 = current.session.s3 collapse_tabs = settings.get_inv_collapse_tabs() tablename = s3_rheader_resource(r)[0] if collapse_tabs and not (tablename == "inv_warehouse"): # Test if the tabs are collapsed show_collapse = True show_inv = r.get_vars.show_inv if show_inv == "True": show_inv = True elif show_inv == "False": show_inv = False else: show_inv = None if show_inv == True or show_inv == False: if not s3.show_inv: s3.show_inv = Storage() s3.show_inv["%s_%s" % (r.name, r.id)] = show_inv elif s3.show_inv: show_inv = s3.show_inv.get("%s_%s" % (r.name, r.id)) else: show_inv = False else: show_inv = True show_collapse = False if show_inv: recv_label = settings.get_inv_recv_tab_label() send_label = settings.get_inv_send_tab_label() tabs = [(T("Stock"), "inv_item"), #(T("Incoming"), "incoming/"), (T(recv_label), "recv"), (T(send_label), "send"), ] if settings.has_module("proc"): tabs.append((T("Planned Procurements"), "plan")) if show_collapse: tabs.append(("- %s" % T("Warehouse"), None, {"show_inv": "False"})) else: tabs = [("+ %s" % T("Warehouse"), "inv_item", {"show_inv": "True"}), ] return tabs return []
Python
def inv_rheader(r): """ Resource Header for Warehouses and Inventory Items """ if r.representation != "html" or r.method == "import": # RHeaders only used in interactive views return None # Need to use this format as otherwise req_match?viewing=org_office.x # doesn't have an rheader tablename, record = s3_rheader_resource(r) if not record: # List or Create form: rheader makes no sense here return None T = current.T s3db = current.s3db table = s3db.table(tablename) rheader = None if tablename == "inv_warehouse": # Tabs tabs = [(T("Basic Details"), None), #(T("Contact Data"), "contact"), ] permit = current.auth.s3_has_permission settings = current.deployment_settings if settings.has_module("hrm"): STAFF = settings.get_hrm_staff_label() tabs.append((STAFF, "human_resource")) if permit("create", "hrm_human_resource_site") and \ permit("update", tablename, r.id): tabs.append((T("Assign %(staff)s") % {"staff": STAFF}, "assign")) if settings.has_module("asset") and permit("read", "asset_asset"): tabs.insert(6, (T("Assets"), "asset")) tabs = tabs + inv_tabs(r) if settings.has_module("req"): tabs = tabs + s3db.req_tabs(r) tabs.append((T("Attachments"), "document")) # Fields rheader_fields = [["name", "organisation_id", "email"], ["location_id", "phone1"], ] rheader = S3ResourceHeader(rheader_fields, tabs) rheader_fields, rheader_tabs = rheader(r, table=table, record=record) # Inject logo logo = s3db.org_organisation_logo(record.organisation_id) if logo: rheader = DIV(TABLE(TR(TD(logo), TD(rheader_fields) ))) else: rheader = DIV(rheader_fields) rheader.append(rheader_tabs) elif tablename == "inv_inv_item": # Tabs tabs = [(T("Details"), None), (T("Track Shipment"), "track_movement/"), ] rheader_tabs = DIV(s3_rheader_tabs(r, tabs)) # Header rheader = DIV( TABLE( TR(TH("%s: " % table.item_id.label), table.item_id.represent(record.item_id), TH("%s: " % table.item_pack_id.label), table.item_pack_id.represent(record.item_pack_id), ), TR(TH("%s: " % table.site_id.label), TD(table.site_id.represent(record.site_id), _colspan = 3, ), ), ), rheader_tabs) elif tablename == "inv_kitting": # Tabs tabs = [(T("Details"), None), (T("Pick List"), "item"), ] rheader_tabs = DIV(s3_rheader_tabs(r, tabs)) # Header rheader = DIV( TABLE( TR(TH("%s: " % table.req_ref.label), TD(table.req_ref.represent(record.req_ref), _colspan = 3, ), ), TR(TH("%s: " % table.item_id.label), table.item_id.represent(record.item_id), TH("%s: " % table.item_pack_id.label), table.item_pack_id.represent(record.item_pack_id), TH("%s: " % table.quantity.label), table.quantity.represent(record.quantity), ), TR(TH("%s: " % table.site_id.label), TD(table.site_id.represent(record.site_id), _colspan = 3, ), ), TR(TH("%s: " % table.repacked_id.label), TD(table.repacked_id.represent(record.repacked_id), _colspan = 3, ), ), TR(TH("%s: " % table.date.label), TD(table.date.represent(record.date), _colspan = 3, ), ), ), rheader_tabs) elif tablename == "inv_track_item": # Tabs tabs = [(T("Details"), None), (T("Track Shipment"), "inv_item/"), ] rheader_tabs = DIV(s3_rheader_tabs(r, tabs)) # Get site data table = s3db.inv_inv_item irecord = current.db(table.id == record.send_inv_item_id).select( table.site_id, limitby = (0, 1) ).first() # Header if irecord: rheader = DIV( TABLE( TR(TH("%s: " % table.item_id.label), table.item_id.represent(record.item_id), TH("%s: " % table.item_pack_id.label), table.item_pack_id.represent(record.item_pack_id), ), TR(TH( "%s: " % table.site_id.label), TD(table.site_id.represent(irecord.site_id), _colspan = 3, ), ), ), rheader_tabs) else: rheader = DIV( TABLE( TR(TH("%s: " % table.item_id.label), table.item_id.represent(record.item_id), TH("%s: " % table.item_pack_id.label), table.item_pack_id.represent(record.item_pack_id), ), ), rheader_tabs) # Build footer inv_rfooter(r, record) return rheader
def inv_rheader(r): """ Resource Header for Warehouses and Inventory Items """ if r.representation != "html" or r.method == "import": # RHeaders only used in interactive views return None # Need to use this format as otherwise req_match?viewing=org_office.x # doesn't have an rheader tablename, record = s3_rheader_resource(r) if not record: # List or Create form: rheader makes no sense here return None T = current.T s3db = current.s3db table = s3db.table(tablename) rheader = None if tablename == "inv_warehouse": # Tabs tabs = [(T("Basic Details"), None), #(T("Contact Data"), "contact"), ] permit = current.auth.s3_has_permission settings = current.deployment_settings if settings.has_module("hrm"): STAFF = settings.get_hrm_staff_label() tabs.append((STAFF, "human_resource")) if permit("create", "hrm_human_resource_site") and \ permit("update", tablename, r.id): tabs.append((T("Assign %(staff)s") % {"staff": STAFF}, "assign")) if settings.has_module("asset") and permit("read", "asset_asset"): tabs.insert(6, (T("Assets"), "asset")) tabs = tabs + inv_tabs(r) if settings.has_module("req"): tabs = tabs + s3db.req_tabs(r) tabs.append((T("Attachments"), "document")) # Fields rheader_fields = [["name", "organisation_id", "email"], ["location_id", "phone1"], ] rheader = S3ResourceHeader(rheader_fields, tabs) rheader_fields, rheader_tabs = rheader(r, table=table, record=record) # Inject logo logo = s3db.org_organisation_logo(record.organisation_id) if logo: rheader = DIV(TABLE(TR(TD(logo), TD(rheader_fields) ))) else: rheader = DIV(rheader_fields) rheader.append(rheader_tabs) elif tablename == "inv_inv_item": # Tabs tabs = [(T("Details"), None), (T("Track Shipment"), "track_movement/"), ] rheader_tabs = DIV(s3_rheader_tabs(r, tabs)) # Header rheader = DIV( TABLE( TR(TH("%s: " % table.item_id.label), table.item_id.represent(record.item_id), TH("%s: " % table.item_pack_id.label), table.item_pack_id.represent(record.item_pack_id), ), TR(TH("%s: " % table.site_id.label), TD(table.site_id.represent(record.site_id), _colspan = 3, ), ), ), rheader_tabs) elif tablename == "inv_kitting": # Tabs tabs = [(T("Details"), None), (T("Pick List"), "item"), ] rheader_tabs = DIV(s3_rheader_tabs(r, tabs)) # Header rheader = DIV( TABLE( TR(TH("%s: " % table.req_ref.label), TD(table.req_ref.represent(record.req_ref), _colspan = 3, ), ), TR(TH("%s: " % table.item_id.label), table.item_id.represent(record.item_id), TH("%s: " % table.item_pack_id.label), table.item_pack_id.represent(record.item_pack_id), TH("%s: " % table.quantity.label), table.quantity.represent(record.quantity), ), TR(TH("%s: " % table.site_id.label), TD(table.site_id.represent(record.site_id), _colspan = 3, ), ), TR(TH("%s: " % table.repacked_id.label), TD(table.repacked_id.represent(record.repacked_id), _colspan = 3, ), ), TR(TH("%s: " % table.date.label), TD(table.date.represent(record.date), _colspan = 3, ), ), ), rheader_tabs) elif tablename == "inv_track_item": # Tabs tabs = [(T("Details"), None), (T("Track Shipment"), "inv_item/"), ] rheader_tabs = DIV(s3_rheader_tabs(r, tabs)) # Get site data table = s3db.inv_inv_item irecord = current.db(table.id == record.send_inv_item_id).select( table.site_id, limitby = (0, 1) ).first() # Header if irecord: rheader = DIV( TABLE( TR(TH("%s: " % table.item_id.label), table.item_id.represent(record.item_id), TH("%s: " % table.item_pack_id.label), table.item_pack_id.represent(record.item_pack_id), ), TR(TH( "%s: " % table.site_id.label), TD(table.site_id.represent(irecord.site_id), _colspan = 3, ), ), ), rheader_tabs) else: rheader = DIV( TABLE( TR(TH("%s: " % table.item_id.label), table.item_id.represent(record.item_id), TH("%s: " % table.item_pack_id.label), table.item_pack_id.represent(record.item_pack_id), ), ), rheader_tabs) # Build footer inv_rfooter(r, record) return rheader
Python
def inv_rfooter(r, record): """ Resource Footer for Warehouses and Inventory Items """ if "site_id" not in record: return if (r.component and r.component.name == "inv_item"): T = current.T rfooter = TAG[""]() component_id = r.component_id if not current.deployment_settings.get_inv_direct_stock_edits() and \ current.auth.s3_has_permission("update", "inv_warehouse", r.id): if component_id: asi_btn = A(T("Adjust Stock Item"), _href = URL(c = "inv", f = "adj", args = ["create"], vars = {"site": record.site_id, "item": component_id, }, ), _class = "action-btn" ) rfooter.append(asi_btn) else: as_btn = A(T("Adjust Stock"), _href = URL(c = "inv", f = "adj", args = ["create"], vars = {"site": record.site_id}, ), _class = "action-btn" ) rfooter.append(as_btn) if component_id: ts_btn = A(T("Track Shipment"), _href = URL(c = "inv", f = "track_movement", vars = {"viewing": "inv_inv_item.%s" % component_id}, ), _class = "action-btn" ) rfooter.append(ts_btn) current.response.s3.rfooter = rfooter
def inv_rfooter(r, record): """ Resource Footer for Warehouses and Inventory Items """ if "site_id" not in record: return if (r.component and r.component.name == "inv_item"): T = current.T rfooter = TAG[""]() component_id = r.component_id if not current.deployment_settings.get_inv_direct_stock_edits() and \ current.auth.s3_has_permission("update", "inv_warehouse", r.id): if component_id: asi_btn = A(T("Adjust Stock Item"), _href = URL(c = "inv", f = "adj", args = ["create"], vars = {"site": record.site_id, "item": component_id, }, ), _class = "action-btn" ) rfooter.append(asi_btn) else: as_btn = A(T("Adjust Stock"), _href = URL(c = "inv", f = "adj", args = ["create"], vars = {"site": record.site_id}, ), _class = "action-btn" ) rfooter.append(as_btn) if component_id: ts_btn = A(T("Track Shipment"), _href = URL(c = "inv", f = "track_movement", vars = {"viewing": "inv_inv_item.%s" % component_id}, ), _class = "action-btn" ) rfooter.append(ts_btn) current.response.s3.rfooter = rfooter
Python
def inv_recv_crud_strings(): """ CRUD Strings for inv_recv which need to be visible to menus without a model load """ T = current.T if current.deployment_settings.get_inv_shipment_name() == "order": #recv_id_label = T("Order") ADD_RECV = T("Add Order") current.response.s3.crud_strings["inv_recv"] = Storage( label_create = ADD_RECV, title_display = T("Order Details"), title_list = T("Orders"), title_update = T("Edit Order"), label_list_button = T("List Orders"), label_delete_button = T("Delete Order"), msg_record_created = T("Order Created"), msg_record_modified = T("Order updated"), msg_record_deleted = T("Order canceled"), msg_list_empty = T("No Orders registered") ) else: #recv_id_label = T("Receive Shipment") ADD_RECV = T("Receive New Shipment") current.response.s3.crud_strings["inv_recv"] = Storage( label_create = ADD_RECV, title_display = T("Received Shipment Details"), title_list = T("Received/Incoming Shipments"), title_update = T("Shipment to Receive"), label_list_button = T("List Received/Incoming Shipments"), label_delete_button = T("Delete Received Shipment"), msg_record_created = T("Shipment Created"), msg_record_modified = T("Received Shipment updated"), msg_record_deleted = T("Received Shipment canceled"), msg_list_empty = T("No Received Shipments") ) return
def inv_recv_crud_strings(): """ CRUD Strings for inv_recv which need to be visible to menus without a model load """ T = current.T if current.deployment_settings.get_inv_shipment_name() == "order": #recv_id_label = T("Order") ADD_RECV = T("Add Order") current.response.s3.crud_strings["inv_recv"] = Storage( label_create = ADD_RECV, title_display = T("Order Details"), title_list = T("Orders"), title_update = T("Edit Order"), label_list_button = T("List Orders"), label_delete_button = T("Delete Order"), msg_record_created = T("Order Created"), msg_record_modified = T("Order updated"), msg_record_deleted = T("Order canceled"), msg_list_empty = T("No Orders registered") ) else: #recv_id_label = T("Receive Shipment") ADD_RECV = T("Receive New Shipment") current.response.s3.crud_strings["inv_recv"] = Storage( label_create = ADD_RECV, title_display = T("Received Shipment Details"), title_list = T("Received/Incoming Shipments"), title_update = T("Shipment to Receive"), label_list_button = T("List Received/Incoming Shipments"), label_delete_button = T("Delete Received Shipment"), msg_record_created = T("Shipment Created"), msg_record_modified = T("Received Shipment updated"), msg_record_deleted = T("Received Shipment canceled"), msg_list_empty = T("No Received Shipments") ) return
Python
def qnty_adj_repr(value): """ Make unadjusted quantities show up in bold """ if value is None: # We want the word "None" here, not just a bold dash return B(T("None")) else: return IS_FLOAT_AMOUNT.represent(value, precision=2)
def qnty_adj_repr(value): """ Make unadjusted quantities show up in bold """ if value is None: # We want the word "None" here, not just a bold dash return B(T("None")) else: return IS_FLOAT_AMOUNT.represent(value, precision=2)
Python
def inv_adj_onaccept(form): """ When an adjustment record is created and it is of type inventory then an adj_item record for each inv_inv_item in the site will be created. If needed, extra adj_item records can be created later. """ record_id = form.vars.id db = current.db inv_item_table = db.inv_inv_item adjitemtable = db.inv_adj_item adjtable = db.inv_adj adj_rec = adjtable[record_id] if adj_rec.category == 1: site_id = form.vars.site_id # Only get inv. item with a positive quantity query = (inv_item_table.site_id == site_id) & \ (inv_item_table.quantity > 0) & \ (inv_item_table.deleted == False) row = db(query).select() for inv_item in row: # add an adjustment item record adjitemtable.insert(reason = 0, adj_id = record_id, inv_item_id = inv_item.id, # original source inv_item item_id = inv_item.item_id, # the supply item item_pack_id = inv_item.item_pack_id, old_quantity = inv_item.quantity, currency = inv_item.currency, old_status = inv_item.status, new_status = inv_item.status, old_pack_value = inv_item.pack_value, new_pack_value = inv_item.pack_value, expiry_date = inv_item.expiry_date, bin = inv_item.bin, old_owner_org_id = inv_item.owner_org_id, new_owner_org_id = inv_item.owner_org_id, )
def inv_adj_onaccept(form): """ When an adjustment record is created and it is of type inventory then an adj_item record for each inv_inv_item in the site will be created. If needed, extra adj_item records can be created later. """ record_id = form.vars.id db = current.db inv_item_table = db.inv_inv_item adjitemtable = db.inv_adj_item adjtable = db.inv_adj adj_rec = adjtable[record_id] if adj_rec.category == 1: site_id = form.vars.site_id # Only get inv. item with a positive quantity query = (inv_item_table.site_id == site_id) & \ (inv_item_table.quantity > 0) & \ (inv_item_table.deleted == False) row = db(query).select() for inv_item in row: # add an adjustment item record adjitemtable.insert(reason = 0, adj_id = record_id, inv_item_id = inv_item.id, # original source inv_item item_id = inv_item.item_id, # the supply item item_pack_id = inv_item.item_pack_id, old_quantity = inv_item.quantity, currency = inv_item.currency, old_status = inv_item.status, new_status = inv_item.status, old_pack_value = inv_item.pack_value, new_pack_value = inv_item.pack_value, expiry_date = inv_item.expiry_date, bin = inv_item.bin, old_owner_org_id = inv_item.owner_org_id, new_owner_org_id = inv_item.owner_org_id, )
Python
def inv_stock_movements(resource, selectors, orderby): """ Extraction method for stock movements report Args: resource: the CRUDResource (inv_inv_item) selectors: the field selectors orderby: orderby expression Note: transactions can be filtered by earliest/latest date using an DateFilter with selector="_transaction.date" TODO does not take manual stock adjustments into account TODO does not represent sites or Waybill/GRN as links (breaks PDF export, but otherwise it's useful) """ # Extract the stock item data selectors = ["id", "site_id", "site_id$name", "item_id$item_category_id", "bin", "item_id$name", "quantity", ] data = resource.select(selectors, limit = None, orderby = orderby, raw_data = True, represent = True, ) # Get all stock item IDs inv_item_ids = [row["_row"]["inv_inv_item.id"] for row in data.rows] # Earliest and latest date of the report (read from filter) convert = S3TypeConverter.convert request = current.request get_vars_get = request.get_vars.get dtstr = get_vars_get("_transaction.date__ge") earliest = convert(datetime.datetime, dtstr) if dtstr else None dtstr = get_vars_get("_transaction.date__le") latest = convert(datetime.datetime, dtstr) if dtstr else request.utcnow def item_dict(): """ Stock movement data per inventory item """ return {# Quantity in/out between earliest and latest date "quantity_in": 0, "quantity_out": 0, # Quantity in/out after latest date "quantity_in_after": 0, "quantity_out_after": 0, # Origin/destination sites "sites": [], # GRN/Waybill numbers "documents": [], } # Dict to collect stock movement data movements = {} # Set of site IDs for bulk representation all_sites = set() s3db = current.s3db # Incoming shipments query = (FS("recv_inv_item_id").belongs(inv_item_ids)) if earliest: query &= (FS("recv_id$date") >= earliest) incoming = s3db.resource("inv_track_item", filter=query) transactions = incoming.select(["recv_id$date", "recv_id$from_site_id", "recv_id$recv_ref", "recv_inv_item_id", "recv_quantity", ], limit = None, raw_data = True, represent = True, ) for transaction in transactions.rows: raw = transaction["_row"] inv_item_id = raw["inv_track_item.recv_inv_item_id"] # Get the movement data dict for this item if inv_item_id in movements: item_data = movements[inv_item_id] else: movements[inv_item_id] = item_data = item_dict() # Incoming quantities quantity_in = raw["inv_track_item.recv_quantity"] if quantity_in: if raw["inv_recv.date"] > latest: item_data["quantity_in_after"] += quantity_in continue else: item_data["quantity_in"] += quantity_in # Origin sites sites = item_data["sites"] from_site = raw["inv_recv.from_site_id"] if from_site and from_site not in sites: all_sites.add(from_site) sites.append(from_site) # GRN numbers if raw["inv_recv.recv_ref"]: documents = item_data["documents"] documents.append(raw["inv_recv.recv_ref"]) # Outgoing shipments query = (FS("send_inv_item_id").belongs(inv_item_ids)) if earliest: query &= (FS("send_id$date") >= earliest) outgoing = s3db.resource("inv_track_item", filter=query) transactions = outgoing.select(["send_id$date", "send_id$to_site_id", "send_id$send_ref", "send_inv_item_id", "quantity", ], limit = None, raw_data = True, represent = True, ) for transaction in transactions.rows: raw = transaction["_row"] inv_item_id = raw["inv_track_item.send_inv_item_id"] # Get the movement data dict for this item if inv_item_id in movements: item_data = movements[inv_item_id] else: movements[inv_item_id] = item_data = item_dict() # Outgoing quantities quantity_in = raw["inv_track_item.quantity"] if quantity_in: send_date = raw["inv_send.date"] if send_date and send_date > latest: item_data["quantity_out_after"] += quantity_in continue else: item_data["quantity_out"] += quantity_in # Destination sites sites = item_data["sites"] to_site = raw["inv_send.to_site_id"] if to_site and to_site not in sites: all_sites.add(to_site) sites.append(to_site) # Waybill numbers if raw["inv_send.send_ref"]: documents = item_data["documents"] documents.append(raw["inv_send.send_ref"]) # Bulk-represent sites (stores the representations in represent) represent = s3db.inv_inv_item.site_id.represent represent.bulk(list(all_sites)) # Extend the original rows in the data dict for row in data.rows: raw = row["_row"] inv_item_id = raw["inv_inv_item.id"] if inv_item_id in movements: item_data = movements[inv_item_id] else: item_data = item_dict() # Compute original and final quantity total_in = item_data["quantity_in"] total_out = item_data["quantity_out"] current_quantity = raw["inv_inv_item.quantity"] final_quantity = current_quantity - \ item_data["quantity_in_after"] + \ item_data["quantity_out_after"] original_quantity = final_quantity - total_in + total_out # Write into raw data (for aggregation) raw["inv_inv_item.quantity"] = final_quantity raw["inv_inv_item.quantity_in"] = total_in raw["inv_inv_item.quantity_out"] = total_out raw["inv_inv_item.original_quantity"] = original_quantity # Copy into represented data (for rendering) row["inv_inv_item.quantity"] = final_quantity row["inv_inv_item.quantity_in"] = total_in row["inv_inv_item.quantity_out"] = total_out row["inv_inv_item.original_quantity"] = original_quantity # Add sites row["inv_inv_item.sites"] = represent.multiple(item_data["sites"], show_link = False, ) # Add GRN/Waybill numbers row["inv_inv_item.documents"] = ", ".join(item_data["documents"]) # Return to S3GroupedItemsReport return data.rows
def inv_stock_movements(resource, selectors, orderby): """ Extraction method for stock movements report Args: resource: the CRUDResource (inv_inv_item) selectors: the field selectors orderby: orderby expression Note: transactions can be filtered by earliest/latest date using an DateFilter with selector="_transaction.date" TODO does not take manual stock adjustments into account TODO does not represent sites or Waybill/GRN as links (breaks PDF export, but otherwise it's useful) """ # Extract the stock item data selectors = ["id", "site_id", "site_id$name", "item_id$item_category_id", "bin", "item_id$name", "quantity", ] data = resource.select(selectors, limit = None, orderby = orderby, raw_data = True, represent = True, ) # Get all stock item IDs inv_item_ids = [row["_row"]["inv_inv_item.id"] for row in data.rows] # Earliest and latest date of the report (read from filter) convert = S3TypeConverter.convert request = current.request get_vars_get = request.get_vars.get dtstr = get_vars_get("_transaction.date__ge") earliest = convert(datetime.datetime, dtstr) if dtstr else None dtstr = get_vars_get("_transaction.date__le") latest = convert(datetime.datetime, dtstr) if dtstr else request.utcnow def item_dict(): """ Stock movement data per inventory item """ return {# Quantity in/out between earliest and latest date "quantity_in": 0, "quantity_out": 0, # Quantity in/out after latest date "quantity_in_after": 0, "quantity_out_after": 0, # Origin/destination sites "sites": [], # GRN/Waybill numbers "documents": [], } # Dict to collect stock movement data movements = {} # Set of site IDs for bulk representation all_sites = set() s3db = current.s3db # Incoming shipments query = (FS("recv_inv_item_id").belongs(inv_item_ids)) if earliest: query &= (FS("recv_id$date") >= earliest) incoming = s3db.resource("inv_track_item", filter=query) transactions = incoming.select(["recv_id$date", "recv_id$from_site_id", "recv_id$recv_ref", "recv_inv_item_id", "recv_quantity", ], limit = None, raw_data = True, represent = True, ) for transaction in transactions.rows: raw = transaction["_row"] inv_item_id = raw["inv_track_item.recv_inv_item_id"] # Get the movement data dict for this item if inv_item_id in movements: item_data = movements[inv_item_id] else: movements[inv_item_id] = item_data = item_dict() # Incoming quantities quantity_in = raw["inv_track_item.recv_quantity"] if quantity_in: if raw["inv_recv.date"] > latest: item_data["quantity_in_after"] += quantity_in continue else: item_data["quantity_in"] += quantity_in # Origin sites sites = item_data["sites"] from_site = raw["inv_recv.from_site_id"] if from_site and from_site not in sites: all_sites.add(from_site) sites.append(from_site) # GRN numbers if raw["inv_recv.recv_ref"]: documents = item_data["documents"] documents.append(raw["inv_recv.recv_ref"]) # Outgoing shipments query = (FS("send_inv_item_id").belongs(inv_item_ids)) if earliest: query &= (FS("send_id$date") >= earliest) outgoing = s3db.resource("inv_track_item", filter=query) transactions = outgoing.select(["send_id$date", "send_id$to_site_id", "send_id$send_ref", "send_inv_item_id", "quantity", ], limit = None, raw_data = True, represent = True, ) for transaction in transactions.rows: raw = transaction["_row"] inv_item_id = raw["inv_track_item.send_inv_item_id"] # Get the movement data dict for this item if inv_item_id in movements: item_data = movements[inv_item_id] else: movements[inv_item_id] = item_data = item_dict() # Outgoing quantities quantity_in = raw["inv_track_item.quantity"] if quantity_in: send_date = raw["inv_send.date"] if send_date and send_date > latest: item_data["quantity_out_after"] += quantity_in continue else: item_data["quantity_out"] += quantity_in # Destination sites sites = item_data["sites"] to_site = raw["inv_send.to_site_id"] if to_site and to_site not in sites: all_sites.add(to_site) sites.append(to_site) # Waybill numbers if raw["inv_send.send_ref"]: documents = item_data["documents"] documents.append(raw["inv_send.send_ref"]) # Bulk-represent sites (stores the representations in represent) represent = s3db.inv_inv_item.site_id.represent represent.bulk(list(all_sites)) # Extend the original rows in the data dict for row in data.rows: raw = row["_row"] inv_item_id = raw["inv_inv_item.id"] if inv_item_id in movements: item_data = movements[inv_item_id] else: item_data = item_dict() # Compute original and final quantity total_in = item_data["quantity_in"] total_out = item_data["quantity_out"] current_quantity = raw["inv_inv_item.quantity"] final_quantity = current_quantity - \ item_data["quantity_in_after"] + \ item_data["quantity_out_after"] original_quantity = final_quantity - total_in + total_out # Write into raw data (for aggregation) raw["inv_inv_item.quantity"] = final_quantity raw["inv_inv_item.quantity_in"] = total_in raw["inv_inv_item.quantity_out"] = total_out raw["inv_inv_item.original_quantity"] = original_quantity # Copy into represented data (for rendering) row["inv_inv_item.quantity"] = final_quantity row["inv_inv_item.quantity_in"] = total_in row["inv_inv_item.quantity_out"] = total_out row["inv_inv_item.original_quantity"] = original_quantity # Add sites row["inv_inv_item.sites"] = represent.multiple(item_data["sites"], show_link = False, ) # Add GRN/Waybill numbers row["inv_inv_item.documents"] = ", ".join(item_data["documents"]) # Return to S3GroupedItemsReport return data.rows
Python
def inv_track_item_quantity_needed(row): """ Quantity still needed for a track item - used in Inv Send when an Item has come from a Request """ if hasattr(row, "inv_track_item"): row = row.inv_track_item try: req_item_id = row.req_item_id except AttributeError: # not available req_item_id = None if not req_item_id: return current.messages["NONE"] s3db = current.s3db ritable = s3db.req_req_item siptable = s3db.supply_item_pack query = (ritable.id == req_item_id) & \ (ritable.item_pack_id == siptable.id) row = current.db(query).select(ritable.quantity, ritable.quantity_transit, ritable.quantity_fulfil, siptable.quantity ).first() if row: rim = row.req_req_item quantity_shipped = max(rim.quantity_transit, rim.quantity_fulfil) quantity_needed = (rim.quantity - quantity_shipped) * \ row.supply_item_pack.quantity else: return current.messages["NONE"] return quantity_needed
def inv_track_item_quantity_needed(row): """ Quantity still needed for a track item - used in Inv Send when an Item has come from a Request """ if hasattr(row, "inv_track_item"): row = row.inv_track_item try: req_item_id = row.req_item_id except AttributeError: # not available req_item_id = None if not req_item_id: return current.messages["NONE"] s3db = current.s3db ritable = s3db.req_req_item siptable = s3db.supply_item_pack query = (ritable.id == req_item_id) & \ (ritable.item_pack_id == siptable.id) row = current.db(query).select(ritable.quantity, ritable.quantity_transit, ritable.quantity_fulfil, siptable.quantity ).first() if row: rim = row.req_req_item quantity_shipped = max(rim.quantity_transit, rim.quantity_fulfil) quantity_needed = (rim.quantity - quantity_shipped) * \ row.supply_item_pack.quantity else: return current.messages["NONE"] return quantity_needed
Python
def convert(cls, a, b): """ Convert b into the data type of a Raises: TypeError: if any of the data types are not supported or the types are incompatible ValueError: if the value conversion fails """ if isinstance(a, lazyT): a = str(a) if b is None: return None if type(a) is type: if a is str: return cls._str(b) if a is int: return cls._int(b) if a is bool: return cls._bool(b) if a is float: return cls._float(b) if a is datetime.datetime: return cls._datetime(b) if a is datetime.date: return cls._date(b) if a is datetime.time: return cls._time(b) raise TypeError if isinstance(b, type(a)): return b if isinstance(a, (list, tuple, set)): if isinstance(b, (list, tuple, set)): return b elif isinstance(b, str): if "," in b: b = b.split(",") else: b = [b] else: b = [b] if len(a): cnv = cls.convert return [cnv(a[0], item) for item in b] else: return b if isinstance(b, (list, tuple, set)): cnv = cls.convert return [cnv(a, item) for item in b] if isinstance(a, str): return cls._str(b) if isinstance(a, bool): return cls._bool(b) if isinstance(a, int): return cls._int(b) if isinstance(a, float): return cls._float(b) if isinstance(a, datetime.datetime): return cls._datetime(b) if isinstance(a, datetime.date): return cls._date(b) if isinstance(a, datetime.time): return cls._time(b) raise TypeError
def convert(cls, a, b): """ Convert b into the data type of a Raises: TypeError: if any of the data types are not supported or the types are incompatible ValueError: if the value conversion fails """ if isinstance(a, lazyT): a = str(a) if b is None: return None if type(a) is type: if a is str: return cls._str(b) if a is int: return cls._int(b) if a is bool: return cls._bool(b) if a is float: return cls._float(b) if a is datetime.datetime: return cls._datetime(b) if a is datetime.date: return cls._date(b) if a is datetime.time: return cls._time(b) raise TypeError if isinstance(b, type(a)): return b if isinstance(a, (list, tuple, set)): if isinstance(b, (list, tuple, set)): return b elif isinstance(b, str): if "," in b: b = b.split(",") else: b = [b] else: b = [b] if len(a): cnv = cls.convert return [cnv(a[0], item) for item in b] else: return b if isinstance(b, (list, tuple, set)): cnv = cls.convert return [cnv(a, item) for item in b] if isinstance(a, str): return cls._str(b) if isinstance(a, bool): return cls._bool(b) if isinstance(a, int): return cls._int(b) if isinstance(a, float): return cls._float(b) if isinstance(a, datetime.datetime): return cls._datetime(b) if isinstance(a, datetime.date): return cls._date(b) if isinstance(a, datetime.time): return cls._time(b) raise TypeError
Python
def s3_str(s, encoding="utf-8"): """ Convert an object into a str Args: s: the object encoding: the character encoding """ if type(s) is str: return s elif type(s) is bytes: return s.decode(encoding, "strict") else: return str(s)
def s3_str(s, encoding="utf-8"): """ Convert an object into a str Args: s: the object encoding: the character encoding """ if type(s) is str: return s elif type(s) is bytes: return s.decode(encoding, "strict") else: return str(s)
Python
def _permitted(self, method=None): """ Check permission for the requested resource Args: method: method to check, defaults to the actually requested method Returns: bool: whether the action is allowed for the target resource """ auth = current.auth has_permission = auth.s3_has_permission r = self.request if not method: method = self.method if r.component is None: table = r.table record_id = r.id else: table = r.component.table record_id = r.component_id if method == "create": # Is creating a new component record allowed without # permission to update the master record? writable = current.s3db.get_config(r.tablename, "ignore_master_access", ) if not isinstance(writable, (tuple, list)) or \ r.component_name not in writable: master_access = has_permission("update", r.table, record_id = r.id, ) if not master_access: return False return has_permission(method, table, record_id=record_id)
def _permitted(self, method=None): """ Check permission for the requested resource Args: method: method to check, defaults to the actually requested method Returns: bool: whether the action is allowed for the target resource """ auth = current.auth has_permission = auth.s3_has_permission r = self.request if not method: method = self.method if r.component is None: table = r.table record_id = r.id else: table = r.component.table record_id = r.component_id if method == "create": # Is creating a new component record allowed without # permission to update the master record? writable = current.s3db.get_config(r.tablename, "ignore_master_access", ) if not isinstance(writable, (tuple, list)) or \ r.component_name not in writable: master_access = has_permission("update", r.table, record_id = r.id, ) if not master_access: return False return has_permission(method, table, record_id=record_id)
Python
def _remove_filters(get_vars): """ Remove all filters from URL vars Args: get_vars: the URL vars as dict Returns: the filtered URL vars (Storage) """ regex_filter = re.compile(r".+\..+|.*\(.+\).*") return Storage((k, v) for k, v in get_vars.items() if not regex_filter.match(k))
def _remove_filters(get_vars): """ Remove all filters from URL vars Args: get_vars: the URL vars as dict Returns: the filtered URL vars (Storage) """ regex_filter = re.compile(r".+\..+|.*\(.+\).*") return Storage((k, v) for k, v in get_vars.items() if not regex_filter.match(k))
Python
def _limits(get_vars, default_limit=0): """ Extract page limits (start and limit) from GET vars Args: get_vars: the GET vars default_limit: the default limit, explicit value or: 0 => response.s3.ROWSPERPAGE None => no default limit Returns: a tuple (start, limit) """ start = get_vars.get("start", None) limit = get_vars.get("limit", default_limit) # Deal with overrides (pagination limits come last) if isinstance(start, list): start = start[-1] if isinstance(limit, list): limit = limit[-1] if limit: # Ability to override default limit to "Show All" if isinstance(limit, str) and limit.lower() == "none": #start = None # needed? limit = None else: try: start = int(start) if start is not None else None limit = int(limit) except (ValueError, TypeError): # Fall back to defaults start, limit = None, default_limit else: # Use defaults, assume sspag because this is a # pagination request by definition start = None limit = default_limit return start, limit
def _limits(get_vars, default_limit=0): """ Extract page limits (start and limit) from GET vars Args: get_vars: the GET vars default_limit: the default limit, explicit value or: 0 => response.s3.ROWSPERPAGE None => no default limit Returns: a tuple (start, limit) """ start = get_vars.get("start", None) limit = get_vars.get("limit", default_limit) # Deal with overrides (pagination limits come last) if isinstance(start, list): start = start[-1] if isinstance(limit, list): limit = limit[-1] if limit: # Ability to override default limit to "Show All" if isinstance(limit, str) and limit.lower() == "none": #start = None # needed? limit = None else: try: start = int(start) if start is not None else None limit = int(limit) except (ValueError, TypeError): # Fall back to defaults start, limit = None, default_limit else: # Use defaults, assume sspag because this is a # pagination request by definition start = None limit = default_limit return start, limit
Python
def dummy(): """ Dummy Task - can be used to populate a table with a task_id """ return
def dummy(): """ Dummy Task - can be used to populate a table with a task_id """ return
Python
def s3db_task(function, user_id=None, **kwargs): """ Generic Task - can be used to call any s3db.function(**kwargs) - saves having to create separate Tasks for many cases """ if user_id: # Authenticate auth.s3_impersonate(user_id) # Run the Task & return the result result = s3db[function](**kwargs) db.commit() return result
def s3db_task(function, user_id=None, **kwargs): """ Generic Task - can be used to call any s3db.function(**kwargs) - saves having to create separate Tasks for many cases """ if user_id: # Authenticate auth.s3_impersonate(user_id) # Run the Task & return the result result = s3db[function](**kwargs) db.commit() return result
Python
def maintenance(period = "daily"): """ Run all maintenance tasks which should be done daily - instantiates and calls the Daily() class defined in the template's maintenance.py file - if it exists - falls back to the default template's maintenancy.py """ maintenance = None result = "NotImplementedError" templates = settings.get_template() if templates != "default": # Try to import maintenance routine from template if not isinstance(templates, (tuple, list)): templates = (templates,) for template in templates[::-1]: package = "templates.%s" % template name = "maintenance" try: maintenance = getattr(__import__(package, fromlist=[name]), name) except (ImportError, AttributeError): pass else: break if maintenance is None: try: # Fallback to default maintenance routine from templates.default import maintenance except ImportError: pass if maintenance is not None: if period == "daily": result = maintenance.Daily()() db.commit() return result
def maintenance(period = "daily"): """ Run all maintenance tasks which should be done daily - instantiates and calls the Daily() class defined in the template's maintenance.py file - if it exists - falls back to the default template's maintenancy.py """ maintenance = None result = "NotImplementedError" templates = settings.get_template() if templates != "default": # Try to import maintenance routine from template if not isinstance(templates, (tuple, list)): templates = (templates,) for template in templates[::-1]: package = "templates.%s" % template name = "maintenance" try: maintenance = getattr(__import__(package, fromlist=[name]), name) except (ImportError, AttributeError): pass else: break if maintenance is None: try: # Fallback to default maintenance routine from templates.default import maintenance except ImportError: pass if maintenance is not None: if period == "daily": result = maintenance.Daily()() db.commit() return result
Python
def msg_gcm(title, uri, message, registration_ids, user_id=None): """ Push the data relating to google cloud messaging server """ if user_id: # Authenticate auth.s3_impersonate(user_id) msg.gcm_push(title, uri, message, eval(registration_ids))
def msg_gcm(title, uri, message, registration_ids, user_id=None): """ Push the data relating to google cloud messaging server """ if user_id: # Authenticate auth.s3_impersonate(user_id) msg.gcm_push(title, uri, message, eval(registration_ids))
Python
def notify_check_subscriptions(user_id=None): """ Scheduled task to check subscriptions for updates, creates notify_notify tasks where updates exist. """ if user_id: auth.s3_impersonate(user_id) result = s3base.S3Notifications().check_subscriptions() db.commit() return result
def notify_check_subscriptions(user_id=None): """ Scheduled task to check subscriptions for updates, creates notify_notify tasks where updates exist. """ if user_id: auth.s3_impersonate(user_id) result = s3base.S3Notifications().check_subscriptions() db.commit() return result
Python
def stats_demographic_update_aggregates(records = None, user_id = None, ): """ Update the stats_demographic_aggregate table for the given stats_demographic_data record(s) @param records: JSON of Rows of stats_demographic_data records to update aggregates for @param user_id: calling request's auth.user.id or None """ if user_id: # Authenticate auth.s3_impersonate(user_id) # Run the Task & return the result result = s3db.stats_demographic_update_aggregates(records) db.commit() return result
def stats_demographic_update_aggregates(records = None, user_id = None, ): """ Update the stats_demographic_aggregate table for the given stats_demographic_data record(s) @param records: JSON of Rows of stats_demographic_data records to update aggregates for @param user_id: calling request's auth.user.id or None """ if user_id: # Authenticate auth.s3_impersonate(user_id) # Run the Task & return the result result = s3db.stats_demographic_update_aggregates(records) db.commit() return result
Python
def stats_demographic_update_location_aggregate(location_level, root_location_id, parameter_id, start_date, end_date, user_id = None, ): """ Update the stats_demographic_aggregate table for the given location and parameter - called from within stats_demographic_update_aggregates @param location_level: gis level at which the data needs to be accumulated @param root_location_id: id of the location @param parameter_id: parameter for which the stats are being updated @param start_date: start date of the period in question @param end_date: end date of the period in question @param user_id: calling request's auth.user.id or None """ if user_id: # Authenticate auth.s3_impersonate(user_id) # Run the Task & return the result result = s3db.stats_demographic_update_location_aggregate(location_level, root_location_id, parameter_id, start_date, end_date, ) db.commit() return result
def stats_demographic_update_location_aggregate(location_level, root_location_id, parameter_id, start_date, end_date, user_id = None, ): """ Update the stats_demographic_aggregate table for the given location and parameter - called from within stats_demographic_update_aggregates @param location_level: gis level at which the data needs to be accumulated @param root_location_id: id of the location @param parameter_id: parameter for which the stats are being updated @param start_date: start date of the period in question @param end_date: end date of the period in question @param user_id: calling request's auth.user.id or None """ if user_id: # Authenticate auth.s3_impersonate(user_id) # Run the Task & return the result result = s3db.stats_demographic_update_location_aggregate(location_level, root_location_id, parameter_id, start_date, end_date, ) db.commit() return result
Python
def disease_stats_update_aggregates(records = None, all = False, user_id = None, ): """ Update the disease_stats_aggregate table for the given disease_stats_data record(s) @param records: JSON of Rows of disease_stats_data records to update aggregates for @param user_id: calling request's auth.user.id or None """ if user_id: # Authenticate auth.s3_impersonate(user_id) # Run the Task & return the result result = s3db.disease_stats_update_aggregates(records, all) db.commit() return result
def disease_stats_update_aggregates(records = None, all = False, user_id = None, ): """ Update the disease_stats_aggregate table for the given disease_stats_data record(s) @param records: JSON of Rows of disease_stats_data records to update aggregates for @param user_id: calling request's auth.user.id or None """ if user_id: # Authenticate auth.s3_impersonate(user_id) # Run the Task & return the result result = s3db.disease_stats_update_aggregates(records, all) db.commit() return result
Python
def disease_stats_update_location_aggregates(location_id, children, parameter_id, dates, user_id = None, ): """ Update the disease_stats_aggregate table for the given location and parameter - called from within disease_stats_update_aggregates @param location_id: location to aggregate at @param children: locations to aggregate from @param parameter_id: parameter to aggregate @param dates: dates to aggregate for @param user_id: calling request's auth.user.id or None """ if user_id: # Authenticate auth.s3_impersonate(user_id) # Run the Task & return the result result = s3db.disease_stats_update_location_aggregates(location_id, children, parameter_id, dates, ) db.commit() return result
def disease_stats_update_location_aggregates(location_id, children, parameter_id, dates, user_id = None, ): """ Update the disease_stats_aggregate table for the given location and parameter - called from within disease_stats_update_aggregates @param location_id: location to aggregate at @param children: locations to aggregate from @param parameter_id: parameter to aggregate @param dates: dates to aggregate for @param user_id: calling request's auth.user.id or None """ if user_id: # Authenticate auth.s3_impersonate(user_id) # Run the Task & return the result result = s3db.disease_stats_update_location_aggregates(location_id, children, parameter_id, dates, ) db.commit() return result
Python
def sync_synchronize(repository_id, user_id=None, manual=False): """ Run all tasks for a repository, to be called from scheduler """ if user_id: # Authenticate auth.s3_impersonate(user_id) rtable = s3db.sync_repository query = (rtable.deleted != True) & \ (rtable.id == repository_id) repository = db(query).select(limitby=(0, 1)).first() if repository: sync = s3base.S3Sync() status = sync.get_status() if status.running: message = "Synchronization already active - skipping run" sync.log.write(repository_id=repository.id, resource_name=None, transmission=None, mode=None, action="check", remote=False, result=sync.log.ERROR, message=message) db.commit() return sync.log.ERROR sync.set_status(running=True, manual=manual) try: sync.synchronize(repository) finally: sync.set_status(running=False, manual=False) db.commit() return s3base.S3SyncLog.SUCCESS
def sync_synchronize(repository_id, user_id=None, manual=False): """ Run all tasks for a repository, to be called from scheduler """ if user_id: # Authenticate auth.s3_impersonate(user_id) rtable = s3db.sync_repository query = (rtable.deleted != True) & \ (rtable.id == repository_id) repository = db(query).select(limitby=(0, 1)).first() if repository: sync = s3base.S3Sync() status = sync.get_status() if status.running: message = "Synchronization already active - skipping run" sync.log.write(repository_id=repository.id, resource_name=None, transmission=None, mode=None, action="check", remote=False, result=sync.log.ERROR, message=message) db.commit() return sync.log.ERROR sync.set_status(running=True, manual=manual) try: sync.synchronize(repository) finally: sync.set_status(running=False, manual=False) db.commit() return s3base.S3SyncLog.SUCCESS
Python
def define_tables(self, migrate=True, fake_migrate=False): """ Define auth tables, to be called unless tables are defined manually Examples: # defines all needed tables and table files # UUID + "_auth_user.table", ... auth.define_tables() # defines all needed tables and table files # "myprefix_auth_user.table", ... auth.define_tables(migrate="myprefix_") # defines all needed tables without migration/table files auth.define_tables(migrate=False) """ db = current.db settings = self.settings messages = self.messages deployment_settings = current.deployment_settings define_table = db.define_table # User table utable = settings.table_user uname = settings.table_user_name if not utable: utable_fields = [ Field("first_name", length=128, notnull=True, default="", requires = \ IS_NOT_EMPTY(error_message=messages.is_empty), ), Field("last_name", length=128, default=""), Field("email", length=255, unique=True, default=""), # Used For chat in default deployment config Field("username", length=255, default="", readable=False, writable=False), Field("language", length=16, default = deployment_settings.get_L10n_default_language()), Field("organisation_id", "integer", readable=False, writable=False), Field("org_group_id", "integer", readable=False, writable=False), Field("site_id", "integer", readable=False, writable=False), Field("link_user_to", "list:string", readable=False, writable=False), Field("registration_key", length=512, default="", readable=False, writable=False), Field("reset_password_key", length=512, default="", readable=False, writable=False), Field("deleted", "boolean", default=False, readable=False, writable=False), Field("timestmp", "datetime", default="", readable=False, writable=False), s3_comments(readable=False, writable=False), # Additional meta fields required for sync: S3MetaFields.uuid(), #S3MetaFields.mci(), S3MetaFields.created_on(), S3MetaFields.modified_on(), ] userfield = settings.login_userfield if userfield != "email": # Use username (not used by default in Sahana) utable_fields.insert(2, Field(userfield, length = 128, default = "", unique = True, )) # Insert password field after either email or username passfield = settings.password_field utable_fields.insert(3, Field(passfield, "password", length=512, requires = CRYPT(key = settings.hmac_key, min_length = deployment_settings.get_auth_password_min_length(), digest_alg = "sha512"), readable = False, label = messages.label_password, )) define_table(uname, migrate = migrate, fake_migrate = fake_migrate, *utable_fields) utable = settings.table_user = db[uname] # Group table (roles) gtable = settings.table_group gname = settings.table_group_name if not gtable: define_table(gname, # Group unique ID, must be notnull+unique: Field("uuid", length=64, notnull=True, unique=True, readable=False, writable=False), # Group does not appear in the Role Manager: # (can neither assign, nor modify, nor delete) Field("hidden", "boolean", readable=False, writable=False, default=False), # Group cannot be modified in the Role Manager: # (can assign, but neither modify nor delete) Field("system", "boolean", readable=False, writable=False, default=False), # Group cannot be deleted in the Role Manager: # (can assign and modify, but not delete) Field("protected", "boolean", readable=False, writable=False, default=False), # Role name: Field("role", length=255, unique=True, default="", requires = IS_NOT_IN_DB(db, "%s.role" % gname), label = messages.label_role, ), Field("description", "text", label = messages.label_description, ), # Additional meta fields required for sync: S3MetaFields.created_on(), S3MetaFields.modified_on(), S3MetaFields.deleted(), #S3MetaFields.deleted_fk(), #S3MetaFields.deleted_rb(), migrate = migrate, fake_migrate = fake_migrate, ) gtable = settings.table_group = db[gname] # Group membership table (user<->role) if not settings.table_membership: define_table( settings.table_membership_name, Field("user_id", utable, requires = IS_IN_DB(db, "%s.id" % uname, "%(id)s: %(first_name)s %(last_name)s"), label = messages.label_user_id, ), Field("group_id", gtable, requires = IS_IN_DB(db, "%s.id" % gname, "%(id)s: %(role)s"), represent = S3Represent(lookup=gname, fields=["role"]), label = messages.label_group_id, ), # Realm Field("pe_id", "integer"), Field("system", "boolean", default = False, ), migrate = migrate, fake_migrate = fake_migrate, *S3MetaFields.sync_meta_fields()) settings.table_membership = db[settings.table_membership_name] # Define Eden permission table self.permission.define_table(migrate = migrate, fake_migrate = fake_migrate) #security_policy = deployment_settings.get_security_policy() #if security_policy not in (1, 2, 3, 4, 5, 6, 7, 8) and \ # not settings.table_permission: # # Permissions table (group<->permission) # # NB This Web2Py table is deprecated / replaced in Eden by S3Permission # settings.table_permission = define_table( # settings.table_permission_name, # Field("group_id", gtable, # requires = IS_IN_DB(db, "%s.id" % gname, # "%(id)s: %(role)s"), # label=messages.label_group_id), # Field("name", default="default", length=512, # requires = IS_NOT_EMPTY(), # label=messages.label_name), # Field("table_name", length=512, # # Needs to be defined after all tables created # #requires = IS_IN_SET(db.tables), # label=messages.label_table_name), # Field("record_id", "integer", # requires = IS_INT_IN_RANGE(0, 10 ** 9), # label=messages.label_record_id), # migrate = migrate, # fake_migrate=fake_migrate) # Event table (auth_event) # Records Logins & ? # @ToDo: Move to s3db.auth to prevent it from being defined every request # (lazy tables means no big issue for Production but helps Devs) # Deprecate? # - date of most recent login is the most useful thing recorded, which we already record in the main auth_user table if not settings.table_event: request = current.request define_table( settings.table_event_name, Field("time_stamp", "datetime", default = request.utcnow, #label = messages.label_time_stamp ), Field("client_ip", default = request.client, #label=messages.label_client_ip ), Field("user_id", utable, default = None, requires = IS_IN_DB(db, "%s.id" % uname, "%(id)s: %(first_name)s %(last_name)s"), #label=messages.label_user_id ), Field("origin", length=512, default = "auth", #label = messages.label_origin, requires = IS_NOT_EMPTY(), ), Field("description", "text", default = "", #label = messages.label_description, requires = IS_NOT_EMPTY(), ), migrate = migrate, fake_migrate = fake_migrate, *S3MetaFields.sync_meta_fields()) settings.table_event = db[settings.table_event_name]
def define_tables(self, migrate=True, fake_migrate=False): """ Define auth tables, to be called unless tables are defined manually Examples: # defines all needed tables and table files # UUID + "_auth_user.table", ... auth.define_tables() # defines all needed tables and table files # "myprefix_auth_user.table", ... auth.define_tables(migrate="myprefix_") # defines all needed tables without migration/table files auth.define_tables(migrate=False) """ db = current.db settings = self.settings messages = self.messages deployment_settings = current.deployment_settings define_table = db.define_table # User table utable = settings.table_user uname = settings.table_user_name if not utable: utable_fields = [ Field("first_name", length=128, notnull=True, default="", requires = \ IS_NOT_EMPTY(error_message=messages.is_empty), ), Field("last_name", length=128, default=""), Field("email", length=255, unique=True, default=""), # Used For chat in default deployment config Field("username", length=255, default="", readable=False, writable=False), Field("language", length=16, default = deployment_settings.get_L10n_default_language()), Field("organisation_id", "integer", readable=False, writable=False), Field("org_group_id", "integer", readable=False, writable=False), Field("site_id", "integer", readable=False, writable=False), Field("link_user_to", "list:string", readable=False, writable=False), Field("registration_key", length=512, default="", readable=False, writable=False), Field("reset_password_key", length=512, default="", readable=False, writable=False), Field("deleted", "boolean", default=False, readable=False, writable=False), Field("timestmp", "datetime", default="", readable=False, writable=False), s3_comments(readable=False, writable=False), # Additional meta fields required for sync: S3MetaFields.uuid(), #S3MetaFields.mci(), S3MetaFields.created_on(), S3MetaFields.modified_on(), ] userfield = settings.login_userfield if userfield != "email": # Use username (not used by default in Sahana) utable_fields.insert(2, Field(userfield, length = 128, default = "", unique = True, )) # Insert password field after either email or username passfield = settings.password_field utable_fields.insert(3, Field(passfield, "password", length=512, requires = CRYPT(key = settings.hmac_key, min_length = deployment_settings.get_auth_password_min_length(), digest_alg = "sha512"), readable = False, label = messages.label_password, )) define_table(uname, migrate = migrate, fake_migrate = fake_migrate, *utable_fields) utable = settings.table_user = db[uname] # Group table (roles) gtable = settings.table_group gname = settings.table_group_name if not gtable: define_table(gname, # Group unique ID, must be notnull+unique: Field("uuid", length=64, notnull=True, unique=True, readable=False, writable=False), # Group does not appear in the Role Manager: # (can neither assign, nor modify, nor delete) Field("hidden", "boolean", readable=False, writable=False, default=False), # Group cannot be modified in the Role Manager: # (can assign, but neither modify nor delete) Field("system", "boolean", readable=False, writable=False, default=False), # Group cannot be deleted in the Role Manager: # (can assign and modify, but not delete) Field("protected", "boolean", readable=False, writable=False, default=False), # Role name: Field("role", length=255, unique=True, default="", requires = IS_NOT_IN_DB(db, "%s.role" % gname), label = messages.label_role, ), Field("description", "text", label = messages.label_description, ), # Additional meta fields required for sync: S3MetaFields.created_on(), S3MetaFields.modified_on(), S3MetaFields.deleted(), #S3MetaFields.deleted_fk(), #S3MetaFields.deleted_rb(), migrate = migrate, fake_migrate = fake_migrate, ) gtable = settings.table_group = db[gname] # Group membership table (user<->role) if not settings.table_membership: define_table( settings.table_membership_name, Field("user_id", utable, requires = IS_IN_DB(db, "%s.id" % uname, "%(id)s: %(first_name)s %(last_name)s"), label = messages.label_user_id, ), Field("group_id", gtable, requires = IS_IN_DB(db, "%s.id" % gname, "%(id)s: %(role)s"), represent = S3Represent(lookup=gname, fields=["role"]), label = messages.label_group_id, ), # Realm Field("pe_id", "integer"), Field("system", "boolean", default = False, ), migrate = migrate, fake_migrate = fake_migrate, *S3MetaFields.sync_meta_fields()) settings.table_membership = db[settings.table_membership_name] # Define Eden permission table self.permission.define_table(migrate = migrate, fake_migrate = fake_migrate) #security_policy = deployment_settings.get_security_policy() #if security_policy not in (1, 2, 3, 4, 5, 6, 7, 8) and \ # not settings.table_permission: # # Permissions table (group<->permission) # # NB This Web2Py table is deprecated / replaced in Eden by S3Permission # settings.table_permission = define_table( # settings.table_permission_name, # Field("group_id", gtable, # requires = IS_IN_DB(db, "%s.id" % gname, # "%(id)s: %(role)s"), # label=messages.label_group_id), # Field("name", default="default", length=512, # requires = IS_NOT_EMPTY(), # label=messages.label_name), # Field("table_name", length=512, # # Needs to be defined after all tables created # #requires = IS_IN_SET(db.tables), # label=messages.label_table_name), # Field("record_id", "integer", # requires = IS_INT_IN_RANGE(0, 10 ** 9), # label=messages.label_record_id), # migrate = migrate, # fake_migrate=fake_migrate) # Event table (auth_event) # Records Logins & ? # @ToDo: Move to s3db.auth to prevent it from being defined every request # (lazy tables means no big issue for Production but helps Devs) # Deprecate? # - date of most recent login is the most useful thing recorded, which we already record in the main auth_user table if not settings.table_event: request = current.request define_table( settings.table_event_name, Field("time_stamp", "datetime", default = request.utcnow, #label = messages.label_time_stamp ), Field("client_ip", default = request.client, #label=messages.label_client_ip ), Field("user_id", utable, default = None, requires = IS_IN_DB(db, "%s.id" % uname, "%(id)s: %(first_name)s %(last_name)s"), #label=messages.label_user_id ), Field("origin", length=512, default = "auth", #label = messages.label_origin, requires = IS_NOT_EMPTY(), ), Field("description", "text", default = "", #label = messages.label_description, requires = IS_NOT_EMPTY(), ), migrate = migrate, fake_migrate = fake_migrate, *S3MetaFields.sync_meta_fields()) settings.table_event = db[settings.table_event_name]
Python
def ignore_min_password_length(self): """ Disable min_length validation for password, e.g. during login """ settings = self.settings utable = settings.table_user requires = utable[settings.password_field].requires if requires: if isinstance(requires, (list, tuple)): requires = requires[-1] try: requires.min_length = 0 except: pass
def ignore_min_password_length(self): """ Disable min_length validation for password, e.g. during login """ settings = self.settings utable = settings.table_user requires = utable[settings.password_field].requires if requires: if isinstance(requires, (list, tuple)): requires = requires[-1] try: requires.min_length = 0 except: pass
Python
def login_bare(self, username, password): """ Logs user in - extended to understand session.s3.roles """ self.ignore_min_password_length() settings = self.settings utable = settings.table_user userfield = settings.login_userfield passfield = settings.password_field query = (utable[userfield] == username) user = current.db(query).select(limitby=(0, 1)).first() password = utable[passfield].validate(password)[0] if user: if not user.registration_key and user[passfield] == password: user = Storage(utable._filter_fields(user, id=True)) current.session.auth = Storage(user = user, last_visit = current.request.now, expiration = settings.expiration) self.user = user self.s3_set_roles() return user return False
def login_bare(self, username, password): """ Logs user in - extended to understand session.s3.roles """ self.ignore_min_password_length() settings = self.settings utable = settings.table_user userfield = settings.login_userfield passfield = settings.password_field query = (utable[userfield] == username) user = current.db(query).select(limitby=(0, 1)).first() password = utable[passfield].validate(password)[0] if user: if not user.registration_key and user[passfield] == password: user = Storage(utable._filter_fields(user, id=True)) current.session.auth = Storage(user = user, last_visit = current.request.now, expiration = settings.expiration) self.user = user self.s3_set_roles() return user return False
Python
def change_password(self, next = DEFAULT, onvalidation = DEFAULT, onaccept = DEFAULT, log = DEFAULT, ): """ Returns a form that lets the user change password """ if not self.is_logged_in(): redirect(self.settings.login_url, client_side = self.settings.client_side) messages = self.messages settings = self.settings utable = settings.table_user s = self.db(utable.id == self.user.id) request = current.request session = current.session if next is DEFAULT: next = self.get_vars_next() or settings.change_password_next if onvalidation is DEFAULT: onvalidation = settings.change_password_onvalidation if onaccept is DEFAULT: onaccept = settings.change_password_onaccept if log is DEFAULT: log = messages["change_password_log"] passfield = settings.password_field form = SQLFORM.factory( Field("old_password", "password", label = messages.old_password, # No minimum length for old password #requires = utable[passfield].requires, requires = CRYPT(key = settings.hmac_key, digest_alg = "sha512", ), ), Field("new_password", "password", label = messages.new_password, requires = utable[passfield].requires, ), Field("new_password2", "password", label = messages.verify_password, requires = [IS_EXPR("value==%s" % repr(request.vars.new_password), messages.mismatched_password, ), ], ), submit_button = messages.password_change_button, hidden = {"_next": next}, formstyle = current.deployment_settings.get_ui_formstyle(), separator = settings.label_separator ) form.add_class("auth_change_password") if form.accepts(request, session, formname = "change_password", onvalidation = onvalidation, hideerror = settings.hideerror): if not form.vars["old_password"] == s.select(limitby = (0, 1), orderby_on_limitby = False ).first()[passfield]: form.errors["old_password"] = messages.invalid_password else: d = {passfield: str(form.vars.new_password)} s.update(**d) session.confirmation = messages.password_changed self.log_event(log, self.user) callback(onaccept, form) if not next: next = self.url(args = request.args) else: next = replace_id(next, form) redirect(next, client_side=settings.client_side) return form
def change_password(self, next = DEFAULT, onvalidation = DEFAULT, onaccept = DEFAULT, log = DEFAULT, ): """ Returns a form that lets the user change password """ if not self.is_logged_in(): redirect(self.settings.login_url, client_side = self.settings.client_side) messages = self.messages settings = self.settings utable = settings.table_user s = self.db(utable.id == self.user.id) request = current.request session = current.session if next is DEFAULT: next = self.get_vars_next() or settings.change_password_next if onvalidation is DEFAULT: onvalidation = settings.change_password_onvalidation if onaccept is DEFAULT: onaccept = settings.change_password_onaccept if log is DEFAULT: log = messages["change_password_log"] passfield = settings.password_field form = SQLFORM.factory( Field("old_password", "password", label = messages.old_password, # No minimum length for old password #requires = utable[passfield].requires, requires = CRYPT(key = settings.hmac_key, digest_alg = "sha512", ), ), Field("new_password", "password", label = messages.new_password, requires = utable[passfield].requires, ), Field("new_password2", "password", label = messages.verify_password, requires = [IS_EXPR("value==%s" % repr(request.vars.new_password), messages.mismatched_password, ), ], ), submit_button = messages.password_change_button, hidden = {"_next": next}, formstyle = current.deployment_settings.get_ui_formstyle(), separator = settings.label_separator ) form.add_class("auth_change_password") if form.accepts(request, session, formname = "change_password", onvalidation = onvalidation, hideerror = settings.hideerror): if not form.vars["old_password"] == s.select(limitby = (0, 1), orderby_on_limitby = False ).first()[passfield]: form.errors["old_password"] = messages.invalid_password else: d = {passfield: str(form.vars.new_password)} s.update(**d) session.confirmation = messages.password_changed self.log_event(log, self.user) callback(onaccept, form) if not next: next = self.url(args = request.args) else: next = replace_id(next, form) redirect(next, client_side=settings.client_side) return form
Python
def reset_password(self, next = DEFAULT, onvalidation = DEFAULT, onaccept = DEFAULT, log = DEFAULT, ): """ Returns a form to reset the user password, overrides web2py's version of the method to not swallow the _next var. """ table_user = self.table_user() request = current.request session = current.session messages = self.messages settings = self.settings if next is DEFAULT: next = self.get_vars_next() or settings.reset_password_next if settings.prevent_password_reset_attacks: key = request.vars.key if key: session._reset_password_key = key session._reset_password_next = next redirect(self.url(args = "reset_password")) else: key = session._reset_password_key next = session._reset_password_next else: key = request.vars.key try: t0 = int(key.split('-')[0]) if time.time() - t0 > 60 * 60 * 24: raise Exception user = table_user(reset_password_key=key) if not user: raise Exception except Exception: session.flash = messages.invalid_reset_password redirect(next, client_side=settings.client_side) key = user.registration_key if key in ("pending", "disabled", "blocked") or (key or "").startswith("pending"): session.flash = messages.registration_pending redirect(next, client_side=settings.client_side) if onvalidation is DEFAULT: onvalidation = settings.reset_password_onvalidation if onaccept is DEFAULT: onaccept = settings.reset_password_onaccept passfield = settings.password_field form = SQLFORM.factory( Field("new_password", "password", label = messages.new_password, requires = table_user[passfield].requires, ), Field("new_password2", "password", label = messages.verify_password, requires = IS_EXPR("value==%s" % repr(request.vars.new_password), messages.mismatched_password, ), ), submit_button = messages.password_change_button, hidden = {"_next": next}, formstyle = current.deployment_settings.get_ui_formstyle(), separator = settings.label_separator ) if form.accepts(request, session, onvalidation = onvalidation, hideerror = settings.hideerror): user.update_record( **{passfield: str(form.vars.new_password), "registration_key": "", "reset_password_key": "", }) session.flash = messages.password_changed if settings.login_after_password_change: user = Storage(table_user._filter_fields(user, id=True)) self.login_user(user) callback(onaccept, form) redirect(next, client_side=settings.client_side) return form
def reset_password(self, next = DEFAULT, onvalidation = DEFAULT, onaccept = DEFAULT, log = DEFAULT, ): """ Returns a form to reset the user password, overrides web2py's version of the method to not swallow the _next var. """ table_user = self.table_user() request = current.request session = current.session messages = self.messages settings = self.settings if next is DEFAULT: next = self.get_vars_next() or settings.reset_password_next if settings.prevent_password_reset_attacks: key = request.vars.key if key: session._reset_password_key = key session._reset_password_next = next redirect(self.url(args = "reset_password")) else: key = session._reset_password_key next = session._reset_password_next else: key = request.vars.key try: t0 = int(key.split('-')[0]) if time.time() - t0 > 60 * 60 * 24: raise Exception user = table_user(reset_password_key=key) if not user: raise Exception except Exception: session.flash = messages.invalid_reset_password redirect(next, client_side=settings.client_side) key = user.registration_key if key in ("pending", "disabled", "blocked") or (key or "").startswith("pending"): session.flash = messages.registration_pending redirect(next, client_side=settings.client_side) if onvalidation is DEFAULT: onvalidation = settings.reset_password_onvalidation if onaccept is DEFAULT: onaccept = settings.reset_password_onaccept passfield = settings.password_field form = SQLFORM.factory( Field("new_password", "password", label = messages.new_password, requires = table_user[passfield].requires, ), Field("new_password2", "password", label = messages.verify_password, requires = IS_EXPR("value==%s" % repr(request.vars.new_password), messages.mismatched_password, ), ), submit_button = messages.password_change_button, hidden = {"_next": next}, formstyle = current.deployment_settings.get_ui_formstyle(), separator = settings.label_separator ) if form.accepts(request, session, onvalidation = onvalidation, hideerror = settings.hideerror): user.update_record( **{passfield: str(form.vars.new_password), "registration_key": "", "reset_password_key": "", }) session.flash = messages.password_changed if settings.login_after_password_change: user = Storage(table_user._filter_fields(user, id=True)) self.login_user(user) callback(onaccept, form) redirect(next, client_side=settings.client_side) return form
Python
def request_reset_password(self, next = DEFAULT, onvalidation = DEFAULT, onaccept = DEFAULT, log = DEFAULT, ): """ Returns a form to reset the user password, overrides web2py's version of the method to apply Eden formstyles. Args: next: URL to redirect to after successful form submission onvalidation: callback to validate password reset form onaccept: callback to post-process password reset request log: event description for the log (string) """ messages = self.messages settings = self.settings if not settings.mailer: current.response.error = messages.function_disabled return "" utable = settings.table_user request = current.request session = current.session captcha = settings.retrieve_password_captcha or \ (settings.retrieve_password_captcha != False and settings.captcha) if next is DEFAULT: next = self.get_vars_next() or settings.request_reset_password_next if onvalidation is DEFAULT: onvalidation = settings.reset_password_onvalidation if onaccept is DEFAULT: onaccept = settings.reset_password_onaccept if log is DEFAULT: log = messages["reset_password_log"] userfield = settings.login_userfield if userfield == "email": utable.email.requires = [ IS_EMAIL(error_message=messages.invalid_email), IS_IN_DB(self.db, utable.email, error_message=messages.invalid_email)] else: utable[userfield].requires = [ IS_IN_DB(self.db, utable[userfield], error_message=messages.invalid_username)] form = SQLFORM(utable, fields = [userfield], hidden = {"_next": next}, showid = settings.showid, submit_button = messages.password_reset_button, delete_label = messages.delete_label, formstyle = current.deployment_settings.get_ui_formstyle(), separator = settings.label_separator ) form.add_class("auth_reset_password") if captcha: s3_addrow(form, captcha.label, captcha, captcha.comment, settings.formstyle, "captcha__row") if form.accepts(request, session if self.csrf_prevention else None, formname="reset_password", dbio=False, onvalidation=onvalidation, hideerror=settings.hideerror): user = utable(**{userfield:form.vars.get(userfield)}) if not user: session.error = messages["invalid_%s" % userfield] redirect(self.url(args=request.args), client_side=settings.client_side) elif user.registration_key in ("pending", "disabled", "blocked"): session.warning = messages.registration_pending redirect(self.url(args=request.args), client_side=settings.client_side) if self.email_reset_password(user): session.confirmation = messages.email_sent else: session.error = messages.unable_to_send_email self.log_event(log, user) callback(onaccept, form) if not next: next = self.url(args=request.args) else: next = replace_id(next, form) redirect(next, client_side=settings.client_side) # old_requires = utable.email.requires return form
def request_reset_password(self, next = DEFAULT, onvalidation = DEFAULT, onaccept = DEFAULT, log = DEFAULT, ): """ Returns a form to reset the user password, overrides web2py's version of the method to apply Eden formstyles. Args: next: URL to redirect to after successful form submission onvalidation: callback to validate password reset form onaccept: callback to post-process password reset request log: event description for the log (string) """ messages = self.messages settings = self.settings if not settings.mailer: current.response.error = messages.function_disabled return "" utable = settings.table_user request = current.request session = current.session captcha = settings.retrieve_password_captcha or \ (settings.retrieve_password_captcha != False and settings.captcha) if next is DEFAULT: next = self.get_vars_next() or settings.request_reset_password_next if onvalidation is DEFAULT: onvalidation = settings.reset_password_onvalidation if onaccept is DEFAULT: onaccept = settings.reset_password_onaccept if log is DEFAULT: log = messages["reset_password_log"] userfield = settings.login_userfield if userfield == "email": utable.email.requires = [ IS_EMAIL(error_message=messages.invalid_email), IS_IN_DB(self.db, utable.email, error_message=messages.invalid_email)] else: utable[userfield].requires = [ IS_IN_DB(self.db, utable[userfield], error_message=messages.invalid_username)] form = SQLFORM(utable, fields = [userfield], hidden = {"_next": next}, showid = settings.showid, submit_button = messages.password_reset_button, delete_label = messages.delete_label, formstyle = current.deployment_settings.get_ui_formstyle(), separator = settings.label_separator ) form.add_class("auth_reset_password") if captcha: s3_addrow(form, captcha.label, captcha, captcha.comment, settings.formstyle, "captcha__row") if form.accepts(request, session if self.csrf_prevention else None, formname="reset_password", dbio=False, onvalidation=onvalidation, hideerror=settings.hideerror): user = utable(**{userfield:form.vars.get(userfield)}) if not user: session.error = messages["invalid_%s" % userfield] redirect(self.url(args=request.args), client_side=settings.client_side) elif user.registration_key in ("pending", "disabled", "blocked"): session.warning = messages.registration_pending redirect(self.url(args=request.args), client_side=settings.client_side) if self.email_reset_password(user): session.confirmation = messages.email_sent else: session.error = messages.unable_to_send_email self.log_event(log, user) callback(onaccept, form) if not next: next = self.url(args=request.args) else: next = replace_id(next, form) redirect(next, client_side=settings.client_side) # old_requires = utable.email.requires return form
Python
def consent(self): """ Consent question form, e.g. - when consent requires renewal, or - new consent questions need to be asked, or - user has been added by ADMIN and shall give consent upon login - ... Note: This form cannot meaningfully prevent the user from simply bypassing the question and navigating away. To prevent the user from accessing functionality for which consent is mandatory, the respective controllers must check for consent using ConsentTracking.has_consented, and refuse if not given (though they can still redirect to this form where useful). """ T = current.T request = current.request response = current.response session = current.session settings = current.deployment_settings next_url = request.get_vars.get("_next") if not next_url: next_url = settings.get_auth_login_next() if callable(next_url): next_url = next_url() if not next_url: next_url = URL(c = "default", f = "index") session.s3.pending_consent = False # Requires login if not self.s3_logged_in(): session.error = T("Authentication required") redirect(URL(c = "default", f = "user", args = ["login"], vars = {"_next": URL(args=current.request.args)}, )) # Requires person record person_id = self.s3_logged_in_person() if not person_id: session.error = T("No person record for the current user") redirect(next_url) # Get all pending consent questions for the current user pending_consent = settings.get_auth_consent_check() if callable(pending_consent): pending_consent = pending_consent() if not pending_consent: session.warning = T("No pending consent questions for the current user") redirect(next_url) else: response.warning = T("Consent required") session.s3.pending_consent = True # Instantiate Consent Tracker consent = ConsentTracking(processing_types=pending_consent) # Form fields formfields = [Field("consent", label = T("Consent"), widget = consent.widget, ), ] # Generate labels (and mark required fields in the process) labels, has_required = s3_mark_required(formfields) response.s3.has_required = has_required # Form buttons SUBMIT = T("Submit") buttons = [INPUT(_type = "submit", _value = SUBMIT, ), ] # Construct the form response.form_label_separator = "" form = SQLFORM.factory(table_name = "auth_consent", record = None, hidden = {"_next": request.vars._next}, labels = labels, separator = "", showid = False, submit_button = SUBMIT, delete_label = self.messages.delete_label, formstyle = settings.get_ui_formstyle(), buttons = buttons, *formfields) # Identify form for CSS form.add_class("auth_consent") if form.accepts(current.request.vars, current.session, formname = "consent", ): consent.track(person_id, form.vars.get("consent")) session.s3.pending_consent = False session.confirmation = T("Consent registered") redirect(next_url) # Remind the user that form should be submitted even if they didn't # enter anything: response.s3.jquery_ready.append('''S3SetNavigateAwayConfirm(); $('form.auth_consent').submit(S3ClearNavigateAwayConfirm);''') return form
def consent(self): """ Consent question form, e.g. - when consent requires renewal, or - new consent questions need to be asked, or - user has been added by ADMIN and shall give consent upon login - ... Note: This form cannot meaningfully prevent the user from simply bypassing the question and navigating away. To prevent the user from accessing functionality for which consent is mandatory, the respective controllers must check for consent using ConsentTracking.has_consented, and refuse if not given (though they can still redirect to this form where useful). """ T = current.T request = current.request response = current.response session = current.session settings = current.deployment_settings next_url = request.get_vars.get("_next") if not next_url: next_url = settings.get_auth_login_next() if callable(next_url): next_url = next_url() if not next_url: next_url = URL(c = "default", f = "index") session.s3.pending_consent = False # Requires login if not self.s3_logged_in(): session.error = T("Authentication required") redirect(URL(c = "default", f = "user", args = ["login"], vars = {"_next": URL(args=current.request.args)}, )) # Requires person record person_id = self.s3_logged_in_person() if not person_id: session.error = T("No person record for the current user") redirect(next_url) # Get all pending consent questions for the current user pending_consent = settings.get_auth_consent_check() if callable(pending_consent): pending_consent = pending_consent() if not pending_consent: session.warning = T("No pending consent questions for the current user") redirect(next_url) else: response.warning = T("Consent required") session.s3.pending_consent = True # Instantiate Consent Tracker consent = ConsentTracking(processing_types=pending_consent) # Form fields formfields = [Field("consent", label = T("Consent"), widget = consent.widget, ), ] # Generate labels (and mark required fields in the process) labels, has_required = s3_mark_required(formfields) response.s3.has_required = has_required # Form buttons SUBMIT = T("Submit") buttons = [INPUT(_type = "submit", _value = SUBMIT, ), ] # Construct the form response.form_label_separator = "" form = SQLFORM.factory(table_name = "auth_consent", record = None, hidden = {"_next": request.vars._next}, labels = labels, separator = "", showid = False, submit_button = SUBMIT, delete_label = self.messages.delete_label, formstyle = settings.get_ui_formstyle(), buttons = buttons, *formfields) # Identify form for CSS form.add_class("auth_consent") if form.accepts(current.request.vars, current.session, formname = "consent", ): consent.track(person_id, form.vars.get("consent")) session.s3.pending_consent = False session.confirmation = T("Consent registered") redirect(next_url) # Remind the user that form should be submitted even if they didn't # enter anything: response.s3.jquery_ready.append('''S3SetNavigateAwayConfirm(); $('form.auth_consent').submit(S3ClearNavigateAwayConfirm);''') return form
Python
def email_reset_password(self, user): """ Overrides Web2Py's email_reset_password() to modify the message structure Args: user: the auth_user record (Row) """ mailer = self.settings.mailer if not mailer or not mailer.settings.server: return False reset_password_key = str(int(time.time())) + '-' + web2py_uuid() reset_password_url = "%s/default/user/reset_password?key=%s" % \ (current.response.s3.base_url, reset_password_key) message = self.messages.reset_password % {"url": reset_password_url} if mailer.send(to = user.email, subject = self.messages.reset_password_subject, message = message): user.update_record(reset_password_key = reset_password_key) return True return False
def email_reset_password(self, user): """ Overrides Web2Py's email_reset_password() to modify the message structure Args: user: the auth_user record (Row) """ mailer = self.settings.mailer if not mailer or not mailer.settings.server: return False reset_password_key = str(int(time.time())) + '-' + web2py_uuid() reset_password_url = "%s/default/user/reset_password?key=%s" % \ (current.response.s3.base_url, reset_password_key) message = self.messages.reset_password % {"url": reset_password_url} if mailer.send(to = user.email, subject = self.messages.reset_password_subject, message = message): user.update_record(reset_password_key = reset_password_key) return True return False
Python
def add_membership(self, group_id = None, user_id = None, role = None, entity = None, ): """ Gives user_id membership of group_id or role - extended to support Entities Args: group_id: the auth_group ID user_id: the auth_user ID (defaults to logged-in user) role: role name (alternative to group_id) entity: the person entity to assign the membership for Returns: the membership record ID """ group_id = group_id or self.id_group(role) try: group_id = int(group_id) except: group_id = self.id_group(group_id) # interpret group_id as a role if not user_id and self.user: user_id = self.user.id membership = self.settings.table_membership record = membership(user_id=user_id, group_id=group_id, pe_id=entity) if record: return record.id else: membership_id = membership.insert(group_id = group_id, user_id = user_id, pe_id = entity) self.update_groups() self.log_event(self.messages.add_membership_log, {"user_id": user_id, "group_id": group_id, }) return membership_id
def add_membership(self, group_id = None, user_id = None, role = None, entity = None, ): """ Gives user_id membership of group_id or role - extended to support Entities Args: group_id: the auth_group ID user_id: the auth_user ID (defaults to logged-in user) role: role name (alternative to group_id) entity: the person entity to assign the membership for Returns: the membership record ID """ group_id = group_id or self.id_group(role) try: group_id = int(group_id) except: group_id = self.id_group(group_id) # interpret group_id as a role if not user_id and self.user: user_id = self.user.id membership = self.settings.table_membership record = membership(user_id=user_id, group_id=group_id, pe_id=entity) if record: return record.id else: membership_id = membership.insert(group_id = group_id, user_id = user_id, pe_id = entity) self.update_groups() self.log_event(self.messages.add_membership_log, {"user_id": user_id, "group_id": group_id, }) return membership_id
Python
def verify_email(self, next=DEFAULT, log=DEFAULT): """ Action when user clicks the link in the verification email """ settings = self.settings request = current.request # Customise the resource customise = current.deployment_settings.customise_resource("auth_user") if customise: customise(request, "auth_user") key = request.args[-1] utable = settings.table_user query = (utable.registration_key == key) user = current.db(query).select(limitby=(0, 1)).first() if not user: redirect(settings.verify_email_next) if log == DEFAULT: log = self.messages.verify_email_log if next == DEFAULT: next = settings.verify_email_next approved = self.s3_verify_user(user) if approved: # Log them in user = Storage(utable._filter_fields(user, id=True)) self.login_user(user) if log: self.log_event(log, user) redirect(next)
def verify_email(self, next=DEFAULT, log=DEFAULT): """ Action when user clicks the link in the verification email """ settings = self.settings request = current.request # Customise the resource customise = current.deployment_settings.customise_resource("auth_user") if customise: customise(request, "auth_user") key = request.args[-1] utable = settings.table_user query = (utable.registration_key == key) user = current.db(query).select(limitby=(0, 1)).first() if not user: redirect(settings.verify_email_next) if log == DEFAULT: log = self.messages.verify_email_log if next == DEFAULT: next = settings.verify_email_next approved = self.s3_verify_user(user) if approved: # Log them in user = Storage(utable._filter_fields(user, id=True)) self.login_user(user) if log: self.log_event(log, user) redirect(next)
Python
def profile(self, next = DEFAULT, onvalidation = DEFAULT, onaccept = DEFAULT, log = DEFAULT, ): """ Returns a form that lets the user change his/her profile - patched for S3 to use s3_mark_required and handle opt_in mailing lists """ if not self.is_logged_in(): redirect(self.settings.login_url) messages = self.messages settings = self.settings utable = settings.table_user passfield = settings.password_field utable[passfield].writable = False request = current.request session = current.session deployment_settings = current.deployment_settings # Users should not be able to change their Org affiliation # - also hide popup-link to create a new Org (makes # no sense here if the field is read-only anyway) utable.organisation_id.writable = False utable.organisation_id.comment = None ## Only allowed to select Orgs that the user has update access to #utable.organisation_id.requires = \ # current.s3db.org_organisation_requires(updateable = True) if next == DEFAULT: next = request.get_vars._next \ or request.post_vars._next \ or settings.profile_next if onvalidation == DEFAULT: onvalidation = settings.profile_onvalidation if onaccept == DEFAULT: onaccept = settings.profile_onaccept if log == DEFAULT: log = messages.profile_log labels = s3_mark_required(utable)[0] # If we have an opt_in and some post_vars then update the opt_in value # @ToDo: Replace with an AuthConsent-integrated solution opt_in_to_email = deployment_settings.get_auth_opt_in_to_email() if opt_in_to_email: team_list = deployment_settings.get_auth_opt_in_team_list() if request.post_vars: removed = [] selected = [] for opt_in in team_list: if opt_in in request.post_vars: selected.append(opt_in) else: removed.append(opt_in) db = current.db s3db = current.s3db ptable = s3db.pr_person putable = s3db.pr_person_user query = (putable.user_id == request.post_vars.id) & \ (putable.pe_id == ptable.pe_id) person_id = db(query).select(ptable.id, limitby=(0, 1)).first().id db(ptable.id == person_id).update(opt_in = selected) g_table = s3db["pr_group"] gm_table = s3db["pr_group_membership"] # Remove them from any team they are a member of in the removed list for team in removed: query = (g_table.name == team) & \ (gm_table.group_id == g_table.id) & \ (gm_table.person_id == person_id) gm_rec = db(query).select(g_table.id, limitby=(0, 1)).first() if gm_rec: db(gm_table.id == gm_rec.id).delete() # Add them to the team (if they are not already a team member) for team in selected: query = (g_table.name == team) & \ (gm_table.group_id == g_table.id) & \ (gm_table.person_id == person_id) gm_rec = db(query).select(g_table.id, limitby=(0, 1)).first() if not gm_rec: query = (g_table.name == team) team_rec = db(query).select(g_table.id, limitby=(0, 1)).first() # if the team doesn't exist then add it if team_rec == None: team_id = g_table.insert(name=team, group_type=5) else: team_id = team_rec.id gm_table.insert(group_id = team_id, person_id = person_id) formstyle = deployment_settings.get_ui_formstyle() current.response.form_label_separator = "" form = SQLFORM(utable, self.user.id, fields = settings.profile_fields, labels = labels, hidden = {"_next": next}, showid = settings.showid, submit_button = messages.profile_save_button, delete_label = messages.delete_label, upload = settings.download_url, formstyle = formstyle, separator = "" ) form.add_class("auth_profile") if deployment_settings.get_auth_openid(): from gluon.contrib.login_methods.openid_auth import OpenIDAuth openid_login_form = OpenIDAuth(self) form = DIV(form, openid_login_form.list_user_openids()) if form.accepts(request, session, formname="profile", onvalidation=onvalidation, hideerror=settings.hideerror): #self.s3_auth_user_register_onaccept(form.vars.email, self.user.id) self.user.update(utable._filter_fields(form.vars)) session.flash = messages.profile_updated if log: self.log_event(log, self.user) callback(onaccept, form) if not next: next = self.url(args=request.args) elif isinstance(next, (list, tuple)): ### fix issue with 2.6 next = next[0] elif next and not next[0] == "/" and next[:4] != "http": next = self.url(next.replace("[id]", str(form.vars.id))) redirect(next) if opt_in_to_email: T = current.T ptable = s3db.pr_person ltable = s3db.pr_person_user team_list = deployment_settings.get_auth_opt_in_team_list() query = (ltable.user_id == form.record.id) & \ (ltable.pe_id == ptable.pe_id) db_opt_in_list = db(query).select(ptable.opt_in, limitby=(0, 1)).first().opt_in for opt_in in team_list: field_id = "%s_opt_in_%s" % (utable, team_list) if opt_in in db_opt_in_list: checked = "selected" else: checked = None s3_addrow(form, LABEL(T("Receive %(opt_in)s updates:") % \ {"opt_in": opt_in}, _for = "opt_in", _id = field_id + SQLFORM.ID_LABEL_SUFFIX, ), INPUT(_name = opt_in, _id = field_id, _type = "checkbox", _checked = checked, ), "", formstyle, field_id + SQLFORM.ID_ROW_SUFFIX, ) return form
def profile(self, next = DEFAULT, onvalidation = DEFAULT, onaccept = DEFAULT, log = DEFAULT, ): """ Returns a form that lets the user change his/her profile - patched for S3 to use s3_mark_required and handle opt_in mailing lists """ if not self.is_logged_in(): redirect(self.settings.login_url) messages = self.messages settings = self.settings utable = settings.table_user passfield = settings.password_field utable[passfield].writable = False request = current.request session = current.session deployment_settings = current.deployment_settings # Users should not be able to change their Org affiliation # - also hide popup-link to create a new Org (makes # no sense here if the field is read-only anyway) utable.organisation_id.writable = False utable.organisation_id.comment = None ## Only allowed to select Orgs that the user has update access to #utable.organisation_id.requires = \ # current.s3db.org_organisation_requires(updateable = True) if next == DEFAULT: next = request.get_vars._next \ or request.post_vars._next \ or settings.profile_next if onvalidation == DEFAULT: onvalidation = settings.profile_onvalidation if onaccept == DEFAULT: onaccept = settings.profile_onaccept if log == DEFAULT: log = messages.profile_log labels = s3_mark_required(utable)[0] # If we have an opt_in and some post_vars then update the opt_in value # @ToDo: Replace with an AuthConsent-integrated solution opt_in_to_email = deployment_settings.get_auth_opt_in_to_email() if opt_in_to_email: team_list = deployment_settings.get_auth_opt_in_team_list() if request.post_vars: removed = [] selected = [] for opt_in in team_list: if opt_in in request.post_vars: selected.append(opt_in) else: removed.append(opt_in) db = current.db s3db = current.s3db ptable = s3db.pr_person putable = s3db.pr_person_user query = (putable.user_id == request.post_vars.id) & \ (putable.pe_id == ptable.pe_id) person_id = db(query).select(ptable.id, limitby=(0, 1)).first().id db(ptable.id == person_id).update(opt_in = selected) g_table = s3db["pr_group"] gm_table = s3db["pr_group_membership"] # Remove them from any team they are a member of in the removed list for team in removed: query = (g_table.name == team) & \ (gm_table.group_id == g_table.id) & \ (gm_table.person_id == person_id) gm_rec = db(query).select(g_table.id, limitby=(0, 1)).first() if gm_rec: db(gm_table.id == gm_rec.id).delete() # Add them to the team (if they are not already a team member) for team in selected: query = (g_table.name == team) & \ (gm_table.group_id == g_table.id) & \ (gm_table.person_id == person_id) gm_rec = db(query).select(g_table.id, limitby=(0, 1)).first() if not gm_rec: query = (g_table.name == team) team_rec = db(query).select(g_table.id, limitby=(0, 1)).first() # if the team doesn't exist then add it if team_rec == None: team_id = g_table.insert(name=team, group_type=5) else: team_id = team_rec.id gm_table.insert(group_id = team_id, person_id = person_id) formstyle = deployment_settings.get_ui_formstyle() current.response.form_label_separator = "" form = SQLFORM(utable, self.user.id, fields = settings.profile_fields, labels = labels, hidden = {"_next": next}, showid = settings.showid, submit_button = messages.profile_save_button, delete_label = messages.delete_label, upload = settings.download_url, formstyle = formstyle, separator = "" ) form.add_class("auth_profile") if deployment_settings.get_auth_openid(): from gluon.contrib.login_methods.openid_auth import OpenIDAuth openid_login_form = OpenIDAuth(self) form = DIV(form, openid_login_form.list_user_openids()) if form.accepts(request, session, formname="profile", onvalidation=onvalidation, hideerror=settings.hideerror): #self.s3_auth_user_register_onaccept(form.vars.email, self.user.id) self.user.update(utable._filter_fields(form.vars)) session.flash = messages.profile_updated if log: self.log_event(log, self.user) callback(onaccept, form) if not next: next = self.url(args=request.args) elif isinstance(next, (list, tuple)): ### fix issue with 2.6 next = next[0] elif next and not next[0] == "/" and next[:4] != "http": next = self.url(next.replace("[id]", str(form.vars.id))) redirect(next) if opt_in_to_email: T = current.T ptable = s3db.pr_person ltable = s3db.pr_person_user team_list = deployment_settings.get_auth_opt_in_team_list() query = (ltable.user_id == form.record.id) & \ (ltable.pe_id == ptable.pe_id) db_opt_in_list = db(query).select(ptable.opt_in, limitby=(0, 1)).first().opt_in for opt_in in team_list: field_id = "%s_opt_in_%s" % (utable, team_list) if opt_in in db_opt_in_list: checked = "selected" else: checked = None s3_addrow(form, LABEL(T("Receive %(opt_in)s updates:") % \ {"opt_in": opt_in}, _for = "opt_in", _id = field_id + SQLFORM.ID_LABEL_SUFFIX, ), INPUT(_name = opt_in, _id = field_id, _type = "checkbox", _checked = checked, ), "", formstyle, field_id + SQLFORM.ID_ROW_SUFFIX, ) return form
Python
def user_represent(self): """ Common auth_UserRepresent instance for meta-fields (lazy property) Returns: S3Represent instance """ represent = self._user_represent if represent is None: if current.deployment_settings.get_ui_auth_user_represent() == "name": show_name = True show_email = False else: show_name = False show_email = True represent = current.s3db.auth_UserRepresent(show_name = show_name, show_email = show_email, show_link = False, ) self._user_represent = represent return represent
def user_represent(self): """ Common auth_UserRepresent instance for meta-fields (lazy property) Returns: S3Represent instance """ represent = self._user_represent if represent is None: if current.deployment_settings.get_ui_auth_user_represent() == "name": show_name = True show_email = False else: show_name = False show_email = True represent = current.s3db.auth_UserRepresent(show_name = show_name, show_email = show_email, show_link = False, ) self._user_represent = represent return represent
Python
def add_org(name, parent=None): """ Helper to add a New Organisation """ organisation_id = otable.insert(name = name) record = Storage(id = organisation_id) update_super(otable, record) set_record_owner(otable, organisation_id) # @ToDo: Call onaccept? if parent: records = db(otable.name == parent).select(otable.id) if len(records) == 1: # Add branch link link_id = btable.insert(organisation_id = records.first().id, branch_id = organisation_id) onaccept = s3db.get_config("org_organisation_branch", "onaccept") callback(onaccept, Storage(vars = Storage(id = link_id))) elif len(records) > 1: # Ambiguous current.log.debug("Cannot set branch link for new Organisation %s as there are multiple matches for parent %s" % (name, parent)) else: # Create Parent parent_id = otable.insert(name = parent) update_super(otable, Storage(id = parent_id)) set_record_owner(otable, parent_id) # @ToDo: Call onaccept? # Create link link_id = btable.insert(organisation_id = parent_id, branch_id = organisation_id) onaccept = s3db.get_config("org_organisation_branch", "onaccept") callback(onaccept, Storage(vars = Storage(id = link_id))) return (organisation_id, record.pe_id)
def add_org(name, parent=None): """ Helper to add a New Organisation """ organisation_id = otable.insert(name = name) record = Storage(id = organisation_id) update_super(otable, record) set_record_owner(otable, organisation_id) # @ToDo: Call onaccept? if parent: records = db(otable.name == parent).select(otable.id) if len(records) == 1: # Add branch link link_id = btable.insert(organisation_id = records.first().id, branch_id = organisation_id) onaccept = s3db.get_config("org_organisation_branch", "onaccept") callback(onaccept, Storage(vars = Storage(id = link_id))) elif len(records) > 1: # Ambiguous current.log.debug("Cannot set branch link for new Organisation %s as there are multiple matches for parent %s" % (name, parent)) else: # Create Parent parent_id = otable.insert(name = parent) update_super(otable, Storage(id = parent_id)) set_record_owner(otable, parent_id) # @ToDo: Call onaccept? # Create link link_id = btable.insert(organisation_id = parent_id, branch_id = organisation_id) onaccept = s3db.get_config("org_organisation_branch", "onaccept") callback(onaccept, Storage(vars = Storage(id = link_id))) return (organisation_id, record.pe_id)
Python
def s3_register_validation(): """ JavaScript client-side validation for Registration / User profile - needed to check for passwords being same, etc """ T = current.T request = current.request appname = request.application settings = current.deployment_settings s3 = current.response.s3 # Static Scripts scripts_append = s3.scripts.append if s3.debug: scripts_append("/%s/static/scripts/jquery.validate.js" % appname) scripts_append("/%s/static/scripts/jquery.pstrength.2.1.0.js" % appname) scripts_append("/%s/static/scripts/S3/s3.register_validation.js" % appname) else: scripts_append("/%s/static/scripts/jquery.validate.min.js" % appname) scripts_append("/%s/static/scripts/jquery.pstrength.2.1.0.min.js" % appname) scripts_append("/%s/static/scripts/S3/s3.register_validation.min.js" % appname) # Configuration js_global = [] js_append = js_global.append if settings.get_auth_registration_mobile_phone_mandatory(): js_append('''S3.auth_registration_mobile_phone_mandatory=1''') if settings.get_auth_registration_organisation_required(): js_append('''S3.auth_registration_organisation_required=1''') js_append('''i18n.enter_your_organisation="%s"''' % T("Enter your organization")) if settings.get_auth_terms_of_service(): js_append('''S3.auth_terms_of_service=1''') js_append('''i18n.tos_required="%s"''' % T("You must agree to the Terms of Service")) if request.controller != "admin": if settings.get_auth_registration_organisation_hidden(): js_append('''S3.auth_registration_hide_organisation=1''') # Check for Whitelists table = current.s3db.auth_organisation query = (table.organisation_id != None) & \ (table.domain != None) whitelists = current.db(query).select(table.organisation_id, table.domain) if whitelists: domains = [] domains_append = domains.append for whitelist in whitelists: domains_append("'%s':%s" % (whitelist.domain, whitelist.organisation_id)) domains = ''','''.join(domains) domains = '''S3.whitelists={%s}''' % domains js_append(domains) js_append('''i18n.enter_first_name="%s"''' % T("Enter your first name")) js_append('''i18n.provide_password="%s"''' % T("Provide a password")) js_append('''i18n.repeat_your_password="%s"''' % T("Repeat your password")) js_append('''i18n.enter_same_password="%s"''' % T("Enter the same password as above")) js_append('''i18n.please_enter_valid_email="%s"''' % T("Please enter a valid email address")) js_append('''S3.password_min_length=%i''' % settings.get_auth_password_min_length()) js_append('''i18n.password_min_chars="%s"''' % T("You must enter a minimum of %d characters")) js_append('''i18n.weak="%s"''' % T("Weak")) js_append('''i18n.normal="%s"''' % T("Normal")) js_append('''i18n.medium="%s"''' % T("Medium")) js_append('''i18n.strong="%s"''' % T("Strong")) js_append('''i18n.very_strong="%s"''' % T("Very Strong")) js_append('''$.extend($.validator.messages, { required: "%s" });''' % T("This field is required.")) script = '''\n'''.join(js_global) s3.js_global.append(script) # Call script after Global config done s3.jquery_ready.append('''s3_register_validation()''')
def s3_register_validation(): """ JavaScript client-side validation for Registration / User profile - needed to check for passwords being same, etc """ T = current.T request = current.request appname = request.application settings = current.deployment_settings s3 = current.response.s3 # Static Scripts scripts_append = s3.scripts.append if s3.debug: scripts_append("/%s/static/scripts/jquery.validate.js" % appname) scripts_append("/%s/static/scripts/jquery.pstrength.2.1.0.js" % appname) scripts_append("/%s/static/scripts/S3/s3.register_validation.js" % appname) else: scripts_append("/%s/static/scripts/jquery.validate.min.js" % appname) scripts_append("/%s/static/scripts/jquery.pstrength.2.1.0.min.js" % appname) scripts_append("/%s/static/scripts/S3/s3.register_validation.min.js" % appname) # Configuration js_global = [] js_append = js_global.append if settings.get_auth_registration_mobile_phone_mandatory(): js_append('''S3.auth_registration_mobile_phone_mandatory=1''') if settings.get_auth_registration_organisation_required(): js_append('''S3.auth_registration_organisation_required=1''') js_append('''i18n.enter_your_organisation="%s"''' % T("Enter your organization")) if settings.get_auth_terms_of_service(): js_append('''S3.auth_terms_of_service=1''') js_append('''i18n.tos_required="%s"''' % T("You must agree to the Terms of Service")) if request.controller != "admin": if settings.get_auth_registration_organisation_hidden(): js_append('''S3.auth_registration_hide_organisation=1''') # Check for Whitelists table = current.s3db.auth_organisation query = (table.organisation_id != None) & \ (table.domain != None) whitelists = current.db(query).select(table.organisation_id, table.domain) if whitelists: domains = [] domains_append = domains.append for whitelist in whitelists: domains_append("'%s':%s" % (whitelist.domain, whitelist.organisation_id)) domains = ''','''.join(domains) domains = '''S3.whitelists={%s}''' % domains js_append(domains) js_append('''i18n.enter_first_name="%s"''' % T("Enter your first name")) js_append('''i18n.provide_password="%s"''' % T("Provide a password")) js_append('''i18n.repeat_your_password="%s"''' % T("Repeat your password")) js_append('''i18n.enter_same_password="%s"''' % T("Enter the same password as above")) js_append('''i18n.please_enter_valid_email="%s"''' % T("Please enter a valid email address")) js_append('''S3.password_min_length=%i''' % settings.get_auth_password_min_length()) js_append('''i18n.password_min_chars="%s"''' % T("You must enter a minimum of %d characters")) js_append('''i18n.weak="%s"''' % T("Weak")) js_append('''i18n.normal="%s"''' % T("Normal")) js_append('''i18n.medium="%s"''' % T("Medium")) js_append('''i18n.strong="%s"''' % T("Strong")) js_append('''i18n.very_strong="%s"''' % T("Very Strong")) js_append('''$.extend($.validator.messages, { required: "%s" });''' % T("This field is required.")) script = '''\n'''.join(js_global) s3.js_global.append(script) # Call script after Global config done s3.jquery_ready.append('''s3_register_validation()''')
Python
def s3_auth_user_register_onaccept(self, email, user_id): """ Allows customisation of the process for creating/updating users - called by s3_approve_user when new users are created or approved Args: email: the user's email address user_id: the auth_user ID """ # Check for any custom functionality onaccept = current.s3db.get_config("auth_user", "register_onaccept") if callable(onaccept): onaccept(user_id) # Default functionality # Handle any OpenFire Chat Server integration if self.settings.login_userfield != "username": deployment_settings = current.deployment_settings chat_server = deployment_settings.get_chat_server() if chat_server: chat_username = email.replace("@", "_") db = current.db db(db.auth_user.id == user_id).update(username = chat_username) chatdb = DAL(deployment_settings.get_chatdb_string(), migrate=False) # Using RawSQL as table not created in web2py sql_query="insert into ofGroupUser values (\'%s\',\'%s\' ,0);" % (chat_server["groupname"], chat_username) chatdb.executesql(sql_query)
def s3_auth_user_register_onaccept(self, email, user_id): """ Allows customisation of the process for creating/updating users - called by s3_approve_user when new users are created or approved Args: email: the user's email address user_id: the auth_user ID """ # Check for any custom functionality onaccept = current.s3db.get_config("auth_user", "register_onaccept") if callable(onaccept): onaccept(user_id) # Default functionality # Handle any OpenFire Chat Server integration if self.settings.login_userfield != "username": deployment_settings = current.deployment_settings chat_server = deployment_settings.get_chat_server() if chat_server: chat_username = email.replace("@", "_") db = current.db db(db.auth_user.id == user_id).update(username = chat_username) chatdb = DAL(deployment_settings.get_chatdb_string(), migrate=False) # Using RawSQL as table not created in web2py sql_query="insert into ofGroupUser values (\'%s\',\'%s\' ,0);" % (chat_server["groupname"], chat_username) chatdb.executesql(sql_query)
Python
def s3_register_onaccept(self, form): """ Sets session.auth.user for authorstamp, etc, and approves user (to set registration groups, such as AUTHENTICATED, link to Person) Designed to be called when a user is created through: - registration via OAuth, LDAP, etc """ user = form.vars current.session.auth = Storage(user=user) self.s3_approve_user(user)
def s3_register_onaccept(self, form): """ Sets session.auth.user for authorstamp, etc, and approves user (to set registration groups, such as AUTHENTICATED, link to Person) Designed to be called when a user is created through: - registration via OAuth, LDAP, etc """ user = form.vars current.session.auth = Storage(user=user) self.s3_approve_user(user)
Python
def s3_user_register_onaccept(form): """ Stores the user's email & profile image in auth_user_temp, to be added to their person record when created on approval Designed to be called when a user is created through: - registration """ temptable = current.s3db.auth_user_temp form_vars = form.vars user_id = form_vars.id if not user_id: return record = {"user_id": user_id} # Store the home_phone ready to go to pr_contact home = form_vars.home if home: record["home"] = home # Store the mobile_phone ready to go to pr_contact mobile = form_vars.mobile if mobile: record["mobile"] = mobile # Store Consent Question Response consent = form_vars.consent if consent: record["consent"] = consent # Store the profile picture ready to go to pr_image image = form_vars.image if image != None and hasattr(image, "file"): # @ToDo: DEBUG!!! source_file = image.file original_filename = image.filename field = temptable.image newfilename = field.store(source_file, original_filename, field.uploadfolder) if isinstance(field.uploadfield, str): form_vars[field.uploadfield] = source_file.read() record["image"] = newfilename if len(record) > 1: temptable.update_or_insert(**record)
def s3_user_register_onaccept(form): """ Stores the user's email & profile image in auth_user_temp, to be added to their person record when created on approval Designed to be called when a user is created through: - registration """ temptable = current.s3db.auth_user_temp form_vars = form.vars user_id = form_vars.id if not user_id: return record = {"user_id": user_id} # Store the home_phone ready to go to pr_contact home = form_vars.home if home: record["home"] = home # Store the mobile_phone ready to go to pr_contact mobile = form_vars.mobile if mobile: record["mobile"] = mobile # Store Consent Question Response consent = form_vars.consent if consent: record["consent"] = consent # Store the profile picture ready to go to pr_image image = form_vars.image if image != None and hasattr(image, "file"): # @ToDo: DEBUG!!! source_file = image.file original_filename = image.filename field = temptable.image newfilename = field.store(source_file, original_filename, field.uploadfolder) if isinstance(field.uploadfield, str): form_vars[field.uploadfield] = source_file.read() record["image"] = newfilename if len(record) > 1: temptable.update_or_insert(**record)
Python
def s3_approve_user_message(user, languages): """ Default construction of Messages to (Org_)Admins to approve a new user """ approve_user_message = \ """Your action is required to approve a New User for %(system_name)s: %(first_name)s %(last_name)s %(email)s Please go to %(url)s to approve this user.""" T = current.T subjects = {} messages = {} first_name = user.first_name last_name = user.last_name email = user.email user_id = user.id base_url = current.response.s3.base_url system_name = current.deployment_settings.get_system_name() for language in languages: T.force(language) subjects[language] = \ s3_str(T("%(system_name)s - New User Registration Approval Pending") % \ {"system_name": system_name}) messages[language] = s3_str(T(approve_user_message) % \ {"system_name": system_name, "first_name": first_name, "last_name": last_name, "email": email, "url": "%(base_url)s/admin/user/%(id)s" % \ {"base_url": base_url, "id": user_id, }, }) # Restore language for UI T.force(current.session.s3.language) return subjects, messages
def s3_approve_user_message(user, languages): """ Default construction of Messages to (Org_)Admins to approve a new user """ approve_user_message = \ """Your action is required to approve a New User for %(system_name)s: %(first_name)s %(last_name)s %(email)s Please go to %(url)s to approve this user.""" T = current.T subjects = {} messages = {} first_name = user.first_name last_name = user.last_name email = user.email user_id = user.id base_url = current.response.s3.base_url system_name = current.deployment_settings.get_system_name() for language in languages: T.force(language) subjects[language] = \ s3_str(T("%(system_name)s - New User Registration Approval Pending") % \ {"system_name": system_name}) messages[language] = s3_str(T(approve_user_message) % \ {"system_name": system_name, "first_name": first_name, "last_name": last_name, "email": email, "url": "%(base_url)s/admin/user/%(id)s" % \ {"base_url": base_url, "id": user_id, }, }) # Restore language for UI T.force(current.session.s3.language) return subjects, messages
Python
def s3_verify_user(self, user): """" Sends a message to the approver to notify them if a user needs approval Designed to be called when a user is verified through: - responding to their verification email - if verification isn't required Returns: boolean - if the user has been approved Notes: - If deployment_settings.auth.always_notify_approver = True, send them notification regardless - If approval isn't required - calls s3_approve_user """ db = current.db deployment_settings = current.deployment_settings session = current.session auth_messages = self.messages utable = self.settings.table_user # Lookup the Approver approver, organisation_id = self.s3_approver(user) if deployment_settings.get_auth_registration_requires_approval() and approver: approved = False db(utable.id == user.id).update(registration_key = "pending") if user.registration_key: # User has just been verified session.information = deployment_settings.get_auth_registration_pending_approval() else: # No Verification needed session.information = deployment_settings.get_auth_registration_pending() message = "approve_user" else: approved = True if organisation_id and not user.get("organisation_id", None): # Use the whitelist user["organisation_id"] = organisation_id db(utable.id == user.id).update(organisation_id = organisation_id) link_user_to = deployment_settings.get_auth_registration_link_user_to_default() if link_user_to and not user.get("link_user_to", None): user["link_user_to"] = link_user_to self.s3_link_user(user) self.s3_approve_user(user) session.confirmation = auth_messages.email_verified session.flash = auth_messages.registration_successful if not deployment_settings.get_auth_always_notify_approver(): return approved message = "new_user" # Ensure that we send out the mails in the language that the approver(s) want if "@" in approver: # Look up language of the user record = db(utable.email == approver).select(utable.language, limitby = (0, 1) ).first() if record: language = record.language else: language = deployment_settings.get_L10n_default_language() approvers = [{"email": approver, "language": language, }] languages = [language] else: approvers = [] aappend = approvers.append languages = [] for each_approver in approver: language = each_approver["language"] if language not in languages: languages.append(language) aappend(each_approver) if message == "approve_user": # Customised Message construction? approve_user_message = deployment_settings.get_auth_approve_user_message() if callable(approve_user_message): subjects, messages = approve_user_message(user, languages) else: # Default Message construction subjects, messages = self.s3_approve_user_message(user, languages) elif message == "new_user": # @ToDo: Allow custom Message construction T = current.T subjects = {} messages = {} first_name = user.first_name last_name = user.last_name email = user.email system_name = deployment_settings.get_system_name() for language in languages: T.force(language) subjects[language] = \ s3_str(T("%(system_name)s - New User Registered") % \ {"system_name": system_name}) messages[language] = \ s3_str(auth_messages.new_user % {"system_name": system_name, "first_name": first_name, "last_name": last_name, "email": email }) # Restore language for UI T.force(session.s3.language) mailer = self.settings.mailer if mailer.settings.server: send_email = mailer.send for approver in approvers: language = approver["language"] result = send_email(to = approver["email"], subject = subjects[language], message = messages[language] ) else: # Email system not configured (yet) result = None if not result: # Don't prevent registration just because email not configured #db.rollback() current.response.error = auth_messages.email_send_failed return False return approved
def s3_verify_user(self, user): """" Sends a message to the approver to notify them if a user needs approval Designed to be called when a user is verified through: - responding to their verification email - if verification isn't required Returns: boolean - if the user has been approved Notes: - If deployment_settings.auth.always_notify_approver = True, send them notification regardless - If approval isn't required - calls s3_approve_user """ db = current.db deployment_settings = current.deployment_settings session = current.session auth_messages = self.messages utable = self.settings.table_user # Lookup the Approver approver, organisation_id = self.s3_approver(user) if deployment_settings.get_auth_registration_requires_approval() and approver: approved = False db(utable.id == user.id).update(registration_key = "pending") if user.registration_key: # User has just been verified session.information = deployment_settings.get_auth_registration_pending_approval() else: # No Verification needed session.information = deployment_settings.get_auth_registration_pending() message = "approve_user" else: approved = True if organisation_id and not user.get("organisation_id", None): # Use the whitelist user["organisation_id"] = organisation_id db(utable.id == user.id).update(organisation_id = organisation_id) link_user_to = deployment_settings.get_auth_registration_link_user_to_default() if link_user_to and not user.get("link_user_to", None): user["link_user_to"] = link_user_to self.s3_link_user(user) self.s3_approve_user(user) session.confirmation = auth_messages.email_verified session.flash = auth_messages.registration_successful if not deployment_settings.get_auth_always_notify_approver(): return approved message = "new_user" # Ensure that we send out the mails in the language that the approver(s) want if "@" in approver: # Look up language of the user record = db(utable.email == approver).select(utable.language, limitby = (0, 1) ).first() if record: language = record.language else: language = deployment_settings.get_L10n_default_language() approvers = [{"email": approver, "language": language, }] languages = [language] else: approvers = [] aappend = approvers.append languages = [] for each_approver in approver: language = each_approver["language"] if language not in languages: languages.append(language) aappend(each_approver) if message == "approve_user": # Customised Message construction? approve_user_message = deployment_settings.get_auth_approve_user_message() if callable(approve_user_message): subjects, messages = approve_user_message(user, languages) else: # Default Message construction subjects, messages = self.s3_approve_user_message(user, languages) elif message == "new_user": # @ToDo: Allow custom Message construction T = current.T subjects = {} messages = {} first_name = user.first_name last_name = user.last_name email = user.email system_name = deployment_settings.get_system_name() for language in languages: T.force(language) subjects[language] = \ s3_str(T("%(system_name)s - New User Registered") % \ {"system_name": system_name}) messages[language] = \ s3_str(auth_messages.new_user % {"system_name": system_name, "first_name": first_name, "last_name": last_name, "email": email }) # Restore language for UI T.force(session.s3.language) mailer = self.settings.mailer if mailer.settings.server: send_email = mailer.send for approver in approvers: language = approver["language"] result = send_email(to = approver["email"], subject = subjects[language], message = messages[language] ) else: # Email system not configured (yet) result = None if not result: # Don't prevent registration just because email not configured #db.rollback() current.response.error = auth_messages.email_send_failed return False return approved
Python
def s3_approve_user(self, user, password=None): """ Adds user to the 'Authenticated' role, and any default roles Designed to be called when a user is created through: - prepop - approved automatically during registration - approved by admin - added by admin - updated by admin Args: user: the user Storage() or Row password: optional password to include in a custom welcome_email """ user_id = user.id if not user_id: return db = current.db s3db = current.s3db deployment_settings = current.deployment_settings settings = self.settings utable = settings.table_user # Add to 'Authenticated' role authenticated = self.id_group("Authenticated") add_membership = self.add_membership add_membership(authenticated, user_id) organisation_id = user.organisation_id # Add User to required registration roles entity_roles = deployment_settings.get_auth_registration_roles() link_user_to = user.link_user_to or utable.link_user_to.default or [] if entity_roles: gtable = settings.table_group get_pe_id = s3db.pr_get_pe_id for entity, roles in entity_roles.items(): if entity is None and \ not organisation_id or "staff" not in link_user_to: # No default realm => do not assign default realm roles continue # Get User's Organisation or Site pe_id if entity in ("organisation_id", "org_group_id", "site_id"): tablename = "org_%s" % entity.split("_")[0] entity = get_pe_id(tablename, user[entity]) if not entity: continue rows = db(gtable.uuid.belongs(roles)).select(gtable.id) for role in rows: add_membership(role.id, user_id, entity=entity) if organisation_id and \ deployment_settings.get_auth_org_admin_to_first(): # If this is the 1st user to register for an Org, give them ORG_ADMIN for that Org entity = s3db.pr_get_pe_id("org_organisation", organisation_id) gtable = settings.table_group ORG_ADMIN = db(gtable.uuid == "ORG_ADMIN").select(gtable.id, limitby=(0, 1) ).first().id mtable = settings.table_membership query = (mtable.group_id == ORG_ADMIN) & \ (mtable.pe_id == entity) exists = db(query).select(mtable.id, limitby=(0, 1)) if not exists: add_membership(ORG_ADMIN, user_id, entity=entity) self.s3_link_user(user) # Track consent if deployment_settings.get_auth_consent_tracking(): ConsentTracking.register_consent(user_id) user_email = db(utable.id == user_id).select(utable.email, ).first().email self.s3_auth_user_register_onaccept(user_email, user_id) if current.response.s3.bulk is True: # Non-interactive imports should stop here return # Allow them to login db(utable.id == user_id).update(registration_key = "") # Approve User's Organisation if organisation_id and \ "org_organisation" in \ deployment_settings.get_auth_record_approval_required_for(): org_resource = s3db.resource("org_organisation", organisation_id, # Do not re-approve (would # overwrite original approver) approved = False, unapproved = True, ) approved = org_resource.approve() if not approved: # User is verifying their email and is not yet # logged-in, so approve by system authority org_resource.approve(approved_by = 0) # Send Welcome mail self.s3_send_welcome_email(user, password)
def s3_approve_user(self, user, password=None): """ Adds user to the 'Authenticated' role, and any default roles Designed to be called when a user is created through: - prepop - approved automatically during registration - approved by admin - added by admin - updated by admin Args: user: the user Storage() or Row password: optional password to include in a custom welcome_email """ user_id = user.id if not user_id: return db = current.db s3db = current.s3db deployment_settings = current.deployment_settings settings = self.settings utable = settings.table_user # Add to 'Authenticated' role authenticated = self.id_group("Authenticated") add_membership = self.add_membership add_membership(authenticated, user_id) organisation_id = user.organisation_id # Add User to required registration roles entity_roles = deployment_settings.get_auth_registration_roles() link_user_to = user.link_user_to or utable.link_user_to.default or [] if entity_roles: gtable = settings.table_group get_pe_id = s3db.pr_get_pe_id for entity, roles in entity_roles.items(): if entity is None and \ not organisation_id or "staff" not in link_user_to: # No default realm => do not assign default realm roles continue # Get User's Organisation or Site pe_id if entity in ("organisation_id", "org_group_id", "site_id"): tablename = "org_%s" % entity.split("_")[0] entity = get_pe_id(tablename, user[entity]) if not entity: continue rows = db(gtable.uuid.belongs(roles)).select(gtable.id) for role in rows: add_membership(role.id, user_id, entity=entity) if organisation_id and \ deployment_settings.get_auth_org_admin_to_first(): # If this is the 1st user to register for an Org, give them ORG_ADMIN for that Org entity = s3db.pr_get_pe_id("org_organisation", organisation_id) gtable = settings.table_group ORG_ADMIN = db(gtable.uuid == "ORG_ADMIN").select(gtable.id, limitby=(0, 1) ).first().id mtable = settings.table_membership query = (mtable.group_id == ORG_ADMIN) & \ (mtable.pe_id == entity) exists = db(query).select(mtable.id, limitby=(0, 1)) if not exists: add_membership(ORG_ADMIN, user_id, entity=entity) self.s3_link_user(user) # Track consent if deployment_settings.get_auth_consent_tracking(): ConsentTracking.register_consent(user_id) user_email = db(utable.id == user_id).select(utable.email, ).first().email self.s3_auth_user_register_onaccept(user_email, user_id) if current.response.s3.bulk is True: # Non-interactive imports should stop here return # Allow them to login db(utable.id == user_id).update(registration_key = "") # Approve User's Organisation if organisation_id and \ "org_organisation" in \ deployment_settings.get_auth_record_approval_required_for(): org_resource = s3db.resource("org_organisation", organisation_id, # Do not re-approve (would # overwrite original approver) approved = False, unapproved = True, ) approved = org_resource.approve() if not approved: # User is verifying their email and is not yet # logged-in, so approve by system authority org_resource.approve(approved_by = 0) # Send Welcome mail self.s3_send_welcome_email(user, password)
Python
def s3_link_user(self, user): """ Links the user account to various tables: - Creates (if not existing) User's Organisation and links User - Creates (if not existing) User's Person Record and links User - Creates (if not existing) User's Human Resource Record and links User - Calls s3_link_to_member Designed to be called when a user is created & approved through: - prepop - approved automatically during registration - approved by admin - added by admin - updated by admin Args: user: the user account (auth_user record) """ # Create/Update/Link to organisation, organisation_id = self.s3_link_to_organisation(user) # Add to user Person Registry and Email/Mobile to pr_contact person_id = self.s3_link_to_person(user, organisation_id) if user.org_group_id: self.s3_link_to_org_group(user, person_id) utable = self.settings.table_user link_user_to = user.link_user_to or utable.link_user_to.default if link_user_to: if "staff" in link_user_to: # Add Staff Record self.s3_link_to_human_resource(user, person_id, hr_type=1) if "volunteer" in link_user_to: # Add Volunteer Record self.s3_link_to_human_resource(user, person_id, hr_type=2) if "member" in link_user_to: # Add Member Record self.s3_link_to_member(user, person_id)
def s3_link_user(self, user): """ Links the user account to various tables: - Creates (if not existing) User's Organisation and links User - Creates (if not existing) User's Person Record and links User - Creates (if not existing) User's Human Resource Record and links User - Calls s3_link_to_member Designed to be called when a user is created & approved through: - prepop - approved automatically during registration - approved by admin - added by admin - updated by admin Args: user: the user account (auth_user record) """ # Create/Update/Link to organisation, organisation_id = self.s3_link_to_organisation(user) # Add to user Person Registry and Email/Mobile to pr_contact person_id = self.s3_link_to_person(user, organisation_id) if user.org_group_id: self.s3_link_to_org_group(user, person_id) utable = self.settings.table_user link_user_to = user.link_user_to or utable.link_user_to.default if link_user_to: if "staff" in link_user_to: # Add Staff Record self.s3_link_to_human_resource(user, person_id, hr_type=1) if "volunteer" in link_user_to: # Add Volunteer Record self.s3_link_to_human_resource(user, person_id, hr_type=2) if "member" in link_user_to: # Add Member Record self.s3_link_to_member(user, person_id)
Python
def s3_user_profile_onaccept(form): """ Update the UI locale from user profile """ if form.vars.language: current.session.s3.language = form.vars.language
def s3_user_profile_onaccept(form): """ Update the UI locale from user profile """ if form.vars.language: current.session.s3.language = form.vars.language
Python
def s3_link_to_organisation(self, user): """ Link a user account to an organisation Args: user: the user account record """ db = current.db s3db = current.s3db user_id = user.id # Lookup the organisation_id for the domain of this email address organisation_id = self.s3_approver(user)[1] if organisation_id: user.organisation_id = organisation_id else: # Use what the user has specified organisation_id = user.organisation_id # @ToDo: Is it correct to override the organisation entered by the user? # Ideally (if the deployment_settings.auth.registration_requests_organisation = True) # the org could be selected based on the email and the user could then override if not organisation_id: # Create a new Organisation name = user.get("organisation_name", None) if name: # Create new organisation acronym = user.get("organisation_acronym", None) otable = s3db.org_organisation record = Storage(name=name, acronym=acronym) organisation_id = otable.insert(**record) # Callbacks if organisation_id: record["id"] = organisation_id s3db.update_super(otable, record) s3db.onaccept(otable, record, method="create") self.s3_set_record_owner(otable, organisation_id) # Update user record user.organisation_id = organisation_id utable = self.settings.table_user db(utable.id == user_id).update(organisation_id = organisation_id) if not organisation_id: return None # Update link to Organisation ltable = s3db.org_organisation_user # Update if the User's Organisation has changed query = (ltable.user_id == user_id) rows = db(query).select(ltable.organisation_id, limitby = (0, 2)) if len(rows) == 1: # We know which record to update - this should always be 1 if rows.first().organisation_id != organisation_id: db(query).update(organisation_id=organisation_id) # No more action required return organisation_id else: # Create link (if it doesn't exist) query = (ltable.user_id == user_id) & \ (ltable.organisation_id == organisation_id) row = db(query).select(ltable.id, limitby=(0, 1)).first() if not row: ltable.insert(user_id = user_id, organisation_id = organisation_id) return organisation_id
def s3_link_to_organisation(self, user): """ Link a user account to an organisation Args: user: the user account record """ db = current.db s3db = current.s3db user_id = user.id # Lookup the organisation_id for the domain of this email address organisation_id = self.s3_approver(user)[1] if organisation_id: user.organisation_id = organisation_id else: # Use what the user has specified organisation_id = user.organisation_id # @ToDo: Is it correct to override the organisation entered by the user? # Ideally (if the deployment_settings.auth.registration_requests_organisation = True) # the org could be selected based on the email and the user could then override if not organisation_id: # Create a new Organisation name = user.get("organisation_name", None) if name: # Create new organisation acronym = user.get("organisation_acronym", None) otable = s3db.org_organisation record = Storage(name=name, acronym=acronym) organisation_id = otable.insert(**record) # Callbacks if organisation_id: record["id"] = organisation_id s3db.update_super(otable, record) s3db.onaccept(otable, record, method="create") self.s3_set_record_owner(otable, organisation_id) # Update user record user.organisation_id = organisation_id utable = self.settings.table_user db(utable.id == user_id).update(organisation_id = organisation_id) if not organisation_id: return None # Update link to Organisation ltable = s3db.org_organisation_user # Update if the User's Organisation has changed query = (ltable.user_id == user_id) rows = db(query).select(ltable.organisation_id, limitby = (0, 2)) if len(rows) == 1: # We know which record to update - this should always be 1 if rows.first().organisation_id != organisation_id: db(query).update(organisation_id=organisation_id) # No more action required return organisation_id else: # Create link (if it doesn't exist) query = (ltable.user_id == user_id) & \ (ltable.organisation_id == organisation_id) row = db(query).select(ltable.id, limitby=(0, 1)).first() if not row: ltable.insert(user_id = user_id, organisation_id = organisation_id) return organisation_id
Python
def s3_link_to_org_group(user, person_id): """ Link a user account to an organisation group Args: user: the user account record person_id: the person record ID associated with this user """ db = current.db s3db = current.s3db org_group_id = user.get("org_group_id") if not org_group_id or not person_id: return None # Default status to "Member" stable = s3db.org_group_person_status query = (stable.name.lower() == "member") & \ (stable.deleted == False) row = db(query).select(stable.id, limitby=(0, 1)).first() if row: status_id = row.id else: status_id = None # Check if link exists ltable = s3db.org_group_person query = (ltable.person_id == person_id) & \ (ltable.org_group_id == org_group_id) & \ (ltable.deleted == False) row = db(query).select(ltable.id, limitby=(0, 1)).first() if not row: # Make sure person record and org_group record exist ptable = s3db.pr_person gtable = s3db.org_group if ptable[person_id] and gtable[org_group_id]: ltable.insert(person_id = person_id, org_group_id = org_group_id, status_id = status_id, ) return org_group_id
def s3_link_to_org_group(user, person_id): """ Link a user account to an organisation group Args: user: the user account record person_id: the person record ID associated with this user """ db = current.db s3db = current.s3db org_group_id = user.get("org_group_id") if not org_group_id or not person_id: return None # Default status to "Member" stable = s3db.org_group_person_status query = (stable.name.lower() == "member") & \ (stable.deleted == False) row = db(query).select(stable.id, limitby=(0, 1)).first() if row: status_id = row.id else: status_id = None # Check if link exists ltable = s3db.org_group_person query = (ltable.person_id == person_id) & \ (ltable.org_group_id == org_group_id) & \ (ltable.deleted == False) row = db(query).select(ltable.id, limitby=(0, 1)).first() if not row: # Make sure person record and org_group record exist ptable = s3db.pr_person gtable = s3db.org_group if ptable[person_id] and gtable[org_group_id]: ltable.insert(person_id = person_id, org_group_id = org_group_id, status_id = status_id, ) return org_group_id
Python
def s3_link_to_human_resource(self, user, person_id, hr_type, ): """ Link the user to a human resource record and make them owner Args: user: the user record person_id: the person ID linked to that user hr_type: the human resource type (staff/volunteer) """ db = current.db s3db = current.s3db settings = current.deployment_settings user_id = user.id organisation_id = user.organisation_id htablename = "hrm_human_resource" htable = s3db.table(htablename) if not htable or (not organisation_id and \ settings.get_hrm_org_required()): # Module disabled or no user organisation set return None def customise(hr_id): """ Customise hrm_human_resource """ customise = settings.customise_resource(htablename) if customise: request = CRUDRequest("hrm", "human_resource", current.request, args = [str(hr_id)] if hr_id else [], ) customise(request, htablename) # Determine the site ID site_id = user.site_id if hr_type == 1 else None # Get existing active HR record for this user ptable = s3db.pr_person ltable = s3db.pr_person_user query = (ltable.user_id == user_id) & \ (ptable.pe_id == ltable.pe_id) & \ (htable.person_id == ptable.id) & \ (htable.type == hr_type) & \ (htable.status == 1) & \ (htable.deleted == False) rows = db(query).select(htable.id, limitby=(0, 2)) accepted = None if len(rows) == 1: # Single active HR record of this type # => update organisation and site record = rows.first() hr_id = record.id # Update the record customise(hr_id) db(htable.id == hr_id).update(organisation_id = organisation_id, site_id = site_id, ) accepted = "update" # Update or create site link hstable = s3db.hrm_human_resource_site query = (hstable.human_resource_id == hr_id) hstable.update_or_insert(query, site_id = site_id, human_resource_id = hr_id, owned_by_user = user_id, ) else: # Multiple or no HR records of this type if rows: # Multiple records # => check if there is one for this organisation and site if type(person_id) is list: person_id = person_id[0] query = (htable.person_id == person_id) & \ (htable.organisation_id == organisation_id) & \ (htable.type == hr_type) & \ (htable.site_id == site_id) & \ (htable.deleted == False) row = db(query).select(htable.id, limitby=(0, 1)).first() else: # No HR record exists at all row = None if row: # At least one record for this organisation and site exists # => pass hr_id = row.id else: # Create new HR record customise(hr_id = None) record = Storage(person_id = person_id, organisation_id = organisation_id, site_id = site_id, type = hr_type, owned_by_user = user_id, ) hr_id = htable.insert(**record) record["id"] = hr_id accepted = "create" if hr_id and accepted: # Update any super-records s3db.update_super(htable, record) # Set or update the record owner and realm entity # (enforce update to change realm if organisation changed) self.s3_set_record_owner(htable, hr_id, force_update=True) # Run onaccept s3db.onaccept(htablename, record, method=accepted) return hr_id
def s3_link_to_human_resource(self, user, person_id, hr_type, ): """ Link the user to a human resource record and make them owner Args: user: the user record person_id: the person ID linked to that user hr_type: the human resource type (staff/volunteer) """ db = current.db s3db = current.s3db settings = current.deployment_settings user_id = user.id organisation_id = user.organisation_id htablename = "hrm_human_resource" htable = s3db.table(htablename) if not htable or (not organisation_id and \ settings.get_hrm_org_required()): # Module disabled or no user organisation set return None def customise(hr_id): """ Customise hrm_human_resource """ customise = settings.customise_resource(htablename) if customise: request = CRUDRequest("hrm", "human_resource", current.request, args = [str(hr_id)] if hr_id else [], ) customise(request, htablename) # Determine the site ID site_id = user.site_id if hr_type == 1 else None # Get existing active HR record for this user ptable = s3db.pr_person ltable = s3db.pr_person_user query = (ltable.user_id == user_id) & \ (ptable.pe_id == ltable.pe_id) & \ (htable.person_id == ptable.id) & \ (htable.type == hr_type) & \ (htable.status == 1) & \ (htable.deleted == False) rows = db(query).select(htable.id, limitby=(0, 2)) accepted = None if len(rows) == 1: # Single active HR record of this type # => update organisation and site record = rows.first() hr_id = record.id # Update the record customise(hr_id) db(htable.id == hr_id).update(organisation_id = organisation_id, site_id = site_id, ) accepted = "update" # Update or create site link hstable = s3db.hrm_human_resource_site query = (hstable.human_resource_id == hr_id) hstable.update_or_insert(query, site_id = site_id, human_resource_id = hr_id, owned_by_user = user_id, ) else: # Multiple or no HR records of this type if rows: # Multiple records # => check if there is one for this organisation and site if type(person_id) is list: person_id = person_id[0] query = (htable.person_id == person_id) & \ (htable.organisation_id == organisation_id) & \ (htable.type == hr_type) & \ (htable.site_id == site_id) & \ (htable.deleted == False) row = db(query).select(htable.id, limitby=(0, 1)).first() else: # No HR record exists at all row = None if row: # At least one record for this organisation and site exists # => pass hr_id = row.id else: # Create new HR record customise(hr_id = None) record = Storage(person_id = person_id, organisation_id = organisation_id, site_id = site_id, type = hr_type, owned_by_user = user_id, ) hr_id = htable.insert(**record) record["id"] = hr_id accepted = "create" if hr_id and accepted: # Update any super-records s3db.update_super(htable, record) # Set or update the record owner and realm entity # (enforce update to change realm if organisation changed) self.s3_set_record_owner(htable, hr_id, force_update=True) # Run onaccept s3db.onaccept(htablename, record, method=accepted) return hr_id
Python
def s3_send_welcome_email(self, user, password=None): """ Send a welcome mail to newly-registered users - suitable e.g. for users from Facebook/Google who don't verify their emails Args: user: the user dict, must contain "email", and can contain "language" for translation of the message password: optional password to include in a custom welcome_email """ settings = current.deployment_settings if not settings.get_auth_registration_welcome_email(): # Welcome-email disabled return messages = self.messages if not settings.get_mail_sender(): current.response.error = messages.unable_send_email return # Ensure that we send out the mails in the language that # the recipient wants (if we know it) T = current.T language = user.get("language") if language: T.force(language) # Compose the message system_name = s3_str(settings.get_system_name()) subject = s3_str(messages.welcome_email_subject % \ {"system_name": system_name}) message = s3_str(messages.welcome_email % \ {"system_name": system_name, "url": settings.get_base_public_url(), "profile": URL("default", "person"), "password": password, }) # Restore language for UI T.force(current.session.s3.language) recipient = user["email"] if settings.has_module("msg"): results = current.msg.send_email(recipient, subject = subject, message = message, ) else: results = current.mail.send(recipient, subject = subject, message = message, ) if not results: current.response.error = messages.unable_send_email
def s3_send_welcome_email(self, user, password=None): """ Send a welcome mail to newly-registered users - suitable e.g. for users from Facebook/Google who don't verify their emails Args: user: the user dict, must contain "email", and can contain "language" for translation of the message password: optional password to include in a custom welcome_email """ settings = current.deployment_settings if not settings.get_auth_registration_welcome_email(): # Welcome-email disabled return messages = self.messages if not settings.get_mail_sender(): current.response.error = messages.unable_send_email return # Ensure that we send out the mails in the language that # the recipient wants (if we know it) T = current.T language = user.get("language") if language: T.force(language) # Compose the message system_name = s3_str(settings.get_system_name()) subject = s3_str(messages.welcome_email_subject % \ {"system_name": system_name}) message = s3_str(messages.welcome_email % \ {"system_name": system_name, "url": settings.get_base_public_url(), "profile": URL("default", "person"), "password": password, }) # Restore language for UI T.force(current.session.s3.language) recipient = user["email"] if settings.has_module("msg"): results = current.msg.send_email(recipient, subject = subject, message = message, ) else: results = current.mail.send(recipient, subject = subject, message = message, ) if not results: current.response.error = messages.unable_send_email
Python
def s3_impersonate(self, user_id): """ S3 framework function - designed to be used within tasks, which are run in a separate request & hence don't have access to current.auth Args: user_id: auth.user.id or auth.user.email """ settings = self.settings utable = settings.table_user query = None if not user_id: # Anonymous user = None elif isinstance(user_id, str) and not user_id.isdigit(): query = (utable[settings.login_userfield] == user_id) else: query = (utable.id == user_id) if query is not None: user = current.db(query).select(limitby=(0, 1)).first() if not user: # Invalid user ID raise ValueError("User not found") else: user = Storage(utable._filter_fields(user, id=True)) self.user = user session = current.session session.auth = Storage(user=user, last_visit=current.request.now, expiration=settings.expiration) self.s3_set_roles() if user: # Set the language from the Profile language = user.language current.T.force(language) session.s3.language = language return user
def s3_impersonate(self, user_id): """ S3 framework function - designed to be used within tasks, which are run in a separate request & hence don't have access to current.auth Args: user_id: auth.user.id or auth.user.email """ settings = self.settings utable = settings.table_user query = None if not user_id: # Anonymous user = None elif isinstance(user_id, str) and not user_id.isdigit(): query = (utable[settings.login_userfield] == user_id) else: query = (utable.id == user_id) if query is not None: user = current.db(query).select(limitby=(0, 1)).first() if not user: # Invalid user ID raise ValueError("User not found") else: user = Storage(utable._filter_fields(user, id=True)) self.user = user session = current.session session.auth = Storage(user=user, last_visit=current.request.now, expiration=settings.expiration) self.s3_set_roles() if user: # Set the language from the Profile language = user.language current.T.force(language) session.s3.language = language return user
Python
def s3_logged_in(self): """ Check whether the user is currently logged-in - tries Basic if not """ if self.override: return True if not self.is_logged_in(): # NB MUST NOT send an HTTP-401 challenge here because otherwise, # negative tests (e.g. if not auth.s3_logged_in()) would always # challenge, and never succeed # => omit basic_auth_realm # => send the challenge in permission.fail() instead # Probe for Master Key Auth if current.deployment_settings.get_auth_masterkey(): success = self.s3_masterkey_login() if success is not None: return success # Basic Auth (default) basic = self.basic() try: return basic[2] except TypeError: # old web2py return basic except: return False return True
def s3_logged_in(self): """ Check whether the user is currently logged-in - tries Basic if not """ if self.override: return True if not self.is_logged_in(): # NB MUST NOT send an HTTP-401 challenge here because otherwise, # negative tests (e.g. if not auth.s3_logged_in()) would always # challenge, and never succeed # => omit basic_auth_realm # => send the challenge in permission.fail() instead # Probe for Master Key Auth if current.deployment_settings.get_auth_masterkey(): success = self.s3_masterkey_login() if success is not None: return success # Basic Auth (default) basic = self.basic() try: return basic[2] except TypeError: # old web2py return basic except: return False return True
Python
def s3_set_roles(self): """ Update pe_id, roles and realms for the current user """ session = current.session s3 = current.response.s3 if "restricted_tables" in s3: del s3["restricted_tables"] permission = self.permission permission.clear_cache() system_roles = self.get_system_roles() ANONYMOUS = system_roles.ANONYMOUS AUTHENTICATED = system_roles.AUTHENTICATED session_roles = {ANONYMOUS} if ANONYMOUS else set() if self.user: db = current.db s3db = current.s3db user_id = self.user.id # Set pe_id for current user ltable = s3db.table("pr_person_user") if ltable is not None: row = db(ltable.user_id == user_id).select(ltable.pe_id, limitby = (0, 1), cache = s3db.cache, ).first() self.user["pe_id"] = row.pe_id if row else None else: self.user["pe_id"] = None # Get all current auth_memberships of the user mtable = self.settings.table_membership query = (mtable.deleted == False) & \ (mtable.user_id == user_id) & \ (mtable.group_id != None) rows = db(query).select(mtable.group_id, mtable.pe_id, cacheable = True, ) # Add all group_ids to session.s3.roles session_roles |= {row.group_id for row in rows} if AUTHENTICATED: session_roles.add(AUTHENTICATED) # Realms: # Permissions of a group apply only for records owned by any of # the entities which belong to the realm of the group membership # These realms apply for every authenticated user: if not permission.entity_realm: # All roles apply site-wide (i.e. no realms, policy 5 and below) realms = {row.group_id: None for row in rows} else: # Roles are limited to realms (policy 6 and above) default_realm = s3db.pr_default_realms(self.user["pe_id"]) # Store the realms: realms = {} for row in rows: group_id = row.group_id if group_id == system_roles.ADMIN: # Admin is not realm-restrictable realm = realms[group_id] = None elif group_id in realms: realm = realms[group_id] else: realm = realms[group_id] = [] if realm is None: continue pe_id = row.pe_id if pe_id is None: if default_realm: realm.extend([e for e in default_realm if e not in realm]) if not realm: del realms[group_id] elif pe_id == 0: realms[group_id] = None elif pe_id not in realm: realm.append(pe_id) if permission.entity_hierarchy: # Realms include subsidiaries of the realm entities # Get all entities in realms entities = [] append = entities.append for realm in realms.values(): if realm is not None: for entity in realm: if entity not in entities: append(entity) # Lookup the subsidiaries of all realms and extensions descendants = s3db.pr_descendants(entities) # Add the subsidiaries to the realms for group_id in realms: realm = realms[group_id] if realm is None: continue append = realm.append for entity in list(realm): if entity in descendants: for subsidiary in descendants[entity]: if subsidiary not in realm: append(subsidiary) # These realms apply for every authenticated user: for role in (ANONYMOUS, AUTHENTICATED): if role: realms[role] = None self.user["realms"] = Storage(realms) session.s3.roles = list(session_roles)
def s3_set_roles(self): """ Update pe_id, roles and realms for the current user """ session = current.session s3 = current.response.s3 if "restricted_tables" in s3: del s3["restricted_tables"] permission = self.permission permission.clear_cache() system_roles = self.get_system_roles() ANONYMOUS = system_roles.ANONYMOUS AUTHENTICATED = system_roles.AUTHENTICATED session_roles = {ANONYMOUS} if ANONYMOUS else set() if self.user: db = current.db s3db = current.s3db user_id = self.user.id # Set pe_id for current user ltable = s3db.table("pr_person_user") if ltable is not None: row = db(ltable.user_id == user_id).select(ltable.pe_id, limitby = (0, 1), cache = s3db.cache, ).first() self.user["pe_id"] = row.pe_id if row else None else: self.user["pe_id"] = None # Get all current auth_memberships of the user mtable = self.settings.table_membership query = (mtable.deleted == False) & \ (mtable.user_id == user_id) & \ (mtable.group_id != None) rows = db(query).select(mtable.group_id, mtable.pe_id, cacheable = True, ) # Add all group_ids to session.s3.roles session_roles |= {row.group_id for row in rows} if AUTHENTICATED: session_roles.add(AUTHENTICATED) # Realms: # Permissions of a group apply only for records owned by any of # the entities which belong to the realm of the group membership # These realms apply for every authenticated user: if not permission.entity_realm: # All roles apply site-wide (i.e. no realms, policy 5 and below) realms = {row.group_id: None for row in rows} else: # Roles are limited to realms (policy 6 and above) default_realm = s3db.pr_default_realms(self.user["pe_id"]) # Store the realms: realms = {} for row in rows: group_id = row.group_id if group_id == system_roles.ADMIN: # Admin is not realm-restrictable realm = realms[group_id] = None elif group_id in realms: realm = realms[group_id] else: realm = realms[group_id] = [] if realm is None: continue pe_id = row.pe_id if pe_id is None: if default_realm: realm.extend([e for e in default_realm if e not in realm]) if not realm: del realms[group_id] elif pe_id == 0: realms[group_id] = None elif pe_id not in realm: realm.append(pe_id) if permission.entity_hierarchy: # Realms include subsidiaries of the realm entities # Get all entities in realms entities = [] append = entities.append for realm in realms.values(): if realm is not None: for entity in realm: if entity not in entities: append(entity) # Lookup the subsidiaries of all realms and extensions descendants = s3db.pr_descendants(entities) # Add the subsidiaries to the realms for group_id in realms: realm = realms[group_id] if realm is None: continue append = realm.append for entity in list(realm): if entity in descendants: for subsidiary in descendants[entity]: if subsidiary not in realm: append(subsidiary) # These realms apply for every authenticated user: for role in (ANONYMOUS, AUTHENTICATED): if role: realms[role] = None self.user["realms"] = Storage(realms) session.s3.roles = list(session_roles)
Python
def s3_create_role(self, role, description=None, *acls, **args): """ Back-end method to create roles with ACLs Args: role: display name for the role description: description of the role (optional) acls: list of initial ACLs to assign to this role Kwargs: name: a unique name for the role hidden: hide this role completely from the RoleManager system: role can be assigned, but neither modified nor deleted in the RoleManager protected: role can be assigned and edited, but not deleted in the RoleManager """ table = self.settings.table_group hidden = args.get("hidden") system = args.get("system") protected = args.get("protected") if isinstance(description, dict): acls = [description] + acls description = None uid = args.get("uid", None) if uid: record = current.db(table.uuid == uid).select(table.id, limitby=(0, 1) ).first() else: record = None uid = uuid4() system_data = {} if hidden is not None: system_data["hidden"] = hidden if protected is not None: system_data["protected"] = protected if system is not None: system_data["system"] = system if record: role_id = record.id record.update_record(deleted = False, role = role, description = description, **system_data) else: role_id = table.insert(uuid = uid, role = role, description = description, **system_data) if role_id: update_acl = self.permission.update_acl for acl in acls: update_acl(role_id, **acl) return role_id
def s3_create_role(self, role, description=None, *acls, **args): """ Back-end method to create roles with ACLs Args: role: display name for the role description: description of the role (optional) acls: list of initial ACLs to assign to this role Kwargs: name: a unique name for the role hidden: hide this role completely from the RoleManager system: role can be assigned, but neither modified nor deleted in the RoleManager protected: role can be assigned and edited, but not deleted in the RoleManager """ table = self.settings.table_group hidden = args.get("hidden") system = args.get("system") protected = args.get("protected") if isinstance(description, dict): acls = [description] + acls description = None uid = args.get("uid", None) if uid: record = current.db(table.uuid == uid).select(table.id, limitby=(0, 1) ).first() else: record = None uid = uuid4() system_data = {} if hidden is not None: system_data["hidden"] = hidden if protected is not None: system_data["protected"] = protected if system is not None: system_data["system"] = system if record: role_id = record.id record.update_record(deleted = False, role = role, description = description, **system_data) else: role_id = table.insert(uuid = uid, role = role, description = description, **system_data) if role_id: update_acl = self.permission.update_acl for acl in acls: update_acl(role_id, **acl) return role_id
Python
def s3_delete_role(self, role_id): """ Remove a role from the system. Args: role_id: the ID or UID of the role Note: Protected roles cannot be deleted with this function, need to reset the protected-flag first to override. """ db = current.db table = self.settings.table_group if isinstance(role_id, str) and not role_id.isdigit(): query = (table.uuid == role_id) else: role_id = int(role_id) query = (table.id == role_id) role = db(query).select(table.id, table.uuid, table.protected, limitby = (0, 1), ).first() if role and not role.protected: group_id = role.id data = {"deleted": True, "group_id": None, "deleted_fk": '{"group_id": %s}' % group_id, } # Remove all memberships for this role mtable = self.settings.table_membership db(mtable.group_id == group_id).update(**data) # Remove all permission rules for this role ptable = self.permission.table db(ptable.group_id == group_id).update(**data) # Remove the role deleted_uuid = "%s-deleted-%s" % (uuid4().hex[-12:], role.uuid[:40]) role.update_record(uuid = deleted_uuid, role = None, deleted = True, )
def s3_delete_role(self, role_id): """ Remove a role from the system. Args: role_id: the ID or UID of the role Note: Protected roles cannot be deleted with this function, need to reset the protected-flag first to override. """ db = current.db table = self.settings.table_group if isinstance(role_id, str) and not role_id.isdigit(): query = (table.uuid == role_id) else: role_id = int(role_id) query = (table.id == role_id) role = db(query).select(table.id, table.uuid, table.protected, limitby = (0, 1), ).first() if role and not role.protected: group_id = role.id data = {"deleted": True, "group_id": None, "deleted_fk": '{"group_id": %s}' % group_id, } # Remove all memberships for this role mtable = self.settings.table_membership db(mtable.group_id == group_id).update(**data) # Remove all permission rules for this role ptable = self.permission.table db(ptable.group_id == group_id).update(**data) # Remove the role deleted_uuid = "%s-deleted-%s" % (uuid4().hex[-12:], role.uuid[:40]) role.update_record(uuid = deleted_uuid, role = None, deleted = True, )