language
stringclasses
6 values
original_string
stringlengths
25
887k
text
stringlengths
25
887k
Python
def as_dict(self): """ Returns the offering object as JSON-serializable dict """ offering = {"code": self.code} if self.operations: offering["operations"] = [o.as_dict() for o in self.operations] elif self.contents: offering["contents"] = [c.as_dict() for c in self.contents] return offering
def as_dict(self): """ Returns the offering object as JSON-serializable dict """ offering = {"code": self.code} if self.operations: offering["operations"] = [o.as_dict() for o in self.operations] elif self.contents: offering["contents"] = [c.as_dict() for c in self.contents] return offering
Python
def as_dict(self): """ Returns the operation object as JSON-serializable dict """ op = {"code": self.code, "method": self.method, "href": self.href, } if self.method == "POST": op["request"] = {"type": self.mime, "content": self.body or "" } if self.result: op["result"] = self.result return op
def as_dict(self): """ Returns the operation object as JSON-serializable dict """ op = {"code": self.code, "method": self.method, "href": self.href, } if self.method == "POST": op["request"] = {"type": self.mime, "content": self.body or "" } if self.result: op["result"] = self.result return op
Python
def as_dict(self): """ Returns the content object as JSON-serializable dict """ return {"type": self.mime, "content": self.content, }
def as_dict(self): """ Returns the content object as JSON-serializable dict """ return {"type": self.mime, "content": self.content, }
Python
def add_manager_tags(): """ Test Station Manager approval tags as filtered components - for embedding in form """ s3db = current.s3db s3db.add_components("hrm_human_resource", hrm_human_resource_tag = ( # Registration Form {"name": "reg_form", "joinby": "human_resource_id", "filterby": {"tag": "REGFORM"}, "multiple": False, }, # Criminal Record Certificate {"name": "crc", "joinby": "human_resource_id", "filterby": {"tag": "CRC"}, "multiple": False, }, # Statement on Criminal Proceedings {"name": "scp", "joinby": "human_resource_id", "filterby": {"tag": "SCP"}, "multiple": False, }, ), )
def add_manager_tags(): """ Test Station Manager approval tags as filtered components - for embedding in form """ s3db = current.s3db s3db.add_components("hrm_human_resource", hrm_human_resource_tag = ( # Registration Form {"name": "reg_form", "joinby": "human_resource_id", "filterby": {"tag": "REGFORM"}, "multiple": False, }, # Criminal Record Certificate {"name": "crc", "joinby": "human_resource_id", "filterby": {"tag": "CRC"}, "multiple": False, }, # Statement on Criminal Proceedings {"name": "scp", "joinby": "human_resource_id", "filterby": {"tag": "SCP"}, "multiple": False, }, ), )
Python
def configure_manager_tags(resource): """ Configure test station manager approval tags - labels - selectable options - representation Args: resource: the hrm_human_resource resource (with filtered components configured) """ T = current.T components = resource.components # Document status options doc_opts = (("N/A", T("not provided")), ("APPROVED", T("provided / appropriate")), ("REJECT", T("not up to requirements")), ) labels = {"reg_form": T("Signed form for registration"), "crc": T("Criminal Record Certificate"), "scp": T("Statement on Pending Criminal Proceedings"), } from ..helpers import workflow_tag_represent for alias in ("reg_form", "crc", "scp"): component = components.get(alias) if not component: continue table = component.table field = table.value field.label = labels.get(alias) field.default = "N/A" field.requires = IS_IN_SET(doc_opts, sort=False, zero=None) field.represent = workflow_tag_represent(dict(doc_opts), none="N/A")
def configure_manager_tags(resource): """ Configure test station manager approval tags - labels - selectable options - representation Args: resource: the hrm_human_resource resource (with filtered components configured) """ T = current.T components = resource.components # Document status options doc_opts = (("N/A", T("not provided")), ("APPROVED", T("provided / appropriate")), ("REJECT", T("not up to requirements")), ) labels = {"reg_form": T("Signed form for registration"), "crc": T("Criminal Record Certificate"), "scp": T("Statement on Pending Criminal Proceedings"), } from ..helpers import workflow_tag_represent for alias in ("reg_form", "crc", "scp"): component = components.get(alias) if not component: continue table = component.table field = table.value field.label = labels.get(alias) field.default = "N/A" field.requires = IS_IN_SET(doc_opts, sort=False, zero=None) field.represent = workflow_tag_represent(dict(doc_opts), none="N/A")
Python
def human_resource_onvalidation(form): """ Form validation for human resources - make sure there is only one HR record per person """ db = current.db s3db = current.s3db form_vars = form.vars record_id = get_form_record_id(form) person_id = form_vars.get("person_id") table = s3db.hrm_human_resource if person_id: query = (table.person_id == person_id) & \ (table.deleted == False) if record_id: query &= (table.id != record_id) duplicate = db(query).select(table.id, limitby=(0, 1)).first() if duplicate: form.errors.person_id = current.T("Person already has a staff record") return if "org_contact" in form_vars and form_vars["org_contact"]: ptable = s3db.pr_person ctable = s3db.pr_contact if not person_id: query = (table.id == record_id) join = [ptable.on(ptable.id == table.person_id)] else: query = (ptable.id == person_id) join = None query &= (ptable.date_of_birth != None) left = ctable.on((ctable.pe_id == ptable.pe_id) & \ (ctable.contact_method.belongs(("EMAIL", "SMS", "HOME_PHONE", "WORK_PHONE"))) & \ (ctable.deleted == False)) rows = db(query).select(ptable.date_of_birth, ctable.value, left = left, join = join, ) if not rows: form.errors.org_contact = current.T("Person details incomplete: date of birth required") elif not any(row.pr_contact.value for row in rows): form.errors.org_contact = current.T("Contact information incomplete: email address and/or phone number required")
def human_resource_onvalidation(form): """ Form validation for human resources - make sure there is only one HR record per person """ db = current.db s3db = current.s3db form_vars = form.vars record_id = get_form_record_id(form) person_id = form_vars.get("person_id") table = s3db.hrm_human_resource if person_id: query = (table.person_id == person_id) & \ (table.deleted == False) if record_id: query &= (table.id != record_id) duplicate = db(query).select(table.id, limitby=(0, 1)).first() if duplicate: form.errors.person_id = current.T("Person already has a staff record") return if "org_contact" in form_vars and form_vars["org_contact"]: ptable = s3db.pr_person ctable = s3db.pr_contact if not person_id: query = (table.id == record_id) join = [ptable.on(ptable.id == table.person_id)] else: query = (ptable.id == person_id) join = None query &= (ptable.date_of_birth != None) left = ctable.on((ctable.pe_id == ptable.pe_id) & \ (ctable.contact_method.belongs(("EMAIL", "SMS", "HOME_PHONE", "WORK_PHONE"))) & \ (ctable.deleted == False)) rows = db(query).select(ptable.date_of_birth, ctable.value, left = left, join = join, ) if not rows: form.errors.org_contact = current.T("Person details incomplete: date of birth required") elif not any(row.pr_contact.value for row in rows): form.errors.org_contact = current.T("Contact information incomplete: email address and/or phone number required")
Python
def human_resource_postprocess(form): """ Postprocess for manager HR form: - update the MGRINFO tag of the organisation """ record_id = get_form_record_id(form) if not record_id: return # Look up the org table = current.s3db.hrm_human_resource query = (table.id == record_id) record = current.db(query).select(table.organisation_id, limitby = (0, 1), ).first() if record: from .org import update_mgrinfo update_mgrinfo(record.organisation_id)
def human_resource_postprocess(form): """ Postprocess for manager HR form: - update the MGRINFO tag of the organisation """ record_id = get_form_record_id(form) if not record_id: return # Look up the org table = current.s3db.hrm_human_resource query = (table.id == record_id) record = current.db(query).select(table.organisation_id, limitby = (0, 1), ).first() if record: from .org import update_mgrinfo update_mgrinfo(record.organisation_id)
Python
def index_alt(): """ Module homepage for non-Admin users when no CMS content found """ # Just redirect to the Warehouse Summary View s3_redirect_default(URL(f="warehouse", args="summary"))
def index_alt(): """ Module homepage for non-Admin users when no CMS content found """ # Just redirect to the Warehouse Summary View s3_redirect_default(URL(f="warehouse", args="summary"))
Python
def supplier(): """ Filtered version of the organisation() REST controller """ get_vars["organisation_type.name"] = "Supplier" # Load model (including normal CRUD strings) table = s3db.org_organisation # Modify CRUD Strings s3.crud_strings.org_organisation = Storage( label_create = T("Create Supplier"), title_display = T("Supplier Details"), title_list = T("Suppliers"), title_update = T("Edit Supplier"), title_upload = T("Import Suppliers"), label_list_button = T("List Suppliers"), label_delete_button = T("Delete Supplier"), msg_record_created = T("Supplier added"), msg_record_modified = T("Supplier updated"), msg_record_deleted = T("Supplier deleted"), msg_list_empty = T("No Suppliers currently registered") ) # Open record in this controller after creation s3db.configure("org_organisation", create_next = URL(c="inv", f="supplier", args = ["[id]", "read"]), ) # NB Type gets defaulted in the Custom CRUD form # - user needs create permissions for org_organisation_organisation_type return s3db.org_organisation_controller()
def supplier(): """ Filtered version of the organisation() REST controller """ get_vars["organisation_type.name"] = "Supplier" # Load model (including normal CRUD strings) table = s3db.org_organisation # Modify CRUD Strings s3.crud_strings.org_organisation = Storage( label_create = T("Create Supplier"), title_display = T("Supplier Details"), title_list = T("Suppliers"), title_update = T("Edit Supplier"), title_upload = T("Import Suppliers"), label_list_button = T("List Suppliers"), label_delete_button = T("Delete Supplier"), msg_record_created = T("Supplier added"), msg_record_modified = T("Supplier updated"), msg_record_deleted = T("Supplier deleted"), msg_list_empty = T("No Suppliers currently registered") ) # Open record in this controller after creation s3db.configure("org_organisation", create_next = URL(c="inv", f="supplier", args = ["[id]", "read"]), ) # NB Type gets defaulted in the Custom CRUD form # - user needs create permissions for org_organisation_organisation_type return s3db.org_organisation_controller()
Python
def import_prep(tree): """ Deletes all Stock records of the organisation/branch before processing a new data import """ if s3.import_replace and tree is not None: xml = current.xml tag = xml.TAG att = xml.ATTRIBUTE root = tree.getroot() expr = "/%s/%s[@%s='org_organisation']/%s[@%s='name']" % \ (tag.root, tag.resource, att.name, tag.data, att.field) orgs = root.xpath(expr) otable = s3db.org_organisation stable = s3db.org_site itable = s3db.inv_inv_item for org in orgs: org_name = org.get("value", None) or org.text if org_name: try: org_name = json.loads(xml.xml_decode(org_name)) except: pass if org_name: query = (otable.name == org_name) & \ (stable.organisation_id == otable.id) & \ (itable.site_id == stable.id) resource = s3db.resource("inv_inv_item", filter=query) # Use cascade=True so that the deletion gets # rolled back if the import fails: resource.delete(format="xml", cascade=True)
def import_prep(tree): """ Deletes all Stock records of the organisation/branch before processing a new data import """ if s3.import_replace and tree is not None: xml = current.xml tag = xml.TAG att = xml.ATTRIBUTE root = tree.getroot() expr = "/%s/%s[@%s='org_organisation']/%s[@%s='name']" % \ (tag.root, tag.resource, att.name, tag.data, att.field) orgs = root.xpath(expr) otable = s3db.org_organisation stable = s3db.org_site itable = s3db.inv_inv_item for org in orgs: org_name = org.get("value", None) or org.text if org_name: try: org_name = json.loads(xml.xml_decode(org_name)) except: pass if org_name: query = (otable.name == org_name) & \ (stable.organisation_id == otable.id) & \ (itable.site_id == stable.id) resource = s3db.resource("inv_inv_item", filter=query) # Use cascade=True so that the deletion gets # rolled back if the import fails: resource.delete(format="xml", cascade=True)
Python
def inv_item_quantity(): """ Access via the .json representation to avoid work rendering menus, etc """ try: item_id = request.args[0] except: raise HTTP(400, current.xml.json_message(False, 400, "No value provided!")) table = s3db.inv_inv_item ptable = db.supply_item_pack query = (table.id == item_id) & \ (table.item_pack_id == ptable.id) record = db(query).select(table.quantity, ptable.quantity, limitby = (0, 1) ).first() d = {"iquantity" : record.inv_inv_item.quantity, "pquantity" : record.supply_item_pack.quantity, } output = json.dumps(d) response.headers["Content-Type"] = "application/json" return output
def inv_item_quantity(): """ Access via the .json representation to avoid work rendering menus, etc """ try: item_id = request.args[0] except: raise HTTP(400, current.xml.json_message(False, 400, "No value provided!")) table = s3db.inv_inv_item ptable = db.supply_item_pack query = (table.id == item_id) & \ (table.item_pack_id == ptable.id) record = db(query).select(table.quantity, ptable.quantity, limitby = (0, 1) ).first() d = {"iquantity" : record.inv_inv_item.quantity, "pquantity" : record.supply_item_pack.quantity, } output = json.dumps(d) response.headers["Content-Type"] = "application/json" return output
Python
def inv_item_packs(): """ Called by OptionsFilter to provide the pack options for a particular Item Access via the .json representation to avoid work rendering menus, etc """ try: item_id = request.args[0] except: raise HTTP(400, current.xml.json_message(False, 400, "No value provided!")) table = s3db.inv_inv_item ptable = db.supply_item_pack query = (table.id == item_id) & \ (table.item_id == ptable.item_id) records = db(query).select(ptable.id, ptable.name, ptable.quantity) output = records.json() response.headers["Content-Type"] = "application/json" return output
def inv_item_packs(): """ Called by OptionsFilter to provide the pack options for a particular Item Access via the .json representation to avoid work rendering menus, etc """ try: item_id = request.args[0] except: raise HTTP(400, current.xml.json_message(False, 400, "No value provided!")) table = s3db.inv_inv_item ptable = db.supply_item_pack query = (table.id == item_id) & \ (table.item_id == ptable.item_id) records = db(query).select(ptable.id, ptable.name, ptable.quantity) output = records.json() response.headers["Content-Type"] = "application/json" return output
Python
def send_commit(): """ Send a Shipment containing all items in a Commitment """ return s3db.req_send_commit()
def send_commit(): """ Send a Shipment containing all items in a Commitment """ return s3db.req_send_commit()
Python
def send_returns(): """ This will cancel a shipment that has been sent @todo need to roll back commitments """ try: send_id = request.args[0] except: redirect(f="send") stable = s3db.inv_send if not auth.s3_has_permission("update", stable, record_id=send_id): session.error = T("You do not have permission to return this sent shipment.") send_record = db(stable.id == send_id).select(stable.status, limitby = (0, 1) ).first() inv_ship_status = s3db.inv_ship_status if send_record.status == inv_ship_status["IN_PROCESS"]: session.error = T("This shipment has not been sent - it cannot be returned because it can still be edited.") if session.error: redirect(URL(c="inv", f="send", args=[send_id])) rtable = s3db.inv_recv tracktable = s3db.inv_track_item # Okay no error so far, change the status to Returning ADMIN = auth.get_system_roles().ADMIN stable[send_id] = dict(status = inv_ship_status["RETURNING"], owned_by_user = None, owned_by_group = ADMIN) recv_row = db(tracktable.send_id == send_id).select(tracktable.recv_id, limitby = (0, 1) ).first() if recv_row: recv_id = recv_row.recv_id rtable[recv_id] = dict(date = request.utcnow, status = inv_ship_status["RETURNING"], owned_by_user = None, owned_by_group = ADMIN) # Set all track items to status of returning db(tracktable.send_id == send_id).update(status = s3db.inv_tracking_status["RETURNING"]) session.confirmation = T("Sent Shipment has returned, indicate how many items will be returned to Warehouse.") redirect(URL(c="inv", f="send", args=[send_id, "track_item"]))
def send_returns(): """ This will cancel a shipment that has been sent @todo need to roll back commitments """ try: send_id = request.args[0] except: redirect(f="send") stable = s3db.inv_send if not auth.s3_has_permission("update", stable, record_id=send_id): session.error = T("You do not have permission to return this sent shipment.") send_record = db(stable.id == send_id).select(stable.status, limitby = (0, 1) ).first() inv_ship_status = s3db.inv_ship_status if send_record.status == inv_ship_status["IN_PROCESS"]: session.error = T("This shipment has not been sent - it cannot be returned because it can still be edited.") if session.error: redirect(URL(c="inv", f="send", args=[send_id])) rtable = s3db.inv_recv tracktable = s3db.inv_track_item # Okay no error so far, change the status to Returning ADMIN = auth.get_system_roles().ADMIN stable[send_id] = dict(status = inv_ship_status["RETURNING"], owned_by_user = None, owned_by_group = ADMIN) recv_row = db(tracktable.send_id == send_id).select(tracktable.recv_id, limitby = (0, 1) ).first() if recv_row: recv_id = recv_row.recv_id rtable[recv_id] = dict(date = request.utcnow, status = inv_ship_status["RETURNING"], owned_by_user = None, owned_by_group = ADMIN) # Set all track items to status of returning db(tracktable.send_id == send_id).update(status = s3db.inv_tracking_status["RETURNING"]) session.confirmation = T("Sent Shipment has returned, indicate how many items will be returned to Warehouse.") redirect(URL(c="inv", f="send", args=[send_id, "track_item"]))
Python
def return_process(): """ Return some stock from a shipment back into the warehouse """ try: send_id = request.args[0] except: redirect(f="send") stable = s3db.inv_send if not auth.s3_has_permission("update", stable, record_id=send_id): session.error = T("You do not have permission to return this sent shipment.") send_record = db(stable.id == send_id).select(stable.status, limitby = (0, 1) ).first() inv_ship_status = s3db.inv_ship_status if send_record.status != inv_ship_status["RETURNING"]: session.error = T("This shipment has not been returned.") if session.error: redirect(URL(c="inv", f="send", args=[send_id])) invtable = s3db.inv_inv_item rtable = s3db.inv_recv tracktable = s3db.inv_track_item # Okay no error so far, let's move the goods back into the warehouse # and then change the status to received # Update Receive record & lock for editing # Move each item to the site track_rows = db(tracktable.send_id == send_id).select(tracktable.id, tracktable.quantity, tracktable.return_quantity, tracktable.send_inv_item_id, ) for track_item in track_rows: send_inv_id = track_item.send_inv_item_id return_qnty = track_item.return_quantity if return_qnty == None: return_qnty = 0 # update the receive quantity in the tracking record tracktable[track_item.id] = dict(recv_quantity = track_item.quantity - return_qnty) if return_qnty: db(invtable.id == send_inv_id).update(quantity = invtable.quantity + return_qnty) ADMIN = auth.get_system_roles().ADMIN stable[send_id] = dict(status = inv_ship_status["RECEIVED"], owned_by_user = None, owned_by_group = ADMIN) recv_row = db(tracktable.send_id == send_id).select(tracktable.recv_id, limitby = (0, 1) ).first() if recv_row: recv_id = recv_row.recv_id rtable[recv_id] = dict(date = request.utcnow, status = inv_ship_status["RECEIVED"], owned_by_user = None, owned_by_group = ADMIN) # Change the status for all track items in this shipment to Received db(tracktable.send_id == send_id).update(status = s3db.inv_tracking_status["RECEIVED"]) redirect(URL(f = "send", args = [send_id]))
def return_process(): """ Return some stock from a shipment back into the warehouse """ try: send_id = request.args[0] except: redirect(f="send") stable = s3db.inv_send if not auth.s3_has_permission("update", stable, record_id=send_id): session.error = T("You do not have permission to return this sent shipment.") send_record = db(stable.id == send_id).select(stable.status, limitby = (0, 1) ).first() inv_ship_status = s3db.inv_ship_status if send_record.status != inv_ship_status["RETURNING"]: session.error = T("This shipment has not been returned.") if session.error: redirect(URL(c="inv", f="send", args=[send_id])) invtable = s3db.inv_inv_item rtable = s3db.inv_recv tracktable = s3db.inv_track_item # Okay no error so far, let's move the goods back into the warehouse # and then change the status to received # Update Receive record & lock for editing # Move each item to the site track_rows = db(tracktable.send_id == send_id).select(tracktable.id, tracktable.quantity, tracktable.return_quantity, tracktable.send_inv_item_id, ) for track_item in track_rows: send_inv_id = track_item.send_inv_item_id return_qnty = track_item.return_quantity if return_qnty == None: return_qnty = 0 # update the receive quantity in the tracking record tracktable[track_item.id] = dict(recv_quantity = track_item.quantity - return_qnty) if return_qnty: db(invtable.id == send_inv_id).update(quantity = invtable.quantity + return_qnty) ADMIN = auth.get_system_roles().ADMIN stable[send_id] = dict(status = inv_ship_status["RECEIVED"], owned_by_user = None, owned_by_group = ADMIN) recv_row = db(tracktable.send_id == send_id).select(tracktable.recv_id, limitby = (0, 1) ).first() if recv_row: recv_id = recv_row.recv_id rtable[recv_id] = dict(date = request.utcnow, status = inv_ship_status["RECEIVED"], owned_by_user = None, owned_by_group = ADMIN) # Change the status for all track items in this shipment to Received db(tracktable.send_id == send_id).update(status = s3db.inv_tracking_status["RECEIVED"]) redirect(URL(f = "send", args = [send_id]))
Python
def send_cancel(): """ This will cancel a shipment that has been sent @todo need to roll back commitments """ try: send_id = request.args[0] except: redirect(f="send") stable = s3db.inv_send if not auth.s3_has_permission("delete", stable, record_id=send_id): session.error = T("You do not have permission to cancel this sent shipment.") send_record = db(stable.id == send_id).select(stable.status, limitby = (0, 1) ).first() inv_ship_status = s3db.inv_ship_status if send_record.status != inv_ship_status["SENT"]: session.error = T("This shipment has not been sent - it has NOT been canceled because it can still be edited.") if session.error: redirect(URL(c="inv", f="send", args=[send_id])) rtable = s3db.inv_recv tracktable = s3db.inv_track_item # Okay no error so far, let's delete that baby # Change the send and recv status to cancelled ADMIN = auth.get_system_roles().ADMIN db(stable.id == send_id).update(status = inv_ship_status["CANCEL"], owned_by_user = None, owned_by_group = ADMIN) recv_row = db(tracktable.send_id == send_id).select(tracktable.recv_id, limitby = (0, 1) ).first() if recv_row: recv_id = recv_row.recv_id db(rtable.id == recv_id).update(date = request.utcnow, status = inv_ship_status["CANCEL"], owned_by_user = None, owned_by_group = ADMIN) # Change the track items status to canceled and then delete them # If they are linked to a request then the in transit total will also be reduced # Records can only be deleted if the status is In Process (or preparing) # so change the status before we delete tracking_status = s3db.inv_tracking_status db(tracktable.send_id == send_id).update(status = tracking_status["IN_PROCESS"]) track_rows = db(tracktable.send_id == send_id).select(tracktable.id) for track_item in track_rows: s3db.inv_track_item_deleting(track_item.id) # Now change the status to (cancelled) db(tracktable.send_id == send_id).update(status = tracking_status["CANCEL"]) session.confirmation = T("Sent Shipment canceled and items returned to Warehouse") redirect(URL(f = "send", args = [send_id]))
def send_cancel(): """ This will cancel a shipment that has been sent @todo need to roll back commitments """ try: send_id = request.args[0] except: redirect(f="send") stable = s3db.inv_send if not auth.s3_has_permission("delete", stable, record_id=send_id): session.error = T("You do not have permission to cancel this sent shipment.") send_record = db(stable.id == send_id).select(stable.status, limitby = (0, 1) ).first() inv_ship_status = s3db.inv_ship_status if send_record.status != inv_ship_status["SENT"]: session.error = T("This shipment has not been sent - it has NOT been canceled because it can still be edited.") if session.error: redirect(URL(c="inv", f="send", args=[send_id])) rtable = s3db.inv_recv tracktable = s3db.inv_track_item # Okay no error so far, let's delete that baby # Change the send and recv status to cancelled ADMIN = auth.get_system_roles().ADMIN db(stable.id == send_id).update(status = inv_ship_status["CANCEL"], owned_by_user = None, owned_by_group = ADMIN) recv_row = db(tracktable.send_id == send_id).select(tracktable.recv_id, limitby = (0, 1) ).first() if recv_row: recv_id = recv_row.recv_id db(rtable.id == recv_id).update(date = request.utcnow, status = inv_ship_status["CANCEL"], owned_by_user = None, owned_by_group = ADMIN) # Change the track items status to canceled and then delete them # If they are linked to a request then the in transit total will also be reduced # Records can only be deleted if the status is In Process (or preparing) # so change the status before we delete tracking_status = s3db.inv_tracking_status db(tracktable.send_id == send_id).update(status = tracking_status["IN_PROCESS"]) track_rows = db(tracktable.send_id == send_id).select(tracktable.id) for track_item in track_rows: s3db.inv_track_item_deleting(track_item.id) # Now change the status to (cancelled) db(tracktable.send_id == send_id).update(status = tracking_status["CANCEL"]) session.confirmation = T("Sent Shipment canceled and items returned to Warehouse") redirect(URL(f = "send", args = [send_id]))
Python
def req_item_in_shipment(shipment_item, shipment_type, req_items, ): """ Checks if a shipment item is in a request and updates req_item and the shipment. """ shipment_item_table = "inv_%s_item" % shipment_type try: item_id = shipment_item[shipment_item_table].item_id except: item_id = shipment_item.inv_inv_item.item_id # Check for req_items if item_id in req_items: shipment_to_req_type = {"recv": "fulfil", "send": "transit", } quantity_req_type = "quantity_%s" % shipment_to_req_type[shipment_type] # This item has been requested from this inv req_item = req_items[item_id] req_item_id = req_item.id # Update the req quantity # convert the shipment items quantity into the req_tem.quantity_fulfil (according to pack) quantity = req_item[quantity_req_type] + \ (shipment_item[shipment_item_table].pack_quantity / \ req_item.pack_quantity) * \ shipment_item[shipment_item_table].quantity quantity = min(quantity, req_item.quantity) #Cap at req. quantity s3db.req_req_item[req_item_id] = {quantity_req_type: quantity} # Link the shipment_item to the req_item s3db[shipment_item_table][shipment_item[shipment_item_table].id] = \ {"req_item_id": req_item_id} # Flag req record to update status_fulfil return req_item.req_id, req_item.id else: return None, None
def req_item_in_shipment(shipment_item, shipment_type, req_items, ): """ Checks if a shipment item is in a request and updates req_item and the shipment. """ shipment_item_table = "inv_%s_item" % shipment_type try: item_id = shipment_item[shipment_item_table].item_id except: item_id = shipment_item.inv_inv_item.item_id # Check for req_items if item_id in req_items: shipment_to_req_type = {"recv": "fulfil", "send": "transit", } quantity_req_type = "quantity_%s" % shipment_to_req_type[shipment_type] # This item has been requested from this inv req_item = req_items[item_id] req_item_id = req_item.id # Update the req quantity # convert the shipment items quantity into the req_tem.quantity_fulfil (according to pack) quantity = req_item[quantity_req_type] + \ (shipment_item[shipment_item_table].pack_quantity / \ req_item.pack_quantity) * \ shipment_item[shipment_item_table].quantity quantity = min(quantity, req_item.quantity) #Cap at req. quantity s3db.req_req_item[req_item_id] = {quantity_req_type: quantity} # Link the shipment_item to the req_item s3db[shipment_item_table][shipment_item[shipment_item_table].id] = \ {"req_item_id": req_item_id} # Flag req record to update status_fulfil return req_item.req_id, req_item.id else: return None, None
Python
def recv_item_json(): """ Used by s3.supply.js Access via the .json representation to avoid work rendering menus, etc """ try: item_id = request.args[0] except: raise HTTP(400, current.xml.json_message(False, 400, "No value provided!")) inv_ship_status = s3db.inv_ship_status stable = s3db.org_site rtable = s3db.inv_recv ittable = s3db.inv_track_item rtable.date.represent = lambda dt: dt[:10] query = (ittable.req_item_id == item_id) & \ (rtable.id == ittable.recv_id) & \ (rtable.site_id == stable.id) & \ (rtable.status == inv_ship_status["RECEIVED"]) & \ (ittable.deleted == False) records = db(query).select(rtable.id, rtable.date, stable.name, ittable.quantity, ) output = "[%s,%s" % (json.dumps({"id": s3_str(T("Received")), "quantity": "#", }), records.json()[1:]) response.headers["Content-Type"] = "application/json" return output
def recv_item_json(): """ Used by s3.supply.js Access via the .json representation to avoid work rendering menus, etc """ try: item_id = request.args[0] except: raise HTTP(400, current.xml.json_message(False, 400, "No value provided!")) inv_ship_status = s3db.inv_ship_status stable = s3db.org_site rtable = s3db.inv_recv ittable = s3db.inv_track_item rtable.date.represent = lambda dt: dt[:10] query = (ittable.req_item_id == item_id) & \ (rtable.id == ittable.recv_id) & \ (rtable.site_id == stable.id) & \ (rtable.status == inv_ship_status["RECEIVED"]) & \ (ittable.deleted == False) records = db(query).select(rtable.id, rtable.date, stable.name, ittable.quantity, ) output = "[%s,%s" % (json.dumps({"id": s3_str(T("Received")), "quantity": "#", }), records.json()[1:]) response.headers["Content-Type"] = "application/json" return output
Python
def send_item_json(): """ Used by s3.supply.js Access via the .json representation to avoid work rendering menus, etc """ try: item_id = request.args[0] except: raise HTTP(400, current.xml.json_message(False, 400, "No value provided!")) inv_ship_status = s3db.inv_ship_status stable = s3db.org_site istable = s3db.inv_send ittable = s3db.inv_track_item istable.date.represent = lambda dt: dt[:10] query = (ittable.req_item_id == item_id) & \ (istable.id == ittable.send_id) & \ (istable.site_id == stable.id) & \ ((istable.status == inv_ship_status["SENT"]) | \ (istable.status == inv_ship_status["RECEIVED"])) & \ (ittable.deleted == False) records = db(query).select(istable.id, istable.date, stable.name, ittable.quantity, ) output = "[%s,%s" % (json.dumps({"id": s3_str(T("Sent")), "quantity": "#", }), records.json()[1:], ) response.headers["Content-Type"] = "application/json" return output
def send_item_json(): """ Used by s3.supply.js Access via the .json representation to avoid work rendering menus, etc """ try: item_id = request.args[0] except: raise HTTP(400, current.xml.json_message(False, 400, "No value provided!")) inv_ship_status = s3db.inv_ship_status stable = s3db.org_site istable = s3db.inv_send ittable = s3db.inv_track_item istable.date.represent = lambda dt: dt[:10] query = (ittable.req_item_id == item_id) & \ (istable.id == ittable.send_id) & \ (istable.site_id == stable.id) & \ ((istable.status == inv_ship_status["SENT"]) | \ (istable.status == inv_ship_status["RECEIVED"])) & \ (ittable.deleted == False) records = db(query).select(istable.id, istable.date, stable.name, ittable.quantity, ) output = "[%s,%s" % (json.dumps({"id": s3_str(T("Sent")), "quantity": "#", }), records.json()[1:], ) response.headers["Content-Type"] = "application/json" return output
Python
def project(): """ Simpler version of Projects for use within Inventory module """ # Load default Model s3db.project_project crud_form = s3base.S3SQLCustomForm("organisation_id", "code", "name", "end_date", ) list_fields = ["organisation_id", "code", "name", "end_date", ] s3db.configure("project_project", crud_form = crud_form, filter_widgets = None, list_fields = list_fields, ) return crud_controller("project")
def project(): """ Simpler version of Projects for use within Inventory module """ # Load default Model s3db.project_project crud_form = s3base.S3SQLCustomForm("organisation_id", "code", "name", "end_date", ) list_fields = ["organisation_id", "code", "name", "end_date", ] s3db.configure("project_project", crud_form = crud_form, filter_widgets = None, list_fields = list_fields, ) return crud_controller("project")
Python
def incoming(): """ Incoming Shipments for Sites Used from Requests rheader when looking at Transport Status """ # @ToDo: Create this function! return s3db.inv_incoming()
def incoming(): """ Incoming Shipments for Sites Used from Requests rheader when looking at Transport Status """ # @ToDo: Create this function! return s3db.inv_incoming()
Python
def as_dict(self): """ Return the time series as JSON-serializable dict """ rfields = self.rfields # Fact Data fact_data = [] for fact in self.facts: fact_data.append((str(fact.label), fact.method, fact.base, fact.slope, fact.interval, )) # Event start and end selectors rfield = rfields.get("event_start") if rfield: event_start = rfield.selector else: event_start = None rfield = rfields.get("event_end") if rfield: event_end = rfield.selector else: event_end = None # Rows rows = rfields.get("rows") if rows: rows_sorted = self._represent_axis(rows, self.rows_keys) rows_keys = [row[0] for row in rows_sorted] rows_data = {"s": rows.selector, "l": str(rows.label), "v": rows_sorted, } else: rows_keys = None rows_data = None # Columns cols = rfields.get("cols") if cols: cols_sorted = self._represent_axis(cols, self.cols_keys) cols_keys = [col[0] for col in cols_sorted] cols_data = {"s": cols.selector, "l": str(cols.label), "v": cols_sorted, } else: cols_keys = None cols_data = None # Iterate over the event frame to collect aggregates event_frame = self.event_frame periods_data = [] append = periods_data.append #fact = self.facts[0] for period in event_frame: # Aggregate period.aggregate(self.facts) # Extract item = period.as_dict(rows = rows_keys, cols = cols_keys, ) append(item) # Baseline rfield = rfields.get("baseline") if rfield: baseline = (rfield.selector, str(rfield.label), event_frame.baseline, ) else: baseline = None # Output dict data = {"f": fact_data, "t": (event_start, event_end), "s": event_frame.slots, "e": event_frame.empty, "l": self.title, "r": rows_data, "c": cols_data, "p": periods_data, "z": baseline, } return data
def as_dict(self): """ Return the time series as JSON-serializable dict """ rfields = self.rfields # Fact Data fact_data = [] for fact in self.facts: fact_data.append((str(fact.label), fact.method, fact.base, fact.slope, fact.interval, )) # Event start and end selectors rfield = rfields.get("event_start") if rfield: event_start = rfield.selector else: event_start = None rfield = rfields.get("event_end") if rfield: event_end = rfield.selector else: event_end = None # Rows rows = rfields.get("rows") if rows: rows_sorted = self._represent_axis(rows, self.rows_keys) rows_keys = [row[0] for row in rows_sorted] rows_data = {"s": rows.selector, "l": str(rows.label), "v": rows_sorted, } else: rows_keys = None rows_data = None # Columns cols = rfields.get("cols") if cols: cols_sorted = self._represent_axis(cols, self.cols_keys) cols_keys = [col[0] for col in cols_sorted] cols_data = {"s": cols.selector, "l": str(cols.label), "v": cols_sorted, } else: cols_keys = None cols_data = None # Iterate over the event frame to collect aggregates event_frame = self.event_frame periods_data = [] append = periods_data.append #fact = self.facts[0] for period in event_frame: # Aggregate period.aggregate(self.facts) # Extract item = period.as_dict(rows = rows_keys, cols = cols_keys, ) append(item) # Baseline rfield = rfields.get("baseline") if rfield: baseline = (rfield.selector, str(rfield.label), event_frame.baseline, ) else: baseline = None # Output dict data = {"f": fact_data, "t": (event_start, event_end), "s": event_frame.slots, "e": event_frame.empty, "l": self.title, "r": rows_data, "c": cols_data, "p": periods_data, "z": baseline, } return data
Python
def _represent_axis(rfield, values): """ Represent and sort the values of a pivot axis (rows or cols) Args: rfield: the axis rfield values: iterable of values """ if rfield.virtual: representations = [] append = representations.append() stripper = MarkupStripper() represent = rfield.represent if not represent: represent = s3_str for value in values: if value is None: append((value, "-")) text = represent(value) if "<" in text: stripper.feed(text) append((value, stripper.stripped())) else: append((value, text)) else: field = rfield.field represent = field.represent if represent and hasattr(represent, "bulk"): representations = represent.bulk(list(values), list_type = False, show_link = False, ).items() else: representations = [] for value in values: append((value, s3_represent_value(field, value, strip_markup = True, ))) return sorted(representations, key = lambda item: item[1])
def _represent_axis(rfield, values): """ Represent and sort the values of a pivot axis (rows or cols) Args: rfield: the axis rfield values: iterable of values """ if rfield.virtual: representations = [] append = representations.append() stripper = MarkupStripper() represent = rfield.represent if not represent: represent = s3_str for value in values: if value is None: append((value, "-")) text = represent(value) if "<" in text: stripper.feed(text) append((value, stripper.stripped())) else: append((value, text)) else: field = rfield.field represent = field.represent if represent and hasattr(represent, "bulk"): representations = represent.bulk(list(values), list_type = False, show_link = False, ).items() else: representations = [] for value in values: append((value, s3_represent_value(field, value, strip_markup = True, ))) return sorted(representations, key = lambda item: item[1])
Python
def _represent_method(self, field): """ Get the representation method for a field in the report Args: field: the field selector """ rfields = self.rfields default = lambda value: None if field and field in rfields: rfield = rfields[field] if rfield.field: def repr_method(value): return s3_represent_value(rfield.field, value, strip_markup = True, ) elif rfield.virtual: stripper = MarkupStripper() def repr_method(val): if val is None: return "-" text = s3_str(val) if "<" in text: stripper.feed(text) return stripper.stripped() # = totally naked ;) else: return text else: repr_method = default else: repr_method = default return repr_method
def _represent_method(self, field): """ Get the representation method for a field in the report Args: field: the field selector """ rfields = self.rfields default = lambda value: None if field and field in rfields: rfield = rfields[field] if rfield.field: def repr_method(value): return s3_represent_value(rfield.field, value, strip_markup = True, ) elif rfield.virtual: stripper = MarkupStripper() def repr_method(val): if val is None: return "-" text = s3_str(val) if "<" in text: stripper.feed(text) return stripper.stripped() # = totally naked ;) else: return text else: repr_method = default else: repr_method = default return repr_method
Python
def _event_frame(self, start = None, end = None, slots = None, ): """ Create an event frame for this report Args: start: the start date/time (string, date or datetime) end: the end date/time (string, date or datetime) slots: the slot length (string) Returns: the event frame """ from ..resource import FS resource = self.resource rfields = self.rfields STANDARD_SLOT = "1 day" now = tp_tzsafe(datetime.datetime.utcnow()) # Parse start and end time dtparse = self.dtparse start_dt = end_dt = None if start: if isinstance(start, str): start_dt = dtparse(start, start=now) else: if isinstance(start, datetime.datetime): start_dt = tp_tzsafe(start) else: # Date only => start at midnight start_dt = tp_tzsafe(datetime.datetime.fromordinal(start.toordinal())) if end: if isinstance(end, str): relative_to = start_dt if start_dt else now end_dt = dtparse(end, start=relative_to) else: if isinstance(end, datetime.datetime): end_dt = tp_tzsafe(end) else: # Date only => end at midnight end_dt = tp_tzsafe(datetime.datetime.fromordinal(end.toordinal())) # Fall back to now if end is not specified if not end_dt: end_dt = now event_start = rfields["event_start"] if not start_dt and event_start and event_start.field: # No interval start => fall back to first event start query = FS(event_start.selector) != None resource.add_filter(query) rows = resource.select([event_start.selector], limit = 1, orderby = event_start.field, as_rows = True, ) # Remove the filter we just added rfilter = resource.rfilter rfilter.filters.pop() rfilter.query = None rfilter.transformed = None if rows: first_event = rows.first()[event_start.colname] if isinstance(first_event, datetime.date): first_event = tp_tzsafe(datetime.datetime.fromordinal(first_event.toordinal())) start_dt = first_event event_end = rfields["event_end"] if not start_dt and event_end and event_end.field: # No interval start => fall back to first event end minus # one standard slot length: query = FS(event_end.selector) != None resource.add_filter(query) rows = resource.select([event_end.selector], limit=1, orderby=event_end.field, as_rows=True) # Remove the filter we just added rfilter = resource.rfilter rfilter.filters.pop() rfilter.query = None rfilter.transformed = None if rows: last_event = rows.first()[event_end.colname] if isinstance(last_event, datetime.date): last_event = tp_tzsafe(datetime.datetime.fromordinal(last_event.toordinal())) start_dt = dtparse("-%s" % STANDARD_SLOT, start=last_event) if not start_dt: # No interval start => fall back to interval end minus # one slot length: if not slots: slots = STANDARD_SLOT try: start_dt = dtparse("-%s" % slots, start=end_dt) except (SyntaxError, ValueError): slots = STANDARD_SLOT start_dt = dtparse("-%s" % slots, start=end_dt) # Fall back for slot length if not slots: # No slot length specified => determine optimum automatically # @todo: determine from density of events rather than from # total interval length? seconds = abs(end_dt - start_dt).total_seconds() day = 86400 if seconds < day: slots = "hours" elif seconds < 3 * day: slots = "6 hours" elif seconds < 28 * day: slots = "days" elif seconds < 90 * day: slots = "weeks" elif seconds < 730 * day: slots = "months" elif seconds < 2190 * day: slots = "3 months" else: slots = "years" # Create event frame ef = TimeSeriesEventFrame(start_dt, end_dt, slots) return ef
def _event_frame(self, start = None, end = None, slots = None, ): """ Create an event frame for this report Args: start: the start date/time (string, date or datetime) end: the end date/time (string, date or datetime) slots: the slot length (string) Returns: the event frame """ from ..resource import FS resource = self.resource rfields = self.rfields STANDARD_SLOT = "1 day" now = tp_tzsafe(datetime.datetime.utcnow()) # Parse start and end time dtparse = self.dtparse start_dt = end_dt = None if start: if isinstance(start, str): start_dt = dtparse(start, start=now) else: if isinstance(start, datetime.datetime): start_dt = tp_tzsafe(start) else: # Date only => start at midnight start_dt = tp_tzsafe(datetime.datetime.fromordinal(start.toordinal())) if end: if isinstance(end, str): relative_to = start_dt if start_dt else now end_dt = dtparse(end, start=relative_to) else: if isinstance(end, datetime.datetime): end_dt = tp_tzsafe(end) else: # Date only => end at midnight end_dt = tp_tzsafe(datetime.datetime.fromordinal(end.toordinal())) # Fall back to now if end is not specified if not end_dt: end_dt = now event_start = rfields["event_start"] if not start_dt and event_start and event_start.field: # No interval start => fall back to first event start query = FS(event_start.selector) != None resource.add_filter(query) rows = resource.select([event_start.selector], limit = 1, orderby = event_start.field, as_rows = True, ) # Remove the filter we just added rfilter = resource.rfilter rfilter.filters.pop() rfilter.query = None rfilter.transformed = None if rows: first_event = rows.first()[event_start.colname] if isinstance(first_event, datetime.date): first_event = tp_tzsafe(datetime.datetime.fromordinal(first_event.toordinal())) start_dt = first_event event_end = rfields["event_end"] if not start_dt and event_end and event_end.field: # No interval start => fall back to first event end minus # one standard slot length: query = FS(event_end.selector) != None resource.add_filter(query) rows = resource.select([event_end.selector], limit=1, orderby=event_end.field, as_rows=True) # Remove the filter we just added rfilter = resource.rfilter rfilter.filters.pop() rfilter.query = None rfilter.transformed = None if rows: last_event = rows.first()[event_end.colname] if isinstance(last_event, datetime.date): last_event = tp_tzsafe(datetime.datetime.fromordinal(last_event.toordinal())) start_dt = dtparse("-%s" % STANDARD_SLOT, start=last_event) if not start_dt: # No interval start => fall back to interval end minus # one slot length: if not slots: slots = STANDARD_SLOT try: start_dt = dtparse("-%s" % slots, start=end_dt) except (SyntaxError, ValueError): slots = STANDARD_SLOT start_dt = dtparse("-%s" % slots, start=end_dt) # Fall back for slot length if not slots: # No slot length specified => determine optimum automatically # @todo: determine from density of events rather than from # total interval length? seconds = abs(end_dt - start_dt).total_seconds() day = 86400 if seconds < day: slots = "hours" elif seconds < 3 * day: slots = "6 hours" elif seconds < 28 * day: slots = "days" elif seconds < 90 * day: slots = "weeks" elif seconds < 730 * day: slots = "months" elif seconds < 2190 * day: slots = "3 months" else: slots = "years" # Create event frame ef = TimeSeriesEventFrame(start_dt, end_dt, slots) return ef
Python
def _select(self): """ Select records from the resource and store them as events in this time series """ from ..resource import FS resource = self.resource rfields = self.rfields # Fields to extract cumulative = False event_start = rfields.get("event_start") fields = {event_start.selector} event_end = rfields.get("event_end") if event_end: fields.add(event_end.selector) rows_rfield = rfields.get("rows") if rows_rfield: fields.add(rows_rfield.selector) cols_rfield = rfields.get("cols") if cols_rfield: fields.add(cols_rfield.selector) fact_columns = [] for fact in self.facts: if fact.method == "cumulate": cumulative = True if fact.resource is None: fact.resolve(resource) for rfield in (fact.base_rfield, fact.slope_rfield): if rfield: fact_columns.append(rfield.colname) fields.add(rfield.selector) fields.add(resource._id.name) # Get event frame event_frame = self.event_frame # Filter by event frame start: if not cumulative and event_end: # End date of events must be after the event frame start date end_selector = FS(event_end.selector) start = event_frame.start query = (end_selector == None) | (end_selector >= start) else: # No point if events have no end date, and wrong if # method is cumulative query = None # Filter by event frame end: # Start date of events must be before event frame end date start_selector = FS(event_start.selector) end = event_frame.end q = (start_selector == None) | (start_selector <= end) query = query & q if query is not None else q # Add as temporary filter resource.add_filter(query) # Compute baseline value = None baseline_rfield = rfields.get("baseline") if baseline_rfield: baseline_table = current.db[baseline_rfield.tname] pkey = str(baseline_table._id) colname = baseline_rfield.colname rows = resource.select([baseline_rfield.selector], groupby = [pkey, colname], as_rows = True, ) value = 0 for row in rows: v = row[colname] if v is not None: value += v event_frame.baseline = value # Extract the records data = resource.select(fields) # Remove the filter we just added rfilter = resource.rfilter rfilter.filters.pop() rfilter.query = None rfilter.transformed = None # Do we need to convert dates into datetimes? convert_start = True if event_start.ftype == "date" else False convert_end = True if event_start.ftype == "date" else False fromordinal = datetime.datetime.fromordinal convert_date = lambda d: fromordinal(d.toordinal()) # Column names for extractions pkey = str(resource._id) start_colname = event_start.colname end_colname = event_end.colname if event_end else None rows_colname = rows_rfield.colname if rows_rfield else None cols_colname = cols_rfield.colname if cols_rfield else None # Create the events events = [] add_event = events.append rows_keys = set() cols_keys = set() for row in data.rows: # Extract values values = dict((colname, row[colname]) for colname in fact_columns) # Extract grouping keys grouping = {} if rows_colname: grouping["row"] = row[rows_colname] if cols_colname: grouping["col"] = row[cols_colname] # Extract start/end date start = row[start_colname] if convert_start and start: start = convert_date(start) end = row[end_colname] if end_colname else None if convert_end and end: end = convert_date(end) # values = (base, slope) event = TimeSeriesEvent(row[pkey], start = start, end = end, values = values, **grouping) add_event(event) rows_keys |= event.rows cols_keys |= event.cols # Extend the event frame with these events if events: event_frame.extend(events) # Store the grouping keys self.rows_keys = rows_keys self.cols_keys = cols_keys return data
def _select(self): """ Select records from the resource and store them as events in this time series """ from ..resource import FS resource = self.resource rfields = self.rfields # Fields to extract cumulative = False event_start = rfields.get("event_start") fields = {event_start.selector} event_end = rfields.get("event_end") if event_end: fields.add(event_end.selector) rows_rfield = rfields.get("rows") if rows_rfield: fields.add(rows_rfield.selector) cols_rfield = rfields.get("cols") if cols_rfield: fields.add(cols_rfield.selector) fact_columns = [] for fact in self.facts: if fact.method == "cumulate": cumulative = True if fact.resource is None: fact.resolve(resource) for rfield in (fact.base_rfield, fact.slope_rfield): if rfield: fact_columns.append(rfield.colname) fields.add(rfield.selector) fields.add(resource._id.name) # Get event frame event_frame = self.event_frame # Filter by event frame start: if not cumulative and event_end: # End date of events must be after the event frame start date end_selector = FS(event_end.selector) start = event_frame.start query = (end_selector == None) | (end_selector >= start) else: # No point if events have no end date, and wrong if # method is cumulative query = None # Filter by event frame end: # Start date of events must be before event frame end date start_selector = FS(event_start.selector) end = event_frame.end q = (start_selector == None) | (start_selector <= end) query = query & q if query is not None else q # Add as temporary filter resource.add_filter(query) # Compute baseline value = None baseline_rfield = rfields.get("baseline") if baseline_rfield: baseline_table = current.db[baseline_rfield.tname] pkey = str(baseline_table._id) colname = baseline_rfield.colname rows = resource.select([baseline_rfield.selector], groupby = [pkey, colname], as_rows = True, ) value = 0 for row in rows: v = row[colname] if v is not None: value += v event_frame.baseline = value # Extract the records data = resource.select(fields) # Remove the filter we just added rfilter = resource.rfilter rfilter.filters.pop() rfilter.query = None rfilter.transformed = None # Do we need to convert dates into datetimes? convert_start = True if event_start.ftype == "date" else False convert_end = True if event_start.ftype == "date" else False fromordinal = datetime.datetime.fromordinal convert_date = lambda d: fromordinal(d.toordinal()) # Column names for extractions pkey = str(resource._id) start_colname = event_start.colname end_colname = event_end.colname if event_end else None rows_colname = rows_rfield.colname if rows_rfield else None cols_colname = cols_rfield.colname if cols_rfield else None # Create the events events = [] add_event = events.append rows_keys = set() cols_keys = set() for row in data.rows: # Extract values values = dict((colname, row[colname]) for colname in fact_columns) # Extract grouping keys grouping = {} if rows_colname: grouping["row"] = row[rows_colname] if cols_colname: grouping["col"] = row[cols_colname] # Extract start/end date start = row[start_colname] if convert_start and start: start = convert_date(start) end = row[end_colname] if end_colname else None if convert_end and end: end = convert_date(end) # values = (base, slope) event = TimeSeriesEvent(row[pkey], start = start, end = end, values = values, **grouping) add_event(event) rows_keys |= event.rows cols_keys |= event.cols # Extend the event frame with these events if events: event_frame.extend(events) # Store the grouping keys self.rows_keys = rows_keys self.cols_keys = cols_keys return data
Python
def default_timestamp(table, event_end=None): """ Get the default timestamp for a table Args: table: the Table event_end: event_end, if not default (field selector) Returns: tuple (event_start, event_end), field selectors """ event_start = None for fname in ("date", "start_date", "created_on"): if fname in table.fields: event_start = fname break if event_start and not event_end: for fname in ("end_date",): if fname in table.fields: event_end = fname break return event_start, event_end
def default_timestamp(table, event_end=None): """ Get the default timestamp for a table Args: table: the Table event_end: event_end, if not default (field selector) Returns: tuple (event_start, event_end), field selectors """ event_start = None for fname in ("date", "start_date", "created_on"): if fname in table.fields: event_start = fname break if event_start and not event_end: for fname in ("end_date",): if fname in table.fields: event_end = fname break return event_start, event_end
Python
def resolve_timestamp(self, event_start, event_end): """ Resolve the event_start and event_end field selectors Args: event_start: the field selector for the event start field event_end: the field selector for the event end field """ resource = self.resource rfields = self.rfields # Defaults table = resource.table if not event_start: event_start, event_end = self.default_timestamp(table) if not event_start: raise SyntaxError("No time stamps found in %s" % table) # Get the fields start_rfield = resource.resolve_selector(event_start) if event_end: end_rfield = resource.resolve_selector(event_end) else: end_rfield = None rfields["event_start"] = start_rfield rfields["event_end"] = end_rfield
def resolve_timestamp(self, event_start, event_end): """ Resolve the event_start and event_end field selectors Args: event_start: the field selector for the event start field event_end: the field selector for the event end field """ resource = self.resource rfields = self.rfields # Defaults table = resource.table if not event_start: event_start, event_end = self.default_timestamp(table) if not event_start: raise SyntaxError("No time stamps found in %s" % table) # Get the fields start_rfield = resource.resolve_selector(event_start) if event_end: end_rfield = resource.resolve_selector(event_end) else: end_rfield = None rfields["event_start"] = start_rfield rfields["event_end"] = end_rfield
Python
def resolve_axes(self, rows, cols): """ Resolve the grouping axes field selectors Args: rows: the rows field selector cols: the columns field selector """ resource = self.resource rfields = self.rfields # Resolve rows selector rows_rfield = None if rows: try: rows_rfield = resource.resolve_selector(rows) except (AttributeError, SyntaxError): rows_rfield = None # Resolve columns selector cols_rfield = None if cols: try: cols_rfield = resource.resolve_selector(cols) except (AttributeError, SyntaxError): cols_rfield = None rfields["rows"] = rows_rfield rfields["cols"] = cols_rfield
def resolve_axes(self, rows, cols): """ Resolve the grouping axes field selectors Args: rows: the rows field selector cols: the columns field selector """ resource = self.resource rfields = self.rfields # Resolve rows selector rows_rfield = None if rows: try: rows_rfield = resource.resolve_selector(rows) except (AttributeError, SyntaxError): rows_rfield = None # Resolve columns selector cols_rfield = None if cols: try: cols_rfield = resource.resolve_selector(cols) except (AttributeError, SyntaxError): cols_rfield = None rfields["rows"] = rows_rfield rfields["cols"] = cols_rfield
Python
def dtparse(timestr, start=None): """ Parse a string for start/end date(time) of an interval Args: timestr: the time string start: the start datetime to relate relative times to """ if start is None: start = tp_tzsafe(datetime.datetime.utcnow()) if not timestr: return start # Relative to start: [+|-]{n}[year|month|week|day|hour]s match = dt_regex.DELTA.match(timestr) if match: groups = match.groups() intervals = {"y": "years", "m": "months", "w": "weeks", "d": "days", "h": "hours", } length = intervals.get(groups[3]) if not length: raise SyntaxError("Invalid date/time: %s" % timestr) num = int(groups[2]) if groups[1] == "-": num *= -1 delta = {length: num} end = groups[0] if end == "<": delta.update(minute=0, second=0, microsecond=0) if length != "hours": delta.update(hour=0) if length == "weeks": delta.update(weeks=num-1, weekday=0) elif length == "months": delta.update(day=1) elif length == "years": delta.update(month=1, day=1) elif end == ">": delta.update(minute=59, second=59, microsecond=999999) if length != "hours": delta.update(hour=23) if length == "weeks": delta.update(weekday=6) elif length == "months": delta.update(day=31) elif length == "years": delta.update(month=12, day=31) return start + relativedelta(**delta) # Month/Year, e.g. "5/2001" match = dt_regex.MONTH_YEAR.match(timestr) if match: groups = match.groups() year = int(groups[1]) month = int(groups[0]) return tp_datetime(year, month, 1, 0, 0, 0) # Year-Month, e.g. "2001-05" match = dt_regex.YEAR_MONTH.match(timestr) if match: groups = match.groups() month = int(groups[1]) year = int(groups[0]) return tp_datetime(year, month, 1, 0, 0, 0) # Year only, e.g. "1996" match = dt_regex.YEAR.match(timestr) if match: groups = match.groups() year = int(groups[0]) return tp_datetime(year, 1, 1, 0, 0, 0) # Date, e.g. "2013-01-04" match = dt_regex.DATE.match(timestr) if match: groups = match.groups() year = int(groups[0]) month = int(groups[1]) day = int(groups[2]) try: return tp_datetime(year, month, day) except ValueError: # Day out of range return tp_datetime(year, month, 1) + \ datetime.timedelta(days = day-1) # ISO datetime dt = s3_decode_iso_datetime(str(timestr)) return s3_utc(dt)
def dtparse(timestr, start=None): """ Parse a string for start/end date(time) of an interval Args: timestr: the time string start: the start datetime to relate relative times to """ if start is None: start = tp_tzsafe(datetime.datetime.utcnow()) if not timestr: return start # Relative to start: [+|-]{n}[year|month|week|day|hour]s match = dt_regex.DELTA.match(timestr) if match: groups = match.groups() intervals = {"y": "years", "m": "months", "w": "weeks", "d": "days", "h": "hours", } length = intervals.get(groups[3]) if not length: raise SyntaxError("Invalid date/time: %s" % timestr) num = int(groups[2]) if groups[1] == "-": num *= -1 delta = {length: num} end = groups[0] if end == "<": delta.update(minute=0, second=0, microsecond=0) if length != "hours": delta.update(hour=0) if length == "weeks": delta.update(weeks=num-1, weekday=0) elif length == "months": delta.update(day=1) elif length == "years": delta.update(month=1, day=1) elif end == ">": delta.update(minute=59, second=59, microsecond=999999) if length != "hours": delta.update(hour=23) if length == "weeks": delta.update(weekday=6) elif length == "months": delta.update(day=31) elif length == "years": delta.update(month=12, day=31) return start + relativedelta(**delta) # Month/Year, e.g. "5/2001" match = dt_regex.MONTH_YEAR.match(timestr) if match: groups = match.groups() year = int(groups[1]) month = int(groups[0]) return tp_datetime(year, month, 1, 0, 0, 0) # Year-Month, e.g. "2001-05" match = dt_regex.YEAR_MONTH.match(timestr) if match: groups = match.groups() month = int(groups[1]) year = int(groups[0]) return tp_datetime(year, month, 1, 0, 0, 0) # Year only, e.g. "1996" match = dt_regex.YEAR.match(timestr) if match: groups = match.groups() year = int(groups[0]) return tp_datetime(year, 1, 1, 0, 0, 0) # Date, e.g. "2013-01-04" match = dt_regex.DATE.match(timestr) if match: groups = match.groups() year = int(groups[0]) month = int(groups[1]) day = int(groups[2]) try: return tp_datetime(year, month, day) except ValueError: # Day out of range return tp_datetime(year, month, 1) + \ datetime.timedelta(days = day-1) # ISO datetime dt = s3_decode_iso_datetime(str(timestr)) return s3_utc(dt)
Python
def series(value): """ Convert a field value into a set of series keys Args: value: the field value """ if value is DEFAULT: series = set() elif value is None: series = {None} elif type(value) is list: series = set(s3_flatlist(value)) else: series = {value} return series
def series(value): """ Convert a field value into a set of series keys Args: value: the field value """ if value is DEFAULT: series = set() elif value is None: series = {None} elif type(value) is list: series = set(s3_flatlist(value)) else: series = {value} return series
Python
def compute(self, values): """ Aggregate a list of values. Args: values: iterable of values """ if values is None: return None method = self.method values = [v for v in values if v != None] result = None if method == "count": result = len(values) elif method == "min": try: result = min(values) except (TypeError, ValueError): result = None elif method == "max": try: result = max(values) except (TypeError, ValueError): result = None elif method == "sum": try: result = sum(values) except (TypeError, ValueError): result = None elif method == "avg": try: num = len(values) if num: result = sum(values) / float(num) except (TypeError, ValueError): result = None elif method == "cumulate": try: result = sum(base + slope * duration for base, slope, duration in values) except (TypeError, ValueError): result = None return result
def compute(self, values): """ Aggregate a list of values. Args: values: iterable of values """ if values is None: return None method = self.method values = [v for v in values if v != None] result = None if method == "count": result = len(values) elif method == "min": try: result = min(values) except (TypeError, ValueError): result = None elif method == "max": try: result = max(values) except (TypeError, ValueError): result = None elif method == "sum": try: result = sum(values) except (TypeError, ValueError): result = None elif method == "avg": try: num = len(values) if num: result = sum(values) / float(num) except (TypeError, ValueError): result = None elif method == "cumulate": try: result = sum(base + slope * duration for base, slope, duration in values) except (TypeError, ValueError): result = None return result
Python
def resolve(self, resource): """ Resolve the base and slope selectors against resource Args: resource: the resource """ self.resource = None base = self.base self.base_rfield = None self.base_column = base slope = self.slope self.slope_rfield = None self.slope_column = slope # Resolve base selector base_rfield = None if base: try: base_rfield = resource.resolve_selector(base) except (AttributeError, SyntaxError): base_rfield = None # Resolve slope selector slope_rfield = None if slope: try: slope_rfield = resource.resolve_selector(slope) except (AttributeError, SyntaxError): slope_rfield = None method = self.method # At least one field parameter must be resolvable if base_rfield is None: if method != "cumulate" or slope_rfield is None: raise SyntaxError("Invalid fact parameter") # All methods except count require numeric input values if method != "count": numeric_types = NUMERIC_TYPES if base_rfield and base_rfield.ftype not in numeric_types: raise SyntaxError("Fact field type not numeric: %s (%s)" % (base, base_rfield.ftype)) if slope_rfield and slope_rfield.ftype not in numeric_types: raise SyntaxError("Fact field type not numeric: %s (%s)" % (slope, slope_rfield.ftype)) if base_rfield: self.base_rfield = base_rfield self.base_column = base_rfield.colname if slope_rfield: self.slope_rfield = slope_rfield self.slope_column = slope_rfield.colname if not self.label: # Lookup the label from the timeplot options label = self.lookup_label(resource, method, base, slope, self.interval) if not label: # Generate a default label label = self.default_label(base_rfield, self.method) self.label = label self.resource = resource return self
def resolve(self, resource): """ Resolve the base and slope selectors against resource Args: resource: the resource """ self.resource = None base = self.base self.base_rfield = None self.base_column = base slope = self.slope self.slope_rfield = None self.slope_column = slope # Resolve base selector base_rfield = None if base: try: base_rfield = resource.resolve_selector(base) except (AttributeError, SyntaxError): base_rfield = None # Resolve slope selector slope_rfield = None if slope: try: slope_rfield = resource.resolve_selector(slope) except (AttributeError, SyntaxError): slope_rfield = None method = self.method # At least one field parameter must be resolvable if base_rfield is None: if method != "cumulate" or slope_rfield is None: raise SyntaxError("Invalid fact parameter") # All methods except count require numeric input values if method != "count": numeric_types = NUMERIC_TYPES if base_rfield and base_rfield.ftype not in numeric_types: raise SyntaxError("Fact field type not numeric: %s (%s)" % (base, base_rfield.ftype)) if slope_rfield and slope_rfield.ftype not in numeric_types: raise SyntaxError("Fact field type not numeric: %s (%s)" % (slope, slope_rfield.ftype)) if base_rfield: self.base_rfield = base_rfield self.base_column = base_rfield.colname if slope_rfield: self.slope_rfield = slope_rfield self.slope_column = slope_rfield.colname if not self.label: # Lookup the label from the timeplot options label = self.lookup_label(resource, method, base, slope, self.interval) if not label: # Generate a default label label = self.default_label(base_rfield, self.method) self.label = label self.resource = resource return self
Python
def lookup_label(cls, resource, method, base, slope=None, interval=None): """ Lookup the fact label from the timeplot options of resource Args: resource: the resource (CRUDResource) method: the aggregation method (string) base: the base field selector (string) slope: the slope field selector (string) interval: the interval expression (string) """ fact_opts = None if resource: config = resource.get_config("timeplot_options") if config: fact_opts = config.get("fact") label = None if fact_opts: parse = cls.parse for opt in fact_opts: if isinstance(opt, tuple): title, facts = opt else: title, facts = None, opt facts = parse(facts) match = None for fact in facts: if fact.method == method and \ fact.base == base and \ fact.slope == slope and \ fact.interval == interval: match = fact break if match: if match.label: label = match.label elif len(facts) == 1: label = title if label: break return label
def lookup_label(cls, resource, method, base, slope=None, interval=None): """ Lookup the fact label from the timeplot options of resource Args: resource: the resource (CRUDResource) method: the aggregation method (string) base: the base field selector (string) slope: the slope field selector (string) interval: the interval expression (string) """ fact_opts = None if resource: config = resource.get_config("timeplot_options") if config: fact_opts = config.get("fact") label = None if fact_opts: parse = cls.parse for opt in fact_opts: if isinstance(opt, tuple): title, facts = opt else: title, facts = None, opt facts = parse(facts) match = None for fact in facts: if fact.method == method and \ fact.base == base and \ fact.slope == slope and \ fact.interval == interval: match = fact break if match: if match.label: label = match.label elif len(facts) == 1: label = title if label: break return label
Python
def extend(self, events): """ Extend this time frame with events Args: events: iterable of events TODO integrate in constructor TODO handle self.rule == None """ if not events: return empty = self.empty # Order events by start datetime events = sorted(events) rule = self.rule periods = self.periods # No point to loop over periods before the first event: start = events[0].start if start is None or start <= self.start: first = rule[0] else: first = rule.before(start, inc=True) current_events = {} previous_events = {} for start in rule.between(first, self.end, inc=True): # Compute end of this period end = rule.after(start) if not end: if start < self.end: end = self.end else: # Period start is at the end of the event frame break # Find all current events last_index = None for index, event in enumerate(events): last_index = index if event.end and event.end < start: # Event ended before this period previous_events[event.event_id] = event elif event.start is None or event.start < end: # Event starts before or during this period current_events[event.event_id] = event else: # Event starts only after this period break # Add current events to current period period = periods.get(start) if period is None: period = periods[start] = TimeSeriesPeriod(start, end=end) for event in current_events.values(): period.add_current(event) for event in previous_events.values(): period.add_previous(event) empty = False # Remaining events events = events[last_index:] if last_index is not None else None if not events: # No more events break # Remove events which end during this period remaining = {} for event_id, event in current_events.items(): if not event.end or event.end > end: remaining[event_id] = event else: previous_events[event_id] = event current_events = remaining self.empty = empty return
def extend(self, events): """ Extend this time frame with events Args: events: iterable of events TODO integrate in constructor TODO handle self.rule == None """ if not events: return empty = self.empty # Order events by start datetime events = sorted(events) rule = self.rule periods = self.periods # No point to loop over periods before the first event: start = events[0].start if start is None or start <= self.start: first = rule[0] else: first = rule.before(start, inc=True) current_events = {} previous_events = {} for start in rule.between(first, self.end, inc=True): # Compute end of this period end = rule.after(start) if not end: if start < self.end: end = self.end else: # Period start is at the end of the event frame break # Find all current events last_index = None for index, event in enumerate(events): last_index = index if event.end and event.end < start: # Event ended before this period previous_events[event.event_id] = event elif event.start is None or event.start < end: # Event starts before or during this period current_events[event.event_id] = event else: # Event starts only after this period break # Add current events to current period period = periods.get(start) if period is None: period = periods[start] = TimeSeriesPeriod(start, end=end) for event in current_events.values(): period.add_current(event) for event in previous_events.values(): period.add_previous(event) empty = False # Remaining events events = events[last_index:] if last_index is not None else None if not events: # No more events break # Remove events which end during this period remaining = {} for event_id, event in current_events.items(): if not event.end or event.end > end: remaining[event_id] = event else: previous_events[event_id] = event current_events = remaining self.empty = empty return
Python
def as_dict(self, rows=None, cols=None, isoformat=True): """ Convert the aggregated results into a JSON-serializable dict Args: rows: the row keys for the result cols: the column keys for the result isoformat: convert datetimes into ISO-formatted strings """ # Start and end datetime start = self.start if start and isoformat: start = start.isoformat() end = self.end if end and isoformat: end = end.isoformat() # Row totals row_totals = None if rows is not None: row_data = self.rows row_totals = [row_data.get(key) for key in rows] # Column totals col_totals = None if cols is not None: col_data = self.cols col_totals = [col_data.get(key) for key in cols] # Matrix matrix = None if rows is not None and cols is not None: matrix_data = self.matrix matrix = [] for row in rows: matrix_row = [] for col in cols: matrix_row.append(matrix_data.get((row, col))) matrix.append(matrix_row) # Output return {"t": (start, end), "v": self.totals, "r": row_totals, "c": col_totals, "x": matrix, }
def as_dict(self, rows=None, cols=None, isoformat=True): """ Convert the aggregated results into a JSON-serializable dict Args: rows: the row keys for the result cols: the column keys for the result isoformat: convert datetimes into ISO-formatted strings """ # Start and end datetime start = self.start if start and isoformat: start = start.isoformat() end = self.end if end and isoformat: end = end.isoformat() # Row totals row_totals = None if rows is not None: row_data = self.rows row_totals = [row_data.get(key) for key in rows] # Column totals col_totals = None if cols is not None: col_data = self.cols col_totals = [col_data.get(key) for key in cols] # Matrix matrix = None if rows is not None and cols is not None: matrix_data = self.matrix matrix = [] for row in rows: matrix_row = [] for col in cols: matrix_row.append(matrix_data.get((row, col))) matrix.append(matrix_row) # Output return {"t": (start, end), "v": self.totals, "r": row_totals, "c": col_totals, "x": matrix, }
Python
def group(self, cumulative=False): """ Group events by their row and col axis values Args: cumulative: include previous events """ event_sets = [self.cevents] if cumulative: event_sets.append(self.pevents) rows = {} cols = {} matrix = {} from itertools import product for index, events in enumerate(event_sets): for event_id, event in events.items(): for key in event.rows: row = rows.get(key) if row is None: row = rows[key] = (set(), set()) row[index].add(event_id) for key in event.cols: col = cols.get(key) if col is None: col = cols[key] = (set(), set()) col[index].add(event_id) for key in product(event.rows, event.cols): cell = matrix.get(key) if cell is None: cell = matrix[key] = (set(), set()) cell[index].add(event_id) self._rows = rows self._cols = cols self._matrix = matrix
def group(self, cumulative=False): """ Group events by their row and col axis values Args: cumulative: include previous events """ event_sets = [self.cevents] if cumulative: event_sets.append(self.pevents) rows = {} cols = {} matrix = {} from itertools import product for index, events in enumerate(event_sets): for event_id, event in events.items(): for key in event.rows: row = rows.get(key) if row is None: row = rows[key] = (set(), set()) row[index].add(event_id) for key in event.cols: col = cols.get(key) if col is None: col = cols[key] = (set(), set()) col[index].add(event_id) for key in product(event.rows, event.cols): cell = matrix.get(key) if cell is None: cell = matrix[key] = (set(), set()) cell[index].add(event_id) self._rows = rows self._cols = cols self._matrix = matrix
Python
def aggregate(self, facts): """ Group and aggregate the events in this period Args: facts: list of facts to aggregate """ # Reset self._reset() rows = self.rows = {} cols = self.cols = {} matrix = self.matrix = {} totals = [] if not isinstance(facts, (list, tuple)): facts = [facts] if any(fact.method == "cumulate" for fact in facts): self.group(cumulative=True) else: self.group() for fact in facts: method = fact.method # Select events if method == "cumulate": events = dict(self.pevents) events.update(self.cevents) cumulative = True else: events = self.cevents cumulative = False aggregate = fact.aggregate # Aggregate rows for key, event_sets in self._rows.items(): event_ids = event_sets[0] if cumulative: event_ids |= event_sets[1] items = [events[event_id] for event_id in event_ids] if key not in rows: rows[key] = [aggregate(self, items)] else: rows[key].append(aggregate(self, items)) # Aggregate columns for key, event_sets in self._cols.items(): event_ids = event_sets[0] if cumulative: event_ids |= event_sets[1] items = [events[event_id] for event_id in event_ids] if key not in cols: cols[key] = [aggregate(self, items)] else: cols[key].append(aggregate(self, items)) # Aggregate matrix for key, event_sets in self._matrix.items(): event_ids = event_sets[0] if cumulative: event_ids |= event_sets[1] items = [events[event_id] for event_id in event_ids] if key not in matrix: matrix[key] = [aggregate(self, items)] else: matrix[key].append(aggregate(self, items)) # Aggregate total totals.append(aggregate(self, list(events.values()))) self.totals = totals return totals
def aggregate(self, facts): """ Group and aggregate the events in this period Args: facts: list of facts to aggregate """ # Reset self._reset() rows = self.rows = {} cols = self.cols = {} matrix = self.matrix = {} totals = [] if not isinstance(facts, (list, tuple)): facts = [facts] if any(fact.method == "cumulate" for fact in facts): self.group(cumulative=True) else: self.group() for fact in facts: method = fact.method # Select events if method == "cumulate": events = dict(self.pevents) events.update(self.cevents) cumulative = True else: events = self.cevents cumulative = False aggregate = fact.aggregate # Aggregate rows for key, event_sets in self._rows.items(): event_ids = event_sets[0] if cumulative: event_ids |= event_sets[1] items = [events[event_id] for event_id in event_ids] if key not in rows: rows[key] = [aggregate(self, items)] else: rows[key].append(aggregate(self, items)) # Aggregate columns for key, event_sets in self._cols.items(): event_ids = event_sets[0] if cumulative: event_ids |= event_sets[1] items = [events[event_id] for event_id in event_ids] if key not in cols: cols[key] = [aggregate(self, items)] else: cols[key].append(aggregate(self, items)) # Aggregate matrix for key, event_sets in self._matrix.items(): event_ids = event_sets[0] if cumulative: event_ids |= event_sets[1] items = [events[event_id] for event_id in event_ids] if key not in matrix: matrix[key] = [aggregate(self, items)] else: matrix[key].append(aggregate(self, items)) # Aggregate total totals.append(aggregate(self, list(events.values()))) self.totals = totals return totals
Python
def duration(self, event, interval): """ Compute the total duration of the given event before the end of this period, in number of interval Args: event: the TimeSeriesEvent interval: the interval expression (string) """ if event.end is None or event.end > self.end: end_date = self.end else: end_date = event.end if event.start is None or event.start >= end_date: result = 0 else: rule = self.get_rule(event.start, end_date, interval) if rule: result = rule.count() else: result = 1 return result
def duration(self, event, interval): """ Compute the total duration of the given event before the end of this period, in number of interval Args: event: the TimeSeriesEvent interval: the interval expression (string) """ if event.end is None or event.end > self.end: end_date = self.end else: end_date = event.end if event.start is None or event.start >= end_date: result = 0 else: rule = self.get_rule(event.start, end_date, interval) if rule: result = rule.count() else: result = 1 return result
Python
def restrict_data_formats(r): """ Restrict data exports (prevent S3XML/S3JSON of records) Args: the CRUDRequest """ settings = current.deployment_settings allowed = ("html", "iframe", "popup", "aadata", "plain", "geojson", "pdf", "xlsx") if r.method in ("report", "timeplot", "filter", "validate"): allowed += ("json",) if r.method == "options": allowed += ("s3json",) settings.ui.export_formats = ("pdf", "xlsx") if r.representation not in allowed: r.error(403, current.ERROR.NOT_PERMITTED)
def restrict_data_formats(r): """ Restrict data exports (prevent S3XML/S3JSON of records) Args: the CRUDRequest """ settings = current.deployment_settings allowed = ("html", "iframe", "popup", "aadata", "plain", "geojson", "pdf", "xlsx") if r.method in ("report", "timeplot", "filter", "validate"): allowed += ("json",) if r.method == "options": allowed += ("s3json",) settings.ui.export_formats = ("pdf", "xlsx") if r.representation not in allowed: r.error(403, current.ERROR.NOT_PERMITTED)
Python
def render_list(self, value, labels, show_link=True): """ Helper method to render list-type representations from bulk()-results. Args: value: the list labels: the labels as returned from bulk() show_link: render references as links, should be the same as used with bulk() """ show_link = show_link and self.show_link values = [v for v in value if v is not None] if not len(values): return "" if show_link: labels_ = (labels[v] if v in labels else self.default for v in values) else: labels_ = sorted(s3_str(labels[v]) if v in labels else self.default for v in values) if current.auth.permission.format in ("xlsx", "xls"): return ", ".join(labels_) #return "\n".join(("- %s" % l) for l in labels_) html = UL(_class="service-list") for label in labels_: html.append(LI(label)) return html
def render_list(self, value, labels, show_link=True): """ Helper method to render list-type representations from bulk()-results. Args: value: the list labels: the labels as returned from bulk() show_link: render references as links, should be the same as used with bulk() """ show_link = show_link and self.show_link values = [v for v in value if v is not None] if not len(values): return "" if show_link: labels_ = (labels[v] if v in labels else self.default for v in values) else: labels_ = sorted(s3_str(labels[v]) if v in labels else self.default for v in values) if current.auth.permission.format in ("xlsx", "xls"): return ", ".join(labels_) #return "\n".join(("- %s" % l) for l in labels_) html = UL(_class="service-list") for label in labels_: html.append(LI(label)) return html
Python
def widget(self, resource, values): """ Render this widget as HTML helper object(s) Args: resource: the resource values: the search values from the URL query """ attr = self.attr if "_size" not in attr: attr.update(_size="40") css = attr.get("class") attr["_class"] = "%s %s" % (css, self.css_base) if css else self.css_base attr["_type"] = "text" # Match any or all of the strings entered? data = attr.get("data", {}) data["match"] = "any" if self.opts.get("match_any") else "all" attr["data"] = data values = [v.strip("*") for v in values if v is not None] if values: attr["_value"] = " ".join(values) return INPUT(**attr)
def widget(self, resource, values): """ Render this widget as HTML helper object(s) Args: resource: the resource values: the search values from the URL query """ attr = self.attr if "_size" not in attr: attr.update(_size="40") css = attr.get("class") attr["_class"] = "%s %s" % (css, self.css_base) if css else self.css_base attr["_type"] = "text" # Match any or all of the strings entered? data = attr.get("data", {}) data["match"] = "any" if self.opts.get("match_any") else "all" attr["data"] = data values = [v.strip("*") for v in values if v is not None] if values: attr["_value"] = " ".join(values) return INPUT(**attr)
Python
def menu_oauth(cls, **attr): """ Menu for authentication with external services - used in default/user controller """ T = current.T settings = current.deployment_settings return MOA(c="default")( MOA("Login with Facebook", f="facebook", args=["login"], api = "facebook", check = lambda item: current.s3db.msg_facebook_login(), title = T("Login using Facebook account"), ), MOA("Login with Google", f="google", args=["login"], api = "google", check = lambda item: settings.get_auth_google(), title = T("Login using Google account"), ), MOA("Login with Humanitarian.ID", f="humanitarian_id", args=["login"], api = "humanitarianid", check = lambda item: settings.get_auth_humanitarian_id(), title = T("Login using Humanitarian.ID account"), ), )
def menu_oauth(cls, **attr): """ Menu for authentication with external services - used in default/user controller """ T = current.T settings = current.deployment_settings return MOA(c="default")( MOA("Login with Facebook", f="facebook", args=["login"], api = "facebook", check = lambda item: current.s3db.msg_facebook_login(), title = T("Login using Facebook account"), ), MOA("Login with Google", f="google", args=["login"], api = "google", check = lambda item: settings.get_auth_google(), title = T("Login using Google account"), ), MOA("Login with Humanitarian.ID", f="humanitarian_id", args=["login"], api = "humanitarianid", check = lambda item: settings.get_auth_humanitarian_id(), title = T("Login using Humanitarian.ID account"), ), )
Python
def disease(): """ Disease Case Tracking and Contact Tracing """ return M(c="disease")( M("Cases", c="disease", f="case", m="summary")( M("Create", m="create"), M("Watch List", m="summary", vars={"~.monitoring_level__belongs": "OBSERVATION,DIAGNOSTICS"}), ), M("Contact Tracing", c="disease", f="tracing")( M("Create", m="create"), ), M("Statistics Data", c="disease", f="stats_data", args="summary")( M("Create", m="create"), M("Time Plot", m="timeplot"), M("Import", m="import"), ), M("Statistics", c="disease", f="statistic")( M("Create", m="create"), ), M("Diseases", c="disease", f="disease")( M("Create", m="create"), ), )
def disease(): """ Disease Case Tracking and Contact Tracing """ return M(c="disease")( M("Cases", c="disease", f="case", m="summary")( M("Create", m="create"), M("Watch List", m="summary", vars={"~.monitoring_level__belongs": "OBSERVATION,DIAGNOSTICS"}), ), M("Contact Tracing", c="disease", f="tracing")( M("Create", m="create"), ), M("Statistics Data", c="disease", f="stats_data", args="summary")( M("Create", m="create"), M("Time Plot", m="timeplot"), M("Import", m="import"), ), M("Statistics", c="disease", f="statistic")( M("Create", m="create"), ), M("Diseases", c="disease", f="disease")( M("Create", m="create"), ), )
Python
def hms(): """ HMS / Hospital Status Assessment and Request Management """ #s3 = current.response.s3 return M(c="hms")( M("Hospitals", f="hospital")( M("Create", m="create"), M("Map", m="map"), M("Report", m="report"), M("Import", m="import", p="create"), #SEP(), #M("Show Map", c="gis", f="map_viewing_client", #vars={"kml_feed" : "%s/hms/hospital.kml" % #s3.base_url, "kml_name" : "Hospitals_"}) ) )
def hms(): """ HMS / Hospital Status Assessment and Request Management """ #s3 = current.response.s3 return M(c="hms")( M("Hospitals", f="hospital")( M("Create", m="create"), M("Map", m="map"), M("Report", m="report"), M("Import", m="import", p="create"), #SEP(), #M("Show Map", c="gis", f="map_viewing_client", #vars={"kml_feed" : "%s/hms/hospital.kml" % #s3.base_url, "kml_name" : "Hospitals_"}) ) )
Python
def project(): """ PROJECT / Project Tracking & Management """ settings = current.deployment_settings activities = lambda i: settings.get_project_activities() activity_types = lambda i: settings.get_project_activity_types() community = settings.get_project_community() if community: IMPORT = "Import Project Communities" else: IMPORT = "Import Project Locations" community_volunteers = lambda i: settings.get_project_community_volunteers() demographics = lambda i: settings.get_project_demographics() hazards = lambda i: settings.get_project_hazards() sectors = lambda i: settings.get_project_sectors() stats = lambda i: settings.has_module("stats") themes = lambda i: settings.get_project_themes() menu = M(c="project") if settings.get_project_mode_3w(): if community: menu( M("Projects", f="project")( M("Create", m="create"), ), M("Communities", f="location")( # Better created from tab (otherwise Activity Type filter won't work) #M("Create", m="create"), M("Map", m="map"), M("Community Contacts", f="location_contact"), M("Community Volunteers", f="volunteer", check=community_volunteers), ), ) else: menu( M("Projects", f="project")( M("Create", m="create"), M("Map", f="location", m="map"), ) ) menu( M("Reports", f="location", m="report")( M("3W", f="location", m="report"), M("Beneficiaries", f="beneficiary", m="report", check=stats, ), M("Funding", f="organisation", m="report"), ), M("Import", f="project", m="import", p="create")( M("Import Projects", m="import", p="create"), M("Import Project Organizations", f="organisation", m="import", p="create"), M(IMPORT, f="location", m="import", p="create"), M("Import Activities", f="activity", m="import", p="create", check=activities, ), ), M("Partner Organizations", f="partners")( M("Create", m="create"), M("Import", m="import", p="create"), ), M("Activity Types", f="activity_type", check=activity_types)( M("Create", m="create"), ), M("Beneficiary Types", f="beneficiary_type", check=stats)( M("Create", m="create"), ), M("Demographics", f="demographic", check=demographics)( M("Create", m="create"), ), M("Hazards", f="hazard", check=hazards)( M("Create", m="create"), ), M("Sectors", f="sector", check=sectors)( M("Create", m="create"), ), M("Themes", f="theme", check=themes)( M("Create", m="create"), ), ) elif settings.get_project_mode_task(): menu( M("Projects", f="project")( M("Create", m="create"), M("Open Tasks for Project", vars={"tasks":1}), ), M("Tasks", f="task")( M("Create", m="create"), ), ) if current.auth.s3_has_role("STAFF"): ADMIN = current.session.s3.system_roles.ADMIN menu( M("Daily Work", f="time")( M("My Logged Hours", vars={"mine":1}), M("My Open Tasks", f="task", vars={"mine":1}), ), M("Admin", restrict=[ADMIN])( M("Activity Types", f="activity_type"), M("Import Tasks", f="task", m="import", p="create"), ), M("Reports", f="report")( M("Activity Report", f="activity", m="report"), M("Last Week's Work", f="time", m="report", vars=Storage(rows="person_id", cols="day", fact="sum(hours)", week=1)), M("Last Month's Work", f="time", m="report", vars=Storage(rows="person_id", cols="week", fact="sum(hours)", month=1)), M("Project Time Report", f="time", m="report"), ), ) else: menu( M("Projects", f="project")( M("Create", m="create"), M("Import", m="import", p="create"), ), ) return menu
def project(): """ PROJECT / Project Tracking & Management """ settings = current.deployment_settings activities = lambda i: settings.get_project_activities() activity_types = lambda i: settings.get_project_activity_types() community = settings.get_project_community() if community: IMPORT = "Import Project Communities" else: IMPORT = "Import Project Locations" community_volunteers = lambda i: settings.get_project_community_volunteers() demographics = lambda i: settings.get_project_demographics() hazards = lambda i: settings.get_project_hazards() sectors = lambda i: settings.get_project_sectors() stats = lambda i: settings.has_module("stats") themes = lambda i: settings.get_project_themes() menu = M(c="project") if settings.get_project_mode_3w(): if community: menu( M("Projects", f="project")( M("Create", m="create"), ), M("Communities", f="location")( # Better created from tab (otherwise Activity Type filter won't work) #M("Create", m="create"), M("Map", m="map"), M("Community Contacts", f="location_contact"), M("Community Volunteers", f="volunteer", check=community_volunteers), ), ) else: menu( M("Projects", f="project")( M("Create", m="create"), M("Map", f="location", m="map"), ) ) menu( M("Reports", f="location", m="report")( M("3W", f="location", m="report"), M("Beneficiaries", f="beneficiary", m="report", check=stats, ), M("Funding", f="organisation", m="report"), ), M("Import", f="project", m="import", p="create")( M("Import Projects", m="import", p="create"), M("Import Project Organizations", f="organisation", m="import", p="create"), M(IMPORT, f="location", m="import", p="create"), M("Import Activities", f="activity", m="import", p="create", check=activities, ), ), M("Partner Organizations", f="partners")( M("Create", m="create"), M("Import", m="import", p="create"), ), M("Activity Types", f="activity_type", check=activity_types)( M("Create", m="create"), ), M("Beneficiary Types", f="beneficiary_type", check=stats)( M("Create", m="create"), ), M("Demographics", f="demographic", check=demographics)( M("Create", m="create"), ), M("Hazards", f="hazard", check=hazards)( M("Create", m="create"), ), M("Sectors", f="sector", check=sectors)( M("Create", m="create"), ), M("Themes", f="theme", check=themes)( M("Create", m="create"), ), ) elif settings.get_project_mode_task(): menu( M("Projects", f="project")( M("Create", m="create"), M("Open Tasks for Project", vars={"tasks":1}), ), M("Tasks", f="task")( M("Create", m="create"), ), ) if current.auth.s3_has_role("STAFF"): ADMIN = current.session.s3.system_roles.ADMIN menu( M("Daily Work", f="time")( M("My Logged Hours", vars={"mine":1}), M("My Open Tasks", f="task", vars={"mine":1}), ), M("Admin", restrict=[ADMIN])( M("Activity Types", f="activity_type"), M("Import Tasks", f="task", m="import", p="create"), ), M("Reports", f="report")( M("Activity Report", f="activity", m="report"), M("Last Week's Work", f="time", m="report", vars=Storage(rows="person_id", cols="day", fact="sum(hours)", week=1)), M("Last Month's Work", f="time", m="report", vars=Storage(rows="person_id", cols="week", fact="sum(hours)", month=1)), M("Project Time Report", f="time", m="report"), ), ) else: menu( M("Projects", f="project")( M("Create", m="create"), M("Import", m="import", p="create"), ), ) return menu
Python
def breadcrumbs(cls): """ Breadcrumbs from the current options menu """ # Configure the layout: layout = S3BreadcrumbsLayout request = current.request controller = request.controller function = request.function all_modules = current.deployment_settings.modules # Start with a link to the homepage - always: breadcrumbs = layout()( layout(all_modules["default"].name_nice) ) # Append the current module's homepage - always: # @note: this may give a breadcrumb for which there's no menu item # and should therefore perhaps be replaced by a real path-check in # the main menu? if controller != "default": try: name_nice = all_modules[controller].get("name_nice", controller) except KeyError: # Module not defined pass else: breadcrumbs(layout(name_nice, c = controller)) # This checks the path in the options menu, omitting the top-level item # (because that's the menu itself which doesn't have a linked label): menu = current.menu.options if menu and function != "index": branch = menu.branch() if branch: path = branch.path() if len(path) > 1: for item in path[1:]: breadcrumbs( layout(item.label, c = item.get("controller"), f = item.get("function"), args = item.args, # Should we retain the request vars in case # the item has no vars? Or shall we merge them # in any case? Didn't see the use-case yet # anywhere... vars = item.vars, )) return breadcrumbs
def breadcrumbs(cls): """ Breadcrumbs from the current options menu """ # Configure the layout: layout = S3BreadcrumbsLayout request = current.request controller = request.controller function = request.function all_modules = current.deployment_settings.modules # Start with a link to the homepage - always: breadcrumbs = layout()( layout(all_modules["default"].name_nice) ) # Append the current module's homepage - always: # @note: this may give a breadcrumb for which there's no menu item # and should therefore perhaps be replaced by a real path-check in # the main menu? if controller != "default": try: name_nice = all_modules[controller].get("name_nice", controller) except KeyError: # Module not defined pass else: breadcrumbs(layout(name_nice, c = controller)) # This checks the path in the options menu, omitting the top-level item # (because that's the menu itself which doesn't have a linked label): menu = current.menu.options if menu and function != "index": branch = menu.branch() if branch: path = branch.path() if len(path) > 1: for item in path[1:]: breadcrumbs( layout(item.label, c = item.get("controller"), f = item.get("function"), args = item.args, # Should we retain the request vars in case # the item has no vars? Or shall we merge them # in any case? Didn't see the use-case yet # anywhere... vars = item.vars, )) return breadcrumbs
Python
def s3_include_debug_css(): """ Generates html to include the css listed in /modules/templates/<theme>/css.cfg """ request = current.request location = current.response.s3.theme_config filename = "%s/modules/templates/%s/css.cfg" % (request.folder, location) if not os.path.isfile(filename): raise HTTP(500, "Theme configuration file missing: modules/templates/%s/css.cfg" % location) link_template = '<link href="/%s/static/styles/%%s" rel="stylesheet" type="text/css" />' % \ request.application links = "" with open(filename, "r") as css_cfg: links = "\n".join(link_template % cssname.rstrip() for cssname in css_cfg if cssname[0] != "#") return XML(links)
def s3_include_debug_css(): """ Generates html to include the css listed in /modules/templates/<theme>/css.cfg """ request = current.request location = current.response.s3.theme_config filename = "%s/modules/templates/%s/css.cfg" % (request.folder, location) if not os.path.isfile(filename): raise HTTP(500, "Theme configuration file missing: modules/templates/%s/css.cfg" % location) link_template = '<link href="/%s/static/styles/%%s" rel="stylesheet" type="text/css" />' % \ request.application links = "" with open(filename, "r") as css_cfg: links = "\n".join(link_template % cssname.rstrip() for cssname in css_cfg if cssname[0] != "#") return XML(links)
Python
def s3_include_debug_js(): """ Generates html to include the js scripts listed in /static/scripts/tools/sahana.js.cfg """ request = current.request scripts_dir = os.path.join(request.folder, "static", "scripts") import mergejsmf config_dict = { ".": scripts_dir, "ui": scripts_dir, "web2py": scripts_dir, "S3": scripts_dir } config_filename = "%s/tools/sahana.js.cfg" % scripts_dir files = mergejsmf.getFiles(config_dict, config_filename)[1] script_template = '<script src="/%s/static/scripts/%%s"></script>' % \ request.application scripts = "\n".join(script_template % scriptname for scriptname in files) return XML(scripts)
def s3_include_debug_js(): """ Generates html to include the js scripts listed in /static/scripts/tools/sahana.js.cfg """ request = current.request scripts_dir = os.path.join(request.folder, "static", "scripts") import mergejsmf config_dict = { ".": scripts_dir, "ui": scripts_dir, "web2py": scripts_dir, "S3": scripts_dir } config_filename = "%s/tools/sahana.js.cfg" % scripts_dir files = mergejsmf.getFiles(config_dict, config_filename)[1] script_template = '<script src="/%s/static/scripts/%%s"></script>' % \ request.application scripts = "\n".join(script_template % scriptname for scriptname in files) return XML(scripts)
Python
def s3_include_ext(): """ Add ExtJS CSS & JS into a page for a Map - since this is normally run from MAP.xml() it is too late to insert into s3.[external_]stylesheets, so must inject sheets into correct order """ s3 = current.response.s3 if s3.ext_included: # Ext already included return request = current.request appname = request.application xtheme = current.deployment_settings.get_base_xtheme() if xtheme: xtheme = "%smin.css" % xtheme[:-3] xtheme = \ "<link href='/%s/static/themes/%s' rel='stylesheet' type='text/css' />" % \ (appname, xtheme) if s3.cdn: # For Sites Hosted on the Public Internet, using a CDN may provide better performance PATH = "//cdn.sencha.com/ext/gpl/3.4.1.1" else: PATH = "/%s/static/scripts/ext" % appname if s3.debug: # Provide debug versions of CSS / JS adapter = "%s/adapter/jquery/ext-jquery-adapter-debug.js" % PATH main_js = "%s/ext-all-debug.js" % PATH main_css = \ "<link href='%s/resources/css/ext-all-notheme.css' rel='stylesheet' type='text/css' />" % PATH if not xtheme: xtheme = \ "<link href='%s/resources/css/xtheme-gray.css' rel='stylesheet' type='text/css' />" % PATH else: adapter = "%s/adapter/jquery/ext-jquery-adapter.js" % PATH main_js = "%s/ext-all.js" % PATH if xtheme: main_css = \ "<link href='/%s/static/scripts/ext/resources/css/ext-notheme.min.css' rel='stylesheet' type='text/css' />" % appname else: main_css = \ "<link href='/%s/static/scripts/ext/resources/css/ext-gray.min.css' rel='stylesheet' type='text/css' />" % appname scripts = s3.scripts scripts_append = scripts.append scripts_append(adapter) scripts_append(main_js) langfile = "ext-lang-%s.js" % s3.language if os.path.exists(os.path.join(request.folder, "static", "scripts", "ext", "src", "locale", langfile)): locale = "%s/src/locale/%s" % (PATH, langfile) scripts_append(locale) if xtheme: s3.jquery_ready.append('''$('link:first').after("%s").after("%s")''' % (xtheme, main_css)) else: s3.jquery_ready.append('''$('link:first').after("%s")''' % main_css) s3.ext_included = True
def s3_include_ext(): """ Add ExtJS CSS & JS into a page for a Map - since this is normally run from MAP.xml() it is too late to insert into s3.[external_]stylesheets, so must inject sheets into correct order """ s3 = current.response.s3 if s3.ext_included: # Ext already included return request = current.request appname = request.application xtheme = current.deployment_settings.get_base_xtheme() if xtheme: xtheme = "%smin.css" % xtheme[:-3] xtheme = \ "<link href='/%s/static/themes/%s' rel='stylesheet' type='text/css' />" % \ (appname, xtheme) if s3.cdn: # For Sites Hosted on the Public Internet, using a CDN may provide better performance PATH = "//cdn.sencha.com/ext/gpl/3.4.1.1" else: PATH = "/%s/static/scripts/ext" % appname if s3.debug: # Provide debug versions of CSS / JS adapter = "%s/adapter/jquery/ext-jquery-adapter-debug.js" % PATH main_js = "%s/ext-all-debug.js" % PATH main_css = \ "<link href='%s/resources/css/ext-all-notheme.css' rel='stylesheet' type='text/css' />" % PATH if not xtheme: xtheme = \ "<link href='%s/resources/css/xtheme-gray.css' rel='stylesheet' type='text/css' />" % PATH else: adapter = "%s/adapter/jquery/ext-jquery-adapter.js" % PATH main_js = "%s/ext-all.js" % PATH if xtheme: main_css = \ "<link href='/%s/static/scripts/ext/resources/css/ext-notheme.min.css' rel='stylesheet' type='text/css' />" % appname else: main_css = \ "<link href='/%s/static/scripts/ext/resources/css/ext-gray.min.css' rel='stylesheet' type='text/css' />" % appname scripts = s3.scripts scripts_append = scripts.append scripts_append(adapter) scripts_append(main_js) langfile = "ext-lang-%s.js" % s3.language if os.path.exists(os.path.join(request.folder, "static", "scripts", "ext", "src", "locale", langfile)): locale = "%s/src/locale/%s" % (PATH, langfile) scripts_append(locale) if xtheme: s3.jquery_ready.append('''$('link:first').after("%s").after("%s")''' % (xtheme, main_css)) else: s3.jquery_ready.append('''$('link:first').after("%s")''' % main_css) s3.ext_included = True
Python
def s3_include_simile(): """ Add Simile CSS & JS into a page for a Timeline """ s3 = current.response.s3 if s3.simile_included: # Simile already included return appname = current.request.application #scripts = s3.scripts if s3.debug: # Provide debug versions of CSS / JS s3.scripts += ["/%s/static/scripts/S3/s3.simile.js" % appname, "/%s/static/scripts/simile/ajax/scripts/platform.js" % appname, "/%s/static/scripts/simile/ajax/scripts/debug.js" % appname, "/%s/static/scripts/simile/ajax/scripts/xmlhttp.js" % appname, "/%s/static/scripts/simile/ajax/scripts/json.js" % appname, "/%s/static/scripts/simile/ajax/scripts/dom.js" % appname, "/%s/static/scripts/simile/ajax/scripts/graphics.js" % appname, "/%s/static/scripts/simile/ajax/scripts/date-time.js" % appname, "/%s/static/scripts/simile/ajax/scripts/string.js" % appname, "/%s/static/scripts/simile/ajax/scripts/html.js" % appname, "/%s/static/scripts/simile/ajax/scripts/data-structure.js" % appname, "/%s/static/scripts/simile/ajax/scripts/units.js" % appname, "/%s/static/scripts/simile/ajax/scripts/ajax.js" % appname, "/%s/static/scripts/simile/ajax/scripts/history.js" % appname, "/%s/static/scripts/simile/ajax/scripts/window-manager.js" % appname, "/%s/static/scripts/simile/ajax/scripts/remoteLog.js" % appname, "/%s/static/scripts/S3/s3.timeline.js" % appname, "/%s/static/scripts/simile/timeline/scripts/timeline.js" % appname, "/%s/static/scripts/simile/timeline/scripts/band.js" % appname, "/%s/static/scripts/simile/timeline/scripts/themes.js" % appname, "/%s/static/scripts/simile/timeline/scripts/ethers.js" % appname, "/%s/static/scripts/simile/timeline/scripts/ether-painters.js" % appname, "/%s/static/scripts/simile/timeline/scripts/event-utils.js" % appname, "/%s/static/scripts/simile/timeline/scripts/labellers.js" % appname, "/%s/static/scripts/simile/timeline/scripts/sources.js" % appname, "/%s/static/scripts/simile/timeline/scripts/original-painter.js" % appname, "/%s/static/scripts/simile/timeline/scripts/detailed-painter.js" % appname, "/%s/static/scripts/simile/timeline/scripts/overview-painter.js" % appname, "/%s/static/scripts/simile/timeline/scripts/compact-painter.js" % appname, "/%s/static/scripts/simile/timeline/scripts/decorators.js" % appname, "/%s/static/scripts/simile/timeline/scripts/l10n/en/timeline.js" % appname, "/%s/static/scripts/simile/timeline/scripts/l10n/en/labellers.js" % appname, ] css = "".join(["<link href='/%s/static/scripts/simile/ajax/styles/graphics.css' rel='stylesheet' type='text/css' />" % appname, "<link href='/%s/static/scripts/simile/timeline/styles/ethers.css' rel='stylesheet' type='text/css' />" % appname, "<link href='/%s/static/scripts/simile/timeline/styles/events.css' rel='stylesheet' type='text/css' />" % appname, "<link href='/%s/static/scripts/simile/timeline/styles/timeline.css' rel='stylesheet' type='text/css' />" % appname, ]) else: s3.scripts.append("/%s/static/scripts/S3/s3.timeline.min.js" % appname) css = "".join(["<link href='/%s/static/scripts/simile/ajax/styles/graphics.css' rel='stylesheet' type='text/css' />" % appname, "<link href='/%s/static/scripts/simile/timeline/timeline-bundle.css' rel='stylesheet' type='text/css' />" % appname, ]) s3.jquery_ready.append('''$('link:first').after("%s")''' % css) supported_locales = [ "cs", # Czech "de", # German "en", # English "es", # Spanish "fr", # French "it", # Italian "nl", # Dutch (The Netherlands) "pl", # Polish "ru", # Russian "se", # Swedish "tr", # Turkish "vi", # Vietnamese "zh" # Chinese ] if s3.language in supported_locales: locale = s3.language else: locale = "en" s3.scripts += ["/%s/static/scripts/simile/timeline/scripts/l10n/%s/timeline.js" % (appname, locale), "/%s/static/scripts/simile/timeline/scripts/l10n/%s/labellers.js" % (appname, locale), ] s3.simile_included = True
def s3_include_simile(): """ Add Simile CSS & JS into a page for a Timeline """ s3 = current.response.s3 if s3.simile_included: # Simile already included return appname = current.request.application #scripts = s3.scripts if s3.debug: # Provide debug versions of CSS / JS s3.scripts += ["/%s/static/scripts/S3/s3.simile.js" % appname, "/%s/static/scripts/simile/ajax/scripts/platform.js" % appname, "/%s/static/scripts/simile/ajax/scripts/debug.js" % appname, "/%s/static/scripts/simile/ajax/scripts/xmlhttp.js" % appname, "/%s/static/scripts/simile/ajax/scripts/json.js" % appname, "/%s/static/scripts/simile/ajax/scripts/dom.js" % appname, "/%s/static/scripts/simile/ajax/scripts/graphics.js" % appname, "/%s/static/scripts/simile/ajax/scripts/date-time.js" % appname, "/%s/static/scripts/simile/ajax/scripts/string.js" % appname, "/%s/static/scripts/simile/ajax/scripts/html.js" % appname, "/%s/static/scripts/simile/ajax/scripts/data-structure.js" % appname, "/%s/static/scripts/simile/ajax/scripts/units.js" % appname, "/%s/static/scripts/simile/ajax/scripts/ajax.js" % appname, "/%s/static/scripts/simile/ajax/scripts/history.js" % appname, "/%s/static/scripts/simile/ajax/scripts/window-manager.js" % appname, "/%s/static/scripts/simile/ajax/scripts/remoteLog.js" % appname, "/%s/static/scripts/S3/s3.timeline.js" % appname, "/%s/static/scripts/simile/timeline/scripts/timeline.js" % appname, "/%s/static/scripts/simile/timeline/scripts/band.js" % appname, "/%s/static/scripts/simile/timeline/scripts/themes.js" % appname, "/%s/static/scripts/simile/timeline/scripts/ethers.js" % appname, "/%s/static/scripts/simile/timeline/scripts/ether-painters.js" % appname, "/%s/static/scripts/simile/timeline/scripts/event-utils.js" % appname, "/%s/static/scripts/simile/timeline/scripts/labellers.js" % appname, "/%s/static/scripts/simile/timeline/scripts/sources.js" % appname, "/%s/static/scripts/simile/timeline/scripts/original-painter.js" % appname, "/%s/static/scripts/simile/timeline/scripts/detailed-painter.js" % appname, "/%s/static/scripts/simile/timeline/scripts/overview-painter.js" % appname, "/%s/static/scripts/simile/timeline/scripts/compact-painter.js" % appname, "/%s/static/scripts/simile/timeline/scripts/decorators.js" % appname, "/%s/static/scripts/simile/timeline/scripts/l10n/en/timeline.js" % appname, "/%s/static/scripts/simile/timeline/scripts/l10n/en/labellers.js" % appname, ] css = "".join(["<link href='/%s/static/scripts/simile/ajax/styles/graphics.css' rel='stylesheet' type='text/css' />" % appname, "<link href='/%s/static/scripts/simile/timeline/styles/ethers.css' rel='stylesheet' type='text/css' />" % appname, "<link href='/%s/static/scripts/simile/timeline/styles/events.css' rel='stylesheet' type='text/css' />" % appname, "<link href='/%s/static/scripts/simile/timeline/styles/timeline.css' rel='stylesheet' type='text/css' />" % appname, ]) else: s3.scripts.append("/%s/static/scripts/S3/s3.timeline.min.js" % appname) css = "".join(["<link href='/%s/static/scripts/simile/ajax/styles/graphics.css' rel='stylesheet' type='text/css' />" % appname, "<link href='/%s/static/scripts/simile/timeline/timeline-bundle.css' rel='stylesheet' type='text/css' />" % appname, ]) s3.jquery_ready.append('''$('link:first').after("%s")''' % css) supported_locales = [ "cs", # Czech "de", # German "en", # English "es", # Spanish "fr", # French "it", # Italian "nl", # Dutch (The Netherlands) "pl", # Polish "ru", # Russian "se", # Swedish "tr", # Turkish "vi", # Vietnamese "zh" # Chinese ] if s3.language in supported_locales: locale = s3.language else: locale = "en" s3.scripts += ["/%s/static/scripts/simile/timeline/scripts/l10n/%s/timeline.js" % (appname, locale), "/%s/static/scripts/simile/timeline/scripts/l10n/%s/labellers.js" % (appname, locale), ] s3.simile_included = True
Python
def s3_include_underscore(): """ Add Undercore JS into a page - for Map templates - for templates in GroupedOptsWidget comment """ s3 = current.response.s3 debug = s3.debug scripts = s3.scripts if s3.cdn: if debug: script = \ "//cdnjs.cloudflare.com/ajax/libs/underscore.js/1.6.0/underscore.js" else: script = \ "//cdnjs.cloudflare.com/ajax/libs/underscore.js/1.6.0/underscore-min.js" else: if debug: script = URL(c="static", f="scripts/underscore.js") else: script = URL(c="static", f="scripts/underscore-min.js") if script not in scripts: scripts.append(script)
def s3_include_underscore(): """ Add Undercore JS into a page - for Map templates - for templates in GroupedOptsWidget comment """ s3 = current.response.s3 debug = s3.debug scripts = s3.scripts if s3.cdn: if debug: script = \ "//cdnjs.cloudflare.com/ajax/libs/underscore.js/1.6.0/underscore.js" else: script = \ "//cdnjs.cloudflare.com/ajax/libs/underscore.js/1.6.0/underscore-min.js" else: if debug: script = URL(c="static", f="scripts/underscore.js") else: script = URL(c="static", f="scripts/underscore-min.js") if script not in scripts: scripts.append(script)
Python
def validate(form): """ Validate the test result registration form - personal details are required for reporting to CWA by name - make sure the required consent option is checked - make sure the selected device matches the selected disease """ T = current.T formvars = form.vars response = ConsentTracking.parse(formvars.get("consent")) # Verify that we have the data and consent required cwa = formvars.get("report_to_cwa") if cwa == "PERSONAL": # Personal data required for fn in ("first_name", "last_name", "date_of_birth"): if not formvars.get(fn): form.errors[fn] = T("Enter a value") # CWA_PERSONAL consent required c = response.get("CWA_PERSONAL") if not c or not c[1]: form.errors.consent = T("Consent required") elif cwa == "ANONYMOUS": # CWA_ANONYMOUS consent required c = response.get("CWA_ANONYMOUS") if not c or not c[1]: form.errors.consent = T("Consent required") # Verify that device ID is specified if DCC option is selected dcc = formvars.get("dcc_option") if dcc: if not formvars.get("device_id"): form.errors.device_id = T("Enter a value") # Verify that the selected testing device matches the selected # disease (only if disease is selectable - otherwise, the device # list is pre-filtered anyway): if "disease_id" in formvars: disease_id = formvars["disease_id"] device_id = formvars.get("device_id") if device_id: table = current.s3db.disease_testing_device query = (table.id == device_id) & \ (table.disease_id == disease_id) & \ (table.deleted == False) row = current.db(query).select(table.id, limitby = (0, 1), ).first() if not row: form.errors.device_id = T("Device not applicable for selected disease")
def validate(form): """ Validate the test result registration form - personal details are required for reporting to CWA by name - make sure the required consent option is checked - make sure the selected device matches the selected disease """ T = current.T formvars = form.vars response = ConsentTracking.parse(formvars.get("consent")) # Verify that we have the data and consent required cwa = formvars.get("report_to_cwa") if cwa == "PERSONAL": # Personal data required for fn in ("first_name", "last_name", "date_of_birth"): if not formvars.get(fn): form.errors[fn] = T("Enter a value") # CWA_PERSONAL consent required c = response.get("CWA_PERSONAL") if not c or not c[1]: form.errors.consent = T("Consent required") elif cwa == "ANONYMOUS": # CWA_ANONYMOUS consent required c = response.get("CWA_ANONYMOUS") if not c or not c[1]: form.errors.consent = T("Consent required") # Verify that device ID is specified if DCC option is selected dcc = formvars.get("dcc_option") if dcc: if not formvars.get("device_id"): form.errors.device_id = T("Enter a value") # Verify that the selected testing device matches the selected # disease (only if disease is selectable - otherwise, the device # list is pre-filtered anyway): if "disease_id" in formvars: disease_id = formvars["disease_id"] device_id = formvars.get("device_id") if device_id: table = current.s3db.disease_testing_device query = (table.id == device_id) & \ (table.disease_id == disease_id) & \ (table.deleted == False) row = current.db(query).select(table.id, limitby = (0, 1), ).first() if not row: form.errors.device_id = T("Device not applicable for selected disease")
Python
def accept(self, r, form): """ Accept the test result form, and report to CWA if selected Args: r: the CRUDRequest form: the test result form Returns: output dict for view, or None when redirecting """ T = current.T auth = current.auth s3db = current.s3db response = current.response formvars = form.vars # Create disease_case_diagnostics record testresult = {"result": formvars.get("result"), } for fn in ("site_id", "disease_id", "probe_date", "device_id", "demographic_id", ): if fn in formvars: testresult[fn] = formvars[fn] table = s3db.disease_case_diagnostics testresult["id"] = record_id = table.insert(**testresult) if not record_id: raise RuntimeError("Could not create testresult record") auth.s3_set_record_owner(table, record_id) auth.s3_make_session_owner(table, record_id) s3db.onaccept(table, testresult, method="create") response.confirmation = T("Test Result registered") # Report to CWA? report_to_cwa = formvars.get("report_to_cwa") dcc_option = False if report_to_cwa == "ANONYMOUS": processing_type = "CWA_ANONYMOUS" cwa_report = CWAReport(record_id) elif report_to_cwa == "PERSONAL": dcc_option = formvars.get("dcc_option") processing_type = "CWA_PERSONAL" cwa_report = CWAReport(record_id, anonymous = False, first_name = formvars.get("first_name"), last_name = formvars.get("last_name"), dob = formvars.get("date_of_birth"), dcc = dcc_option, ) else: processing_type = cwa_report = None if cwa_report: # Register consent cwa_report.register_consent(processing_type, formvars.get("consent"), ) # Send to CWA if cwa_report.send(): response.information = T("Result reported to %(system)s") % CWA retry = False else: response.error = T("Report to %(system)s failed") % CWA retry = True # Store DCC data if dcc_option: cwa_data = cwa_report.data try: hcert = DCC.from_result(cwa_data.get("hash"), record_id, cwa_data.get("fn"), cwa_data.get("ln"), cwa_data.get("dob"), ) except ValueError as e: hcert = None response.warning = str(e) if hcert: hcert.save() else: # Remove DCC flag if hcert could not be generated cwa_report.dcc = False CustomController._view("RLPPTM", "certificate.html") # Title field = table.disease_id if cwa_report.disease_id and field.represent: disease = field.represent(cwa_report.disease_id) title = "%s %s" % (disease, T("Test Result")) else: title = T("Test Result") output = {"title": title, "intro": None, "form": cwa_report.formatted(retry=retry), } else: self.next = r.url(id=record_id, method="read") output = None return output
def accept(self, r, form): """ Accept the test result form, and report to CWA if selected Args: r: the CRUDRequest form: the test result form Returns: output dict for view, or None when redirecting """ T = current.T auth = current.auth s3db = current.s3db response = current.response formvars = form.vars # Create disease_case_diagnostics record testresult = {"result": formvars.get("result"), } for fn in ("site_id", "disease_id", "probe_date", "device_id", "demographic_id", ): if fn in formvars: testresult[fn] = formvars[fn] table = s3db.disease_case_diagnostics testresult["id"] = record_id = table.insert(**testresult) if not record_id: raise RuntimeError("Could not create testresult record") auth.s3_set_record_owner(table, record_id) auth.s3_make_session_owner(table, record_id) s3db.onaccept(table, testresult, method="create") response.confirmation = T("Test Result registered") # Report to CWA? report_to_cwa = formvars.get("report_to_cwa") dcc_option = False if report_to_cwa == "ANONYMOUS": processing_type = "CWA_ANONYMOUS" cwa_report = CWAReport(record_id) elif report_to_cwa == "PERSONAL": dcc_option = formvars.get("dcc_option") processing_type = "CWA_PERSONAL" cwa_report = CWAReport(record_id, anonymous = False, first_name = formvars.get("first_name"), last_name = formvars.get("last_name"), dob = formvars.get("date_of_birth"), dcc = dcc_option, ) else: processing_type = cwa_report = None if cwa_report: # Register consent cwa_report.register_consent(processing_type, formvars.get("consent"), ) # Send to CWA if cwa_report.send(): response.information = T("Result reported to %(system)s") % CWA retry = False else: response.error = T("Report to %(system)s failed") % CWA retry = True # Store DCC data if dcc_option: cwa_data = cwa_report.data try: hcert = DCC.from_result(cwa_data.get("hash"), record_id, cwa_data.get("fn"), cwa_data.get("ln"), cwa_data.get("dob"), ) except ValueError as e: hcert = None response.warning = str(e) if hcert: hcert.save() else: # Remove DCC flag if hcert could not be generated cwa_report.dcc = False CustomController._view("RLPPTM", "certificate.html") # Title field = table.disease_id if cwa_report.disease_id and field.represent: disease = field.represent(cwa_report.disease_id) title = "%s %s" % (disease, T("Test Result")) else: title = T("Test Result") output = {"title": title, "intro": None, "form": cwa_report.formatted(retry=retry), } else: self.next = r.url(id=record_id, method="read") output = None return output
Python
def certify(cls, r, **attr): """ Generate a test certificate (PDF) for download Args: r: the CRUDRequest instance attr: controller attributes """ record = r.record if not record: r.error(400, current.ERROR.BAD_REQUEST) if r.representation != "pdf": r.error(415, current.ERROR.BAD_FORMAT) testid = record.uuid site_id = record.site_id probe_date = record.probe_date result = record.result disease_id = record.disease_id item = {"testid": testid, "result_raw": result, } if r.http == "POST": post_vars = r.post_vars # Extract and check formkey from post data formkey = post_vars.get("_formkey") keyname = "_formkey[testresult/%s]" % r.id if not formkey or formkey not in current.session.get(keyname, []): r.error(403, current.ERROR.NOT_PERMITTED) # Extract cwadata cwadata = post_vars.get("cwadata") if not cwadata: r.error(400, current.ERROR.BAD_REQUEST) try: cwadata = json.loads(cwadata) except JSONERRORS: r.error(400, current.ERROR.BAD_REQUEST) # Generate the CWAReport (implicitly validates the hash) anonymous = "fn" not in cwadata try: cwareport = CWAReport(r.id, anonymous = anonymous, first_name = cwadata.get("fn"), last_name = cwadata.get("ln"), dob = cwadata.get("dob"), dcc = post_vars.get("dcc") == "1", salt = cwadata.get("salt"), dhash = cwadata.get("hash"), ) except ValueError: r.error(400, current.ERROR.BAD_RECORD) # Generate the data item item["link"] = cwareport.get_link() if not anonymous: for k in ("ln", "fn", "dob"): value = cwadata.get(k) if k == "dob": value = CWAReport.to_local_dtfmt(value) item[k] = value else: cwareport = None s3db = current.s3db # Test Station table = s3db.disease_case_diagnostics field = table.site_id if field.represent: item["site_name"] = field.represent(site_id) if site_id: item.update(cls.get_site_details(site_id)) # Probe date and test result field = table.probe_date if field.represent: item["test_date"] = field.represent(probe_date) field = table.result if field.represent: item["result"] = field.represent(result) # Title T = current.T field = table.disease_id if disease_id and field.represent: disease = field.represent(disease_id) title = "%s %s" % (disease, T("Test Result")) else: title = T("Test Result") item["title"] = pdf_title = title from core import DataExporter from gluon.contenttype import contenttype # Export PDF output = DataExporter.pdfcard([item], layout = CWACardLayout, title = pdf_title, ) response = current.response disposition = "attachment; filename=\"certificate.pdf\"" response.headers["Content-Type"] = contenttype(".pdf") response.headers["Content-disposition"] = disposition return output
def certify(cls, r, **attr): """ Generate a test certificate (PDF) for download Args: r: the CRUDRequest instance attr: controller attributes """ record = r.record if not record: r.error(400, current.ERROR.BAD_REQUEST) if r.representation != "pdf": r.error(415, current.ERROR.BAD_FORMAT) testid = record.uuid site_id = record.site_id probe_date = record.probe_date result = record.result disease_id = record.disease_id item = {"testid": testid, "result_raw": result, } if r.http == "POST": post_vars = r.post_vars # Extract and check formkey from post data formkey = post_vars.get("_formkey") keyname = "_formkey[testresult/%s]" % r.id if not formkey or formkey not in current.session.get(keyname, []): r.error(403, current.ERROR.NOT_PERMITTED) # Extract cwadata cwadata = post_vars.get("cwadata") if not cwadata: r.error(400, current.ERROR.BAD_REQUEST) try: cwadata = json.loads(cwadata) except JSONERRORS: r.error(400, current.ERROR.BAD_REQUEST) # Generate the CWAReport (implicitly validates the hash) anonymous = "fn" not in cwadata try: cwareport = CWAReport(r.id, anonymous = anonymous, first_name = cwadata.get("fn"), last_name = cwadata.get("ln"), dob = cwadata.get("dob"), dcc = post_vars.get("dcc") == "1", salt = cwadata.get("salt"), dhash = cwadata.get("hash"), ) except ValueError: r.error(400, current.ERROR.BAD_RECORD) # Generate the data item item["link"] = cwareport.get_link() if not anonymous: for k in ("ln", "fn", "dob"): value = cwadata.get(k) if k == "dob": value = CWAReport.to_local_dtfmt(value) item[k] = value else: cwareport = None s3db = current.s3db # Test Station table = s3db.disease_case_diagnostics field = table.site_id if field.represent: item["site_name"] = field.represent(site_id) if site_id: item.update(cls.get_site_details(site_id)) # Probe date and test result field = table.probe_date if field.represent: item["test_date"] = field.represent(probe_date) field = table.result if field.represent: item["result"] = field.represent(result) # Title T = current.T field = table.disease_id if disease_id and field.represent: disease = field.represent(disease_id) title = "%s %s" % (disease, T("Test Result")) else: title = T("Test Result") item["title"] = pdf_title = title from core import DataExporter from gluon.contenttype import contenttype # Export PDF output = DataExporter.pdfcard([item], layout = CWACardLayout, title = pdf_title, ) response = current.response disposition = "attachment; filename=\"certificate.pdf\"" response.headers["Content-Type"] = contenttype(".pdf") response.headers["Content-disposition"] = disposition return output
Python
def cwaretry(r, **attr): """ Retry sending test result to CWA result server Args: r: the CRUDRequest instance attr: controller attributes """ if not r.record: r.error(400, current.ERROR.BAD_REQUEST) if r.http != "POST": r.error(405, current.ERROR.BAD_METHOD) if r.representation != "json": r.error(415, current.ERROR.BAD_FORMAT) T = current.T # Parse JSON body s = r.body s.seek(0) try: options = json.load(s) except JSONERRORS: options = None if not isinstance(options, dict): r.error(400, "Invalid request options") # Verify formkey formkey = options.get("formkey") keyname = "_formkey[testresult/%s]" % r.id if not formkey or formkey not in current.session.get(keyname, []): r.error(403, current.ERROR.NOT_PERMITTED) # Instantiate CWAReport cwadata = options.get("cwadata", {}) anonymous = "fn" not in cwadata try: cwareport = CWAReport(r.id, anonymous = anonymous, first_name = cwadata.get("fn"), last_name = cwadata.get("ln"), dob = cwadata.get("dob"), dcc = options.get("dcc") == "1", salt = cwadata.get("salt"), dhash = cwadata.get("hash"), ) except ValueError: r.error(400, current.ERROR.BAD_RECORD) success = cwareport.send() if success: message = T("Result reported to %(system)s") % CWA output = current.xml.json_message(message=message) else: r.error(503, T("Report to %(system)s failed") % CWA) return output
def cwaretry(r, **attr): """ Retry sending test result to CWA result server Args: r: the CRUDRequest instance attr: controller attributes """ if not r.record: r.error(400, current.ERROR.BAD_REQUEST) if r.http != "POST": r.error(405, current.ERROR.BAD_METHOD) if r.representation != "json": r.error(415, current.ERROR.BAD_FORMAT) T = current.T # Parse JSON body s = r.body s.seek(0) try: options = json.load(s) except JSONERRORS: options = None if not isinstance(options, dict): r.error(400, "Invalid request options") # Verify formkey formkey = options.get("formkey") keyname = "_formkey[testresult/%s]" % r.id if not formkey or formkey not in current.session.get(keyname, []): r.error(403, current.ERROR.NOT_PERMITTED) # Instantiate CWAReport cwadata = options.get("cwadata", {}) anonymous = "fn" not in cwadata try: cwareport = CWAReport(r.id, anonymous = anonymous, first_name = cwadata.get("fn"), last_name = cwadata.get("ln"), dob = cwadata.get("dob"), dcc = options.get("dcc") == "1", salt = cwadata.get("salt"), dhash = cwadata.get("hash"), ) except ValueError: r.error(400, current.ERROR.BAD_RECORD) success = cwareport.send() if success: message = T("Result reported to %(system)s") % CWA output = current.xml.json_message(message=message) else: r.error(503, T("Report to %(system)s failed") % CWA) return output
Python
def to_local_dtfmt(dtstr): """ Helper to convert an ISO-formatted date to local format Args: dtstr: the ISO-formatted date as string Returns: the date in local format as string """ c = current.calendar dt = c.parse_date(dtstr) return c.format_date(dt, local=True) if dt else dtstr
def to_local_dtfmt(dtstr): """ Helper to convert an ISO-formatted date to local format Args: dtstr: the ISO-formatted date as string Returns: the date in local format as string """ c = current.calendar dt = c.parse_date(dtstr) return c.format_date(dt, local=True) if dt else dtstr
Python
def register_consent(self, processing_type, response): """ Register consent assertion using the current hash as reference Args: processing type: the data processing type for which consent is required response: the consent response """ data = self.data dhash = data.get("hash") if not dhash: raise ValueError("Missing context hash") ConsentTracking.assert_consent(dhash, processing_type, response)
def register_consent(self, processing_type, response): """ Register consent assertion using the current hash as reference Args: processing type: the data processing type for which consent is required response: the consent response """ data = self.data dhash = data.get("hash") if not dhash: raise ValueError("Missing context hash") ConsentTracking.assert_consent(dhash, processing_type, response)
Python
def draw_box_with_label(self, x, y, width=120, height=20, label=None): """ Draw a box with a label inside (paper form element) Args: x: the horizontal position (from left) y: the vertical position (from bottom) width: the width of the box height: the height of the box label: the label """ label_size = 7 c = self.canv c.saveState() c.setLineWidth(0.5) c.rect(x, y, width, height) if label: c.setFont("Helvetica", label_size) c.setFillGray(0.3) c.drawString(x + 4, y + height - label_size - 1, s3_str(label)) c.restoreState()
def draw_box_with_label(self, x, y, width=120, height=20, label=None): """ Draw a box with a label inside (paper form element) Args: x: the horizontal position (from left) y: the vertical position (from bottom) width: the width of the box height: the height of the box label: the label """ label_size = 7 c = self.canv c.saveState() c.setLineWidth(0.5) c.rect(x, y, width, height) if label: c.setFont("Helvetica", label_size) c.setFillGray(0.3) c.drawString(x + 4, y + height - label_size - 1, s3_str(label)) c.restoreState()
Python
def draw_line_with_label(self, x, y, width=120, label=None): """ Draw a placeholder line with label underneath (paper form element) Args: x: the horizontal position (from left) y: the vertical position (from bottom) width: the horizontal length of the line label: the label """ label_size = 7 c = self.canv c.saveState() c.setLineWidth(0.5) c.line(x, y, x + width, y) if label: c.setFont("Helvetica", label_size) c.setFillGray(0.3) c.drawString(x, y - label_size - 1, s3_str(label)) c.restoreState()
def draw_line_with_label(self, x, y, width=120, label=None): """ Draw a placeholder line with label underneath (paper form element) Args: x: the horizontal position (from left) y: the vertical position (from bottom) width: the horizontal length of the line label: the label """ label_size = 7 c = self.canv c.saveState() c.setLineWidth(0.5) c.line(x, y, x + width, y) if label: c.setFont("Helvetica", label_size) c.setFillGray(0.3) c.drawString(x, y - label_size - 1, s3_str(label)) c.restoreState()
Python
def drk_default_shelter(): """ Lazy getter for the default shelter_id """ s3 = current.response.s3 shelter_id = s3.drk_default_shelter if not shelter_id: default_site = current.deployment_settings.get_org_default_site() # Get the shelter_id for default site if default_site: stable = current.s3db.cr_shelter query = (stable.site_id == default_site) shelter = current.db(query).select(stable.id, limitby=(0, 1), ).first() if shelter: shelter_id = shelter.id s3.drk_default_shelter = shelter_id return shelter_id
def drk_default_shelter(): """ Lazy getter for the default shelter_id """ s3 = current.response.s3 shelter_id = s3.drk_default_shelter if not shelter_id: default_site = current.deployment_settings.get_org_default_site() # Get the shelter_id for default site if default_site: stable = current.s3db.cr_shelter query = (stable.site_id == default_site) shelter = current.db(query).select(stable.id, limitby=(0, 1), ).first() if shelter: shelter_id = shelter.id s3.drk_default_shelter = shelter_id return shelter_id
Python
def _options(self, r, **attr): """ Get the updated options for the filter form for the target resource as JSON. - GET filter.options Args: r: the CRUDRequest attr: additional controller parameters (ignored currently) Note: These use a fresh resource, so filter vars are not respected. s3.filter if respected, so if you need to filter the options, then can apply filter vars to s3.filter in customise() if the controller is not the same as the calling one! """ resource = self.resource options = {} filter_widgets = resource.get_config("filter_widgets", None) if filter_widgets: fresource = current.s3db.resource(resource.tablename, filter = current.response.s3.filter, ) for widget in filter_widgets: if hasattr(widget, "ajax_options"): opts = widget.ajax_options(fresource) if opts and isinstance(opts, dict): options.update(opts) options = json.dumps(options, separators=JSONSEPARATORS) current.response.headers["Content-Type"] = "application/json" return options
def _options(self, r, **attr): """ Get the updated options for the filter form for the target resource as JSON. - GET filter.options Args: r: the CRUDRequest attr: additional controller parameters (ignored currently) Note: These use a fresh resource, so filter vars are not respected. s3.filter if respected, so if you need to filter the options, then can apply filter vars to s3.filter in customise() if the controller is not the same as the calling one! """ resource = self.resource options = {} filter_widgets = resource.get_config("filter_widgets", None) if filter_widgets: fresource = current.s3db.resource(resource.tablename, filter = current.response.s3.filter, ) for widget in filter_widgets: if hasattr(widget, "ajax_options"): opts = widget.ajax_options(fresource) if opts and isinstance(opts, dict): options.update(opts) options = json.dumps(options, separators=JSONSEPARATORS) current.response.headers["Content-Type"] = "application/json" return options
Python
def _delete(r, **attr): """ Delete a filter, responds to POST filter.json?delete= Args: r: the CRUDRequest attr: additional controller parameters """ # Authorization, get pe_id auth = current.auth if auth.s3_logged_in(): pe_id = current.auth.user.pe_id else: pe_id = None if not pe_id: r.unauthorised() # Read the source source = r.body source.seek(0) try: data = json.load(source) except ValueError: # Syntax error: no JSON data r.error(400, current.ERROR.BAD_SOURCE) # Try to find the record db = current.db s3db = current.s3db table = s3db.pr_filter record = None record_id = data.get("id") if record_id: query = (table.id == record_id) & \ (table.pe_id == pe_id) record = db(query).select(table.id, limitby = (0, 1) ).first() if not record: r.error(404, current.ERROR.BAD_RECORD) resource = s3db.resource("pr_filter", id = record_id) success = resource.delete(format = r.representation) if not success: r.error(400, resource.error) current.response.headers["Content-Type"] = "application/json" return current.xml.json_message(deleted = record_id)
def _delete(r, **attr): """ Delete a filter, responds to POST filter.json?delete= Args: r: the CRUDRequest attr: additional controller parameters """ # Authorization, get pe_id auth = current.auth if auth.s3_logged_in(): pe_id = current.auth.user.pe_id else: pe_id = None if not pe_id: r.unauthorised() # Read the source source = r.body source.seek(0) try: data = json.load(source) except ValueError: # Syntax error: no JSON data r.error(400, current.ERROR.BAD_SOURCE) # Try to find the record db = current.db s3db = current.s3db table = s3db.pr_filter record = None record_id = data.get("id") if record_id: query = (table.id == record_id) & \ (table.pe_id == pe_id) record = db(query).select(table.id, limitby = (0, 1) ).first() if not record: r.error(404, current.ERROR.BAD_RECORD) resource = s3db.resource("pr_filter", id = record_id) success = resource.delete(format = r.representation) if not success: r.error(400, resource.error) current.response.headers["Content-Type"] = "application/json" return current.xml.json_message(deleted = record_id)
Python
def _save(self, r, **attr): """ Save a filter, responds to POST filter.json Args: r: the CRUDRequest attr: additional controller parameters """ # Authorization, get pe_id auth = current.auth if auth.s3_logged_in(): pe_id = current.auth.user.pe_id else: pe_id = None if not pe_id: r.unauthorised() # Read the source source = r.body source.seek(0) try: data = json.load(source) except ValueError: r.error(501, current.ERROR.BAD_SOURCE) # Try to find the record db = current.db s3db = current.s3db table = s3db.pr_filter record_id = data.get("id") record = None if record_id: query = (table.id == record_id) & \ (table.pe_id == pe_id) record = db(query).select(table.id, limitby = (0, 1) ).first() if not record: r.error(404, current.ERROR.BAD_RECORD) # Build new record resource = self.resource filter_data = { "pe_id": pe_id, "controller": r.controller, "function": r.function, "resource": resource.tablename, "deleted": False, } for attribute in ("title", "description", "url"): value = data.get(attribute) if value: filter_data[attribute] = value # Client-side filter queries query = data.get("query") if query is not None: queries = [item for item in query if item[1] != None] filter_data["query"] = json.dumps(queries) else: queries = [] filter_data["query"] = None # Server-side filters filters = {} for f in resource.rfilter.filters: filters.update(f.serialize_url(resource)) queries.extend(filters.items()) filter_data["serverside"] = queries if queries else [] # Store record form = Storage(vars=filter_data) if record: record.update_record(**filter_data) current.audit("update", "pr", "filter", form, record_id, "json") s3db.onaccept(table, record, method="update") info = {"updated": record_id} else: filter_data["id"] = record_id = table.insert(**filter_data) current.audit("create", "pr", "filter", form, record_id, "json") auth.s3_set_record_owner(table, record_id) s3db.onaccept(table, record, method="create") info = {"created": record_id} # Success/Error response current.response.headers["Content-Type"] = "application/json" return current.xml.json_message(**info)
def _save(self, r, **attr): """ Save a filter, responds to POST filter.json Args: r: the CRUDRequest attr: additional controller parameters """ # Authorization, get pe_id auth = current.auth if auth.s3_logged_in(): pe_id = current.auth.user.pe_id else: pe_id = None if not pe_id: r.unauthorised() # Read the source source = r.body source.seek(0) try: data = json.load(source) except ValueError: r.error(501, current.ERROR.BAD_SOURCE) # Try to find the record db = current.db s3db = current.s3db table = s3db.pr_filter record_id = data.get("id") record = None if record_id: query = (table.id == record_id) & \ (table.pe_id == pe_id) record = db(query).select(table.id, limitby = (0, 1) ).first() if not record: r.error(404, current.ERROR.BAD_RECORD) # Build new record resource = self.resource filter_data = { "pe_id": pe_id, "controller": r.controller, "function": r.function, "resource": resource.tablename, "deleted": False, } for attribute in ("title", "description", "url"): value = data.get(attribute) if value: filter_data[attribute] = value # Client-side filter queries query = data.get("query") if query is not None: queries = [item for item in query if item[1] != None] filter_data["query"] = json.dumps(queries) else: queries = [] filter_data["query"] = None # Server-side filters filters = {} for f in resource.rfilter.filters: filters.update(f.serialize_url(resource)) queries.extend(filters.items()) filter_data["serverside"] = queries if queries else [] # Store record form = Storage(vars=filter_data) if record: record.update_record(**filter_data) current.audit("update", "pr", "filter", form, record_id, "json") s3db.onaccept(table, record, method="update") info = {"updated": record_id} else: filter_data["id"] = record_id = table.insert(**filter_data) current.audit("create", "pr", "filter", form, record_id, "json") auth.s3_set_record_owner(table, record_id) s3db.onaccept(table, record, method="create") info = {"created": record_id} # Success/Error response current.response.headers["Content-Type"] = "application/json" return current.xml.json_message(**info)
Python
def posts(): """ Function accessed by AJAX to handle a Series of Posts """ try: series_id = request.args[0] except: raise HTTP(400) try: recent = request.args[1] except: recent = 5 table = s3db.cms_post # List of Posts in this Series query = (table.series_id == series_id) posts = db(query).select(table.name, table.body, table.avatar, table.created_by, table.created_on, limitby = (0, recent) ) output = UL(_id="comments") import hashlib for post in posts: author = T("Anonymous") if post.created_by: utable = s3db.auth_user ptable = s3db.pr_person ltable = s3db.pr_person_user query = (utable.id == post.created_by) left = [ltable.on(ltable.user_id == utable.id), ptable.on(ptable.pe_id == ltable.pe_id)] row = db(query).select(utable.email, ptable.first_name, ptable.middle_name, ptable.last_name, left = left, limitby = (0, 1) ).first() if row: person = row.pr_person user = row[utable._tablename] username = s3_fullname(person) email = user.email.strip().lower() hash = hashlib.md5(email.encode("utf-8")).hexdigest() url = "http://www.gravatar.com/%s" % hash author = A(username, _href=url, _target="top") header = H4(post.name) if post.avatar: avatar = s3base.s3_avatar_represent(post.created_by) else: avatar = "" row = LI(DIV(avatar, DIV(DIV(header, _class = "comment-header", ), DIV(XML(post.body), _class = "comment-body", ), _class="comment-text"), DIV(DIV(post.created_on, _class = "comment-date", ), _class="fright"), DIV(author, _class = "comment-footer", ), _class = "comment-box", )) output.append(row) return XML(output)
def posts(): """ Function accessed by AJAX to handle a Series of Posts """ try: series_id = request.args[0] except: raise HTTP(400) try: recent = request.args[1] except: recent = 5 table = s3db.cms_post # List of Posts in this Series query = (table.series_id == series_id) posts = db(query).select(table.name, table.body, table.avatar, table.created_by, table.created_on, limitby = (0, recent) ) output = UL(_id="comments") import hashlib for post in posts: author = T("Anonymous") if post.created_by: utable = s3db.auth_user ptable = s3db.pr_person ltable = s3db.pr_person_user query = (utable.id == post.created_by) left = [ltable.on(ltable.user_id == utable.id), ptable.on(ptable.pe_id == ltable.pe_id)] row = db(query).select(utable.email, ptable.first_name, ptable.middle_name, ptable.last_name, left = left, limitby = (0, 1) ).first() if row: person = row.pr_person user = row[utable._tablename] username = s3_fullname(person) email = user.email.strip().lower() hash = hashlib.md5(email.encode("utf-8")).hexdigest() url = "http://www.gravatar.com/%s" % hash author = A(username, _href=url, _target="top") header = H4(post.name) if post.avatar: avatar = s3base.s3_avatar_represent(post.created_by) else: avatar = "" row = LI(DIV(avatar, DIV(DIV(header, _class = "comment-header", ), DIV(XML(post.body), _class = "comment-body", ), _class="comment-text"), DIV(DIV(post.created_on, _class = "comment-date", ), _class="fright"), DIV(author, _class = "comment-footer", ), _class = "comment-box", )) output.append(row) return XML(output)
Python
def configure_newsletter_attachments(file_icons=False): """ Configure newsletter attachments (doc_document) Args: file_icons: show files as icon+size rather than filename """ dtable = s3db.doc_document # Document Title is required field = dtable.name requires = field.requires field.requires = IS_NOT_EMPTY() if requires: field.requires = [field.requires, requires] # Hide URL and Date field = dtable.url field.readable = field.writable = False field = dtable.date field.readable = field.writable = False if file_icons: # Represent files as icon+size from core import represent_file field = dtable.file field.represent = represent_file # Reduced list fields s3db.configure("doc_document", list_fields = ["name", "file", "comments"], )
def configure_newsletter_attachments(file_icons=False): """ Configure newsletter attachments (doc_document) Args: file_icons: show files as icon+size rather than filename """ dtable = s3db.doc_document # Document Title is required field = dtable.name requires = field.requires field.requires = IS_NOT_EMPTY() if requires: field.requires = [field.requires, requires] # Hide URL and Date field = dtable.url field.readable = field.writable = False field = dtable.date field.readable = field.writable = False if file_icons: # Represent files as icon+size from core import represent_file field = dtable.file field.represent = represent_file # Reduced list fields s3db.configure("doc_document", list_fields = ["name", "file", "comments"], )
Python
def widget(self, resource, values): """ Render this widget as HTML helper object(s) Args: resource: the resource values: the search values from the URL query """ T = current.T css_base = self.css_base attr = self.attr css = attr.get("class") attr["_class"] = "%s %s" % (css, css_base) if css else css_base input_class = "%s-%s" % (css_base, "input") input_labels = self.input_labels input_elements = DIV(_class="range-filter") ie_append = input_elements.append _id = attr["_id"] _variable = self._variable selector = self.selector for operator in self.operator: input_id = "%s-%s" % (_id, operator) input_box = INPUT(_name = input_id, _id = input_id, _type = "text", _class = input_class, ) variable = _variable(selector, operator) # Populate with the value, if given # if user has not set any of the limits, we get [] in values. value = values.get(variable, None) if value not in [None, []]: if type(value) is list: value = value[0] input_box["_value"] = value input_box["value"] = value ie_append(DIV(DIV(LABEL("%s:" % T(input_labels[operator]), _for = input_id, ), _class = "range-filter-label", ), DIV(input_box, _class = "range-filter-widget", ), _class = "range-filter-field", )) return input_elements
def widget(self, resource, values): """ Render this widget as HTML helper object(s) Args: resource: the resource values: the search values from the URL query """ T = current.T css_base = self.css_base attr = self.attr css = attr.get("class") attr["_class"] = "%s %s" % (css, css_base) if css else css_base input_class = "%s-%s" % (css_base, "input") input_labels = self.input_labels input_elements = DIV(_class="range-filter") ie_append = input_elements.append _id = attr["_id"] _variable = self._variable selector = self.selector for operator in self.operator: input_id = "%s-%s" % (_id, operator) input_box = INPUT(_name = input_id, _id = input_id, _type = "text", _class = input_class, ) variable = _variable(selector, operator) # Populate with the value, if given # if user has not set any of the limits, we get [] in values. value = values.get(variable, None) if value not in [None, []]: if type(value) is list: value = value[0] input_box["_value"] = value input_box["value"] = value ie_append(DIV(DIV(LABEL("%s:" % T(input_labels[operator]), _for = input_id, ), _class = "range-filter-label", ), DIV(input_box, _class = "range-filter-widget", ), _class = "range-filter-field", )) return input_elements
Python
def data_element(self, variable): """ Overrides FilterWidget.data_element(), constructs multiple hidden INPUTs (one per variable) with element IDs of the form <id>-<operator>-data (where no operator is translated as "eq"). Args: variable: the variable(s) """ if variable is None: operators = self.operator if type(operators) is not list: operators = [operators] variable = self._variable(self.selector, operators) else: # Split the operators off the ends of the variables. if type(variable) is not list: variable = [variable] parse_key = S3URLQuery.parse_key operators = [parse_key(v)[1] for v in variable] elements = [] widget_id = self.attr["_id"] for o, v in zip(operators, variable): elements.append( INPUT(_type = "hidden", _id = "%s-%s-data" % (widget_id, o), _class = "filter-widget-data %s-data" % self.css_base, _value = v, )) return elements
def data_element(self, variable): """ Overrides FilterWidget.data_element(), constructs multiple hidden INPUTs (one per variable) with element IDs of the form <id>-<operator>-data (where no operator is translated as "eq"). Args: variable: the variable(s) """ if variable is None: operators = self.operator if type(operators) is not list: operators = [operators] variable = self._variable(self.selector, operators) else: # Split the operators off the ends of the variables. if type(variable) is not list: variable = [variable] parse_key = S3URLQuery.parse_key operators = [parse_key(v)[1] for v in variable] elements = [] widget_id = self.attr["_id"] for o, v in zip(operators, variable): elements.append( INPUT(_type = "hidden", _id = "%s-%s-data" % (widget_id, o), _class = "filter-widget-data %s-data" % self.css_base, _value = v, )) return elements
Python
def ajax_options(self, resource): """ Method to Ajax-retrieve the current options of this widget Args: resource: the CRUDResource """ minimum, maximum = self._options(resource) attr = self._attr(resource) options = {attr["_id"]: {"min": minimum, "max": maximum, }} return options
def ajax_options(self, resource): """ Method to Ajax-retrieve the current options of this widget Args: resource: the CRUDResource """ minimum, maximum = self._options(resource) attr = self._attr(resource) options = {attr["_id"]: {"min": minimum, "max": maximum, }} return options
Python
def _options(self, resource): """ Helper function to retrieve the current options for this filter widget Args: resource: the CRUDResource """ # Find only values linked to records the user is # permitted to read, and apply any resource filters # (= use the resource query) query = resource.get_query() # Must include rfilter joins when using the resource # query (both inner and left): rfilter = resource.rfilter if rfilter: join = rfilter.get_joins() left = rfilter.get_joins(left = True) else: join = left = None rfield = S3ResourceField(resource, self.field) field = rfield.field row = current.db(query).select(field.min(), field.max(), join = join, left = left, ).first() minimum = row[field.min()] maximum = row[field.max()] return minimum, maximum
def _options(self, resource): """ Helper function to retrieve the current options for this filter widget Args: resource: the CRUDResource """ # Find only values linked to records the user is # permitted to read, and apply any resource filters # (= use the resource query) query = resource.get_query() # Must include rfilter joins when using the resource # query (both inner and left): rfilter = resource.rfilter if rfilter: join = rfilter.get_joins() left = rfilter.get_joins(left = True) else: join = left = None rfield = S3ResourceField(resource, self.field) field = rfield.field row = current.db(query).select(field.min(), field.max(), join = join, left = left, ).first() minimum = row[field.min()] maximum = row[field.max()] return minimum, maximum
Python
def widget(self, resource, values): """ Render this widget as HTML helper object(s) Args: resource: the resource values: the search values from the URL query """ T = current.T attr = self.attr opts = self.opts # CSS classes css_base = self.css_base css = attr.get("class") attr["_class"] = "%s %s range-filter-slider" % (css, css_base) if css else css_base input_class = "%s-%s" % (css_base, "input") # Widget widget = DIV(**attr) widget_id = attr["_id"] # Slider slider_id = "%s_slider" % str(self.field).replace(".", "_") slider = DIV(_id=slider_id) widget.append(slider) # Selectable range minimum = opts.get("minimum") maximum = opts.get("maximum") if minimum is None or maximum is None: min_value, max_value, empty = self._options(resource) if minimum is not None: min_value = minimum elif min_value is None or min_value > 0 and empty: min_value = 0 if maximum is not None: max_value = maximum elif max_value is None: max_value = 0 # Input fields input_ids = [] selected = [] for operator in self.operator: input_id = "%s-%s" % (widget_id, operator) input_ids.append(input_id) input_box = INPUT(_name = input_id, _id = input_id, _type = "text", _class = input_class, ) variable = self._variable(self.selector, operator) value = values.get(variable) if value or value == 0: if type(value) is list: value = value[0] input_box["_value"] = input_box["value"] = value selected.append(value) else: if operator == "ge": selected.append(min_value) else: selected.append(max_value) label = "%s:" % T(self.input_labels[operator]) widget.append(DIV(DIV(LABEL(label, _for = input_id, ), _class = "range-filter-label", ), DIV(input_box, _class = "range-filter-widget", ), _class = "range-filter-field", )) s3 = current.response.s3 # Inject script script = '''i18n.slider_help="%s"''' % \ current.T("Click on the slider to choose a value") s3.js_global.append(script) datatype = opts.get("type", "int") if datatype == "int": script = '''S3.range_slider('%s','%s','%s',%i,%i,%i,[%i,%i])''' else: script = '''S3.range_slider('%s','%s','%s',%f,%f,%f,[%i,%i])''' params = (slider_id, input_ids[0], input_ids[1], min_value, max_value, opts.get("step", 1), selected[0], selected[1], ) s3.jquery_ready.append(script % params) return widget
def widget(self, resource, values): """ Render this widget as HTML helper object(s) Args: resource: the resource values: the search values from the URL query """ T = current.T attr = self.attr opts = self.opts # CSS classes css_base = self.css_base css = attr.get("class") attr["_class"] = "%s %s range-filter-slider" % (css, css_base) if css else css_base input_class = "%s-%s" % (css_base, "input") # Widget widget = DIV(**attr) widget_id = attr["_id"] # Slider slider_id = "%s_slider" % str(self.field).replace(".", "_") slider = DIV(_id=slider_id) widget.append(slider) # Selectable range minimum = opts.get("minimum") maximum = opts.get("maximum") if minimum is None or maximum is None: min_value, max_value, empty = self._options(resource) if minimum is not None: min_value = minimum elif min_value is None or min_value > 0 and empty: min_value = 0 if maximum is not None: max_value = maximum elif max_value is None: max_value = 0 # Input fields input_ids = [] selected = [] for operator in self.operator: input_id = "%s-%s" % (widget_id, operator) input_ids.append(input_id) input_box = INPUT(_name = input_id, _id = input_id, _type = "text", _class = input_class, ) variable = self._variable(self.selector, operator) value = values.get(variable) if value or value == 0: if type(value) is list: value = value[0] input_box["_value"] = input_box["value"] = value selected.append(value) else: if operator == "ge": selected.append(min_value) else: selected.append(max_value) label = "%s:" % T(self.input_labels[operator]) widget.append(DIV(DIV(LABEL(label, _for = input_id, ), _class = "range-filter-label", ), DIV(input_box, _class = "range-filter-widget", ), _class = "range-filter-field", )) s3 = current.response.s3 # Inject script script = '''i18n.slider_help="%s"''' % \ current.T("Click on the slider to choose a value") s3.js_global.append(script) datatype = opts.get("type", "int") if datatype == "int": script = '''S3.range_slider('%s','%s','%s',%i,%i,%i,[%i,%i])''' else: script = '''S3.range_slider('%s','%s','%s',%f,%f,%f,[%i,%i])''' params = (slider_id, input_ids[0], input_ids[1], min_value, max_value, opts.get("step", 1), selected[0], selected[1], ) s3.jquery_ready.append(script % params) return widget
Python
def ajax_options(self, resource): """ Method to Ajax-retrieve the current options of this widget Args: resource: the CRUDResource """ minimum, maximum = self._options(resource)[:2] attr = self._attr(resource) options = {attr["_id"]: {"min": minimum, "max": maximum, }} return options
def ajax_options(self, resource): """ Method to Ajax-retrieve the current options of this widget Args: resource: the CRUDResource """ minimum, maximum = self._options(resource)[:2] attr = self._attr(resource) options = {attr["_id"]: {"min": minimum, "max": maximum, }} return options
Python
def _options(self, resource): """ Helper function to retrieve the current options for this filter widget Args: resource: the CRUDResource Returns: tuple (min_value, max_value, empty), with "empty" indicating whether there are records with None-values """ db = current.db # Find only values linked to records the user is # permitted to read, and apply any resource filters # (= use the resource query) query = resource.get_query() # Must include rfilter joins when using the resource # query (both inner and left): rfilter = resource.rfilter if rfilter: join = rfilter.get_joins() left = rfilter.get_joins(left = True) else: join = left = None rfield = S3ResourceField(resource, self.field) # If the filter field is in a joined table itself, # include the join for that table joins = rfield.join for tname in joins: query &= joins[tname] field = rfield.field row = db(query).select(field.min(), field.max(), join = join, left = left, ).first() minimum = row[field.min()] maximum = row[field.max()] # Check if there are records with no value empty = db(query & (field == None)).select(resource.table.id, join = join, left = left, limitby = (0, 1) ).first() return minimum, maximum, bool(empty)
def _options(self, resource): """ Helper function to retrieve the current options for this filter widget Args: resource: the CRUDResource Returns: tuple (min_value, max_value, empty), with "empty" indicating whether there are records with None-values """ db = current.db # Find only values linked to records the user is # permitted to read, and apply any resource filters # (= use the resource query) query = resource.get_query() # Must include rfilter joins when using the resource # query (both inner and left): rfilter = resource.rfilter if rfilter: join = rfilter.get_joins() left = rfilter.get_joins(left = True) else: join = left = None rfield = S3ResourceField(resource, self.field) # If the filter field is in a joined table itself, # include the join for that table joins = rfield.join for tname in joins: query &= joins[tname] field = rfield.field row = db(query).select(field.min(), field.max(), join = join, left = left, ).first() minimum = row[field.min()] maximum = row[field.max()] # Check if there are records with no value empty = db(query & (field == None)).select(resource.table.id, join = join, left = left, limitby = (0, 1) ).first() return minimum, maximum, bool(empty)
Python
def drk_absence(row): """ Field method to display duration of absence in dvr/person list view and rheader Args: row: the Row """ if hasattr(row, "cr_shelter_registration"): registration = row.cr_shelter_registration else: registration = None result = current.messages["NONE"] if registration is None or \ not hasattr(registration, "registration_status") or \ not hasattr(registration, "check_out_date"): # must reload db = current.db s3db = current.s3db person = row.pr_person if hasattr(row, "pr_person") else row person_id = person.id if not person_id: return result table = s3db.cr_shelter_registration query = (table.person_id == person_id) & \ (table.deleted != True) registration = db(query).select(table.registration_status, table.check_out_date, limitby = (0, 1), ).first() if registration and \ registration.registration_status == 3: T = current.T check_out_date = registration.check_out_date if check_out_date: delta = (current.request.utcnow - check_out_date).total_seconds() if delta < 0: delta = 0 days = int(delta / 86400) if days < 1: result = "<1 %s" % T("Day") elif days == 1: result = "1 %s" % T("Day") else: result = "%s %s" % (days, T("Days")) if days >= ABSENCE_LIMIT: result = SPAN(result, _class="overdue") else: result = SPAN(T("Date unknown"), _class="overdue") return result
def drk_absence(row): """ Field method to display duration of absence in dvr/person list view and rheader Args: row: the Row """ if hasattr(row, "cr_shelter_registration"): registration = row.cr_shelter_registration else: registration = None result = current.messages["NONE"] if registration is None or \ not hasattr(registration, "registration_status") or \ not hasattr(registration, "check_out_date"): # must reload db = current.db s3db = current.s3db person = row.pr_person if hasattr(row, "pr_person") else row person_id = person.id if not person_id: return result table = s3db.cr_shelter_registration query = (table.person_id == person_id) & \ (table.deleted != True) registration = db(query).select(table.registration_status, table.check_out_date, limitby = (0, 1), ).first() if registration and \ registration.registration_status == 3: T = current.T check_out_date = registration.check_out_date if check_out_date: delta = (current.request.utcnow - check_out_date).total_seconds() if delta < 0: delta = 0 days = int(delta / 86400) if days < 1: result = "<1 %s" % T("Day") elif days == 1: result = "1 %s" % T("Day") else: result = "%s %s" % (days, T("Days")) if days >= ABSENCE_LIMIT: result = SPAN(result, _class="overdue") else: result = SPAN(T("Date unknown"), _class="overdue") return result
Python
def event_overdue(code, interval): """ Get cases (person_ids) for which a certain event is overdue Args: code: the event code interval: the interval in days """ db = current.db s3db = current.s3db ttable = s3db.dvr_case_event_type ctable = s3db.dvr_case stable = s3db.dvr_case_status etable = s3db.dvr_case_event # Get event type ID if code[-1] == "*": # Prefix query = (ttable.code.like("%s%%" % code[:-1])) limitby = None else: query = (ttable.code == code) limitby = (0, 1) query &= (ttable.deleted == False) rows = db(query).select(ttable.id, limitby=limitby) if not rows: # No such event type return set() elif limitby: type_query = (etable.type_id == rows.first().id) else: type_query = (etable.type_id.belongs(set(row.id for row in rows))) # Determine deadline now = current.request.utcnow then = now - datetime.timedelta(days=interval) # Check only open cases join = stable.on((stable.id == ctable.status_id) & \ (stable.is_closed == False)) # Join only events after the deadline left = etable.on((etable.person_id == ctable.person_id) & \ type_query & \ (etable.date != None) & \ (etable.date >= then) & \ (etable.deleted == False)) # ...and then select the rows which don't have any query = (ctable.archived == False) & \ (ctable.date < then.date()) & \ (ctable.deleted == False) rows = db(query).select(ctable.person_id, left = left, join = join, groupby = ctable.person_id, having = (etable.date.max() == None), ) return set(row.person_id for row in rows)
def event_overdue(code, interval): """ Get cases (person_ids) for which a certain event is overdue Args: code: the event code interval: the interval in days """ db = current.db s3db = current.s3db ttable = s3db.dvr_case_event_type ctable = s3db.dvr_case stable = s3db.dvr_case_status etable = s3db.dvr_case_event # Get event type ID if code[-1] == "*": # Prefix query = (ttable.code.like("%s%%" % code[:-1])) limitby = None else: query = (ttable.code == code) limitby = (0, 1) query &= (ttable.deleted == False) rows = db(query).select(ttable.id, limitby=limitby) if not rows: # No such event type return set() elif limitby: type_query = (etable.type_id == rows.first().id) else: type_query = (etable.type_id.belongs(set(row.id for row in rows))) # Determine deadline now = current.request.utcnow then = now - datetime.timedelta(days=interval) # Check only open cases join = stable.on((stable.id == ctable.status_id) & \ (stable.is_closed == False)) # Join only events after the deadline left = etable.on((etable.person_id == ctable.person_id) & \ type_query & \ (etable.date != None) & \ (etable.date >= then) & \ (etable.deleted == False)) # ...and then select the rows which don't have any query = (ctable.archived == False) & \ (ctable.date < then.date()) & \ (ctable.deleted == False) rows = db(query).select(ctable.person_id, left = left, join = join, groupby = ctable.person_id, having = (etable.date.max() == None), ) return set(row.person_id for row in rows)
Python
def configure_person_tags(): """ Configure filtered pr_person_tag components for registration numbers: - EasyOpt Number (tag=EONUMBER) - BAMF Registration Number (tag=BAMF) """ current.s3db.add_components("pr_person", pr_person_tag = (#{"name": "eo_number", # "joinby": "person_id", # "filterby": { # "tag": "EONUMBER", # }, # "multiple": False, # }, {"name": "bamf", "joinby": "person_id", "filterby": { "tag": "BAMF", }, "multiple": False, }, ) )
def configure_person_tags(): """ Configure filtered pr_person_tag components for registration numbers: - EasyOpt Number (tag=EONUMBER) - BAMF Registration Number (tag=BAMF) """ current.s3db.add_components("pr_person", pr_person_tag = (#{"name": "eo_number", # "joinby": "person_id", # "filterby": { # "tag": "EONUMBER", # }, # "multiple": False, # }, {"name": "bamf", "joinby": "person_id", "filterby": { "tag": "BAMF", }, "multiple": False, }, ) )
Python
def __normalize(path): """ Normalize a path into a sequence of non-recurrent paths Args: path: the path as a list of node IDs """ seq = [str(item) for item in path] if len(seq) < 2: return [path] seq = S3MultiPath.__resolve(seq) pop = seq.pop paths = [] append = paths.append while len(seq): p = pop(0) s = paths + seq contained = False lp = len(p) for i in s: if i[:lp] == p: contained = True break if not contained: append(p) return paths
def __normalize(path): """ Normalize a path into a sequence of non-recurrent paths Args: path: the path as a list of node IDs """ seq = [str(item) for item in path] if len(seq) < 2: return [path] seq = S3MultiPath.__resolve(seq) pop = seq.pop paths = [] append = paths.append while len(seq): p = pop(0) s = paths + seq contained = False lp = len(p) for i in s: if i[:lp] == p: contained = True break if not contained: append(p) return paths
Python
def add_org_tags(): """ Adds organisation tags as filtered components, for embedding in form, filtering and as report axis """ s3db = current.s3db s3db.add_components("org_organisation", org_organisation_tag = ({"name": "district_id", "joinby": "organisation_id", "filterby": {"tag": "DistrictID"}, "multiple": False, }, ), )
def add_org_tags(): """ Adds organisation tags as filtered components, for embedding in form, filtering and as report axis """ s3db = current.s3db s3db.add_components("org_organisation", org_organisation_tag = ({"name": "district_id", "joinby": "organisation_id", "filterby": {"tag": "DistrictID"}, "multiple": False, }, ), )
Python
def html(self, start=None, limit=None, pagesize=None, rowsize=None, ajaxurl=None, empty=None, popup_url=None, popup_title=None, ): """ Render list data as HTML (nested DIVs) Args: start: index of the first item (in this page) limit: total number of available items pagesize: maximum number of items per page rowsize: number of items per row ajaxurl: the URL to Ajax-update the datalist empty: message to display if the list is empty popup_url: the URL for the modal used for the 'more' button (=> we deactivate InfiniteScroll) popup_title: the title for the modal """ T = current.T resource = self.resource list_fields = self.list_fields rfields = resource.resolve_selectors(list_fields)[0] list_id = self.list_id render = self.layout render_row = self.row_layout if not rowsize: rowsize = 1 pkey = str(resource._id) records = self.records if records is not None: # Call prep if present if hasattr(render, "prep"): render.prep(resource, records) if current.response.s3.dl_no_header: items = [] else: items = [DIV(T("Total Records: %(numrows)s") % \ {"numrows": self.total}, _class = "dl-header", _id = "%s-header" % list_id, ) ] if empty is None: empty = get_crud_string(resource.tablename, "msg_no_match") empty = DIV(empty, _class="dl-empty") if self.total > 0: empty.update(_style="display:none") items.append(empty) row_idx = int(self.start / rowsize) + 1 for group in self.groups(records, rowsize): row = [] col_idx = 0 for record in group: if pkey in record: item_id = "%s-%s" % (list_id, record[pkey]) else: # template item_id = "%s-[id]" % list_id item = render(list_id, item_id, resource, rfields, record) if hasattr(item, "add_class"): _class = "dl-item dl-%s-cols dl-col-%s" % (rowsize, col_idx) item.add_class(_class) row.append(item) col_idx += 1 _class = "dl-row %s" % ((row_idx % 2) and "even" or "odd") if render_row: row = render_row(list_id, resource, rowsize, row) if hasattr(row, "add_class"): row.add_class(_class) else: row = DIV(row, _class=_class) items.append(row) row_idx += 1 else: # template raise NotImplementedError dl = DIV(items, _class="dl", _id=list_id, ) dl_data = {"startindex": start, "maxitems": limit, "totalitems": self.total, "pagesize": pagesize, "rowsize": rowsize, "ajaxurl": ajaxurl, } if popup_url: input_class = "dl-pagination" a_class = "s3_modal dl-more" #dl_data["popup_url"] = popup_url #dl_data["popup_title"] = popup_title else: input_class = "dl-pagination dl-scroll" a_class = "dl-more" from gluon.serializers import json as jsons dl_data = jsons(dl_data) dl.append(DIV(INPUT(_type = "hidden", _class = input_class, _value = dl_data, ), A(T("more..."), _href = popup_url or ajaxurl, _class = a_class, _title = popup_title, ), _class = "dl-navigation", )) return dl
def html(self, start=None, limit=None, pagesize=None, rowsize=None, ajaxurl=None, empty=None, popup_url=None, popup_title=None, ): """ Render list data as HTML (nested DIVs) Args: start: index of the first item (in this page) limit: total number of available items pagesize: maximum number of items per page rowsize: number of items per row ajaxurl: the URL to Ajax-update the datalist empty: message to display if the list is empty popup_url: the URL for the modal used for the 'more' button (=> we deactivate InfiniteScroll) popup_title: the title for the modal """ T = current.T resource = self.resource list_fields = self.list_fields rfields = resource.resolve_selectors(list_fields)[0] list_id = self.list_id render = self.layout render_row = self.row_layout if not rowsize: rowsize = 1 pkey = str(resource._id) records = self.records if records is not None: # Call prep if present if hasattr(render, "prep"): render.prep(resource, records) if current.response.s3.dl_no_header: items = [] else: items = [DIV(T("Total Records: %(numrows)s") % \ {"numrows": self.total}, _class = "dl-header", _id = "%s-header" % list_id, ) ] if empty is None: empty = get_crud_string(resource.tablename, "msg_no_match") empty = DIV(empty, _class="dl-empty") if self.total > 0: empty.update(_style="display:none") items.append(empty) row_idx = int(self.start / rowsize) + 1 for group in self.groups(records, rowsize): row = [] col_idx = 0 for record in group: if pkey in record: item_id = "%s-%s" % (list_id, record[pkey]) else: # template item_id = "%s-[id]" % list_id item = render(list_id, item_id, resource, rfields, record) if hasattr(item, "add_class"): _class = "dl-item dl-%s-cols dl-col-%s" % (rowsize, col_idx) item.add_class(_class) row.append(item) col_idx += 1 _class = "dl-row %s" % ((row_idx % 2) and "even" or "odd") if render_row: row = render_row(list_id, resource, rowsize, row) if hasattr(row, "add_class"): row.add_class(_class) else: row = DIV(row, _class=_class) items.append(row) row_idx += 1 else: # template raise NotImplementedError dl = DIV(items, _class="dl", _id=list_id, ) dl_data = {"startindex": start, "maxitems": limit, "totalitems": self.total, "pagesize": pagesize, "rowsize": rowsize, "ajaxurl": ajaxurl, } if popup_url: input_class = "dl-pagination" a_class = "s3_modal dl-more" #dl_data["popup_url"] = popup_url #dl_data["popup_title"] = popup_title else: input_class = "dl-pagination dl-scroll" a_class = "dl-more" from gluon.serializers import json as jsons dl_data = jsons(dl_data) dl.append(DIV(INPUT(_type = "hidden", _class = input_class, _value = dl_data, ), A(T("more..."), _href = popup_url or ajaxurl, _class = a_class, _title = popup_title, ), _class = "dl-navigation", )) return dl
Python
def project_map(r, **attr): """ Display a filterable set of Projects on a Map - assumes mode_3w - currently assumes that theme_percentages=True @ToDo: Browse by Year """ if r.representation == "html" and \ r.name == "project": T = current.T # Search Widget themes_dropdown = SELECT(_multiple = True, _id = "project_theme_id", _style = "height:80px", ) append = themes_dropdown.append ttable = current.s3db.project_theme themes = current.db(ttable.deleted == False).select(ttable.id, ttable.name, orderby = ttable.name, ) for theme in themes: append(OPTION(theme.name, _value = theme.id, #_selected = "selected", )) form = FORM(themes_dropdown) # Map # The Layer of Projects to show on the Map # @ToDo: Create a URL to the project_polygons custom method & use that # @ToDo: Pass through attributes that we don't need for the 1st level of mapping # so that they can be used without a screen refresh url = URL(f="location", extension="geojson") layer = {"name" : T("Projects"), "id" : "projects", "tablename" : "project_location", "url" : url, "active" : True, #"marker" : None, } the_map = current.gis.show_map(collapsed = True, feature_resources = [layer], ) output = {"title": T("Projects Map"), "form": form, "map": the_map, } # Add Static JS response = current.response response.s3.scripts.append(URL(c="static", f="scripts", args=["S3", "s3.project_map.js"])) response.view = "map.html" else: r.error(405, current.ERROR.BAD_METHOD) return output
def project_map(r, **attr): """ Display a filterable set of Projects on a Map - assumes mode_3w - currently assumes that theme_percentages=True @ToDo: Browse by Year """ if r.representation == "html" and \ r.name == "project": T = current.T # Search Widget themes_dropdown = SELECT(_multiple = True, _id = "project_theme_id", _style = "height:80px", ) append = themes_dropdown.append ttable = current.s3db.project_theme themes = current.db(ttable.deleted == False).select(ttable.id, ttable.name, orderby = ttable.name, ) for theme in themes: append(OPTION(theme.name, _value = theme.id, #_selected = "selected", )) form = FORM(themes_dropdown) # Map # The Layer of Projects to show on the Map # @ToDo: Create a URL to the project_polygons custom method & use that # @ToDo: Pass through attributes that we don't need for the 1st level of mapping # so that they can be used without a screen refresh url = URL(f="location", extension="geojson") layer = {"name" : T("Projects"), "id" : "projects", "tablename" : "project_location", "url" : url, "active" : True, #"marker" : None, } the_map = current.gis.show_map(collapsed = True, feature_resources = [layer], ) output = {"title": T("Projects Map"), "form": form, "map": the_map, } # Add Static JS response = current.response response.s3.scripts.append(URL(c="static", f="scripts", args=["S3", "s3.project_map.js"])) response.view = "map.html" else: r.error(405, current.ERROR.BAD_METHOD) return output
Python
def project_polygons(r, **attr): """ Export Projects as GeoJSON Polygons to view on the map - currently assumes that theme_percentages=True @ToDo: complete """ db = current.db s3db = current.s3db ptable = s3db.project_project ttable = s3db.project_theme tptable = s3db.project_theme_project pltable = s3db.project_location ltable = s3db.gis_location #get_vars = r.get_vars themes = db(ttable.deleted == False).select(ttable.id, ttable.name, orderby = ttable.name) # Total the Budget spent by Theme for each country countries = {} query = (ptable.deleted == False) & \ (tptable.project_id == ptable.id) & \ (ptable.id == pltable.project_id) & \ (ltable.id == pltable.location_id) #if "theme_id" in get_vars: # query = query & (tptable.id.belongs(get_vars.theme_id)) projects = db(query).select() for project in projects: # Only show those projects which are only within 1 country # @ToDo _countries = project.location_id if len(_countries) == 1: country = _countries[0] if country in countries: budget = project.project_project.total_annual_budget() theme = project.project_theme_project.theme_id percentage = project.project_theme_project.percentage countries[country][theme] += budget * percentage else: name = db(ltable.id == country).select(ltable.name).first().name countries[country] = {"name": name} # Init all themes to 0 for theme in themes: countries[country][theme.id] = 0 # Add value for this record budget = project.project_project.total_annual_budget() theme = project.project_theme_project.theme_id percentage = project.project_theme_project.percentage countries[country][theme] += budget * percentage #query = (ltable.id.belongs(countries)) #locations = db(query).select(ltable.id, # ltable.wkt) #for location in locations: # pass # Convert to GeoJSON output = json.dumps({}) current.response.headers["Content-Type"] = "application/json" return output
def project_polygons(r, **attr): """ Export Projects as GeoJSON Polygons to view on the map - currently assumes that theme_percentages=True @ToDo: complete """ db = current.db s3db = current.s3db ptable = s3db.project_project ttable = s3db.project_theme tptable = s3db.project_theme_project pltable = s3db.project_location ltable = s3db.gis_location #get_vars = r.get_vars themes = db(ttable.deleted == False).select(ttable.id, ttable.name, orderby = ttable.name) # Total the Budget spent by Theme for each country countries = {} query = (ptable.deleted == False) & \ (tptable.project_id == ptable.id) & \ (ptable.id == pltable.project_id) & \ (ltable.id == pltable.location_id) #if "theme_id" in get_vars: # query = query & (tptable.id.belongs(get_vars.theme_id)) projects = db(query).select() for project in projects: # Only show those projects which are only within 1 country # @ToDo _countries = project.location_id if len(_countries) == 1: country = _countries[0] if country in countries: budget = project.project_project.total_annual_budget() theme = project.project_theme_project.theme_id percentage = project.project_theme_project.percentage countries[country][theme] += budget * percentage else: name = db(ltable.id == country).select(ltable.name).first().name countries[country] = {"name": name} # Init all themes to 0 for theme in themes: countries[country][theme.id] = 0 # Add value for this record budget = project.project_project.total_annual_budget() theme = project.project_theme_project.theme_id percentage = project.project_theme_project.percentage countries[country][theme] += budget * percentage #query = (ltable.id.belongs(countries)) #locations = db(query).select(ltable.id, # ltable.wkt) #for location in locations: # pass # Convert to GeoJSON output = json.dumps({}) current.response.headers["Content-Type"] = "application/json" return output
Python
def project_timeline(r, **attr): """ Display the project on a Simile Timeline http://www.simile-widgets.org/wiki/Reference_Documentation_for_Timeline Currently this just displays a Google Calendar @ToDo: Add Milestones @ToDo: Filters for different 'layers' @ToDo: export milestones/tasks as .ics """ if r.representation == "html" and r.name == "project": #appname = r.application response = current.response s3 = response.s3 calendar = r.record.calendar # Pass vars to our JS code s3.js_global.append('''S3.timeline.calendar="%s"''' % calendar) # Add core Simile Code s3_include_simile() # Create the DIV item = DIV(_id = "s3timeline", _class = "s3-timeline", ) output = {"item": item} output["title"] = current.T("Project Calendar") # Maintain RHeader for consistency if "rheader" in attr: rheader = attr["rheader"](r) if rheader: output["rheader"] = rheader response.view = "timeline.html" else: r.error(405, current.ERROR.BAD_METHOD) return output
def project_timeline(r, **attr): """ Display the project on a Simile Timeline http://www.simile-widgets.org/wiki/Reference_Documentation_for_Timeline Currently this just displays a Google Calendar @ToDo: Add Milestones @ToDo: Filters for different 'layers' @ToDo: export milestones/tasks as .ics """ if r.representation == "html" and r.name == "project": #appname = r.application response = current.response s3 = response.s3 calendar = r.record.calendar # Pass vars to our JS code s3.js_global.append('''S3.timeline.calendar="%s"''' % calendar) # Add core Simile Code s3_include_simile() # Create the DIV item = DIV(_id = "s3timeline", _class = "s3-timeline", ) output = {"item": item} output["title"] = current.T("Project Calendar") # Maintain RHeader for consistency if "rheader" in attr: rheader = attr["rheader"](r) if rheader: output["rheader"] = rheader response.view = "timeline.html" else: r.error(405, current.ERROR.BAD_METHOD) return output
Python
def project_human_resource_onvalidation(form): """ Prevent the same human_resource record being added more than once """ hr = current.s3db.project_human_resource_project # Fetch the first row that has the same project and human resource ids # (which isn't this record!) form_vars = form.request_vars query = (hr.human_resource_id == form_vars.human_resource_id) & \ (hr.project_id == form_vars.project_id) & \ (hr.id != form_vars.id) row = current.db(query).select(hr.id, limitby=(0, 1)).first() if row: # We have a duplicate. Return an error to the user. form.errors.human_resource_id = current.T("Record already exists")
def project_human_resource_onvalidation(form): """ Prevent the same human_resource record being added more than once """ hr = current.s3db.project_human_resource_project # Fetch the first row that has the same project and human resource ids # (which isn't this record!) form_vars = form.request_vars query = (hr.human_resource_id == form_vars.human_resource_id) & \ (hr.project_id == form_vars.project_id) & \ (hr.id != form_vars.id) row = current.db(query).select(hr.id, limitby=(0, 1)).first() if row: # We have a duplicate. Return an error to the user. form.errors.human_resource_id = current.T("Record already exists")
Python
def project_location_contact_onaccept(form): """ If the Contact has no Realm, then set it to that of this record """ db = current.db form_vars = form.vars person_id = form_vars.get("person_id") realm_entity = form_vars.get("realm_entity") if not person_id or not realm_entity: # Retrieve the record table = db.project_location_contact record = db(table.id == form_vars.get("id")).select(table.person_id, table.realm_entity, limitby=(0, 1), ).first() if not record: return person_id = record.person_id realm_entity = record.realm_entity if realm_entity: ptable = db.pr_person person = db(ptable.id == person_id).select(ptable.id, ptable.realm_entity, limitby=(0, 1), ).first() if person and not person.realm_entity: person.update_record(realm_entity = realm_entity)
def project_location_contact_onaccept(form): """ If the Contact has no Realm, then set it to that of this record """ db = current.db form_vars = form.vars person_id = form_vars.get("person_id") realm_entity = form_vars.get("realm_entity") if not person_id or not realm_entity: # Retrieve the record table = db.project_location_contact record = db(table.id == form_vars.get("id")).select(table.person_id, table.realm_entity, limitby=(0, 1), ).first() if not record: return person_id = record.person_id realm_entity = record.realm_entity if realm_entity: ptable = db.pr_person person = db(ptable.id == person_id).select(ptable.id, ptable.realm_entity, limitby=(0, 1), ).first() if person and not person.realm_entity: person.update_record(realm_entity = realm_entity)
Python
def project_organisation_realm_entity(table, record): """ Set the realm entity to the project's realm entity """ po_id = record.id db = current.db table = db.project_organisation ptable = db.project_project query = (table.id == po_id) & \ (table.project_id == ptable.id) project = db(query).select(ptable.realm_entity, limitby=(0, 1)).first() try: return project.realm_entity except AttributeError: return None
def project_organisation_realm_entity(table, record): """ Set the realm entity to the project's realm entity """ po_id = record.id db = current.db table = db.project_organisation ptable = db.project_project query = (table.id == po_id) & \ (table.project_id == ptable.id) project = db(query).select(ptable.realm_entity, limitby=(0, 1)).first() try: return project.realm_entity except AttributeError: return None
Python
def project_activity_year(row): """ Virtual field for the project_activity table @ToDo: Deprecate: replace with computed field """ if hasattr(row, "project_activity"): row = row.project_activity try: activity_id = row.id except AttributeError: return [] if hasattr(row, "date"): start_date = row.date else: start_date = False if hasattr(row, "end_date"): end_date = row.end_date else: end_date = False if start_date is False or end_date is False: s3db = current.s3db table = s3db.project_activity activity = current.db(table.id == activity_id).select(table.date, table.end_date, cache=s3db.cache, limitby=(0, 1) ).first() if activity: start_date = activity.date end_date = activity.end_date if not start_date and not end_date: return [] elif not end_date: return [start_date.year] elif not start_date: return [end_date.year] else: return list(range(start_date.year, end_date.year + 1))
def project_activity_year(row): """ Virtual field for the project_activity table @ToDo: Deprecate: replace with computed field """ if hasattr(row, "project_activity"): row = row.project_activity try: activity_id = row.id except AttributeError: return [] if hasattr(row, "date"): start_date = row.date else: start_date = False if hasattr(row, "end_date"): end_date = row.end_date else: end_date = False if start_date is False or end_date is False: s3db = current.s3db table = s3db.project_activity activity = current.db(table.id == activity_id).select(table.date, table.end_date, cache=s3db.cache, limitby=(0, 1) ).first() if activity: start_date = activity.date end_date = activity.end_date if not start_date and not end_date: return [] elif not end_date: return [start_date.year] elif not start_date: return [end_date.year] else: return list(range(start_date.year, end_date.year + 1))
Python
def project_activity_activity_type_onaccept(form): """ Ensure the Activity Location is a Project Location with the Activity's Activity Types in (as a minimum). @ToDo: deployment_setting to allow project Locations to be read-only & have data editable only at the Activity level """ db = current.db form_vars_get = form.vars.get activity_id = form_vars_get("activity_id") # Find the Project & Location atable = db.project_activity activity = db(atable.id == activity_id).select(atable.project_id, atable.location_id, limitby=(0, 1) ).first() try: project_id = activity.project_id location_id = activity.location_id except AttributeError: # Nothing we can do return if not project_id or not location_id: # Nothing we can do return # Find the Project Location s3db = current.s3db ltable = s3db.project_location query = (ltable.project_id == project_id) &\ (ltable.location_id == location_id) location = db(query).select(ltable.id, limitby=(0, 1) ).first() if location: pl_id = location.id else: # Create it pl_id = ltable.insert(project_id = project_id, location_id = location_id, ) # Ensure we have the Activity Type in activity_type_id = form_vars_get("activity_type_id") latable = s3db.project_activity_type_location query = (latable.project_location_id == pl_id) &\ (latable.activity_type_id == activity_type_id) exists = db(query).select(latable.id, limitby=(0, 1) ).first() if not exists: # Create it latable.insert(project_location_id = pl_id, activity_type_id = activity_type_id, )
def project_activity_activity_type_onaccept(form): """ Ensure the Activity Location is a Project Location with the Activity's Activity Types in (as a minimum). @ToDo: deployment_setting to allow project Locations to be read-only & have data editable only at the Activity level """ db = current.db form_vars_get = form.vars.get activity_id = form_vars_get("activity_id") # Find the Project & Location atable = db.project_activity activity = db(atable.id == activity_id).select(atable.project_id, atable.location_id, limitby=(0, 1) ).first() try: project_id = activity.project_id location_id = activity.location_id except AttributeError: # Nothing we can do return if not project_id or not location_id: # Nothing we can do return # Find the Project Location s3db = current.s3db ltable = s3db.project_location query = (ltable.project_id == project_id) &\ (ltable.location_id == location_id) location = db(query).select(ltable.id, limitby=(0, 1) ).first() if location: pl_id = location.id else: # Create it pl_id = ltable.insert(project_id = project_id, location_id = location_id, ) # Ensure we have the Activity Type in activity_type_id = form_vars_get("activity_type_id") latable = s3db.project_activity_type_location query = (latable.project_location_id == pl_id) &\ (latable.activity_type_id == activity_type_id) exists = db(query).select(latable.id, limitby=(0, 1) ).first() if not exists: # Create it latable.insert(project_location_id = pl_id, activity_type_id = activity_type_id, )
Python
def project_activity_realm_entity(table, record): """ Set the realm entity to the project's realm entity """ activity_id = record.id db = current.db table = db.project_activity ptable = db.project_project query = (table.id == activity_id) & \ (table.project_id == ptable.id) project = db(query).select(ptable.realm_entity, limitby=(0, 1)).first() try: return project.realm_entity except AttributeError: return None
def project_activity_realm_entity(table, record): """ Set the realm entity to the project's realm entity """ activity_id = record.id db = current.db table = db.project_activity ptable = db.project_project query = (table.id == activity_id) & \ (table.project_id == ptable.id) project = db(query).select(ptable.realm_entity, limitby=(0, 1)).first() try: return project.realm_entity except AttributeError: return None
Python
def defaults(): """ Safe defaults for model-global names if module is disabled """ return {"project_task_id": S3ReusableField.dummy("task_id"), "project_task_active_statuses": [], }
def defaults(): """ Safe defaults for model-global names if module is disabled """ return {"project_task_id": S3ReusableField.dummy("task_id"), "project_task_active_statuses": [], }
Python
def project_task_project_opts(): """ Provide the options for the Project search filter - all Projects with Tasks """ db = current.db ptable = db.project_project ttable = db.project_task join = ttable.on((ttable.project_id == ptable.id) & \ (ttable.deleted == False)) query = ptable.deleted == False rows = db(query).select(ptable.id, ptable.name, join=join) return {row.id: row.name for row in rows}
def project_task_project_opts(): """ Provide the options for the Project search filter - all Projects with Tasks """ db = current.db ptable = db.project_project ttable = db.project_task join = ttable.on((ttable.project_id == ptable.id) & \ (ttable.deleted == False)) query = ptable.deleted == False rows = db(query).select(ptable.id, ptable.name, join=join) return {row.id: row.name for row in rows}
Python
def project_task_activity_opts(): """ Provide the options for the Activity search filter - all Activities with Tasks """ db = current.db atable = db.project_activity ttable = db.project_task join = ttable.on((ttable.project_id == atable.id) & \ (ttable.deleted == False)) query = atable.deleted == False rows = db(query).select(atable.id, atable.name, join=join) return {row.id: row.name for row in rows}
def project_task_activity_opts(): """ Provide the options for the Activity search filter - all Activities with Tasks """ db = current.db atable = db.project_activity ttable = db.project_task join = ttable.on((ttable.project_id == atable.id) & \ (ttable.deleted == False)) query = atable.deleted == False rows = db(query).select(atable.id, atable.name, join=join) return {row.id: row.name for row in rows}
Python
def project_task_milestone_opts(): """ Provide the options for the Milestone search filter - all Milestones with Tasks """ db = current.db mtable = db.project_milestone ttable = db.project_task join = ttable.on((ttable.project_id == mtable.id) & \ (ttable.deleted == False)) query = mtable.deleted == False rows = db(query).select(mtable.id, mtable.name, join=join) return {row.id: row.name for row in rows}
def project_task_milestone_opts(): """ Provide the options for the Milestone search filter - all Milestones with Tasks """ db = current.db mtable = db.project_milestone ttable = db.project_task join = ttable.on((ttable.project_id == mtable.id) & \ (ttable.deleted == False)) query = mtable.deleted == False rows = db(query).select(mtable.id, mtable.name, join=join) return {row.id: row.name for row in rows}
Python
def project_task_realm_entity(table, record): """ Set the task realm entity to the project's realm entity """ task_id = record.id db = current.db ptable = db.project_project ttable = db.project_task join = ptable.on((ptable.id == ttable.project_id) & \ (ptable.deleted == False)) query = (ttable.id == task_id) project = db(query).select(ptable.realm_entity, join=join, limitby=(0, 1)).first() if project: return project.realm_entity else: return None
def project_task_realm_entity(table, record): """ Set the task realm entity to the project's realm entity """ task_id = record.id db = current.db ptable = db.project_project ttable = db.project_task join = ptable.on((ptable.id == ttable.project_id) & \ (ptable.deleted == False)) query = (ttable.id == task_id) project = db(query).select(ptable.realm_entity, join=join, limitby=(0, 1)).first() if project: return project.realm_entity else: return None
Python
def project_task_create_onaccept(form): """ When a Task is created: * inherit the project_id from activity if task is created under activity * notify assignee """ db = current.db s3db = current.s3db form_vars = form.vars try: record_id = form_vars.id except AttributeError: record_id = None if not record_id: return table = s3db.project_task project_id = form_vars.get("project_id") if not project_id: project_id = table.project_id.default if not project_id: activity_id = form_vars.get("activity_id") if not activity_id: activity_id = table.activity_id.default if activity_id: atable = s3db.project_activity activity = db(atable.id == activity_id).select(atable.project_id, limitby = (0, 1), ).first() if activity and activity.project_id: db(table.id == record_id).update(project_id=project_id) # Notify Assignee task_notify(form)
def project_task_create_onaccept(form): """ When a Task is created: * inherit the project_id from activity if task is created under activity * notify assignee """ db = current.db s3db = current.s3db form_vars = form.vars try: record_id = form_vars.id except AttributeError: record_id = None if not record_id: return table = s3db.project_task project_id = form_vars.get("project_id") if not project_id: project_id = table.project_id.default if not project_id: activity_id = form_vars.get("activity_id") if not activity_id: activity_id = table.activity_id.default if activity_id: atable = s3db.project_activity activity = db(atable.id == activity_id).select(atable.project_id, limitby = (0, 1), ).first() if activity and activity.project_id: db(table.id == record_id).update(project_id=project_id) # Notify Assignee task_notify(form)
Python
def project_task_update_onaccept(form): """ * Process the additional fields: Project/Activity/Milestone * Log changes as comments * If the task is assigned to someone then notify them """ db = current.db s3db = current.s3db form_vars = form.vars task_id = form_vars.id record = form.record table = db.project_task if record: # Not True for a record merger changed = {} for var in form_vars: vvar = form_vars[var] if isinstance(vvar, Field): # modified_by/modified_on continue rvar = record[var] if vvar != rvar: type_ = table[var].type if type_ == "integer" or \ type_.startswith("reference"): if vvar: vvar = int(vvar) if vvar == rvar: continue represent = table[var].represent if not represent: represent = lambda o: o if rvar: changed[var] = "%s changed from %s to %s" % \ (table[var].label, represent(rvar), represent(vvar)) else: changed[var] = "%s changed to %s" % \ (table[var].label, represent(vvar)) if changed: table = db.project_comment text = s3db.auth_UserRepresent(show_link = False)(current.auth.user.id) for var in changed: text = "%s\n%s" % (text, changed[var]) table.insert(task_id = task_id, body = text, ) # Notify Assignee task_notify(form) # Resolve shelter inspection flags linked to this task if current.deployment_settings.get_cr_shelter_inspection_tasks(): s3db.cr_resolve_shelter_flags(task_id)
def project_task_update_onaccept(form): """ * Process the additional fields: Project/Activity/Milestone * Log changes as comments * If the task is assigned to someone then notify them """ db = current.db s3db = current.s3db form_vars = form.vars task_id = form_vars.id record = form.record table = db.project_task if record: # Not True for a record merger changed = {} for var in form_vars: vvar = form_vars[var] if isinstance(vvar, Field): # modified_by/modified_on continue rvar = record[var] if vvar != rvar: type_ = table[var].type if type_ == "integer" or \ type_.startswith("reference"): if vvar: vvar = int(vvar) if vvar == rvar: continue represent = table[var].represent if not represent: represent = lambda o: o if rvar: changed[var] = "%s changed from %s to %s" % \ (table[var].label, represent(rvar), represent(vvar)) else: changed[var] = "%s changed to %s" % \ (table[var].label, represent(vvar)) if changed: table = db.project_comment text = s3db.auth_UserRepresent(show_link = False)(current.auth.user.id) for var in changed: text = "%s\n%s" % (text, changed[var]) table.insert(task_id = task_id, body = text, ) # Notify Assignee task_notify(form) # Resolve shelter inspection flags linked to this task if current.deployment_settings.get_cr_shelter_inspection_tasks(): s3db.cr_resolve_shelter_flags(task_id)
Python
def project_task_dispatch(r, **attr): """ Send a Task Dispatch notice from a Task - if a location is supplied, this will be formatted as an OpenGeoSMS """ if r.representation == "html" and \ r.name == "task" and r.id and not r.component: record = r.record text = "%s: %s" % (record.name, record.description) # Encode the message as an OpenGeoSMS msg = current.msg message = msg.prepare_opengeosms(record.location_id, code = "ST", map = "google", text = text) # URL to redirect to after message sent url = URL(c="project", f="task", args=r.id) # Create the form if record.pe_id: opts = {"recipient": record.pe_id} else: opts = {"recipient_type": "pr_person"} output = msg.compose(type = "SMS", message = message, url = url, **opts) # Maintain RHeader for consistency if "rheader" in attr: rheader = attr["rheader"](r) if rheader: output["rheader"] = rheader output["title"] = current.T("Send Task Notification") current.response.view = "msg/compose.html" else: r.error(405, current.ERROR.BAD_METHOD) return output
def project_task_dispatch(r, **attr): """ Send a Task Dispatch notice from a Task - if a location is supplied, this will be formatted as an OpenGeoSMS """ if r.representation == "html" and \ r.name == "task" and r.id and not r.component: record = r.record text = "%s: %s" % (record.name, record.description) # Encode the message as an OpenGeoSMS msg = current.msg message = msg.prepare_opengeosms(record.location_id, code = "ST", map = "google", text = text) # URL to redirect to after message sent url = URL(c="project", f="task", args=r.id) # Create the form if record.pe_id: opts = {"recipient": record.pe_id} else: opts = {"recipient_type": "pr_person"} output = msg.compose(type = "SMS", message = message, url = url, **opts) # Maintain RHeader for consistency if "rheader" in attr: rheader = attr["rheader"](r) if rheader: output["rheader"] = rheader output["title"] = current.T("Send Task Notification") current.response.view = "msg/compose.html" else: r.error(405, current.ERROR.BAD_METHOD) return output
Python
def project_time_onaccept(cls, form): """ When a project_time entry is newly created or updated: - updates the total hours in both task and activity Args: form: the FORM """ form_vars = form.vars try: record_id = form_vars.id except AttributeError: record_id = None if not record_id: return task_id = form_vars.get("task_id") if not task_id: table = current.s3db.project_time row = current.db(table.id == record_id).select(table.task_id, limitby = (0, 1), ).first() if row: task_id = row.task_id if task_id: cls.update_total_hours(task_id)
def project_time_onaccept(cls, form): """ When a project_time entry is newly created or updated: - updates the total hours in both task and activity Args: form: the FORM """ form_vars = form.vars try: record_id = form_vars.id except AttributeError: record_id = None if not record_id: return task_id = form_vars.get("task_id") if not task_id: table = current.s3db.project_time row = current.db(table.id == record_id).select(table.task_id, limitby = (0, 1), ).first() if row: task_id = row.task_id if task_id: cls.update_total_hours(task_id)
Python
def project_time_ondelete(cls, row): """ When a project_time entry is deleted: - updates the total hours in both task and activity Args: row: the deleted project_time Row """ if row.task_id: cls.update_total_hours(row.task_id)
def project_time_ondelete(cls, row): """ When a project_time entry is deleted: - updates the total hours in both task and activity Args: row: the deleted project_time Row """ if row.task_id: cls.update_total_hours(row.task_id)
Python
def update_total_hours(task_id): """ Updates the total hours in both task and corresponding activity Args: task_id: the project_task record ID """ db = current.db s3db = current.s3db htable = s3db.project_time ttable = s3db.project_task atable = s3db.project_activity # Look up the task query = ttable.id == task_id task = db(query).select(ttable.id, ttable.activity_id, limitby = (0, 1), ).first() # Update the total hours of the task query = (htable.task_id == task_id) & \ (htable.deleted == False) total_hours = htable.hours.sum() row = db(query).select(total_hours).first() task.update_record(time_actual = row[total_hours]) # Update the total hours of the activity activity_id = task.activity_id if activity_id: join = htable.on((htable.task_id == ttable.id) & \ (htable.deleted == False)) query = (ttable.activity_id == activity_id) & \ (ttable.deleted == False) row = db(query).select(total_hours, join=join).first() db(atable.id == activity_id).update(time_actual=row[total_hours])
def update_total_hours(task_id): """ Updates the total hours in both task and corresponding activity Args: task_id: the project_task record ID """ db = current.db s3db = current.s3db htable = s3db.project_time ttable = s3db.project_task atable = s3db.project_activity # Look up the task query = ttable.id == task_id task = db(query).select(ttable.id, ttable.activity_id, limitby = (0, 1), ).first() # Update the total hours of the task query = (htable.task_id == task_id) & \ (htable.deleted == False) total_hours = htable.hours.sum() row = db(query).select(total_hours).first() task.update_record(time_actual = row[total_hours]) # Update the total hours of the activity activity_id = task.activity_id if activity_id: join = htable.on((htable.task_id == ttable.id) & \ (htable.deleted == False)) query = (ttable.activity_id == activity_id) & \ (ttable.deleted == False) row = db(query).select(total_hours, join=join).first() db(atable.id == activity_id).update(time_actual=row[total_hours])
Python
def lookup_rows(self, key, values, fields=None): """ Custom lookup method for organisation rows, does a join with the projects and locations. Args: values: the project_location IDs """ db = current.db ltable = current.s3db.project_location gtable = db.gis_location fields = [ltable.id, # pkey is needed for the cache gtable.name, gtable.level, gtable.L0, gtable.L1, gtable.L2, gtable.L3, gtable.L4, gtable.L5, ] if len(values) == 1: query = (ltable.id == values[0]) & \ (ltable.location_id == gtable.id) limitby = (0, 1) else: query = (ltable.id.belongs(values)) & \ (ltable.location_id == gtable.id) limitby = None if not self.community: ptable = db.project_project query &= (ltable.project_id == ptable.id) fields.append(ptable.name) if self.use_codes: fields.append(ptable.code) rows = db(query).select(*fields, limitby=limitby) self.queries += 1 return rows
def lookup_rows(self, key, values, fields=None): """ Custom lookup method for organisation rows, does a join with the projects and locations. Args: values: the project_location IDs """ db = current.db ltable = current.s3db.project_location gtable = db.gis_location fields = [ltable.id, # pkey is needed for the cache gtable.name, gtable.level, gtable.L0, gtable.L1, gtable.L2, gtable.L3, gtable.L4, gtable.L5, ] if len(values) == 1: query = (ltable.id == values[0]) & \ (ltable.location_id == gtable.id) limitby = (0, 1) else: query = (ltable.id.belongs(values)) & \ (ltable.location_id == gtable.id) limitby = None if not self.community: ptable = db.project_project query &= (ltable.project_id == ptable.id) fields.append(ptable.name) if self.use_codes: fields.append(ptable.code) rows = db(query).select(*fields, limitby=limitby) self.queries += 1 return rows